From f12e425741f57b5dc82adeb392dc26ab23da6f31 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Mon, 8 Feb 2021 12:39:29 -0800 Subject: [PATCH 001/121] Added ecr_bootstrap --- samcli/lib/bootstrap/ecr_bootstrap.py | 89 +++++++++++++++++++++++++++ 1 file changed, 89 insertions(+) create mode 100644 samcli/lib/bootstrap/ecr_bootstrap.py diff --git a/samcli/lib/bootstrap/ecr_bootstrap.py b/samcli/lib/bootstrap/ecr_bootstrap.py new file mode 100644 index 0000000000..2ecec26694 --- /dev/null +++ b/samcli/lib/bootstrap/ecr_bootstrap.py @@ -0,0 +1,89 @@ +import re +from string import Template + +from samcli.lib.utils.hash import str_checksum +from samcli import __version__ as VERSION + +_STACK_TEMPLATE = Template(""" +AWSTemplateFormatVersion : '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: AWS SAM CLI Managed ECR Repo Stack +Metadata: + SamCliInfo: $sam_cli_version + CompanionStackname: $companion_stack_name + +Resources: +$resources +Outputs: +$outputs +""") + +_REPO_TEMPLATE = """ + $repo_logical_id: + Type: AWS::ECR::Repository + Properties: + RepositoryName: $repo_name + Tags: + - Key: ManagedStackSource + Value: AwsSamCli + - Key: AwsSamCliCompanionStack + Value: $companion_stack_name + + RepositoryPolicyText: + Version: "2012-10-17" + Statement: + - + Sid: AllowLambdaSLR + Effect: Allow + Principal: + Service: + - "lambda.amazonaws.com" + Action: + - "ecr:GetDownloadUrlForLayer" + - "ecr:GetRepositoryPolicy" + - "ecr:BatchGetImage" +""" + +_OUTPUT_TEMPLATE = """ + $repo_output_logical_id: + Value: !Sub $${AWS::AccountId}.dkr.ecr.$${AWS::Region}.$${AWS::URLSuffix}/$${$repo_logical_id} +""" + +class CompanionStackBuilder: + def __init__(self, stack_name): + self._stack_name = stack_name + self._functions = dict() + + self._escaped_stack_name = re.sub(r"[^a-z0-9]", "", self._stack_name.lower()) + self._stack_hash = str_checksum(self._stack_name) + + def add_function(self, function_logical_id): + self._functions[function_logical_id] = self._get_repo_output_logical_id(function_logical_id) + + def build(self): + pass + + def get_output_logical_id_mapping(self): + return self._functions + + def _get_escaped_function_logical_id(self, function_logical_id): + return re.sub(r"[^a-z0-9]", "", function_logical_id.lower()) + + def _get_function_md5(self, function_logical_id): + return str_checksum(function_logical_id) + + def _get_repo_logical_id(self,function_logical_id): + return self._get_escaped_function_logical_id(function_logical_id)[:52] + self._get_function_md5(function_logical_id) + "Repo" + def _get_repo_output_logical_id(self,function_logical_id): + return self._get_escaped_function_logical_id(function_logical_id)[:52] + self._get_function_md5(function_logical_id) + "Out" + + def _get_repo_name(self, function_logical_id): + return self._escaped_stack_name + self._escaped_stack_name[:8] + "/" + self._get_escaped_function_logical_id(function_logical_id) + self._get_function_md5(function_logical_id)[:8] + "repo" + + def get_companion_stack_name(self): + return self._stack_name[:104] + "-" + self._stack_hash[:8] + "-CompanionStack" + + class Function: + def __init__(self, logical_id): + self.logical_id = logical_id + \ No newline at end of file From 650fe2c1966875e1d70a204306d7c5c36e8bf082 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Thu, 11 Feb 2021 14:26:06 -0800 Subject: [PATCH 002/121] Added companion_stack_manager --- .../lib/bootstrap/companion_stack_manager.py | 56 +++++++++++ samcli/lib/bootstrap/ecr_bootstrap.py | 93 ++++++++++++++----- 2 files changed, 124 insertions(+), 25 deletions(-) create mode 100644 samcli/lib/bootstrap/companion_stack_manager.py diff --git a/samcli/lib/bootstrap/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack_manager.py new file mode 100644 index 0000000000..33bea18572 --- /dev/null +++ b/samcli/lib/bootstrap/companion_stack_manager.py @@ -0,0 +1,56 @@ +import boto3 +from botocore.config import Config +from botocore.exceptions import ClientError, BotoCoreError, NoRegionError, NoCredentialsError +from samcli.commands.exceptions import UserException, CredentialsError, RegionError + +from samcli.lib.bootstrap.ecr_bootstrap import CompanionStackBuilder + + +class CompanionStackManager: + def __init__(self, stack_name, function_logical_ids, region): + self._builder = CompanionStackBuilder(stack_name) + self._companion_stack_name = self._builder.get_companion_stack_name() + + try: + self._cfn_client = boto3.client("cloudformation", config=Config(region_name=region if region else None)) + except NoCredentialsError as ex: + raise CredentialsError( + "Error Setting Up Managed Stack Client: Unable to resolve credentials for the AWS SDK for Python client. " + "Please see their documentation for options to pass in credentials: " + "https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html" + ) from ex + except NoRegionError as ex: + raise RegionError( + "Error Setting Up Managed Stack Client: Unable to resolve a region. " + "Please provide a region via the --region parameter or by the AWS_REGION environment variable." + ) from ex + + for function_logical_id in function_logical_ids: + self._builder.add_function(function_logical_id) + + def update_companion_stack(self): + pass + + def list_current_repos(self): + repos = list() + stack = boto3.resource("cloudformation", "us-west-2").Stack(self._companion_stack_name) + resources = stack.resource_summaries.all() + for resource in resources: + if resource.resource_type == "AWS::ECR::Repository": + repos.append(resource.physical_resource_id) + return repos + + + def get_unreferenced_repos(self): + pass + + def does_companion_stack_exist(self): + try: + self._cfn_client.describe_stacks(StackName=self._companion_stack_name) + return True + except ClientError: + return False + + +manager = CompanionStackManager("test-ecr-stack", ["FuncA", "FuncB"], "us-west-2") +manager.list_current_repos() \ No newline at end of file diff --git a/samcli/lib/bootstrap/ecr_bootstrap.py b/samcli/lib/bootstrap/ecr_bootstrap.py index 2ecec26694..25e68a3464 100644 --- a/samcli/lib/bootstrap/ecr_bootstrap.py +++ b/samcli/lib/bootstrap/ecr_bootstrap.py @@ -4,7 +4,8 @@ from samcli.lib.utils.hash import str_checksum from samcli import __version__ as VERSION -_STACK_TEMPLATE = Template(""" +_STACK_TEMPLATE = Template( + """ AWSTemplateFormatVersion : '2010-09-09' Transform: AWS::Serverless-2016-10-31 Description: AWS SAM CLI Managed ECR Repo Stack @@ -16,10 +17,12 @@ $resources Outputs: $outputs -""") +""" +) -_REPO_TEMPLATE = """ - $repo_logical_id: +_REPO_TEMPLATE = Template( + """ + $repo_logical_id: Type: AWS::ECR::Repository Properties: RepositoryName: $repo_name @@ -31,39 +34,68 @@ RepositoryPolicyText: Version: "2012-10-17" - Statement: - - + Statement: + - Sid: AllowLambdaSLR Effect: Allow - Principal: - Service: + Principal: + Service: - "lambda.amazonaws.com" - Action: + Action: - "ecr:GetDownloadUrlForLayer" - "ecr:GetRepositoryPolicy" - "ecr:BatchGetImage" """ +) -_OUTPUT_TEMPLATE = """ +_OUTPUT_TEMPLATE = Template( + """ $repo_output_logical_id: Value: !Sub $${AWS::AccountId}.dkr.ecr.$${AWS::Region}.$${AWS::URLSuffix}/$${$repo_logical_id} """ +) + class CompanionStackBuilder: def __init__(self, stack_name): self._stack_name = stack_name self._functions = dict() - + self._escaped_stack_name = re.sub(r"[^a-z0-9]", "", self._stack_name.lower()) self._stack_hash = str_checksum(self._stack_name) def add_function(self, function_logical_id): - self._functions[function_logical_id] = self._get_repo_output_logical_id(function_logical_id) + self._functions[function_logical_id] = self._get_repo_logical_id(function_logical_id) def build(self): - pass + repo_templates = list() + repo_output_templates = list() + companion_stack_name = self.get_companion_stack_name() + for function_logical_id, repo_logical_id in self._functions.items(): + repo_name = self._get_repo_name(function_logical_id) + repo_template = _REPO_TEMPLATE.substitute( + repo_logical_id=repo_logical_id, repo_name=repo_name, companion_stack_name=companion_stack_name + ) + repo_templates.append(repo_template) + + repo_output_logical_id = self._get_repo_output_logical_id(function_logical_id) + repo_output_template = _OUTPUT_TEMPLATE.substitute( + repo_output_logical_id=repo_output_logical_id, repo_logical_id=repo_logical_id + ) + repo_output_templates.append(repo_output_template) + repo_templates_string = "".join(repo_templates) + repo_output_templates_string = "".join(repo_output_templates) + + stack_template_string = _STACK_TEMPLATE.substitute( + sam_cli_version=VERSION, + companion_stack_name=companion_stack_name, + resources=repo_templates_string, + outputs=repo_output_templates_string, + ) + + return stack_template_string - def get_output_logical_id_mapping(self): + def get_repo_logical_id_mapping(self): return self._functions def _get_escaped_function_logical_id(self, function_logical_id): @@ -72,18 +104,29 @@ def _get_escaped_function_logical_id(self, function_logical_id): def _get_function_md5(self, function_logical_id): return str_checksum(function_logical_id) - def _get_repo_logical_id(self,function_logical_id): - return self._get_escaped_function_logical_id(function_logical_id)[:52] + self._get_function_md5(function_logical_id) + "Repo" - def _get_repo_output_logical_id(self,function_logical_id): - return self._get_escaped_function_logical_id(function_logical_id)[:52] + self._get_function_md5(function_logical_id) + "Out" + def get_repo_logical_id(self, function_logical_id): + return ( + self._get_escaped_function_logical_id(function_logical_id)[:52] + + self._get_function_md5(function_logical_id) + + "Repo" + ) - def _get_repo_name(self, function_logical_id): - return self._escaped_stack_name + self._escaped_stack_name[:8] + "/" + self._get_escaped_function_logical_id(function_logical_id) + self._get_function_md5(function_logical_id)[:8] + "repo" + def get_repo_output_logical_id(self, function_logical_id): + return ( + self._get_escaped_function_logical_id(function_logical_id)[:52] + + self._get_function_md5(function_logical_id) + + "Out" + ) + + def get_repo_name(self, function_logical_id): + return ( + self._escaped_stack_name + + self._escaped_stack_name[:8] + + "/" + + self._get_escaped_function_logical_id(function_logical_id) + + self._get_function_md5(function_logical_id)[:8] + + "repo" + ) def get_companion_stack_name(self): return self._stack_name[:104] + "-" + self._stack_hash[:8] + "-CompanionStack" - - class Function: - def __init__(self, logical_id): - self.logical_id = logical_id - \ No newline at end of file From 539780367fd8aba30ba884228a313a8587bccdc8 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Tue, 16 Feb 2021 01:28:46 -0800 Subject: [PATCH 003/121] Added Companion Stack Manager --- .../lib/bootstrap/companion_stack/__init__.py | 0 .../companion_stack_manager.py | 74 +++++++++++++++ .../bootstrap/companion_stack/data_types.py | 91 +++++++++++++++++++ .../{ => companion_stack}/ecr_bootstrap.py | 70 ++++---------- .../lib/bootstrap/companion_stack_manager.py | 56 ------------ 5 files changed, 185 insertions(+), 106 deletions(-) create mode 100644 samcli/lib/bootstrap/companion_stack/__init__.py create mode 100644 samcli/lib/bootstrap/companion_stack/companion_stack_manager.py create mode 100644 samcli/lib/bootstrap/companion_stack/data_types.py rename samcli/lib/bootstrap/{ => companion_stack}/ecr_bootstrap.py (52%) delete mode 100644 samcli/lib/bootstrap/companion_stack_manager.py diff --git a/samcli/lib/bootstrap/companion_stack/__init__.py b/samcli/lib/bootstrap/companion_stack/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py new file mode 100644 index 0000000000..3e7d6fa9a3 --- /dev/null +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py @@ -0,0 +1,74 @@ +import boto3 + +from typing import List, Dict + +from botocore.config import Config +from botocore.exceptions import ClientError, NoRegionError, NoCredentialsError +from samcli.commands.exceptions import CredentialsError, RegionError +from samcli.lib.bootstrap.companion_stack.ecr_bootstrap import CompanionStackBuilder +from samcli.lib.bootstrap.companion_stack.data_types import CompanionStack, ECRRepo + + +class CompanionStackManager: + def __init__(self, stack_name, function_logical_ids, region): + self._companion_stack = CompanionStack(stack_name) + self._builder = CompanionStackBuilder(self._companion_stack) + self._boto_config = Config(region_name=region if region else None) + try: + self._cfn_client = boto3.client("cloudformation", config=self._boto_config) + except NoCredentialsError as ex: + raise CredentialsError( + "Error Setting Up Managed Stack Client: Unable to resolve credentials for the AWS SDK for Python client. " + "Please see their documentation for options to pass in credentials: " + "https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html" + ) from ex + except NoRegionError as ex: + raise RegionError( + "Error Setting Up Managed Stack Client: Unable to resolve a region. " + "Please provide a region via the --region parameter or by the AWS_REGION environment variable." + ) from ex + + for function_logical_id in function_logical_ids: + self._builder.add_function(function_logical_id) + + def update_companion_stack(self): + self._cfn_client.update_stack(StackName=self._companion_stack.stack_name, TemplateBody=self._builder.build()) + + def list_deployed_repos(self) -> List[ECRRepo]: + """ + Not using create_change_set as it is slow + """ + repos:List[ECRRepo] = list() + stack = boto3.resource("cloudformation", config=self._boto_config).Stack(self._companion_stack.stack_name) + resources = stack.resource_summaries.all() + for resource in resources: + if resource.resource_type == "AWS::ECR::Repository": + repos.append(ECRRepo(logical_id=resource.logical_resource_id,physical_id=resource.physical_resource_id)) + return repos + + def get_unreferenced_repos(self) -> List[ECRRepo]: + deployed_repos:List[ECRRepo] = self.list_deployed_repos() + current_mapping = self._builder.repo_mapping + + unreferenced_repos:List[ECRRepo] = list() + for deployed_repo in deployed_repos: + found = False + for _, current_repo in current_mapping.items(): + if current_repo.logical_id == deployed_repo.logical_id: + found = True + break + if not found: + unreferenced_repos.append(deployed_repo) + return unreferenced_repos + + + def does_companion_stack_exist(self): + try: + self._cfn_client.describe_stacks(StackName=self._companion_stack.stack_name) + return True + except ClientError: + return False + + +manager = CompanionStackManager("test-ecr-stack", ["FuncA", "FuncB"], "us-west-2") +print(manager.get_unreferenced_repos()) diff --git a/samcli/lib/bootstrap/companion_stack/data_types.py b/samcli/lib/bootstrap/companion_stack/data_types.py new file mode 100644 index 0000000000..4abeaa9ff0 --- /dev/null +++ b/samcli/lib/bootstrap/companion_stack/data_types.py @@ -0,0 +1,91 @@ + +import re +from samcli.lib.utils.hash import str_checksum + + +class CompanionStack: + _parent_stack_name: str + _escaped_parent_stack_name: str + _parent_stack_hash: str + _stack_name: str + + def __init__(self, parent_stack_name: str) -> None: + self._parent_stack_name = parent_stack_name + self._escaped_parent_stack_name = re.sub(r"[^a-z0-9]", "", self._parent_stack_name.lower()) + self._parent_stack_hash = str_checksum(self._parent_stack_name) + self._stack_name = self._parent_stack_name[:104] + "-" + self._parent_stack_hash[:8] + "-CompanionStack" + + @property + def parent_stack_name(self) -> str: + return self._parent_stack_name + + @property + def escaped_parent_stack_name(self) -> str: + return self._escaped_parent_stack_name + + @property + def parent_stack_hash(self) -> str: + return self._parent_stack_hash + + @property + def stack_name(self) -> str: + return self._stack_name + + +class ECRRepo: + _function_logical_id: str + _escaped_function_logical_id: str + _function_md5: str + _companion_stack: str + _logical_id: str + _physical_id: str + _output_logical_id: str + + def __init__( + self, + companion_stack: CompanionStack = None, + function_logical_id: str = None, + logical_id: str = None, + physical_id: str = None, + output_logical_id: str = None, + ): + self._function_logical_id = function_logical_id + self._escaped_function_logical_id = ( + re.sub(r"[^a-z0-9]", "", self._function_logical_id.lower()) + if self._function_logical_id is not None + else None + ) + self._function_md5 = str_checksum(function_logical_id) if self._function_logical_id is not None else None + self._companion_stack = companion_stack + + self._logical_id = logical_id + self._physical_id = physical_id + self._output_logical_id = output_logical_id + + @property + def logical_id(self) -> str: + if self._logical_id is None: + self._logical_id = ( + self._escaped_function_logical_id[:52] + self._function_md5 + "Repo" + ) + return self._logical_id + + @property + def physical_id(self) -> str: + if self._physical_id is None: + self._physical_id = ( + self._companion_stack.escaped_parent_stack_name + + self._companion_stack.parent_stack_hash[:8] + + "/" + + self._escaped_function_logical_id + + self._function_md5[:8] + + "repo" + ) + return self._physical_id + + @property + def output_logical_id(self) -> str: + if self._output_logical_id is None: + self._output_logical_id = ( + self._escaped_function_logical_id[:52] + self._function_md5 + "Out" + ) diff --git a/samcli/lib/bootstrap/ecr_bootstrap.py b/samcli/lib/bootstrap/companion_stack/ecr_bootstrap.py similarity index 52% rename from samcli/lib/bootstrap/ecr_bootstrap.py rename to samcli/lib/bootstrap/companion_stack/ecr_bootstrap.py index 25e68a3464..5904e30fe5 100644 --- a/samcli/lib/bootstrap/ecr_bootstrap.py +++ b/samcli/lib/bootstrap/companion_stack/ecr_bootstrap.py @@ -1,7 +1,7 @@ -import re +from typing import Dict from string import Template -from samcli.lib.utils.hash import str_checksum +from samcli.lib.bootstrap.companion_stack.data_types import CompanionStack, ECRRepo from samcli import __version__ as VERSION _STACK_TEMPLATE = Template( @@ -57,28 +57,30 @@ class CompanionStackBuilder: - def __init__(self, stack_name): - self._stack_name = stack_name - self._functions = dict() + _parent_stack_name: str + _companion_stack: CompanionStack + _repo_mapping: Dict[str, ECRRepo] - self._escaped_stack_name = re.sub(r"[^a-z0-9]", "", self._stack_name.lower()) - self._stack_hash = str_checksum(self._stack_name) + def __init__(self, companion_stack: CompanionStack) -> None: + self._companion_stack = companion_stack + self._repo_mapping:Dict[str, ECRRepo] = dict() - def add_function(self, function_logical_id): - self._functions[function_logical_id] = self._get_repo_logical_id(function_logical_id) + def add_function(self, function_logical_id: str) -> None: + self._repo_mapping[function_logical_id] = ECRRepo(self._companion_stack, function_logical_id) - def build(self): + def build(self) -> str: repo_templates = list() repo_output_templates = list() - companion_stack_name = self.get_companion_stack_name() - for function_logical_id, repo_logical_id in self._functions.items(): - repo_name = self._get_repo_name(function_logical_id) + companion_stack_name = self._companion_stack.stack_name + for _, ecr_repo in self._repo_mapping.items(): + repo_logical_id = ecr_repo.logical_id + repo_name = ecr_repo.physical_id + repo_output_logical_id = ecr_repo.output_logical_id + repo_template = _REPO_TEMPLATE.substitute( repo_logical_id=repo_logical_id, repo_name=repo_name, companion_stack_name=companion_stack_name ) repo_templates.append(repo_template) - - repo_output_logical_id = self._get_repo_output_logical_id(function_logical_id) repo_output_template = _OUTPUT_TEMPLATE.substitute( repo_output_logical_id=repo_output_logical_id, repo_logical_id=repo_logical_id ) @@ -94,39 +96,7 @@ def build(self): ) return stack_template_string - - def get_repo_logical_id_mapping(self): - return self._functions - - def _get_escaped_function_logical_id(self, function_logical_id): - return re.sub(r"[^a-z0-9]", "", function_logical_id.lower()) - - def _get_function_md5(self, function_logical_id): - return str_checksum(function_logical_id) - - def get_repo_logical_id(self, function_logical_id): - return ( - self._get_escaped_function_logical_id(function_logical_id)[:52] - + self._get_function_md5(function_logical_id) - + "Repo" - ) - - def get_repo_output_logical_id(self, function_logical_id): - return ( - self._get_escaped_function_logical_id(function_logical_id)[:52] - + self._get_function_md5(function_logical_id) - + "Out" - ) - - def get_repo_name(self, function_logical_id): - return ( - self._escaped_stack_name - + self._escaped_stack_name[:8] - + "/" - + self._get_escaped_function_logical_id(function_logical_id) - + self._get_function_md5(function_logical_id)[:8] - + "repo" - ) - def get_companion_stack_name(self): - return self._stack_name[:104] + "-" + self._stack_hash[:8] + "-CompanionStack" + @property + def repo_mapping(self) -> Dict[str, ECRRepo]: + return self._repo_mapping diff --git a/samcli/lib/bootstrap/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack_manager.py deleted file mode 100644 index 33bea18572..0000000000 --- a/samcli/lib/bootstrap/companion_stack_manager.py +++ /dev/null @@ -1,56 +0,0 @@ -import boto3 -from botocore.config import Config -from botocore.exceptions import ClientError, BotoCoreError, NoRegionError, NoCredentialsError -from samcli.commands.exceptions import UserException, CredentialsError, RegionError - -from samcli.lib.bootstrap.ecr_bootstrap import CompanionStackBuilder - - -class CompanionStackManager: - def __init__(self, stack_name, function_logical_ids, region): - self._builder = CompanionStackBuilder(stack_name) - self._companion_stack_name = self._builder.get_companion_stack_name() - - try: - self._cfn_client = boto3.client("cloudformation", config=Config(region_name=region if region else None)) - except NoCredentialsError as ex: - raise CredentialsError( - "Error Setting Up Managed Stack Client: Unable to resolve credentials for the AWS SDK for Python client. " - "Please see their documentation for options to pass in credentials: " - "https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html" - ) from ex - except NoRegionError as ex: - raise RegionError( - "Error Setting Up Managed Stack Client: Unable to resolve a region. " - "Please provide a region via the --region parameter or by the AWS_REGION environment variable." - ) from ex - - for function_logical_id in function_logical_ids: - self._builder.add_function(function_logical_id) - - def update_companion_stack(self): - pass - - def list_current_repos(self): - repos = list() - stack = boto3.resource("cloudformation", "us-west-2").Stack(self._companion_stack_name) - resources = stack.resource_summaries.all() - for resource in resources: - if resource.resource_type == "AWS::ECR::Repository": - repos.append(resource.physical_resource_id) - return repos - - - def get_unreferenced_repos(self): - pass - - def does_companion_stack_exist(self): - try: - self._cfn_client.describe_stacks(StackName=self._companion_stack_name) - return True - except ClientError: - return False - - -manager = CompanionStackManager("test-ecr-stack", ["FuncA", "FuncB"], "us-west-2") -manager.list_current_repos() \ No newline at end of file From 67519287a9ac63ab8dcd7fb2c8b323775ac559af Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Wed, 17 Feb 2021 15:35:42 -0800 Subject: [PATCH 004/121] Added update_companion_stack --- .../companion_stack_manager.py | 44 +++++++++++++++++-- .../bootstrap/companion_stack/data_types.py | 4 +- 2 files changed, 43 insertions(+), 5 deletions(-) diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py index 3e7d6fa9a3..1da0f41010 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py @@ -1,3 +1,7 @@ +from mypy_boto3_cloudformation.client import CloudFormationClient +from mypy_boto3_s3.client import S3Client +from samcli.lib.package.s3_uploader import S3Uploader +from samcli.lib.deploy.deployer import Deployer import boto3 from typing import List, Dict @@ -7,15 +11,26 @@ from samcli.commands.exceptions import CredentialsError, RegionError from samcli.lib.bootstrap.companion_stack.ecr_bootstrap import CompanionStackBuilder from samcli.lib.bootstrap.companion_stack.data_types import CompanionStack, ECRRepo +from samcli.lib.package.artifact_exporter import mktempfile class CompanionStackManager: - def __init__(self, stack_name, function_logical_ids, region): + _companion_stack: str + _builder: CompanionStackBuilder + _boto_config: Config + _s3_bucket: str + _s3_prefix: str + _cfn_client: CloudFormationClient + _s3_client: S3Client + def __init__(self, stack_name, function_logical_ids, region, s3_bucket, s3_prefix): self._companion_stack = CompanionStack(stack_name) self._builder = CompanionStackBuilder(self._companion_stack) self._boto_config = Config(region_name=region if region else None) + self._s3_bucket = s3_bucket + self._s3_prefix = s3_prefix try: self._cfn_client = boto3.client("cloudformation", config=self._boto_config) + self._s3_client = boto3.client("s3", config=self._boto_config) except NoCredentialsError as ex: raise CredentialsError( "Error Setting Up Managed Stack Client: Unable to resolve credentials for the AWS SDK for Python client. " @@ -32,7 +47,30 @@ def __init__(self, stack_name, function_logical_ids, region): self._builder.add_function(function_logical_id) def update_companion_stack(self): - self._cfn_client.update_stack(StackName=self._companion_stack.stack_name, TemplateBody=self._builder.build()) + stack_name = self._companion_stack.stack_name + template = self._builder.build() + + with mktempfile() as temporary_file: + temporary_file.write(template) + temporary_file.flush() + + s3_uploader = S3Uploader(self._s3_client, bucket_name=self._s3_bucket, prefix=self._s3_prefix) + # TemplateUrl property requires S3 URL to be in path-style format + parts = S3Uploader.parse_s3_url( + s3_uploader.upload_with_dedup(temporary_file.name, "template"), version_property="Version" + ) + + template_url = s3_uploader.to_path_style_s3_url(parts["Key"], parts.get("Version", None)) + waiter_config = {"Delay": 30, "MaxAttempts": 120} + if self.does_companion_stack_exist(): + self._cfn_client.update_stack(StackName=stack_name, TemplateURL=template_url) + waiter = self._cfn_client.get_waiter('stack_update_complete') + else: + self._cfn_client.create_stack(StackName=stack_name, TemplateURL=template_url) + waiter = self._cfn_client.get_waiter('stack_create_complete') + + waiter.wait(StackName=stack_name, WaiterConfig=waiter_config) + def list_deployed_repos(self) -> List[ECRRepo]: """ @@ -70,5 +108,5 @@ def does_companion_stack_exist(self): return False -manager = CompanionStackManager("test-ecr-stack", ["FuncA", "FuncB"], "us-west-2") +manager = CompanionStackManager("Hello-World-Stack", ["TestFunction01", "AnotherTestFunction02"], "us-west-2") print(manager.get_unreferenced_repos()) diff --git a/samcli/lib/bootstrap/companion_stack/data_types.py b/samcli/lib/bootstrap/companion_stack/data_types.py index 4abeaa9ff0..b3ab7d3f70 100644 --- a/samcli/lib/bootstrap/companion_stack/data_types.py +++ b/samcli/lib/bootstrap/companion_stack/data_types.py @@ -66,7 +66,7 @@ def __init__( def logical_id(self) -> str: if self._logical_id is None: self._logical_id = ( - self._escaped_function_logical_id[:52] + self._function_md5 + "Repo" + self._function_logical_id[:52] + self._function_md5[:8] + "Repo" ) return self._logical_id @@ -87,5 +87,5 @@ def physical_id(self) -> str: def output_logical_id(self) -> str: if self._output_logical_id is None: self._output_logical_id = ( - self._escaped_function_logical_id[:52] + self._function_md5 + "Out" + self._function_logical_id[:52] + self._function_md5[:8] + "Out" ) From 6c034ecce7ff2299a4482ff6a561ae885cd8d50f Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Wed, 17 Feb 2021 18:04:19 -0800 Subject: [PATCH 005/121] Updated companion_stack_builder File Name --- .../{ecr_bootstrap.py => companion_stack_builder.py} | 0 .../companion_stack/companion_stack_manager.py | 11 ++++++----- 2 files changed, 6 insertions(+), 5 deletions(-) rename samcli/lib/bootstrap/companion_stack/{ecr_bootstrap.py => companion_stack_builder.py} (100%) diff --git a/samcli/lib/bootstrap/companion_stack/ecr_bootstrap.py b/samcli/lib/bootstrap/companion_stack/companion_stack_builder.py similarity index 100% rename from samcli/lib/bootstrap/companion_stack/ecr_bootstrap.py rename to samcli/lib/bootstrap/companion_stack/companion_stack_builder.py diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py index 1da0f41010..b966af6da9 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py @@ -9,7 +9,7 @@ from botocore.config import Config from botocore.exceptions import ClientError, NoRegionError, NoCredentialsError from samcli.commands.exceptions import CredentialsError, RegionError -from samcli.lib.bootstrap.companion_stack.ecr_bootstrap import CompanionStackBuilder +from samcli.lib.bootstrap.companion_stack.companion_stack_builder import CompanionStackBuilder from samcli.lib.bootstrap.companion_stack.data_types import CompanionStack, ECRRepo from samcli.lib.package.artifact_exporter import mktempfile @@ -49,7 +49,7 @@ def __init__(self, stack_name, function_logical_ids, region, s3_bucket, s3_prefi def update_companion_stack(self): stack_name = self._companion_stack.stack_name template = self._builder.build() - + with mktempfile() as temporary_file: temporary_file.write(template) temporary_file.flush() @@ -63,10 +63,10 @@ def update_companion_stack(self): template_url = s3_uploader.to_path_style_s3_url(parts["Key"], parts.get("Version", None)) waiter_config = {"Delay": 30, "MaxAttempts": 120} if self.does_companion_stack_exist(): - self._cfn_client.update_stack(StackName=stack_name, TemplateURL=template_url) + self._cfn_client.update_stack(StackName=stack_name, TemplateURL=template_url, Capabilities=["CAPABILITY_AUTO_EXPAND"]) waiter = self._cfn_client.get_waiter('stack_update_complete') else: - self._cfn_client.create_stack(StackName=stack_name, TemplateURL=template_url) + self._cfn_client.create_stack(StackName=stack_name, TemplateURL=template_url, Capabilities=["CAPABILITY_AUTO_EXPAND"]) waiter = self._cfn_client.get_waiter('stack_create_complete') waiter.wait(StackName=stack_name, WaiterConfig=waiter_config) @@ -108,5 +108,6 @@ def does_companion_stack_exist(self): return False -manager = CompanionStackManager("Hello-World-Stack", ["TestFunction01", "AnotherTestFunction02"], "us-west-2") +manager = CompanionStackManager("Auto-ECR-Test-Stack", ["TestFunction01", "AnotherTestFunction03"], "us-west-2", "aws-sam-cli-managed-default-samclisourcebucket-9bu3m109ig6i", "Hello-World-Stack") print(manager.get_unreferenced_repos()) +print(manager.update_companion_stack()) From f6204af4b6d1f23b48431648fe0e5b092be1b734 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Thu, 18 Feb 2021 11:18:33 -0800 Subject: [PATCH 006/121] Formatted with Black --- .../companion_stack_manager.py | 33 ++++++++++++------- 1 file changed, 22 insertions(+), 11 deletions(-) diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py index b966af6da9..ef84d97fa8 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py @@ -22,6 +22,7 @@ class CompanionStackManager: _s3_prefix: str _cfn_client: CloudFormationClient _s3_client: S3Client + def __init__(self, stack_name, function_logical_ids, region, s3_bucket, s3_prefix): self._companion_stack = CompanionStack(stack_name) self._builder = CompanionStackBuilder(self._companion_stack) @@ -63,32 +64,37 @@ def update_companion_stack(self): template_url = s3_uploader.to_path_style_s3_url(parts["Key"], parts.get("Version", None)) waiter_config = {"Delay": 30, "MaxAttempts": 120} if self.does_companion_stack_exist(): - self._cfn_client.update_stack(StackName=stack_name, TemplateURL=template_url, Capabilities=["CAPABILITY_AUTO_EXPAND"]) - waiter = self._cfn_client.get_waiter('stack_update_complete') + self._cfn_client.update_stack( + StackName=stack_name, TemplateURL=template_url, Capabilities=["CAPABILITY_AUTO_EXPAND"] + ) + waiter = self._cfn_client.get_waiter("stack_update_complete") else: - self._cfn_client.create_stack(StackName=stack_name, TemplateURL=template_url, Capabilities=["CAPABILITY_AUTO_EXPAND"]) - waiter = self._cfn_client.get_waiter('stack_create_complete') + self._cfn_client.create_stack( + StackName=stack_name, TemplateURL=template_url, Capabilities=["CAPABILITY_AUTO_EXPAND"] + ) + waiter = self._cfn_client.get_waiter("stack_create_complete") waiter.wait(StackName=stack_name, WaiterConfig=waiter_config) - def list_deployed_repos(self) -> List[ECRRepo]: """ Not using create_change_set as it is slow """ - repos:List[ECRRepo] = list() + repos: List[ECRRepo] = list() stack = boto3.resource("cloudformation", config=self._boto_config).Stack(self._companion_stack.stack_name) resources = stack.resource_summaries.all() for resource in resources: if resource.resource_type == "AWS::ECR::Repository": - repos.append(ECRRepo(logical_id=resource.logical_resource_id,physical_id=resource.physical_resource_id)) + repos.append( + ECRRepo(logical_id=resource.logical_resource_id, physical_id=resource.physical_resource_id) + ) return repos def get_unreferenced_repos(self) -> List[ECRRepo]: - deployed_repos:List[ECRRepo] = self.list_deployed_repos() + deployed_repos: List[ECRRepo] = self.list_deployed_repos() current_mapping = self._builder.repo_mapping - unreferenced_repos:List[ECRRepo] = list() + unreferenced_repos: List[ECRRepo] = list() for deployed_repo in deployed_repos: found = False for _, current_repo in current_mapping.items(): @@ -99,7 +105,6 @@ def get_unreferenced_repos(self) -> List[ECRRepo]: unreferenced_repos.append(deployed_repo) return unreferenced_repos - def does_companion_stack_exist(self): try: self._cfn_client.describe_stacks(StackName=self._companion_stack.stack_name) @@ -108,6 +113,12 @@ def does_companion_stack_exist(self): return False -manager = CompanionStackManager("Auto-ECR-Test-Stack", ["TestFunction01", "AnotherTestFunction03"], "us-west-2", "aws-sam-cli-managed-default-samclisourcebucket-9bu3m109ig6i", "Hello-World-Stack") +manager = CompanionStackManager( + "Auto-ECR-Test-Stack", + ["TestFunction01", "AnotherTestFunction03"], + "us-west-2", + "aws-sam-cli-managed-default-samclisourcebucket-9bu3m109ig6i", + "Hello-World-Stack", +) print(manager.get_unreferenced_repos()) print(manager.update_companion_stack()) From 13b425f52b8c536a4552ae92c01f9c0141d91943 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Sun, 28 Feb 2021 18:02:05 -0800 Subject: [PATCH 007/121] Updated get_unreferenced_repos --- .../bootstrap/companion_stack/companion_stack_manager.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py index ef84d97fa8..1e3270da38 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py @@ -96,12 +96,10 @@ def get_unreferenced_repos(self) -> List[ECRRepo]: unreferenced_repos: List[ECRRepo] = list() for deployed_repo in deployed_repos: - found = False for _, current_repo in current_mapping.items(): if current_repo.logical_id == deployed_repo.logical_id: - found = True break - if not found: + else: unreferenced_repos.append(deployed_repo) return unreferenced_repos @@ -115,7 +113,7 @@ def does_companion_stack_exist(self): manager = CompanionStackManager( "Auto-ECR-Test-Stack", - ["TestFunction01", "AnotherTestFunction03"], + ["TestFunction01", "AnotherTestFunction02"], "us-west-2", "aws-sam-cli-managed-default-samclisourcebucket-9bu3m109ig6i", "Hello-World-Stack", From 25d2dad38ae7b0a8e0a2a72035c270bf5ca259b2 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Sun, 28 Feb 2021 22:19:48 -0800 Subject: [PATCH 008/121] Updated guided_context to Use Companion Stack --- samcli/commands/deploy/guided_context.py | 97 +++++++++++++------ .../companion_stack_builder.py | 2 +- .../companion_stack_manager.py | 10 +- .../bootstrap/companion_stack/data_types.py | 12 +-- 4 files changed, 83 insertions(+), 38 deletions(-) diff --git a/samcli/commands/deploy/guided_context.py b/samcli/commands/deploy/guided_context.py index 891839e9a1..7d36b7c851 100644 --- a/samcli/commands/deploy/guided_context.py +++ b/samcli/commands/deploy/guided_context.py @@ -3,6 +3,7 @@ """ import logging +from samcli.lib.bootstrap.companion_stack.companion_stack_manager import CompanionStackManager from typing import Dict, Any, List import click @@ -125,7 +126,6 @@ def guided_prompts(self, parameter_override_keys): stacks = SamLocalStackProvider.get_stacks( self.template_file, parameter_overrides=sanitize_parameter_overrides(input_parameter_overrides) ) - image_repositories = self.prompt_image_repository(stacks) click.secho("\t#Shows you resources changes to be deployed and require a 'Y' to initiate deploy") confirm_changeset = confirm( @@ -165,6 +165,10 @@ def guided_prompts(self, parameter_override_keys): click.echo(f"\n\t\tManaged S3 bucket: {s3_bucket}") click.echo("\t\tA different default S3 bucket can be set in samconfig.toml") + image_repositories = self.prompt_image_repository( + stack_name, stacks, self.image_repositories, region, s3_bucket, self.s3_prefix + ) + self.guided_stack_name = stack_name self.guided_s3_bucket = s3_bucket self.guided_image_repositories = image_repositories @@ -268,35 +272,72 @@ def prompt_parameters( _prompted_param_overrides[parameter_key] = {"Value": parameter, "Hidden": False} return _prompted_param_overrides - def prompt_image_repository(self, stacks: List[Stack]): - image_repositories = {} + def prompt_image_repository( + self, stack_name, image_repositories: Dict[str, str], stacks: List[Stack], region, s3_bucket, s3_prefix + ): artifacts_format = get_template_artifacts_format(template_file=self.template_file) - if IMAGE in artifacts_format: - self.function_provider = SamFunctionProvider(stacks, ignore_code_extraction_warnings=True) - function_resources = get_template_function_resource_ids(template_file=self.template_file, artifact=IMAGE) - for resource_id in function_resources: - image_repositories[resource_id] = prompt( - f"\t{self.start_bold}Image Repository for {resource_id}{self.end_bold}", - default=self.image_repositories.get(resource_id, "") - if isinstance(self.image_repositories, dict) - else "" or self.image_repository, + if IMAGE not in artifacts_format: + return {} + + image_repositories = image_repositories.copy() + + self.function_provider = SamFunctionProvider(stacks, ignore_code_extraction_warnings=True) + function_logical_ids = get_template_function_resource_ids(template_file=self.template_file, artifact=IMAGE) + missing_repo_functions = list() + if image_repositories: + for function_logical_id in function_logical_ids: + if function_logical_id not in self.image_repositories: + missing_repo_functions.append(function_logical_id) + else: + missing_repo_functions = function_logical_ids + + if not missing_repo_functions: + return {} + + if missing_repo_functions == function_logical_ids: + click.echo("\nImage repositories: Not found.") + create_all_repos = click.confirm("\nCreate managed ECR repositories for all functions?", default=True) + else: + functions_with_repo_count = len(function_logical_ids) - len(missing_repo_functions) + click.echo( + f"\nImage repositories: Found ({len(functions_with_repo_count)} of {len(function_logical_ids)}) #Different image repositories can be set in samconfig.toml" + ) + create_all_repos = click.confirm( + f"\nCreate managed ECR repositories for the {len(missing_repo_functions)} functions without?", + default=True, + ) + + companion_stack_manager = CompanionStackManager( + stack_name, missing_repo_functions, region, s3_bucket, s3_prefix + ) + + if create_all_repos: + companion_stack_manager.get_unreferenced_repos() + companion_stack_manager.update_companion_stack() + image_repositories.update(companion_stack_manager.get_repository_mapping()) + else: + for function_logical_id in missing_repo_functions: + image_uri = prompt( + f"\t{self.start_bold}ECR repository for {function_logical_id}:{self.end_bold}", + default=self.image_repository, ) - if not is_ecr_url(image_repositories.get(resource_id)): - raise GuidedDeployFailedError( - f"Invalid Image Repository ECR URI: {image_repositories.get(resource_id)}" - ) - for resource_id, function_prop in self.function_provider.functions.items(): - if function_prop.packagetype == IMAGE: - image = function_prop.imageuri - try: - tag = tag_translation(image) - except NonLocalImageException: - pass - except NoImageFoundException as ex: - raise GuidedDeployFailedError("No images found to deploy, try running sam build") from ex - else: - click.secho(f"\t {image} to be pushed to {image_repositories.get(resource_id)}:{tag}") - click.secho(nl=True) + if not is_ecr_url(image_uri): + raise GuidedDeployFailedError(f"Invalid Image Repository ECR URI: {image_uri}") + + image_repositories[function_logical_id] = image_uri + + for resource_id, function_prop in self.function_provider.functions.items(): + if function_prop.packagetype == IMAGE: + image = function_prop.imageuri + try: + tag = tag_translation(image) + except NonLocalImageException: + pass + except NoImageFoundException as ex: + raise GuidedDeployFailedError("No images found to deploy, try running sam build") from ex + else: + click.secho(f"\t {image} to be pushed to {image_repositories.get(resource_id)}:{tag}") + click.secho(nl=True) return image_repositories diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_builder.py b/samcli/lib/bootstrap/companion_stack/companion_stack_builder.py index 5904e30fe5..d0fda21411 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_builder.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_builder.py @@ -63,7 +63,7 @@ class CompanionStackBuilder: def __init__(self, companion_stack: CompanionStack) -> None: self._companion_stack = companion_stack - self._repo_mapping:Dict[str, ECRRepo] = dict() + self._repo_mapping: Dict[str, ECRRepo] = dict() def add_function(self, function_logical_id: str) -> None: self._repo_mapping[function_logical_id] = ECRRepo(self._companion_stack, function_logical_id) diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py index 1e3270da38..71ab78c140 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py @@ -110,6 +110,11 @@ def does_companion_stack_exist(self): except ClientError: return False + def get_repository_mapping(self): + account_id = boto3.client("sts").get_caller_identity().get("Account") + region_name = self._cfn_client.meta.region_name + return dict((k, v.get_repo_uri(account_id, region_name)) for (k, v) in self._builder.repo_mapping.items()) + manager = CompanionStackManager( "Auto-ECR-Test-Stack", @@ -118,5 +123,6 @@ def does_companion_stack_exist(self): "aws-sam-cli-managed-default-samclisourcebucket-9bu3m109ig6i", "Hello-World-Stack", ) -print(manager.get_unreferenced_repos()) -print(manager.update_companion_stack()) +# print(manager.get_unreferenced_repos()) +# print(manager.update_companion_stack()) +print(manager.get_repository_mapping()) diff --git a/samcli/lib/bootstrap/companion_stack/data_types.py b/samcli/lib/bootstrap/companion_stack/data_types.py index b3ab7d3f70..684c4127df 100644 --- a/samcli/lib/bootstrap/companion_stack/data_types.py +++ b/samcli/lib/bootstrap/companion_stack/data_types.py @@ -1,4 +1,3 @@ - import re from samcli.lib.utils.hash import str_checksum @@ -65,9 +64,7 @@ def __init__( @property def logical_id(self) -> str: if self._logical_id is None: - self._logical_id = ( - self._function_logical_id[:52] + self._function_md5[:8] + "Repo" - ) + self._logical_id = self._function_logical_id[:52] + self._function_md5[:8] + "Repo" return self._logical_id @property @@ -86,6 +83,7 @@ def physical_id(self) -> str: @property def output_logical_id(self) -> str: if self._output_logical_id is None: - self._output_logical_id = ( - self._function_logical_id[:52] + self._function_md5[:8] + "Out" - ) + self._output_logical_id = self._function_logical_id[:52] + self._function_md5[:8] + "Out" + + def get_repo_uri(self, account_id, region): + return f"{account_id}.dkr.ecr.{region}.amazonaws.com/{self.physical_id}" From fbe5a660aa4f209779974cc22da8e7fbb80af74c Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Sun, 28 Feb 2021 23:14:48 -0800 Subject: [PATCH 009/121] Added Delete Auto Create ECR Repo Prompt --- samcli/commands/deploy/guided_context.py | 31 ++++++++++++++++--- .../companion_stack_manager.py | 9 ++++-- 2 files changed, 32 insertions(+), 8 deletions(-) diff --git a/samcli/commands/deploy/guided_context.py b/samcli/commands/deploy/guided_context.py index 7d36b7c851..3fd9c6a7d3 100644 --- a/samcli/commands/deploy/guided_context.py +++ b/samcli/commands/deploy/guided_context.py @@ -311,11 +311,7 @@ def prompt_image_repository( stack_name, missing_repo_functions, region, s3_bucket, s3_prefix ) - if create_all_repos: - companion_stack_manager.get_unreferenced_repos() - companion_stack_manager.update_companion_stack() - image_repositories.update(companion_stack_manager.get_repository_mapping()) - else: + if not create_all_repos: for function_logical_id in missing_repo_functions: image_uri = prompt( f"\t{self.start_bold}ECR repository for {function_logical_id}:{self.end_bold}", @@ -326,6 +322,31 @@ def prompt_image_repository( image_repositories[function_logical_id] = image_uri + unreferenced_repos = companion_stack_manager.get_unreferenced_repos() + if unreferenced_repos: + click.echo(f"Checking for unreferenced ECR repositories to clean-up: {len(unreferenced_repos)} found") + for repo in unreferenced_repos: + repo_uri = companion_stack_manager.get_repo_uri(repo) + click.echo(f"\n{repo_uri}") + delete_repos = click.confirm( + "\nDelete the unreferenced repositories listed above when deploying?", + default=False, + ) + if not delete_repos: + click.echo("\nDeployment aborted!") + click.echo( + """ + #The deployment was aborted to prevent unreferenced managed ECR repositories from being deleted. + #You may remove repositories from the SAMCLI managed stack to retain them and resolve this unreferenced check. + https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/.html + """ + ) + raise GuidedDeployFailedError("Unreferenced auto created ECR repos must be deleted.") + + if create_all_repos: + companion_stack_manager.update_companion_stack() + image_repositories.update(companion_stack_manager.get_repository_mapping()) + for resource_id, function_prop in self.function_provider.functions.items(): if function_prop.packagetype == IMAGE: image = function_prop.imageuri diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py index 71ab78c140..26c7065873 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py @@ -32,6 +32,8 @@ def __init__(self, stack_name, function_logical_ids, region, s3_bucket, s3_prefi try: self._cfn_client = boto3.client("cloudformation", config=self._boto_config) self._s3_client = boto3.client("s3", config=self._boto_config) + self._account_id = boto3.client("sts").get_caller_identity().get("Account") + self._region_name = self._cfn_client.meta.region_name except NoCredentialsError as ex: raise CredentialsError( "Error Setting Up Managed Stack Client: Unable to resolve credentials for the AWS SDK for Python client. " @@ -111,9 +113,10 @@ def does_companion_stack_exist(self): return False def get_repository_mapping(self): - account_id = boto3.client("sts").get_caller_identity().get("Account") - region_name = self._cfn_client.meta.region_name - return dict((k, v.get_repo_uri(account_id, region_name)) for (k, v) in self._builder.repo_mapping.items()) + return dict((k, self.get_repo_uri(v)) for (k, v) in self._builder.repo_mapping.items()) + + def get_repo_uri(self, repo: ECRRepo): + return repo.get_repo_uri(self._account_id, self._region_name) manager = CompanionStackManager( From 0b9a8813dffd575f028331e1b31a79f8ad3b4d7c Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Mon, 1 Mar 2021 13:32:27 -0800 Subject: [PATCH 010/121] Updated prompt_image_repository Flow --- samcli/commands/deploy/guided_context.py | 67 +++++++++++-------- .../companion_stack_builder.py | 3 + .../companion_stack_manager.py | 38 ++++++----- 3 files changed, 61 insertions(+), 47 deletions(-) diff --git a/samcli/commands/deploy/guided_context.py b/samcli/commands/deploy/guided_context.py index 3fd9c6a7d3..600339463f 100644 --- a/samcli/commands/deploy/guided_context.py +++ b/samcli/commands/deploy/guided_context.py @@ -273,48 +273,57 @@ def prompt_parameters( return _prompted_param_overrides def prompt_image_repository( - self, stack_name, image_repositories: Dict[str, str], stacks: List[Stack], region, s3_bucket, s3_prefix + self, stack_name, stacks: List[Stack], image_repositories: Dict[str, str], region, s3_bucket, s3_prefix ): - artifacts_format = get_template_artifacts_format(template_file=self.template_file) - if IMAGE not in artifacts_format: - return {} + image_repositories = image_repositories.copy() if image_repositories is not None else {} - image_repositories = image_repositories.copy() + companion_stack_manager = CompanionStackManager(stack_name, region, s3_bucket, s3_prefix) + deployed_repos = companion_stack_manager.list_deployed_repos() + deployed_repo_uris = [companion_stack_manager.get_repo_uri(repo) for repo in deployed_repos] self.function_provider = SamFunctionProvider(stacks, ignore_code_extraction_warnings=True) function_logical_ids = get_template_function_resource_ids(template_file=self.template_file, artifact=IMAGE) missing_repo_functions = list() + auto_ecr_repo_functions = list() if image_repositories: for function_logical_id in function_logical_ids: if function_logical_id not in self.image_repositories: missing_repo_functions.append(function_logical_id) - else: - missing_repo_functions = function_logical_ids + continue - if not missing_repo_functions: - return {} + repo_uri = self.image_repositories[function_logical_id] + if repo_uri in deployed_repo_uris: + auto_ecr_repo_functions.append(function_logical_id) if missing_repo_functions == function_logical_ids: - click.echo("\nImage repositories: Not found.") - create_all_repos = click.confirm("\nCreate managed ECR repositories for all functions?", default=True) + click.echo("\n\t\tImage repositories: Not found.") + click.echo( + "\t\t#Managed repositories will be deleted when their functions are removed from the template and deployed" + ) + create_all_repos = click.confirm("\t\tCreate managed ECR repositories for all functions?", default=True) else: functions_with_repo_count = len(function_logical_ids) - len(missing_repo_functions) click.echo( - f"\nImage repositories: Found ({len(functions_with_repo_count)} of {len(function_logical_ids)}) #Different image repositories can be set in samconfig.toml" + f"\n\t\tImage repositories: Found ({functions_with_repo_count} of {len(function_logical_ids)}) #Different image repositories can be set in samconfig.toml" ) - create_all_repos = click.confirm( - f"\nCreate managed ECR repositories for the {len(missing_repo_functions)} functions without?", - default=True, + click.echo( + "\t\t#Managed repositories will be deleted when their functions are removed from the template and deployed" + ) + create_all_repos = ( + click.confirm( + f"\n\t\tCreate managed ECR repositories for the {len(missing_repo_functions)} functions without?", + default=True, + ) + if missing_repo_functions + else True ) - companion_stack_manager = CompanionStackManager( - stack_name, missing_repo_functions, region, s3_bucket, s3_prefix - ) + companion_stack_manager.set_functions(missing_repo_functions + auto_ecr_repo_functions) if not create_all_repos: for function_logical_id in missing_repo_functions: image_uri = prompt( - f"\t{self.start_bold}ECR repository for {function_logical_id}:{self.end_bold}", + f"\t\t{self.start_bold}ECR repository for {function_logical_id}:{self.end_bold}", default=self.image_repository, ) if not is_ecr_url(image_uri): @@ -324,24 +333,24 @@ def prompt_image_repository( unreferenced_repos = companion_stack_manager.get_unreferenced_repos() if unreferenced_repos: - click.echo(f"Checking for unreferenced ECR repositories to clean-up: {len(unreferenced_repos)} found") + click.echo(f"\t\tChecking for unreferenced ECR repositories to clean-up: {len(unreferenced_repos)} found") for repo in unreferenced_repos: repo_uri = companion_stack_manager.get_repo_uri(repo) - click.echo(f"\n{repo_uri}") + click.echo(f"\t\t {repo_uri}") delete_repos = click.confirm( - "\nDelete the unreferenced repositories listed above when deploying?", + "\t\tDelete the unreferenced repositories listed above when deploying?", default=False, ) if not delete_repos: - click.echo("\nDeployment aborted!") + click.echo("\t\tDeployment aborted!") click.echo( - """ - #The deployment was aborted to prevent unreferenced managed ECR repositories from being deleted. - #You may remove repositories from the SAMCLI managed stack to retain them and resolve this unreferenced check. - https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/.html - """ + "\t\t#The deployment was aborted to prevent unreferenced managed ECR repositories from being deleted." + "\t\t#You may remove repositories from the SAMCLI managed stack to retain them and resolve this unreferenced check." + "\t\thttps://docs.aws.amazon.com/serverless-application-model/latest/developerguide/.html" ) - raise GuidedDeployFailedError("Unreferenced auto created ECR repos must be deleted.") + raise GuidedDeployFailedError("Unreferenced Auto Created ECR Repos Must Be Deleted.") + else: + companion_stack_manager.delete_unreferenced_repos() if create_all_repos: companion_stack_manager.update_companion_stack() diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_builder.py b/samcli/lib/bootstrap/companion_stack/companion_stack_builder.py index d0fda21411..a47088609b 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_builder.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_builder.py @@ -68,6 +68,9 @@ def __init__(self, companion_stack: CompanionStack) -> None: def add_function(self, function_logical_id: str) -> None: self._repo_mapping[function_logical_id] = ECRRepo(self._companion_stack, function_logical_id) + def clear_functions(self) -> None: + self._repo_mapping = dict() + def build(self) -> str: repo_templates = list() repo_output_templates = list() diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py index 26c7065873..8b5e8ba785 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py @@ -23,7 +23,7 @@ class CompanionStackManager: _cfn_client: CloudFormationClient _s3_client: S3Client - def __init__(self, stack_name, function_logical_ids, region, s3_bucket, s3_prefix): + def __init__(self, stack_name, region, s3_bucket, s3_prefix): self._companion_stack = CompanionStack(stack_name) self._builder = CompanionStackBuilder(self._companion_stack) self._boto_config = Config(region_name=region if region else None) @@ -31,6 +31,7 @@ def __init__(self, stack_name, function_logical_ids, region, s3_bucket, s3_prefi self._s3_prefix = s3_prefix try: self._cfn_client = boto3.client("cloudformation", config=self._boto_config) + self._ecr_client = boto3.client("ecr", config=self._boto_config) self._s3_client = boto3.client("s3", config=self._boto_config) self._account_id = boto3.client("sts").get_caller_identity().get("Account") self._region_name = self._cfn_client.meta.region_name @@ -46,10 +47,12 @@ def __init__(self, stack_name, function_logical_ids, region, s3_bucket, s3_prefi "Please provide a region via the --region parameter or by the AWS_REGION environment variable." ) from ex + def set_functions(self, function_logical_ids: List[str]) -> None: + self._builder.clear_functions() for function_logical_id in function_logical_ids: self._builder.add_function(function_logical_id) - def update_companion_stack(self): + def update_companion_stack(self) -> None: stack_name = self._companion_stack.stack_name template = self._builder.build() @@ -57,7 +60,9 @@ def update_companion_stack(self): temporary_file.write(template) temporary_file.flush() - s3_uploader = S3Uploader(self._s3_client, bucket_name=self._s3_bucket, prefix=self._s3_prefix) + s3_uploader = S3Uploader( + self._s3_client, bucket_name=self._s3_bucket, prefix=self._s3_prefix, no_progressbar=True + ) # TemplateUrl property requires S3 URL to be in path-style format parts = S3Uploader.parse_s3_url( s3_uploader.upload_with_dedup(temporary_file.name, "template"), version_property="Version" @@ -82,6 +87,8 @@ def list_deployed_repos(self) -> List[ECRRepo]: """ Not using create_change_set as it is slow """ + if not self.does_companion_stack_exist(): + return None repos: List[ECRRepo] = list() stack = boto3.resource("cloudformation", config=self._boto_config).Stack(self._companion_stack.stack_name) resources = stack.resource_summaries.all() @@ -93,6 +100,8 @@ def list_deployed_repos(self) -> List[ECRRepo]: return repos def get_unreferenced_repos(self) -> List[ECRRepo]: + if not self.does_companion_stack_exist(): + return [] deployed_repos: List[ECRRepo] = self.list_deployed_repos() current_mapping = self._builder.repo_mapping @@ -105,27 +114,20 @@ def get_unreferenced_repos(self) -> List[ECRRepo]: unreferenced_repos.append(deployed_repo) return unreferenced_repos - def does_companion_stack_exist(self): + def delete_unreferenced_repos(self) -> None: + repos = self.get_unreferenced_repos() + for repo in repos: + self._ecr_client.delete_repository(repositoryName=repo.physical_id, force=True) + + def does_companion_stack_exist(self) -> bool: try: self._cfn_client.describe_stacks(StackName=self._companion_stack.stack_name) return True except ClientError: return False - def get_repository_mapping(self): + def get_repository_mapping(self) -> Dict[str, str]: return dict((k, self.get_repo_uri(v)) for (k, v) in self._builder.repo_mapping.items()) - def get_repo_uri(self, repo: ECRRepo): + def get_repo_uri(self, repo: ECRRepo) -> str: return repo.get_repo_uri(self._account_id, self._region_name) - - -manager = CompanionStackManager( - "Auto-ECR-Test-Stack", - ["TestFunction01", "AnotherTestFunction02"], - "us-west-2", - "aws-sam-cli-managed-default-samclisourcebucket-9bu3m109ig6i", - "Hello-World-Stack", -) -# print(manager.get_unreferenced_repos()) -# print(manager.update_companion_stack()) -print(manager.get_repository_mapping()) From 676fa0118cb130c266fb0e0f2a0dc994d7fda7b5 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Wed, 3 Mar 2021 23:17:39 -0800 Subject: [PATCH 011/121] Added --resolve-image-repos --- samcli/commands/deploy/command.py | 35 +++++++-- samcli/commands/deploy/guided_config.py | 2 +- samcli/commands/deploy/guided_context.py | 72 +++++++++---------- .../companion_stack_manager.py | 39 ++++++++-- .../companion_stack_manager_helper.py | 41 +++++++++++ .../image_repository_validation.py | 8 ++- 6 files changed, 145 insertions(+), 52 deletions(-) create mode 100644 samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index 45e2c08499..9c0bf8ca37 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -2,6 +2,7 @@ CLI command for "deploy" command """ import logging +from samcli.lib.bootstrap.companion_stack.companion_stack_manager_helper import CompanionStackManagerHelper import click @@ -156,6 +157,13 @@ help="Automatically resolve s3 bucket for non-guided deployments." "Do not use --s3-guided parameter with this option.", ) +@click.option( + "--resolve-image-repos", + required=False, + is_flag=True, + help="Automatically create ECR repos for image based functions in non-guided deployments." + "Auto created image repos will be deleted if the corresponding functions are removed.", +) @metadata_override_option @notification_arns_override_option @tags_override_option @@ -195,6 +203,7 @@ def cli( resolve_s3, config_file, config_env, + resolve_image_repos, ): # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing @@ -225,6 +234,7 @@ def cli( resolve_s3, config_file, config_env, + resolve_image_repos, ) # pragma: no cover @@ -255,6 +265,7 @@ def do_cli( resolve_s3, config_file, config_env, + resolve_image_repos, ): from samcli.commands.package.package_context import PackageContext from samcli.commands.deploy.deploy_context import DeployContext @@ -281,13 +292,23 @@ def do_cli( config_file=config_file, ) guided_context.run() - elif resolve_s3 and bool(s3_bucket): - raise DeployResolveS3AndS3SetError() - elif resolve_s3: - s3_bucket = manage_stack(profile=profile, region=region) - click.echo(f"\n\t\tManaged S3 bucket: {s3_bucket}") - click.echo("\t\tA different default S3 bucket can be set in samconfig.toml") - click.echo("\t\tOr by specifying --s3-bucket explicitly.") + else: + if resolve_s3 and bool(s3_bucket): + raise DeployResolveS3AndS3SetError() + elif resolve_s3: + s3_bucket = manage_stack(profile=profile, region=region) + click.echo(f"\n\t\tManaged S3 bucket: {s3_bucket}") + click.echo("\t\tA different default S3 bucket can be set in samconfig.toml") + click.echo("\t\tOr by specifying --s3-bucket explicitly.") + + if resolve_image_repos: + if image_repositories is None: + image_repositories = {} + manager_helper = CompanionStackManagerHelper( + stack_name, region, s3_bucket, s3_prefix, template_file, image_repositories + ) + image_repositories.update(manager_helper.manager.get_repository_mapping()) + manager_helper.manager.sync_repos() with osutils.tempfile_platform_independent() as output_template_file: diff --git a/samcli/commands/deploy/guided_config.py b/samcli/commands/deploy/guided_config.py index eef259af9c..ef2c156b54 100644 --- a/samcli/commands/deploy/guided_config.py +++ b/samcli/commands/deploy/guided_config.py @@ -102,7 +102,7 @@ def _save_parameter_overrides(self, cmd_names, config_env, parameter_overrides, samconfig.put(cmd_names, self.section, "parameter_overrides", " ".join(_params), env=config_env) def _save_image_repositories(self, cmd_names, config_env, samconfig, image_repositories): - if image_repositories: + if image_repositories is not None: _image_repositories = [f"{key}={value}" for key, value in image_repositories.items()] samconfig.put(cmd_names, self.section, "image_repositories", _image_repositories, env=config_env) diff --git a/samcli/commands/deploy/guided_context.py b/samcli/commands/deploy/guided_context.py index 600339463f..62e779e3a6 100644 --- a/samcli/commands/deploy/guided_context.py +++ b/samcli/commands/deploy/guided_context.py @@ -3,6 +3,8 @@ """ import logging +from re import template +from samcli.lib.bootstrap.companion_stack.companion_stack_manager_helper import CompanionStackManagerHelper from samcli.lib.bootstrap.companion_stack.companion_stack_manager import CompanionStackManager from typing import Dict, Any, List @@ -33,10 +35,12 @@ from samcli.lib.package.ecr_utils import is_ecr_url from samcli.lib.package.image_utils import tag_translation, NonLocalImageException, NoImageFoundException from samcli.lib.providers.provider import Stack -from samcli.lib.providers.sam_function_provider import SamFunctionProvider from samcli.lib.providers.sam_stack_provider import SamLocalStackProvider from samcli.lib.utils.colors import Colored from samcli.lib.utils.packagetype import IMAGE +from samcli.commands.deploy.utils import sanitize_parameter_overrides +from samcli.lib.providers.sam_stack_provider import SamLocalStackProvider +from samcli.lib.providers.sam_function_provider import SamFunctionProvider LOG = logging.getLogger(__name__) @@ -276,52 +280,43 @@ def prompt_image_repository( self, stack_name, stacks: List[Stack], image_repositories: Dict[str, str], region, s3_bucket, s3_prefix ): image_repositories = image_repositories.copy() if image_repositories is not None else {} + self.function_provider = SamFunctionProvider(stacks, ignore_code_extraction_warnings=True) - companion_stack_manager = CompanionStackManager(stack_name, region, s3_bucket, s3_prefix) - deployed_repos = companion_stack_manager.list_deployed_repos() - deployed_repo_uris = [companion_stack_manager.get_repo_uri(repo) for repo in deployed_repos] + manager_helper = CompanionStackManagerHelper( + stack_name, region, s3_bucket, s3_prefix, self.template_file, image_repositories + ) - self.function_provider = SamFunctionProvider(stacks, ignore_code_extraction_warnings=True) - function_logical_ids = get_template_function_resource_ids(template_file=self.template_file, artifact=IMAGE) - missing_repo_functions = list() - auto_ecr_repo_functions = list() - if image_repositories: - for function_logical_id in function_logical_ids: - if function_logical_id not in self.image_repositories: - missing_repo_functions.append(function_logical_id) - continue - - repo_uri = self.image_repositories[function_logical_id] - if repo_uri in deployed_repo_uris: - auto_ecr_repo_functions.append(function_logical_id) - - if missing_repo_functions == function_logical_ids: + # Prompt for auto create repos + if not manager_helper.missing_repo_functions: + create_all_repos = False + elif manager_helper.missing_repo_functions == manager_helper.function_logical_ids: click.echo("\n\t\tImage repositories: Not found.") click.echo( "\t\t#Managed repositories will be deleted when their functions are removed from the template and deployed" ) create_all_repos = click.confirm("\t\tCreate managed ECR repositories for all functions?", default=True) else: - functions_with_repo_count = len(function_logical_ids) - len(missing_repo_functions) + functions_with_repo_count = len(manager_helper.function_logical_ids) - len( + manager_helper.missing_repo_functions + ) click.echo( - f"\n\t\tImage repositories: Found ({functions_with_repo_count} of {len(function_logical_ids)}) #Different image repositories can be set in samconfig.toml" + f"\n\t\tImage repositories: Found ({functions_with_repo_count} of {len(manager_helper.function_logical_ids)}) #Different image repositories can be set in samconfig.toml" ) click.echo( "\t\t#Managed repositories will be deleted when their functions are removed from the template and deployed" ) create_all_repos = ( click.confirm( - f"\n\t\tCreate managed ECR repositories for the {len(missing_repo_functions)} functions without?", + f"\n\t\tCreate managed ECR repositories for the {len(manager_helper.missing_repo_functions)} functions without?", default=True, ) - if missing_repo_functions + if manager_helper.missing_repo_functions else True ) - companion_stack_manager.set_functions(missing_repo_functions + auto_ecr_repo_functions) - + # Prompt for image repos if not create_all_repos: - for function_logical_id in missing_repo_functions: + for function_logical_id in manager_helper.missing_repo_functions: image_uri = prompt( f"\t\t{self.start_bold}ECR repository for {function_logical_id}:{self.end_bold}", default=self.image_repository, @@ -331,11 +326,17 @@ def prompt_image_repository( image_repositories[function_logical_id] = image_uri - unreferenced_repos = companion_stack_manager.get_unreferenced_repos() - if unreferenced_repos: - click.echo(f"\t\tChecking for unreferenced ECR repositories to clean-up: {len(unreferenced_repos)} found") - for repo in unreferenced_repos: - repo_uri = companion_stack_manager.get_repo_uri(repo) + manager_helper.update_sepcified_image_repos(image_repositories) + else: + image_repositories.update(manager_helper.manager.get_repository_mapping()) + + # Prompt for deleting referenced repos + if manager_helper.unreferenced_repos: + click.echo( + f"\t\tChecking for unreferenced ECR repositories to clean-up: {len(manager_helper.unreferenced_repos)} found" + ) + for repo in manager_helper.unreferenced_repos: + repo_uri = manager_helper.manager.get_repo_uri(repo) click.echo(f"\t\t {repo_uri}") delete_repos = click.confirm( "\t\tDelete the unreferenced repositories listed above when deploying?", @@ -349,13 +350,10 @@ def prompt_image_repository( "\t\thttps://docs.aws.amazon.com/serverless-application-model/latest/developerguide/.html" ) raise GuidedDeployFailedError("Unreferenced Auto Created ECR Repos Must Be Deleted.") - else: - companion_stack_manager.delete_unreferenced_repos() - if create_all_repos: - companion_stack_manager.update_companion_stack() - image_repositories.update(companion_stack_manager.get_repository_mapping()) + image_repositories = manager_helper.remove_unreferenced_repos_from_mapping(image_repositories) + # Check images locally exist for resource_id, function_prop in self.function_provider.functions.items(): if function_prop.packagetype == IMAGE: image = function_prop.imageuri @@ -369,6 +367,8 @@ def prompt_image_repository( click.secho(f"\t {image} to be pushed to {image_repositories.get(resource_id)}:{tag}") click.secho(nl=True) + manager_helper.manager.sync_repos() + return image_repositories def run(self): diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py index 8b5e8ba785..163d0f7898 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py @@ -15,7 +15,7 @@ class CompanionStackManager: - _companion_stack: str + _companion_stack: CompanionStack _builder: CompanionStackBuilder _boto_config: Config _s3_bucket: str @@ -69,13 +69,15 @@ def update_companion_stack(self) -> None: ) template_url = s3_uploader.to_path_style_s3_url(parts["Key"], parts.get("Version", None)) - waiter_config = {"Delay": 30, "MaxAttempts": 120} - if self.does_companion_stack_exist(): + waiter_config = {"Delay": 10, "MaxAttempts": 120} + + exists = self.does_companion_stack_exist() + if exists: self._cfn_client.update_stack( StackName=stack_name, TemplateURL=template_url, Capabilities=["CAPABILITY_AUTO_EXPAND"] ) waiter = self._cfn_client.get_waiter("stack_update_complete") - else: + elif self._builder.repo_mapping: self._cfn_client.create_stack( StackName=stack_name, TemplateURL=template_url, Capabilities=["CAPABILITY_AUTO_EXPAND"] ) @@ -83,12 +85,19 @@ def update_companion_stack(self) -> None: waiter.wait(StackName=stack_name, WaiterConfig=waiter_config) + def delete_companion_stack(self): + stack_name = self._companion_stack.stack_name + waiter = self._cfn_client.get_waiter("stack_delete_complete") + waiter_config = {"Delay": 10, "MaxAttempts": 60} + self._cfn_client.delete_stack(StackName=stack_name) + waiter.wait(StackName=stack_name, WaiterConfig=waiter_config) + def list_deployed_repos(self) -> List[ECRRepo]: """ Not using create_change_set as it is slow """ if not self.does_companion_stack_exist(): - return None + return [] repos: List[ECRRepo] = list() stack = boto3.resource("cloudformation", config=self._boto_config).Stack(self._companion_stack.stack_name) resources = stack.resource_summaries.all() @@ -117,7 +126,22 @@ def get_unreferenced_repos(self) -> List[ECRRepo]: def delete_unreferenced_repos(self) -> None: repos = self.get_unreferenced_repos() for repo in repos: - self._ecr_client.delete_repository(repositoryName=repo.physical_id, force=True) + try: + self._ecr_client.delete_repository(repositoryName=repo.physical_id, force=True) + except self._ecr_client.exceptions.RepositoryNotFoundException: + pass + + def sync_repos(self) -> None: + exists = self.does_companion_stack_exist() + has_repo = bool(self.get_repository_mapping()) + if exists: + self.delete_unreferenced_repos() + if has_repo: + self.update_companion_stack() + else: + self.delete_companion_stack() + elif not exists and has_repo: + self.update_companion_stack() def does_companion_stack_exist(self) -> bool: try: @@ -131,3 +155,6 @@ def get_repository_mapping(self) -> Dict[str, str]: def get_repo_uri(self, repo: ECRRepo) -> str: return repo.get_repo_uri(self._account_id, self._region_name) + + def is_repo_uri(self, repo_uri, function_logical_id) -> bool: + return repo_uri == self.get_repo_uri(ECRRepo(self._companion_stack, function_logical_id)) diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py new file mode 100644 index 0000000000..7de352a175 --- /dev/null +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py @@ -0,0 +1,41 @@ +from samcli.lib.bootstrap.companion_stack.data_types import ECRRepo +from samcli.commands._utils.template import ( + get_template_function_resource_ids, +) +from samcli.lib.utils.packagetype import IMAGE +from samcli.lib.bootstrap.companion_stack.companion_stack_manager import CompanionStackManager +from typing import Dict + + +class CompanionStackManagerHelper: + def __init__(self, stack_name, region, s3_bucket, s3_prefix, template_file, specified_image_repos): + self.function_logical_ids = get_template_function_resource_ids(template_file=template_file, artifact=IMAGE) + self.missing_repo_functions = list() + self.auto_ecr_repo_functions = list() + self.manager = CompanionStackManager(stack_name, region, s3_bucket, s3_prefix) + self.deployed_repos = self.manager.list_deployed_repos() + self.deployed_repo_uris = [self.manager.get_repo_uri(repo) for repo in self.deployed_repos] + self.update_sepcified_image_repos(specified_image_repos) + self.unreferenced_repos = self.manager.get_unreferenced_repos() + + def update_sepcified_image_repos(self, specified_image_repos): + self.missing_repo_functions.clear() + self.auto_ecr_repo_functions.clear() + for function_logical_id in self.function_logical_ids: + if not specified_image_repos or function_logical_id not in specified_image_repos: + self.missing_repo_functions.append(function_logical_id) + continue + + repo_uri = specified_image_repos[function_logical_id] + if self.manager.is_repo_uri(repo_uri, function_logical_id): + self.auto_ecr_repo_functions.append(function_logical_id) + self.manager.set_functions(self.missing_repo_functions + self.auto_ecr_repo_functions) + + def remove_unreferenced_repos_from_mapping(self, image_repositories: Dict[str, str]): + output_image_repositories = image_repositories.copy() + for function_logical_id, repo_uri in image_repositories.items(): + for repo in self.unreferenced_repos: + if self.manager.get_repo_uri(repo) == repo_uri: + del output_image_repositories[function_logical_id] + break + return output_image_repositories \ No newline at end of file diff --git a/samcli/lib/cli_validation/image_repository_validation.py b/samcli/lib/cli_validation/image_repository_validation.py index 329e855019..01377536bf 100644 --- a/samcli/lib/cli_validation/image_repository_validation.py +++ b/samcli/lib/cli_validation/image_repository_validation.py @@ -25,6 +25,7 @@ def wrapped(*args, **kwargs): guided = ctx.params.get("guided", False) or ctx.params.get("g", False) image_repository = ctx.params.get("image_repository", False) image_repositories = ctx.params.get("image_repositories", False) or {} + resolve_image_repos = ctx.params.get("resolve_image_repos", False) template_file = ( ctx.params.get("t", False) or ctx.params.get("template_file", False) or ctx.params.get("template", False) ) @@ -50,11 +51,13 @@ def wrapped(*args, **kwargs): ), ), Validator( - validation_function=lambda: not guided and not (image_repository or image_repositories) and required, + validation_function=lambda: not guided + and not (image_repository or image_repositories or resolve_image_repos) + and required, exception=click.BadOptionUsage( option_name="--image-repositories", ctx=ctx, - message="Missing option '--image-repository' or '--image-repositories'", + message="Missing option '--image-repository', '--image-repositories', or '--resolve_image_repos'", ), ), Validator( @@ -62,6 +65,7 @@ def wrapped(*args, **kwargs): and ( set(image_repositories.keys()) != set(get_template_function_resource_ids(template_file, IMAGE)) and image_repositories + and not resolve_image_repos ), exception=click.BadOptionUsage( option_name="--image-repositories", From 2e16290113fca247dc442ce9b69e075c6668b61b Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Wed, 3 Mar 2021 23:40:16 -0800 Subject: [PATCH 012/121] Addressed Some of Pylint Issues --- samcli/commands/deploy/command.py | 8 ++--- samcli/commands/deploy/guided_context.py | 30 ++++++++++--------- .../companion_stack_builder.py | 7 ++++- .../companion_stack_manager.py | 17 ++++++----- .../companion_stack_manager_helper.py | 9 ++++-- .../bootstrap/companion_stack/data_types.py | 3 ++ 6 files changed, 45 insertions(+), 29 deletions(-) diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index 9c0bf8ca37..085f470b5a 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -2,7 +2,6 @@ CLI command for "deploy" command """ import logging -from samcli.lib.bootstrap.companion_stack.companion_stack_manager_helper import CompanionStackManagerHelper import click @@ -27,6 +26,7 @@ from samcli.lib.utils import osutils from samcli.lib.bootstrap.bootstrap import manage_stack from samcli.lib.utils.version_checker import check_newer_version +from samcli.lib.bootstrap.companion_stack.companion_stack_manager_helper import CompanionStackManagerHelper SHORT_HELP = "Deploy an AWS SAM application." @@ -293,9 +293,9 @@ def do_cli( ) guided_context.run() else: - if resolve_s3 and bool(s3_bucket): - raise DeployResolveS3AndS3SetError() - elif resolve_s3: + if resolve_s3: + if bool(s3_bucket): + raise DeployResolveS3AndS3SetError() s3_bucket = manage_stack(profile=profile, region=region) click.echo(f"\n\t\tManaged S3 bucket: {s3_bucket}") click.echo("\t\tA different default S3 bucket can be set in samconfig.toml") diff --git a/samcli/commands/deploy/guided_context.py b/samcli/commands/deploy/guided_context.py index 62e779e3a6..e4d6faeb24 100644 --- a/samcli/commands/deploy/guided_context.py +++ b/samcli/commands/deploy/guided_context.py @@ -3,9 +3,6 @@ """ import logging -from re import template -from samcli.lib.bootstrap.companion_stack.companion_stack_manager_helper import CompanionStackManagerHelper -from samcli.lib.bootstrap.companion_stack.companion_stack_manager import CompanionStackManager from typing import Dict, Any, List import click @@ -17,8 +14,6 @@ from samcli.commands._utils.options import _space_separated_list_func_type from samcli.commands._utils.template import ( get_template_parameters, - get_template_artifacts_format, - get_template_function_resource_ids, ) from samcli.commands.deploy.code_signer_utils import ( signer_config_per_function, @@ -38,9 +33,8 @@ from samcli.lib.providers.sam_stack_provider import SamLocalStackProvider from samcli.lib.utils.colors import Colored from samcli.lib.utils.packagetype import IMAGE -from samcli.commands.deploy.utils import sanitize_parameter_overrides -from samcli.lib.providers.sam_stack_provider import SamLocalStackProvider from samcli.lib.providers.sam_function_provider import SamFunctionProvider +from samcli.lib.bootstrap.companion_stack.companion_stack_manager_helper import CompanionStackManagerHelper LOG = logging.getLogger(__name__) @@ -292,7 +286,8 @@ def prompt_image_repository( elif manager_helper.missing_repo_functions == manager_helper.function_logical_ids: click.echo("\n\t\tImage repositories: Not found.") click.echo( - "\t\t#Managed repositories will be deleted when their functions are removed from the template and deployed" + "\t\t#Managed repositories will be deleted when " + "their functions are removed from the template and deployed" ) create_all_repos = click.confirm("\t\tCreate managed ECR repositories for all functions?", default=True) else: @@ -300,14 +295,18 @@ def prompt_image_repository( manager_helper.missing_repo_functions ) click.echo( - f"\n\t\tImage repositories: Found ({functions_with_repo_count} of {len(manager_helper.function_logical_ids)}) #Different image repositories can be set in samconfig.toml" + "\n\t\tImage repositories: " + f"Found ({functions_with_repo_count} of {len(manager_helper.function_logical_ids)})" + "#Different image repositories can be set in samconfig.toml" ) click.echo( - "\t\t#Managed repositories will be deleted when their functions are removed from the template and deployed" + "\t\t#Managed repositories will be deleted when their functions are " + "removed from the template and deployed" ) create_all_repos = ( click.confirm( - f"\n\t\tCreate managed ECR repositories for the {len(manager_helper.missing_repo_functions)} functions without?", + "\n\t\tCreate managed ECR repositories for the " + f"{len(manager_helper.missing_repo_functions)} functions without?", default=True, ) if manager_helper.missing_repo_functions @@ -333,7 +332,8 @@ def prompt_image_repository( # Prompt for deleting referenced repos if manager_helper.unreferenced_repos: click.echo( - f"\t\tChecking for unreferenced ECR repositories to clean-up: {len(manager_helper.unreferenced_repos)} found" + "\t\tChecking for unreferenced ECR repositories to clean-up: " + f"{len(manager_helper.unreferenced_repos)} found" ) for repo in manager_helper.unreferenced_repos: repo_uri = manager_helper.manager.get_repo_uri(repo) @@ -345,8 +345,10 @@ def prompt_image_repository( if not delete_repos: click.echo("\t\tDeployment aborted!") click.echo( - "\t\t#The deployment was aborted to prevent unreferenced managed ECR repositories from being deleted." - "\t\t#You may remove repositories from the SAMCLI managed stack to retain them and resolve this unreferenced check." + "\t\t#The deployment was aborted to prevent " + "unreferenced managed ECR repositories from being deleted." + "\t\t#You may remove repositories from the SAMCLI " + "managed stack to retain them and resolve this unreferenced check." "\t\thttps://docs.aws.amazon.com/serverless-application-model/latest/developerguide/.html" ) raise GuidedDeployFailedError("Unreferenced Auto Created ECR Repos Must Be Deleted.") diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_builder.py b/samcli/lib/bootstrap/companion_stack/companion_stack_builder.py index a47088609b..deb4650a98 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_builder.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_builder.py @@ -1,4 +1,9 @@ +""" + Companion stack template builder +""" from typing import Dict + +# pylint: disable=W0402 from string import Template from samcli.lib.bootstrap.companion_stack.data_types import CompanionStack, ECRRepo @@ -32,7 +37,7 @@ - Key: AwsSamCliCompanionStack Value: $companion_stack_name - RepositoryPolicyText: + RepositoryPolicyText: Version: "2012-10-17" Statement: - diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py index 163d0f7898..b39e65148c 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py @@ -1,17 +1,19 @@ -from mypy_boto3_cloudformation.client import CloudFormationClient -from mypy_boto3_s3.client import S3Client -from samcli.lib.package.s3_uploader import S3Uploader -from samcli.lib.deploy.deployer import Deployer -import boto3 - +""" + Companion stack manager +""" from typing import List, Dict +import boto3 +from mypy_boto3_cloudformation.client import CloudFormationClient +from mypy_boto3_s3.client import S3Client from botocore.config import Config from botocore.exceptions import ClientError, NoRegionError, NoCredentialsError + from samcli.commands.exceptions import CredentialsError, RegionError from samcli.lib.bootstrap.companion_stack.companion_stack_builder import CompanionStackBuilder from samcli.lib.bootstrap.companion_stack.data_types import CompanionStack, ECRRepo from samcli.lib.package.artifact_exporter import mktempfile +from samcli.lib.package.s3_uploader import S3Uploader class CompanionStackManager: @@ -37,7 +39,8 @@ def __init__(self, stack_name, region, s3_bucket, s3_prefix): self._region_name = self._cfn_client.meta.region_name except NoCredentialsError as ex: raise CredentialsError( - "Error Setting Up Managed Stack Client: Unable to resolve credentials for the AWS SDK for Python client. " + "Error Setting Up Managed Stack Client: Unable to resolve " + "credentials for the AWS SDK for Python client. " "Please see their documentation for options to pass in credentials: " "https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html" ) from ex diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py index 7de352a175..59750b36a9 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py @@ -1,10 +1,13 @@ -from samcli.lib.bootstrap.companion_stack.data_types import ECRRepo +""" + Help class to bridge CLI functions and CompanionStackManager +""" +from typing import Dict + from samcli.commands._utils.template import ( get_template_function_resource_ids, ) from samcli.lib.utils.packagetype import IMAGE from samcli.lib.bootstrap.companion_stack.companion_stack_manager import CompanionStackManager -from typing import Dict class CompanionStackManagerHelper: @@ -38,4 +41,4 @@ def remove_unreferenced_repos_from_mapping(self, image_repositories: Dict[str, s if self.manager.get_repo_uri(repo) == repo_uri: del output_image_repositories[function_logical_id] break - return output_image_repositories \ No newline at end of file + return output_image_repositories diff --git a/samcli/lib/bootstrap/companion_stack/data_types.py b/samcli/lib/bootstrap/companion_stack/data_types.py index 684c4127df..351d778df0 100644 --- a/samcli/lib/bootstrap/companion_stack/data_types.py +++ b/samcli/lib/bootstrap/companion_stack/data_types.py @@ -1,3 +1,6 @@ +""" + Date type classes for companion stacks +""" import re from samcli.lib.utils.hash import str_checksum From 739b5501fbc1507ee3a49c189a15ca09d38bc58a Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Tue, 13 Apr 2021 21:37:30 -0700 Subject: [PATCH 013/121] Updated Helper Text --- samcli/commands/deploy/command.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index 085f470b5a..d26525b049 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -161,7 +161,7 @@ "--resolve-image-repos", required=False, is_flag=True, - help="Automatically create ECR repos for image based functions in non-guided deployments." + help="Automatically create and delete ECR repos for image based functions in non-guided deployments." "Auto created image repos will be deleted if the corresponding functions are removed.", ) @metadata_override_option From 701932cd3d2d470323626f45c557cd2715b068cf Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Wed, 14 Apr 2021 01:41:59 -0700 Subject: [PATCH 014/121] Updated Comments --- samcli/commands/deploy/guided_context.py | 216 +++++++++++++----- .../companion_stack_builder.py | 20 ++ .../companion_stack_manager.py | 91 +++++++- .../companion_stack_manager_helper.py | 28 ++- .../bootstrap/companion_stack/data_types.py | 22 ++ 5 files changed, 316 insertions(+), 61 deletions(-) diff --git a/samcli/commands/deploy/guided_context.py b/samcli/commands/deploy/guided_context.py index 344ffd076a..37de1ecb21 100644 --- a/samcli/commands/deploy/guided_context.py +++ b/samcli/commands/deploy/guided_context.py @@ -29,7 +29,7 @@ from samcli.lib.bootstrap.bootstrap import manage_stack from samcli.lib.package.ecr_utils import is_ecr_url from samcli.lib.package.image_utils import tag_translation, NonLocalImageException, NoImageFoundException -from samcli.lib.providers.provider import Stack +from samcli.lib.providers.provider import Function, Stack from samcli.lib.providers.sam_stack_provider import SamLocalStackProvider from samcli.lib.utils.colors import Colored from samcli.lib.utils.packagetype import IMAGE @@ -288,20 +288,41 @@ def prompt_parameters( return _prompted_param_overrides def prompt_image_repository( - self, stack_name, stacks: List[Stack], image_repositories: Dict[str, str], region, s3_bucket, s3_prefix - ): + self, + stack_name, + stacks: List[Stack], + image_repositories: Dict[str, str], + region: str, + s3_bucket: str, + s3_prefix: str, + ) -> Dict[str, str]: """ Prompt for the image repository to push the images. For each image function found in build artifacts, it will prompt for an image repository. Parameters ---------- + stack_name : List[Stack] + Name of the stack to be deployed. + stacks : List[Stack] List of stacks to look for image functions. + image_repositories: Dict[str, str] + Dictionary with function logical ID as key and image repo URI as value. + + region: str + Region for the image repos. + + s3_bucket: str + s3 bucket URI to be used for uploading companion stack template + + s3_prefix: str + s3 prefix to be used for uploading companion stack template + Returns ------- - Dict + Dict[str, str] A dictionary contains image function logical ID as key, image repository as value. """ image_repositories = image_repositories.copy() if image_repositories is not None else {} @@ -311,7 +332,71 @@ def prompt_image_repository( stack_name, region, s3_bucket, s3_prefix, self.template_file, image_repositories ) - # Prompt for auto create repos + create_all_repos = GuidedContext.prompt_create_all_repos(manager_helper) + if create_all_repos: + image_repositories.update(manager_helper.manager.get_repository_mapping()) + else: + image_repositories = GuidedContext.prompt_specify_repos( + manager_helper, image_repositories, self.image_repository + ) + manager_helper.update_sepcified_image_repos(image_repositories) + + image_repositories = GuidedContext.prompt_delete_unreferenced_repos(manager_helper, image_repositories) + GuidedContext.verify_images_exist_locally(self.function_provider.functions, image_repositories) + + manager_helper.manager.sync_repos() + return image_repositories + + @staticmethod + def prompt_specify_repos( + manager_helper: CompanionStackManagerHelper, + image_repositories: Dict[str, str], + default_image_repo, + ) -> Dict[str, str]: + """ + Show prompts for each function that isn't associated with a image repo + + Parameters + ---------- + manager_helper: CompanionStackManagerHelper + Instance of CompanionStackManagerHelper + + image_repositories: Dict[str, str] + Current image repo dictionary with function logical ID as key and image repo URI as value. + + default_image_repo: str + Default image repo URI to be shown for each function prompt. + + Returns + ------- + Dict[str, str] + Updated image repo dictionary with values(image repo URIs) filled by user input + """ + image_repositories = image_repositories.copy() + for function_logical_id in manager_helper.missing_repo_functions: + image_uri = prompt(f"\t\tECR repository for {function_logical_id}:", default=default_image_repo) + if not is_ecr_url(image_uri): + raise GuidedDeployFailedError(f"Invalid Image Repository ECR URI: {image_uri}") + + image_repositories[function_logical_id] = image_uri + + return image_repositories + + @staticmethod + def prompt_create_all_repos(manager_helper: CompanionStackManagerHelper) -> bool: + """ + Prompt whether to create all repos + + Parameters + ---------- + manager_helper: CompanionStackManagerHelper + Instance of CompanionStackManagerHelper + + Returns + ------- + Boolean + Returns False if there is no missing function or denied by prompt + """ if not manager_helper.missing_repo_functions: create_all_repos = False elif manager_helper.missing_repo_functions == manager_helper.function_logical_ids: @@ -343,66 +428,83 @@ def prompt_image_repository( if manager_helper.missing_repo_functions else True ) + return create_all_repos - # Prompt for image repos - if not create_all_repos: - for function_logical_id in manager_helper.missing_repo_functions: - image_uri = prompt( - f"\t\t{self.start_bold}ECR repository for {function_logical_id}:{self.end_bold}", - default=self.image_repository, - ) - if not is_ecr_url(image_uri): - raise GuidedDeployFailedError(f"Invalid Image Repository ECR URI: {image_uri}") + @staticmethod + def prompt_delete_unreferenced_repos( + manager_helper: CompanionStackManagerHelper, image_repositories: Dict[str, str] + ) -> Dict[str, str]: + """ + Prompt user for deleting unreferenced companion stack image repos. + Throws GuidedDeployFailedError if delete repos has been denied by the user. + This function does not actually remove the functions from the stack. - image_repositories[function_logical_id] = image_uri + Parameters + ---------- + manager_helper: CompanionStackManagerHelper + Instance of CompanionStackManagerHelper - manager_helper.update_sepcified_image_repos(image_repositories) - else: - image_repositories.update(manager_helper.manager.get_repository_mapping()) + image_repositories: Dict[str, str] + Current image repo dictionary with function logical ID as key and image repo URI as value. + + Returns + ------- + Dict[str, str] + Updated image repo dictionary with unreferenced repos removed + """ + if not manager_helper.unreferenced_repos: + return image_repositories - # Prompt for deleting referenced repos - if manager_helper.unreferenced_repos: + click.echo( + "\t\tChecking for unreferenced ECR repositories to clean-up: " + f"{len(manager_helper.unreferenced_repos)} found" + ) + for repo in manager_helper.unreferenced_repos: + repo_uri = manager_helper.manager.get_repo_uri(repo) + click.echo(f"\t\t {repo_uri}") + delete_repos = click.confirm( + "\t\tDelete the unreferenced repositories listed above when deploying?", + default=False, + ) + if not delete_repos: + click.echo("\t\tDeployment aborted!") click.echo( - "\t\tChecking for unreferenced ECR repositories to clean-up: " - f"{len(manager_helper.unreferenced_repos)} found" + "\t\t#The deployment was aborted to prevent " + "unreferenced managed ECR repositories from being deleted." + "\t\t#You may remove repositories from the SAMCLI " + "managed stack to retain them and resolve this unreferenced check." + "\t\thttps://docs.aws.amazon.com/serverless-application-model/latest/developerguide/.html" ) - for repo in manager_helper.unreferenced_repos: - repo_uri = manager_helper.manager.get_repo_uri(repo) - click.echo(f"\t\t {repo_uri}") - delete_repos = click.confirm( - "\t\tDelete the unreferenced repositories listed above when deploying?", - default=False, - ) - if not delete_repos: - click.echo("\t\tDeployment aborted!") - click.echo( - "\t\t#The deployment was aborted to prevent " - "unreferenced managed ECR repositories from being deleted." - "\t\t#You may remove repositories from the SAMCLI " - "managed stack to retain them and resolve this unreferenced check." - "\t\thttps://docs.aws.amazon.com/serverless-application-model/latest/developerguide/.html" - ) - raise GuidedDeployFailedError("Unreferenced Auto Created ECR Repos Must Be Deleted.") - - image_repositories = manager_helper.remove_unreferenced_repos_from_mapping(image_repositories) - - # Check images locally exist - for resource_id, function_prop in self.function_provider.functions.items(): - if function_prop.packagetype == IMAGE: - image = function_prop.imageuri - try: - tag = tag_translation(image) - except NonLocalImageException: - pass - except NoImageFoundException as ex: - raise GuidedDeployFailedError("No images found to deploy, try running sam build") from ex - else: - click.secho(f"\t {image} to be pushed to {image_repositories.get(resource_id)}:{tag}") - click.secho(nl=True) + raise GuidedDeployFailedError("Unreferenced Auto Created ECR Repos Must Be Deleted.") - manager_helper.manager.sync_repos() + return manager_helper.remove_unreferenced_repos_from_mapping(image_repositories) - return image_repositories + @staticmethod + def verify_images_exist_locally(functions: Dict[str, Function], image_repositories: Dict[str, str]) -> None: + """ + Verify all images associated with deploying functions exist locally. + + Parameters + ---------- + functions: Dict[str, Function] + Dictionary of functions in the stack to be deployed with key as their logical ID. + + image_repositories: Dict[str, str] + Image repo dictionary with function logical ID as key and image repo URI as value. + """ + for resource_id, function_prop in functions.items(): + if function_prop.packagetype != IMAGE: + continue + image = function_prop.imageuri + try: + tag = tag_translation(image) + except NonLocalImageException: + pass + except NoImageFoundException as ex: + raise GuidedDeployFailedError("No images found to deploy, try running sam build") from ex + else: + click.secho(f"\t {image} to be pushed to {image_repositories.get(resource_id)}:{tag}") + click.secho(nl=True) def run(self): diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_builder.py b/samcli/lib/bootstrap/companion_stack/companion_stack_builder.py index deb4650a98..539288bc1c 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_builder.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_builder.py @@ -62,6 +62,10 @@ class CompanionStackBuilder: + """ + CFN template builder for the companion stack + """ + _parent_stack_name: str _companion_stack: CompanionStack _repo_mapping: Dict[str, ECRRepo] @@ -71,12 +75,25 @@ def __init__(self, companion_stack: CompanionStack) -> None: self._repo_mapping: Dict[str, ECRRepo] = dict() def add_function(self, function_logical_id: str) -> None: + """ + Add an ECR repo associated with the function to the companion stack template + """ self._repo_mapping[function_logical_id] = ECRRepo(self._companion_stack, function_logical_id) def clear_functions(self) -> None: + """ + Remove all functions that need ECR repos + """ self._repo_mapping = dict() def build(self) -> str: + """ + Build companion stack CFN template with current functions + Returns + ------- + str + CFN template for companions stack + """ repo_templates = list() repo_output_templates = list() companion_stack_name = self._companion_stack.stack_name @@ -107,4 +124,7 @@ def build(self) -> str: @property def repo_mapping(self) -> Dict[str, ECRRepo]: + """ + Repo mapping dictionary with key as function logical ID and value as ECRRepo object + """ return self._repo_mapping diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py index b39e65148c..c05dd5ff93 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py @@ -17,6 +17,11 @@ class CompanionStackManager: + """ + Manager class for a companion stack + Used to create/update the remote stack + """ + _companion_stack: CompanionStack _builder: CompanionStackBuilder _boto_config: Config @@ -51,11 +56,23 @@ def __init__(self, stack_name, region, s3_bucket, s3_prefix): ) from ex def set_functions(self, function_logical_ids: List[str]) -> None: + """ + Sets functions that need to have ECR repos created + + Parameters + ---------- + function_logical_ids: List[str] + Function logical IDs that need to have ECR repos created + """ self._builder.clear_functions() for function_logical_id in function_logical_ids: self._builder.add_function(function_logical_id) def update_companion_stack(self) -> None: + """ + Blocking call to create or update the companion stack based on current functions + Companion stack template will be updated to the s3 bucket first before deployment + """ stack_name = self._companion_stack.stack_name template = self._builder.build() @@ -89,6 +106,9 @@ def update_companion_stack(self) -> None: waiter.wait(StackName=stack_name, WaiterConfig=waiter_config) def delete_companion_stack(self): + """ + Blocking call to delte the companion stack + """ stack_name = self._companion_stack.stack_name waiter = self._cfn_client.get_waiter("stack_delete_complete") waiter_config = {"Delay": 10, "MaxAttempts": 60} @@ -97,7 +117,14 @@ def delete_companion_stack(self): def list_deployed_repos(self) -> List[ECRRepo]: """ - Not using create_change_set as it is slow + List deployed ECR repos for this companion stack + Not using create_change_set as it is slow. + + Returns + ------- + List[ECRRepo] + List of ECR repos deployed for this companion stack + Returns empty list if companion stack does not exist """ if not self.does_companion_stack_exist(): return [] @@ -112,6 +139,15 @@ def list_deployed_repos(self) -> List[ECRRepo]: return repos def get_unreferenced_repos(self) -> List[ECRRepo]: + """ + List deployed ECR repos that is not referenced by current list of functions + + Returns + ------- + List[ECRRepo] + List of deployed ECR repos that is not referenced by current list of functions + Returns empty list if companion stack does not exist + """ if not self.does_companion_stack_exist(): return [] deployed_repos: List[ECRRepo] = self.list_deployed_repos() @@ -127,6 +163,9 @@ def get_unreferenced_repos(self) -> List[ECRRepo]: return unreferenced_repos def delete_unreferenced_repos(self) -> None: + """ + Blocking call to delete all deployed ECR repos that are unreferenced by a function + """ repos = self.get_unreferenced_repos() for repo in repos: try: @@ -135,6 +174,12 @@ def delete_unreferenced_repos(self) -> None: pass def sync_repos(self) -> None: + """ + Blocking call to sync companion stack with the following actions + Create/Update companion stack. + Deletes unreferenced repos. + Deletes companion stack if there isn't any repo left. + """ exists = self.does_companion_stack_exist() has_repo = bool(self.get_repository_mapping()) if exists: @@ -147,6 +192,14 @@ def sync_repos(self) -> None: self.update_companion_stack() def does_companion_stack_exist(self) -> bool: + """ + Does companion stack exist + + Returns + ------- + bool + Returns True if companion stack exists + """ try: self._cfn_client.describe_stacks(StackName=self._companion_stack.stack_name) return True @@ -154,10 +207,46 @@ def does_companion_stack_exist(self) -> bool: return False def get_repository_mapping(self) -> Dict[str, str]: + """ + Get current function to repo mapping + + Returns + ------- + Dict[str, str] + Dictionary with key as function logical ID and value as ECR repo URI. + """ return dict((k, self.get_repo_uri(v)) for (k, v) in self._builder.repo_mapping.items()) def get_repo_uri(self, repo: ECRRepo) -> str: + """ + Get repo URI for a ECR repo + + Parameters + ---------- + repo: ECRRepo + + Returns + ------- + str + ECR repo URI based on account ID and region. + """ return repo.get_repo_uri(self._account_id, self._region_name) def is_repo_uri(self, repo_uri, function_logical_id) -> bool: + """ + Check whether repo URI is a companion stack repo + + Parameters + ---------- + repo_uri: str + Repo URI to be checked. + + function_logical_id: str + Function logical ID associated with the image repo. + + Returns + ------- + bool + Returns True if repo_uri is a companion stack repo. + """ return repo_uri == self.get_repo_uri(ECRRepo(self._companion_stack, function_logical_id)) diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py index 59750b36a9..09b5f17d69 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py @@ -1,7 +1,7 @@ """ Help class to bridge CLI functions and CompanionStackManager """ -from typing import Dict +from typing import Dict, List from samcli.commands._utils.template import ( get_template_function_resource_ids, @@ -21,7 +21,16 @@ def __init__(self, stack_name, region, s3_bucket, s3_prefix, template_file, spec self.update_sepcified_image_repos(specified_image_repos) self.unreferenced_repos = self.manager.get_unreferenced_repos() - def update_sepcified_image_repos(self, specified_image_repos): + def update_sepcified_image_repos(self, specified_image_repos: Dict[str, str]) -> None: + """ + Update list of image repos specified for each function. + updates missing_repo_functions and auto_ecr_repo_functions accordingly. + + Parameters + ---------- + specified_image_repos: Dict[str, str] + Dictionary of image repo URIs with key as function logical ID and value as image repo URI + """ self.missing_repo_functions.clear() self.auto_ecr_repo_functions.clear() for function_logical_id in self.function_logical_ids: @@ -34,7 +43,20 @@ def update_sepcified_image_repos(self, specified_image_repos): self.auto_ecr_repo_functions.append(function_logical_id) self.manager.set_functions(self.missing_repo_functions + self.auto_ecr_repo_functions) - def remove_unreferenced_repos_from_mapping(self, image_repositories: Dict[str, str]): + def remove_unreferenced_repos_from_mapping(self, image_repositories: Dict[str, str]) -> None: + """ + Removes image repos that are not referenced by a function + + Parameters + ---------- + image_repositories: Dict[str, str] + Dictionary of image repo URIs with key as function logical ID and value as image repo URI + + Returns + ---------- + Dict[str, str] + Copy of image_repositories that have unreferenced image repos removed + """ output_image_repositories = image_repositories.copy() for function_logical_id, repo_uri in image_repositories.items(): for repo in self.unreferenced_repos: diff --git a/samcli/lib/bootstrap/companion_stack/data_types.py b/samcli/lib/bootstrap/companion_stack/data_types.py index 351d778df0..44df5a17b6 100644 --- a/samcli/lib/bootstrap/companion_stack/data_types.py +++ b/samcli/lib/bootstrap/companion_stack/data_types.py @@ -6,6 +6,11 @@ class CompanionStack: + """ + Abstraction class for the companion stack + Companion stack name will be generated by this class. + """ + _parent_stack_name: str _escaped_parent_stack_name: str _parent_stack_hash: str @@ -19,22 +24,39 @@ def __init__(self, parent_stack_name: str) -> None: @property def parent_stack_name(self) -> str: + """ + Parent stack name + """ return self._parent_stack_name @property def escaped_parent_stack_name(self) -> str: + """ + Parent stack name with only alpha numerica characters + """ return self._escaped_parent_stack_name @property def parent_stack_hash(self) -> str: + """ + MD5 hash of parent stack name + """ return self._parent_stack_hash @property def stack_name(self) -> str: + """ + Companion stack stack name + """ return self._stack_name class ECRRepo: + """ + Abstraction class for ECR repos in companion stacks + Logical ID, Physical ID, and Repo URI will be generated with this class. + """ + _function_logical_id: str _escaped_function_logical_id: str _function_md5: str From f1f9f451c063647caa2993908b5a1d6bbe77981d Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Wed, 14 Apr 2021 02:04:13 -0700 Subject: [PATCH 015/121] Fixed Typing --- samcli/commands/deploy/guided_context.py | 4 +- .../companion_stack_manager.py | 4 +- .../companion_stack_manager_helper.py | 4 +- .../bootstrap/companion_stack/data_types.py | 53 ++++++++++++------- 4 files changed, 39 insertions(+), 26 deletions(-) diff --git a/samcli/commands/deploy/guided_context.py b/samcli/commands/deploy/guided_context.py index 37de1ecb21..91d66c9c9e 100644 --- a/samcli/commands/deploy/guided_context.py +++ b/samcli/commands/deploy/guided_context.py @@ -3,7 +3,7 @@ """ import logging -from typing import Dict, Any, List +from typing import Dict, Any, List, Optional import click from botocore.session import get_session @@ -291,7 +291,7 @@ def prompt_image_repository( self, stack_name, stacks: List[Stack], - image_repositories: Dict[str, str], + image_repositories: Optional[Dict[str, str]], region: str, s3_bucket: str, s3_prefix: str, diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py index c05dd5ff93..bbaa769167 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py @@ -103,7 +103,7 @@ def update_companion_stack(self) -> None: ) waiter = self._cfn_client.get_waiter("stack_create_complete") - waiter.wait(StackName=stack_name, WaiterConfig=waiter_config) + waiter.wait(StackName=stack_name, WaiterConfig=waiter_config) # type: ignore def delete_companion_stack(self): """ @@ -232,7 +232,7 @@ def get_repo_uri(self, repo: ECRRepo) -> str: """ return repo.get_repo_uri(self._account_id, self._region_name) - def is_repo_uri(self, repo_uri, function_logical_id) -> bool: + def is_repo_uri(self, repo_uri: str, function_logical_id: str) -> bool: """ Check whether repo URI is a companion stack repo diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py index 09b5f17d69..46c2143c6a 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py @@ -1,7 +1,7 @@ """ Help class to bridge CLI functions and CompanionStackManager """ -from typing import Dict, List +from typing import Dict from samcli.commands._utils.template import ( get_template_function_resource_ids, @@ -43,7 +43,7 @@ def update_sepcified_image_repos(self, specified_image_repos: Dict[str, str]) -> self.auto_ecr_repo_functions.append(function_logical_id) self.manager.set_functions(self.missing_repo_functions + self.auto_ecr_repo_functions) - def remove_unreferenced_repos_from_mapping(self, image_repositories: Dict[str, str]) -> None: + def remove_unreferenced_repos_from_mapping(self, image_repositories: Dict[str, str]) -> Dict[str, str]: """ Removes image repos that are not referenced by a function diff --git a/samcli/lib/bootstrap/companion_stack/data_types.py b/samcli/lib/bootstrap/companion_stack/data_types.py index 44df5a17b6..c0388e2f19 100644 --- a/samcli/lib/bootstrap/companion_stack/data_types.py +++ b/samcli/lib/bootstrap/companion_stack/data_types.py @@ -2,6 +2,7 @@ Date type classes for companion stacks """ import re +from typing import Optional, cast from samcli.lib.utils.hash import str_checksum @@ -57,29 +58,35 @@ class ECRRepo: Logical ID, Physical ID, and Repo URI will be generated with this class. """ - _function_logical_id: str - _escaped_function_logical_id: str - _function_md5: str - _companion_stack: str - _logical_id: str - _physical_id: str - _output_logical_id: str + _function_logical_id: Optional[str] + _escaped_function_logical_id: Optional[str] + _function_md5: Optional[str] + _companion_stack: Optional[CompanionStack] + _logical_id: Optional[str] + _physical_id: Optional[str] + _output_logical_id: Optional[str] def __init__( self, - companion_stack: CompanionStack = None, - function_logical_id: str = None, - logical_id: str = None, - physical_id: str = None, - output_logical_id: str = None, + companion_stack: Optional[CompanionStack] = None, + function_logical_id: Optional[str] = None, + logical_id: Optional[str] = None, + physical_id: Optional[str] = None, + output_logical_id: Optional[str] = None, ): + """ + Must be specified either with + companion_stack and function_logical_id + or + logical_id, physical_id, and output_logical_id + """ self._function_logical_id = function_logical_id self._escaped_function_logical_id = ( re.sub(r"[^a-z0-9]", "", self._function_logical_id.lower()) if self._function_logical_id is not None else None ) - self._function_md5 = str_checksum(function_logical_id) if self._function_logical_id is not None else None + self._function_md5 = str_checksum(self._function_logical_id) if self._function_logical_id is not None else None self._companion_stack = companion_stack self._logical_id = logical_id @@ -87,14 +94,19 @@ def __init__( self._output_logical_id = output_logical_id @property - def logical_id(self) -> str: - if self._logical_id is None: + def logical_id(self) -> Optional[str]: + if self._logical_id is None and self._function_logical_id and self._function_md5: self._logical_id = self._function_logical_id[:52] + self._function_md5[:8] + "Repo" return self._logical_id @property - def physical_id(self) -> str: - if self._physical_id is None: + def physical_id(self) -> Optional[str]: + if ( + self._physical_id is None + and self._companion_stack + and self._function_md5 + and self._escaped_function_logical_id + ): self._physical_id = ( self._companion_stack.escaped_parent_stack_name + self._companion_stack.parent_stack_hash[:8] @@ -106,9 +118,10 @@ def physical_id(self) -> str: return self._physical_id @property - def output_logical_id(self) -> str: - if self._output_logical_id is None: + def output_logical_id(self) -> Optional[str]: + if self._output_logical_id is None and self._function_logical_id and self._function_md5: self._output_logical_id = self._function_logical_id[:52] + self._function_md5[:8] + "Out" + return self._output_logical_id - def get_repo_uri(self, account_id, region): + def get_repo_uri(self, account_id, region) -> str: return f"{account_id}.dkr.ecr.{region}.amazonaws.com/{self.physical_id}" From b82d544636a0c4ccc47a233cbf75d1e5a944bf6a Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Wed, 14 Apr 2021 02:04:48 -0700 Subject: [PATCH 016/121] Removed Unused Imports --- samcli/lib/bootstrap/companion_stack/data_types.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samcli/lib/bootstrap/companion_stack/data_types.py b/samcli/lib/bootstrap/companion_stack/data_types.py index c0388e2f19..311ff1326c 100644 --- a/samcli/lib/bootstrap/companion_stack/data_types.py +++ b/samcli/lib/bootstrap/companion_stack/data_types.py @@ -2,7 +2,7 @@ Date type classes for companion stacks """ import re -from typing import Optional, cast +from typing import Optional from samcli.lib.utils.hash import str_checksum From 2d2cf5bf7d6ab6b6787ad0ee77c1d30ccb8290d9 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Thu, 15 Apr 2021 00:59:06 -0700 Subject: [PATCH 017/121] Updated Unit Tests --- samcli/commands/deploy/guided_context.py | 6 +- .../companion_stack_manager.py | 10 +- tests/unit/commands/deploy/test_command.py | 94 +++++--- .../test_companion_stack_builder.py | 93 ++++++++ .../test_companion_stack_manager.py | 207 ++++++++++++++++++ 5 files changed, 370 insertions(+), 40 deletions(-) create mode 100644 tests/unit/lib/bootstrap/companion_stack/test_companion_stack_builder.py create mode 100644 tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager.py diff --git a/samcli/commands/deploy/guided_context.py b/samcli/commands/deploy/guided_context.py index 91d66c9c9e..35f6364240 100644 --- a/samcli/commands/deploy/guided_context.py +++ b/samcli/commands/deploy/guided_context.py @@ -405,7 +405,7 @@ def prompt_create_all_repos(manager_helper: CompanionStackManagerHelper) -> bool "\t\t#Managed repositories will be deleted when " "their functions are removed from the template and deployed" ) - create_all_repos = click.confirm("\t\tCreate managed ECR repositories for all functions?", default=True) + create_all_repos = confirm("\t\tCreate managed ECR repositories for all functions?", default=True) else: functions_with_repo_count = len(manager_helper.function_logical_ids) - len( manager_helper.missing_repo_functions @@ -420,7 +420,7 @@ def prompt_create_all_repos(manager_helper: CompanionStackManagerHelper) -> bool "removed from the template and deployed" ) create_all_repos = ( - click.confirm( + confirm( "\n\t\tCreate managed ECR repositories for the " f"{len(manager_helper.missing_repo_functions)} functions without?", default=True, @@ -462,7 +462,7 @@ def prompt_delete_unreferenced_repos( for repo in manager_helper.unreferenced_repos: repo_uri = manager_helper.manager.get_repo_uri(repo) click.echo(f"\t\t {repo_uri}") - delete_repos = click.confirm( + delete_repos = confirm( "\t\tDelete the unreferenced repositories listed above when deploying?", default=False, ) diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py index bbaa769167..546a9917e8 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py @@ -2,10 +2,9 @@ Companion stack manager """ from typing import List, Dict +import typing import boto3 -from mypy_boto3_cloudformation.client import CloudFormationClient -from mypy_boto3_s3.client import S3Client from botocore.config import Config from botocore.exceptions import ClientError, NoRegionError, NoCredentialsError @@ -15,6 +14,13 @@ from samcli.lib.package.artifact_exporter import mktempfile from samcli.lib.package.s3_uploader import S3Uploader +if typing.TYPE_CHECKING: + from mypy_boto3_cloudformation.client import CloudFormationClient + from mypy_boto3_s3.client import S3Client +else: + CloudFormationClient = object + S3Client = object + class CompanionStackManager: """ diff --git a/tests/unit/commands/deploy/test_command.py b/tests/unit/commands/deploy/test_command.py index 6781972a58..44adbd014a 100644 --- a/tests/unit/commands/deploy/test_command.py +++ b/tests/unit/commands/deploy/test_command.py @@ -47,6 +47,7 @@ def setUp(self): self.config_env = "mock-default-env" self.config_file = "mock-default-filename" self.signing_profiles = None + self.resolve_image_repos = False MOCK_SAM_CONFIG.reset_mock() @patch("samcli.commands.package.command.click") @@ -85,6 +86,7 @@ def test_all_args(self, mock_deploy_context, mock_deploy_click, mock_package_con resolve_s3=self.resolve_s3, config_env=self.config_env, config_file=self.config_file, + resolve_image_repos=self.resolve_image_repos, ) mock_deploy_context.assert_called_with( @@ -121,8 +123,8 @@ def test_all_args(self, mock_deploy_context, mock_deploy_click, mock_package_con @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.get_template_parameters") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") - @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") + @patch("samcli.commands.deploy.guided_context.CompanionStackManagerHelper") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") @patch.object(GuidedConfig, "get_config_ctx", MagicMock(return_value=(None, get_mock_sam_config()))) @patch("samcli.commands.deploy.guided_context.prompt") @@ -132,8 +134,8 @@ def test_all_args_guided_no_to_authorization_confirmation_prompt( mock_confirm, mock_prompt, mock_signer_config_per_function, + mock_companion_stack_manager_helper, mock_sam_function_provider, - mock_get_template_artifacts_format, mock_get_buildable_stacks, mock_get_template_parameters, mockauth_per_resource, @@ -145,7 +147,6 @@ def test_all_args_guided_no_to_authorization_confirmation_prompt( ): mock_get_buildable_stacks.return_value = (Mock(), []) mock_sam_function_provider.return_value = {} - mock_get_template_artifacts_format.return_value = [ZIP] context_mock = Mock() mockauth_per_resource.return_value = [("HelloWorldResource1", False), ("HelloWorldResource2", False)] mock_deploy_context.return_value.__enter__.return_value = context_mock @@ -197,6 +198,7 @@ def test_all_args_guided_no_to_authorization_confirmation_prompt( resolve_s3=self.resolve_s3, config_env=self.config_env, config_file=self.config_file, + resolve_image_repos=self.resolve_image_repos, ) @patch("samcli.commands.package.command.click") @@ -207,9 +209,8 @@ def test_all_args_guided_no_to_authorization_confirmation_prompt( @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.get_template_parameters") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") - @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") - @patch("samcli.commands.deploy.guided_context.get_template_function_resource_ids") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") + @patch("samcli.commands.deploy.guided_context.CompanionStackManagerHelper") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") @patch.object(GuidedConfig, "get_config_ctx", MagicMock(return_value=(None, get_mock_sam_config()))) @patch("samcli.commands.deploy.guided_context.prompt") @@ -221,9 +222,8 @@ def test_all_args_guided( mock_confirm, mock_prompt, mock_signer_config_per_function, + mock_companion_stack_manager_helper, mock_sam_function_provider, - mock_get_template_function_resource_ids, - mock_get_template_artifacts_format, mock_get_buildable_stacks, mock_get_template_parameters, mockauth_per_resource, @@ -235,22 +235,28 @@ def test_all_args_guided( ): mock_get_buildable_stacks.return_value = (Mock(), []) mock_tag_translation.return_value = "helloworld-123456-v1" - mock_get_template_function_resource_ids.return_value = ["HelloWorldFunction"] context_mock = Mock() mock_sam_function_provider.return_value = MagicMock( functions={"HelloWorldFunction": MagicMock(packagetype=IMAGE, imageuri="helloworld:v1")} ) - mock_get_template_artifacts_format.return_value = [IMAGE] + mock_companion_stack_manager_helper.return_value.missing_repo_functions = ["HelloWorldFunction"] + mock_companion_stack_manager_helper.return_value.function_logical_ids = ["HelloWorldFunction"] + mock_companion_stack_manager_helper.return_value.unreferenced_repos = ["HelloWorldFunctionB"] + mock_companion_stack_manager_helper.return_value.get_repository_mapping.return_value = { + "HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1" + } + mock_companion_stack_manager_helper.return_value.remove_unreferenced_repos_from_mapping.return_value = { + "HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1" + } mockauth_per_resource.return_value = [("HelloWorldResource", False)] mock_deploy_context.return_value.__enter__.return_value = context_mock - mock_confirm.side_effect = [True, False, True, True] + mock_confirm.side_effect = [True, False, True, True, True, True] mock_prompt.side_effect = [ "sam-app", "us-east-1", "guidedParameter", "secure", - "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1", ("CAPABILITY_IAM",), "testconfig.toml", "test-env", @@ -293,6 +299,7 @@ def test_all_args_guided( resolve_s3=self.resolve_s3, config_env=self.config_env, config_file=self.config_file, + resolve_image_repos=self.resolve_image_repos, ) mock_deploy_context.assert_called_with( @@ -347,9 +354,8 @@ def test_all_args_guided( @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") @patch("samcli.commands.deploy.guided_context.get_template_parameters") - @patch("samcli.commands.deploy.guided_context.get_template_function_resource_ids") - @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") + @patch("samcli.commands.deploy.guided_context.CompanionStackManagerHelper") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") @patch.object( GuidedConfig, @@ -365,9 +371,8 @@ def test_all_args_guided_no_save_echo_param_to_config( mock_confirm, mock_prompt, mock_signer_config_per_function, + mock_companion_stack_manager_helper, mock_sam_function_provider, - mock_get_template_artifacts_format, - mock_get_template_function_resource_ids, mock_get_template_parameters, mock_get_buildable_stacks, mockauth_per_resource, @@ -379,13 +384,20 @@ def test_all_args_guided_no_save_echo_param_to_config( ): mock_get_buildable_stacks.return_value = (Mock(), []) mock_tag_translation.return_value = "helloworld-123456-v1" - mock_get_template_function_resource_ids.return_value = ["HelloWorldFunction"] context_mock = Mock() mock_sam_function_provider.return_value = MagicMock( functions={"HelloWorldFunction": MagicMock(packagetype=IMAGE, imageuri="helloworld:v1")} ) - mock_get_template_artifacts_format.return_value = [IMAGE] + mock_companion_stack_manager_helper.return_value.missing_repo_functions = ["HelloWorldFunction"] + mock_companion_stack_manager_helper.return_value.function_logical_ids = ["HelloWorldFunction"] + mock_companion_stack_manager_helper.return_value.unreferenced_repos = ["HelloWorldFunctionB"] + mock_companion_stack_manager_helper.return_value.get_repository_mapping.return_value = { + "HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1" + } + mock_companion_stack_manager_helper.return_value.remove_unreferenced_repos_from_mapping.return_value = { + "HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1" + } mockauth_per_resource.return_value = [("HelloWorldResource", False)] mock_get_template_parameters.return_value = { "Myparameter": {"Type": "String"}, @@ -399,12 +411,11 @@ def test_all_args_guided_no_save_echo_param_to_config( "guidedParameter", "guided parameter with spaces", "secure", - "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1", ("CAPABILITY_IAM",), "testconfig.toml", "test-env", ] - mock_confirm.side_effect = [True, False, True, True] + mock_confirm.side_effect = [True, False, True, True, True, True] mock_managed_stack.return_value = "managed-s3-bucket" mock_signer_config_per_function.return_value = ({}, {}) @@ -436,6 +447,7 @@ def test_all_args_guided_no_save_echo_param_to_config( resolve_s3=self.resolve_s3, config_env=self.config_env, config_file=self.config_file, + resolve_image_repos=self.resolve_image_repos, ) mock_deploy_context.assert_called_with( @@ -505,9 +517,8 @@ def test_all_args_guided_no_save_echo_param_to_config( @patch("samcli.commands.deploy.guided_context.manage_stack") @patch("samcli.commands.deploy.guided_context.get_template_parameters") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") - @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") - @patch("samcli.commands.deploy.guided_context.get_template_function_resource_ids") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") + @patch("samcli.commands.deploy.guided_context.CompanionStackManagerHelper") @patch.object( GuidedConfig, "get_config_ctx", @@ -525,9 +536,8 @@ def test_all_args_guided_no_params_save_config( mock_sam_config, mock_confirm, mock_prompt, + mock_companion_stack_manager_helper, mock_sam_function_provider, - mock_get_template_function_resource_ids, - mock_get_template_artifacts_format, mock_signer_config_per_function, mock_get_template_parameters, mock_managed_stack, @@ -540,13 +550,20 @@ def test_all_args_guided_no_params_save_config( ): mock_get_buildable_stacks.return_value = (Mock(), []) mock_tag_translation.return_value = "helloworld-123456-v1" - mock_get_template_function_resource_ids.return_value = ["HelloWorldFunction"] context_mock = Mock() mock_sam_function_provider.return_value = MagicMock( functions={"HelloWorldFunction": MagicMock(packagetype=IMAGE, imageuri="helloworld:v1")} ) - mock_get_template_artifacts_format.return_value = [IMAGE] + mock_companion_stack_manager_helper.return_value.missing_repo_functions = ["HelloWorldFunction"] + mock_companion_stack_manager_helper.return_value.function_logical_ids = ["HelloWorldFunction"] + mock_companion_stack_manager_helper.return_value.unreferenced_repos = ["HelloWorldFunctionB"] + mock_companion_stack_manager_helper.return_value.get_repository_mapping.return_value = { + "HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1" + } + mock_companion_stack_manager_helper.return_value.remove_unreferenced_repos_from_mapping.return_value = { + "HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1" + } mockauth_per_resource.return_value = [("HelloWorldResource", False)] mock_get_template_parameters.return_value = {} @@ -554,12 +571,11 @@ def test_all_args_guided_no_params_save_config( mock_prompt.side_effect = [ "sam-app", "us-east-1", - "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1", ("CAPABILITY_IAM",), "testconfig.toml", "test-env", ] - mock_confirm.side_effect = [True, False, True, True] + mock_confirm.side_effect = [True, False, True, True, True, True] mock_get_cmd_names.return_value = ["deploy"] mock_managed_stack.return_value = "managed-s3-bucket" mock_signer_config_per_function.return_value = ({}, {}) @@ -591,6 +607,7 @@ def test_all_args_guided_no_params_save_config( config_env=self.config_env, config_file=self.config_file, signing_profiles=self.signing_profiles, + resolve_image_repos=self.resolve_image_repos, ) mock_deploy_context.assert_called_with( @@ -649,9 +666,8 @@ def test_all_args_guided_no_params_save_config( @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") @patch("samcli.commands.deploy.guided_context.get_template_parameters") - @patch("samcli.commands.deploy.guided_context.get_template_function_resource_ids") - @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") + @patch("samcli.commands.deploy.guided_context.CompanionStackManagerHelper") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") @patch.object(GuidedConfig, "get_config_ctx", MagicMock(return_value=(None, get_mock_sam_config()))) @patch("samcli.commands.deploy.guided_context.prompt") @@ -663,9 +679,8 @@ def test_all_args_guided_no_params_no_save_config( mock_confirm, mock_prompt, mock_signer_config_per_function, + mock_companion_stack_manager_helper, mock_sam_function_provider, - mock_get_template_artifacts_format, - mock_get_template_function_resource_ids, mock_get_template_parameters, mock_get_buildable_stacks, mockauth_per_resource, @@ -677,23 +692,29 @@ def test_all_args_guided_no_params_no_save_config( ): mock_get_buildable_stacks.return_value = (Mock(), []) mock_tag_translation.return_value = "helloworld-123456-v1" - mock_get_template_function_resource_ids.return_value = ["HelloWorldFunction"] context_mock = Mock() mock_sam_function_provider.return_value = MagicMock( functions={"HelloWorldFunction": MagicMock(packagetype=IMAGE, imageuri="helloworld:v1")} ) - mock_get_template_artifacts_format.return_value = [IMAGE] + mock_companion_stack_manager_helper.return_value.missing_repo_functions = ["HelloWorldFunction"] + mock_companion_stack_manager_helper.return_value.function_logical_ids = ["HelloWorldFunction"] + mock_companion_stack_manager_helper.return_value.unreferenced_repos = ["HelloWorldFunctionB"] + mock_companion_stack_manager_helper.return_value.get_repository_mapping.return_value = { + "HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1" + } + mock_companion_stack_manager_helper.return_value.remove_unreferenced_repos_from_mapping.return_value = { + "HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1" + } mockauth_per_resource.return_value = [("HelloWorldResource", False)] mock_get_template_parameters.return_value = {} mock_deploy_context.return_value.__enter__.return_value = context_mock mock_prompt.side_effect = [ "sam-app", "us-east-1", - "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1", ("CAPABILITY_IAM",), ] - mock_confirm.side_effect = [True, False, True, False] + mock_confirm.side_effect = [True, False, True, False, True, True] mock_managed_stack.return_value = "managed-s3-bucket" mock_signer_config_per_function.return_value = ({}, {}) @@ -727,6 +748,7 @@ def test_all_args_guided_no_params_no_save_config( config_file=self.config_file, config_env=self.config_env, signing_profiles=self.signing_profiles, + resolve_image_repos=self.resolve_image_repos, ) mock_deploy_context.assert_called_with( @@ -796,6 +818,7 @@ def test_all_args_resolve_s3( config_file=self.config_file, config_env=self.config_env, signing_profiles=self.signing_profiles, + resolve_image_repos=self.resolve_image_repos, ) mock_deploy_context.assert_called_with( @@ -853,4 +876,5 @@ def test_resolve_s3_and_s3_bucket_both_set(self): config_file=self.config_file, config_env=self.config_env, signing_profiles=self.signing_profiles, + resolve_image_repos=self.resolve_image_repos, ) diff --git a/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_builder.py b/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_builder.py new file mode 100644 index 0000000000..a5173e4b6f --- /dev/null +++ b/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_builder.py @@ -0,0 +1,93 @@ +from samcli.lib.bootstrap.companion_stack.companion_stack_builder import CompanionStackBuilder +from unittest import TestCase +from unittest.mock import Mock, patch + + +class TestCompanionStackBuilder(TestCase): + @patch("samcli.lib.bootstrap.companion_stack.companion_stack_builder.ECRRepo") + def test_building_single_function(self, ecr_repo_mock): + companion_stack_name = "CompanionStackA" + function_a = "FunctionA" + + repo_logical_id = "RepoLogicalIDA" + repo_physical_id = "RepoPhysicalIDA" + repo_output_id = "RepoOutputIDA" + + ecr_repo_instance = ecr_repo_mock.return_value + ecr_repo_instance.logical_id = repo_logical_id + ecr_repo_instance.physical_id = repo_physical_id + ecr_repo_instance.output_logical_id = repo_output_id + + companion_stack = Mock() + companion_stack.stack_name = companion_stack_name + builder = CompanionStackBuilder(companion_stack) + + builder.add_function(function_a) + template = builder.build() + self.assertIn(f"{repo_logical_id}:", template) + self.assertIn(f"RepositoryName: {repo_physical_id}", template) + self.assertIn(f"{repo_output_id}:", template) + + @patch("samcli.lib.bootstrap.companion_stack.companion_stack_builder.ECRRepo") + def test_building_multiple_functions(self, ecr_repo_mock): + companion_stack_name = "CompanionStackA" + function_prefix = "Function" + function_names = ["A", "B", "C", "D", "E", "F"] + + repo_logical_id_prefix = "RepoLogicalID" + repo_physical_id_prefix = "RepoPhysicalID" + repo_output_id_prefix = "RepoOutputID" + + ecr_repo_instances = list() + for function_name in function_names: + ecr_repo_instance = Mock() + ecr_repo_instance.logical_id = repo_logical_id_prefix + function_name + ecr_repo_instance.physical_id = repo_physical_id_prefix + function_name + ecr_repo_instance.output_logical_id = repo_output_id_prefix + function_name + ecr_repo_instances.append(ecr_repo_instance) + + ecr_repo_mock.side_effect = ecr_repo_instances + + companion_stack = Mock() + companion_stack.stack_name = companion_stack_name + builder = CompanionStackBuilder(companion_stack) + + for function_name in function_names: + builder.add_function(function_prefix + function_name) + template = builder.build() + for function_name in function_names: + self.assertIn(f"{repo_logical_id_prefix + function_name}:", template) + self.assertIn(f"RepositoryName: {repo_physical_id_prefix + function_name}", template) + self.assertIn(f"{repo_output_id_prefix + function_name}:", template) + + @patch("samcli.lib.bootstrap.companion_stack.companion_stack_builder.ECRRepo") + def test_mapping_multiple_functions(self, ecr_repo_mock): + companion_stack_name = "CompanionStackA" + function_prefix = "Function" + function_names = ["A", "B", "C", "D", "E", "F"] + + repo_logical_id_prefix = "RepoLogicalID" + repo_physical_id_prefix = "RepoPhysicalID" + repo_output_id_prefix = "RepoOutputID" + + ecr_repo_instances = list() + for function_name in function_names: + ecr_repo_instance = Mock() + ecr_repo_instance.logical_id = repo_logical_id_prefix + function_name + ecr_repo_instance.physical_id = repo_physical_id_prefix + function_name + ecr_repo_instance.output_logical_id = repo_output_id_prefix + function_name + ecr_repo_instances.append(ecr_repo_instance) + + ecr_repo_mock.side_effect = ecr_repo_instances + + companion_stack = Mock() + companion_stack.stack_name = companion_stack_name + builder = CompanionStackBuilder(companion_stack) + + for function_name in function_names: + builder.add_function(function_prefix + function_name) + for function_name in function_names: + self.assertIn( + (function_prefix + function_name, ecr_repo_instances[function_names.index(function_name)]), + builder.repo_mapping.items(), + ) diff --git a/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager.py b/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager.py new file mode 100644 index 0000000000..6be004356d --- /dev/null +++ b/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager.py @@ -0,0 +1,207 @@ +from botocore.exceptions import ClientError +from samcli.lib.bootstrap.companion_stack.companion_stack_manager import CompanionStackManager +from unittest import TestCase +from unittest.mock import ANY, Mock, patch + + +class TestCompanionStackManager(TestCase): + def setUp(self): + self.stack_name = "StackA" + self.companion_stack_name = "CompanionStackA" + + self.boto3_client_patch = patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.boto3.client") + self.boto3_client_mock = self.boto3_client_patch.start() + + self.companion_stack_patch = patch( + "samcli.lib.bootstrap.companion_stack.companion_stack_manager.CompanionStack" + ) + self.companion_stack_mock = self.companion_stack_patch.start() + + self.companion_stack_builder_patch = patch( + "samcli.lib.bootstrap.companion_stack.companion_stack_manager.CompanionStackBuilder" + ) + self.companion_stack_builder_mock = self.companion_stack_builder_patch.start() + + self.cfn_client = Mock() + self.ecr_client = Mock() + self.s3_client = Mock() + self.sts_client = Mock() + + self.companion_stack_mock.return_value.stack_name = self.companion_stack_name + self.boto3_client_mock.side_effect = [self.cfn_client, self.ecr_client, self.s3_client, self.sts_client] + self.manager = CompanionStackManager(self.stack_name, "region", "s3_bucket", "s3_prefix") + + def tearDown(self): + self.boto3_client_patch.stop() + self.companion_stack_patch.stop() + self.companion_stack_builder_patch.stop() + + def test_set_functions(self): + function_a = "FunctionA" + function_b = "FunctionB" + + self.manager.set_functions([function_a, function_b]) + + self.companion_stack_builder_mock.return_value.clear_functions.assert_called_once() + self.companion_stack_builder_mock.return_value.add_function.assert_any_call(function_a) + self.companion_stack_builder_mock.return_value.add_function.assert_any_call(function_b) + + @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.mktempfile") + @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.S3Uploader") + def test_create_companion_stack( + self, + s3_uploader_mock, + mktempfile_mock, + ): + cfn_waiter = Mock() + self.cfn_client.get_waiter.return_value = cfn_waiter + + self.manager.does_companion_stack_exist = lambda: False + + self.manager.update_companion_stack() + + self.companion_stack_builder_mock.return_value.build.assert_called_once() + s3_uploader_mock.return_value.upload_with_dedup.assert_called_once() + self.cfn_client.create_stack.assert_called_once_with( + StackName=self.companion_stack_name, TemplateURL=ANY, Capabilities=ANY + ) + self.cfn_client.get_waiter.assert_called_once_with("stack_create_complete") + cfn_waiter.wait.assert_called_once_with(StackName=self.companion_stack_name, WaiterConfig=ANY) + + @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.mktempfile") + @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.S3Uploader") + def test_update_companion_stack( + self, + s3_uploader_mock, + mktempfile_mock, + ): + cfn_waiter = Mock() + self.cfn_client.get_waiter.return_value = cfn_waiter + + self.manager.does_companion_stack_exist = lambda: True + + self.manager.update_companion_stack() + + self.companion_stack_builder_mock.return_value.build.assert_called_once() + s3_uploader_mock.return_value.upload_with_dedup.assert_called_once() + self.cfn_client.update_stack.assert_called_once_with( + StackName=self.companion_stack_name, TemplateURL=ANY, Capabilities=ANY + ) + self.cfn_client.get_waiter.assert_called_once_with("stack_update_complete") + cfn_waiter.wait.assert_called_once_with(StackName=self.companion_stack_name, WaiterConfig=ANY) + + def test_delete_companion_stack(self): + cfn_waiter = Mock() + self.cfn_client.get_waiter.return_value = cfn_waiter + + self.manager.delete_companion_stack() + + self.cfn_client.delete_stack.assert_called_once_with(StackName=self.companion_stack_name) + self.cfn_client.get_waiter.assert_called_once_with("stack_delete_complete") + cfn_waiter.wait.assert_called_once_with(StackName=self.companion_stack_name, WaiterConfig=ANY) + + @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.ECRRepo") + @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.boto3.resource") + def test_list_deployed_repos(self, boto3_resource_mock, ecr_repo_mock): + repo_a = "ECRRepoA" + repo_b = "ECRRepoB" + + resource_a = Mock() + resource_a.resource_type = "AWS::ECR::Repository" + resource_a.logical_resource_id = repo_a + resource_b = Mock() + resource_b.resource_type = "AWS::ECR::Repository" + resource_b.logical_resource_id = repo_b + resource_c = Mock() + resource_c.resource_type = "RandomResource" + resources = [resource_a, resource_b, resource_c] + boto3_resource_mock.return_value.Stack.return_value.resource_summaries.all.return_value = resources + + repos = self.manager.list_deployed_repos() + self.assertTrue(len(repos) == 2) + ecr_repo_mock.assert_any_call(logical_id=repo_a, physical_id=ANY) + ecr_repo_mock.assert_any_call(logical_id=repo_b, physical_id=ANY) + + @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.ECRRepo") + def test_get_unreferenced_repos(self, ecr_repo_mock): + repo_a_id = "ECRRepoA" + repo_b_id = "ECRRepoB" + + current_repo_a = Mock() + current_repo_a.logical_id = repo_a_id + current_repos = {"FunctionA": current_repo_a} + + repo_a = Mock() + repo_a.logical_id = repo_a_id + repo_b = Mock() + repo_b.logical_id = repo_b_id + deployed_repos = [repo_a, repo_b] + + self.manager.does_companion_stack_exist = lambda: True + self.manager.list_deployed_repos = lambda: deployed_repos + self.companion_stack_builder_mock.return_value.repo_mapping = current_repos + + unreferenced_repos = self.manager.get_unreferenced_repos() + self.assertEqual(len(unreferenced_repos), 1) + self.assertEqual(unreferenced_repos[0].logical_id, repo_b_id) + + def test_delete_unreferenced_repos(self): + repo_a_id = "ECRRepoA" + repo_b_id = "ECRRepoB" + + repo_a = Mock() + repo_a.physical_id = repo_a_id + repo_b = Mock() + repo_b.physical_id = repo_b_id + unreferenced_repos = [repo_a, repo_b] + + self.manager.get_unreferenced_repos = lambda: unreferenced_repos + + self.manager.delete_unreferenced_repos() + + self.ecr_client.delete_repository.assert_any_call(repositoryName=repo_a_id, force=True) + self.ecr_client.delete_repository.assert_any_call(repositoryName=repo_b_id, force=True) + + def test_sync_repos_exists(self): + self.manager.does_companion_stack_exist = lambda: True + self.manager.get_repository_mapping = lambda: {"a": ""} + self.manager.delete_unreferenced_repos = Mock() + self.manager.update_companion_stack = Mock() + self.manager.delete_companion_stack = Mock() + + self.manager.sync_repos() + self.manager.delete_unreferenced_repos.assert_called_once() + self.manager.delete_companion_stack.assert_not_called() + self.manager.update_companion_stack.assert_called_once() + + def test_sync_repos_exists_with_no_repo(self): + self.manager.does_companion_stack_exist = lambda: True + self.manager.get_repository_mapping = lambda: {} + self.manager.delete_unreferenced_repos = Mock() + self.manager.update_companion_stack = Mock() + self.manager.delete_companion_stack = Mock() + + self.manager.sync_repos() + self.manager.delete_unreferenced_repos.assert_called_once() + self.manager.delete_companion_stack.assert_called_once() + self.manager.update_companion_stack.assert_not_called() + + def test_sync_repos_does_not_exist(self): + self.manager.does_companion_stack_exist = lambda: False + self.manager.get_repository_mapping = lambda: {"a": ""} + self.manager.delete_unreferenced_repos = Mock() + self.manager.update_companion_stack = Mock() + self.manager.delete_companion_stack = Mock() + + self.manager.sync_repos() + self.manager.delete_unreferenced_repos.assert_not_called() + self.manager.delete_companion_stack.assert_not_called() + self.manager.update_companion_stack.assert_called_once() + + def test_does_companion_stack_exist_true(self): + self.cfn_client.describe_stacks.return_value = {"a": "a"} + self.assertTrue(self.manager.does_companion_stack_exist()) + + def test_does_companion_stack_exist_false(self): + self.cfn_client.describe_stacks.side_effect = ClientError({}, Mock()) + self.assertFalse(self.manager.does_companion_stack_exist()) From 9fb143e217448541dec63c8bd7dafb764f99a27b Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Thu, 15 Apr 2021 15:48:21 -0700 Subject: [PATCH 018/121] Updated UX and Fixed Windows ANSI --- samcli/commands/deploy/command.py | 6 +- samcli/commands/deploy/guided_context.py | 80 ++++++++++--------- samcli/commands/package/command.py | 3 +- samcli/lib/package/stream_cursor_utils.py | 8 ++ .../lib/utils/managed_cloudformation_stack.py | 3 +- 5 files changed, 52 insertions(+), 48 deletions(-) diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index 70f47e852f..c1133e3427 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -151,11 +151,7 @@ "the output AWS CloudFormation template. YAML is used by default.", ) @click.option( - "--resolve-s3", - required=False, - is_flag=True, - help="Automatically resolve s3 bucket for non-guided deployments." - "Do not use --s3-guided parameter with this option.", + "--resolve-s3", required=False, is_flag=True, help="Automatically resolve s3 bucket for non-guided deployments." ) @click.option( "--resolve-image-repos", diff --git a/samcli/commands/deploy/guided_context.py b/samcli/commands/deploy/guided_context.py index 35f6364240..8333ddcba5 100644 --- a/samcli/commands/deploy/guided_context.py +++ b/samcli/commands/deploy/guided_context.py @@ -167,9 +167,10 @@ def guided_prompts(self, parameter_override_keys): type=click.STRING, ) + click.echo("\n\tLooking for resources needed for deployment:") s3_bucket = manage_stack(profile=self.profile, region=region) - click.echo(f"\n\t\tManaged S3 bucket: {s3_bucket}") - click.echo("\t\tA different default S3 bucket can be set in samconfig.toml") + click.echo(f"\t Managed S3 bucket: {s3_bucket}") + click.echo("\t A different default S3 bucket can be set in samconfig.toml") image_repositories = self.prompt_image_repository( stack_name, stacks, self.image_repositories, region, s3_bucket, self.s3_prefix @@ -332,23 +333,21 @@ def prompt_image_repository( stack_name, region, s3_bucket, s3_prefix, self.template_file, image_repositories ) - create_all_repos = GuidedContext.prompt_create_all_repos(manager_helper) + create_all_repos = self.prompt_create_all_repos(manager_helper) if create_all_repos: image_repositories.update(manager_helper.manager.get_repository_mapping()) else: - image_repositories = GuidedContext.prompt_specify_repos( - manager_helper, image_repositories, self.image_repository - ) + image_repositories = self.prompt_specify_repos(manager_helper, image_repositories, self.image_repository) manager_helper.update_sepcified_image_repos(image_repositories) - image_repositories = GuidedContext.prompt_delete_unreferenced_repos(manager_helper, image_repositories) + image_repositories = self.prompt_delete_unreferenced_repos(manager_helper, image_repositories) GuidedContext.verify_images_exist_locally(self.function_provider.functions, image_repositories) manager_helper.manager.sync_repos() return image_repositories - @staticmethod def prompt_specify_repos( + self, manager_helper: CompanionStackManagerHelper, image_repositories: Dict[str, str], default_image_repo, @@ -374,7 +373,10 @@ def prompt_specify_repos( """ image_repositories = image_repositories.copy() for function_logical_id in manager_helper.missing_repo_functions: - image_uri = prompt(f"\t\tECR repository for {function_logical_id}:", default=default_image_repo) + image_uri = prompt( + f"\t {self.start_bold}ECR repository for {function_logical_id}{self.end_bold}", + default=default_image_repo, + ) if not is_ecr_url(image_uri): raise GuidedDeployFailedError(f"Invalid Image Repository ECR URI: {image_uri}") @@ -382,8 +384,7 @@ def prompt_specify_repos( return image_repositories - @staticmethod - def prompt_create_all_repos(manager_helper: CompanionStackManagerHelper) -> bool: + def prompt_create_all_repos(self, manager_helper: CompanionStackManagerHelper) -> bool: """ Prompt whether to create all repos @@ -397,42 +398,47 @@ def prompt_create_all_repos(manager_helper: CompanionStackManagerHelper) -> bool Boolean Returns False if there is no missing function or denied by prompt """ - if not manager_helper.missing_repo_functions: - create_all_repos = False - elif manager_helper.missing_repo_functions == manager_helper.function_logical_ids: - click.echo("\n\t\tImage repositories: Not found.") + if not manager_helper.function_logical_ids: + return False + + if manager_helper.missing_repo_functions == manager_helper.function_logical_ids: + click.echo("\t Image repositories: Not found.") click.echo( - "\t\t#Managed repositories will be deleted when " + "\t #Managed repositories will be deleted when " "their functions are removed from the template and deployed" ) - create_all_repos = confirm("\t\tCreate managed ECR repositories for all functions?", default=True) + return confirm( + f"\t {self.start_bold}Create managed ECR repositories for all functions?{self.end_bold}", default=True + ) else: functions_with_repo_count = len(manager_helper.function_logical_ids) - len( manager_helper.missing_repo_functions ) click.echo( - "\n\t\tImage repositories: " + "\t Image repositories: " f"Found ({functions_with_repo_count} of {len(manager_helper.function_logical_ids)})" - "#Different image repositories can be set in samconfig.toml" + " #Different image repositories can be set in samconfig.toml" ) + + if not manager_helper.missing_repo_functions: + return False + click.echo( - "\t\t#Managed repositories will be deleted when their functions are " + "\t #Managed repositories will be deleted when their functions are " "removed from the template and deployed" ) - create_all_repos = ( + return ( confirm( - "\n\t\tCreate managed ECR repositories for the " - f"{len(manager_helper.missing_repo_functions)} functions without?", + f"\n\t {self.start_bold}Create managed ECR repositories for the " + f"{len(manager_helper.missing_repo_functions)} functions without?{self.end_bold}", default=True, ) if manager_helper.missing_repo_functions else True ) - return create_all_repos - @staticmethod def prompt_delete_unreferenced_repos( - manager_helper: CompanionStackManagerHelper, image_repositories: Dict[str, str] + self, manager_helper: CompanionStackManagerHelper, image_repositories: Dict[str, str] ) -> Dict[str, str]: """ Prompt user for deleting unreferenced companion stack image repos. @@ -456,24 +462,23 @@ def prompt_delete_unreferenced_repos( return image_repositories click.echo( - "\t\tChecking for unreferenced ECR repositories to clean-up: " + "\t Checking for unreferenced ECR repositories to clean-up: " f"{len(manager_helper.unreferenced_repos)} found" ) for repo in manager_helper.unreferenced_repos: repo_uri = manager_helper.manager.get_repo_uri(repo) - click.echo(f"\t\t {repo_uri}") + click.echo(f"\t {repo_uri}") delete_repos = confirm( - "\t\tDelete the unreferenced repositories listed above when deploying?", + f"\t {self.start_bold}Delete the unreferenced repositories listed above when deploying?{self.end_bold}", default=False, ) if not delete_repos: - click.echo("\t\tDeployment aborted!") + click.echo("\t Deployment aborted!") click.echo( - "\t\t#The deployment was aborted to prevent " - "unreferenced managed ECR repositories from being deleted." - "\t\t#You may remove repositories from the SAMCLI " + "\t #The deployment was aborted to prevent " + "unreferenced managed ECR repositories from being deleted.\n" + "\t #You may remove repositories from the SAMCLI " "managed stack to retain them and resolve this unreferenced check." - "\t\thttps://docs.aws.amazon.com/serverless-application-model/latest/developerguide/.html" ) raise GuidedDeployFailedError("Unreferenced Auto Created ECR Repos Must Be Deleted.") @@ -492,19 +497,16 @@ def verify_images_exist_locally(functions: Dict[str, Function], image_repositori image_repositories: Dict[str, str] Image repo dictionary with function logical ID as key and image repo URI as value. """ - for resource_id, function_prop in functions.items(): + for _, function_prop in functions.items(): if function_prop.packagetype != IMAGE: continue image = function_prop.imageuri try: - tag = tag_translation(image) + tag_translation(image) except NonLocalImageException: pass except NoImageFoundException as ex: raise GuidedDeployFailedError("No images found to deploy, try running sam build") from ex - else: - click.secho(f"\t {image} to be pushed to {image_repositories.get(resource_id)}:{tag}") - click.secho(nl=True) def run(self): diff --git a/samcli/commands/package/command.py b/samcli/commands/package/command.py index cab68b6d88..869b0a1d00 100644 --- a/samcli/commands/package/command.py +++ b/samcli/commands/package/command.py @@ -121,8 +121,7 @@ def resources_and_properties_help_string(): exc_set=PackageResolveS3AndS3SetError, exc_not_set=PackageResolveS3AndS3NotSetError, ), - help="Automatically resolve s3 bucket for non-guided deployments." - "Do not use --s3-guided parameter with this option.", + help="Automatically resolve s3 bucket for non-guided deployments.", ) @metadata_override_option @signing_profiles_option diff --git a/samcli/lib/package/stream_cursor_utils.py b/samcli/lib/package/stream_cursor_utils.py index 908293c317..ef4dc149c0 100644 --- a/samcli/lib/package/stream_cursor_utils.py +++ b/samcli/lib/package/stream_cursor_utils.py @@ -1,11 +1,19 @@ """ Stream cursor utilities for moving cursor in the terminal. """ +import os # NOTE: ANSI escape codes. # NOTE: Still needs investigation on non terminal environments. ESC = "\u001B[" +# Enables ANSI escape codes on Windows +if os.name == "nt": + try: + os.system("color") + except Exception: + pass + def cursor_up(count=1): return ESC + str(count) + "A" diff --git a/samcli/lib/utils/managed_cloudformation_stack.py b/samcli/lib/utils/managed_cloudformation_stack.py index 25973fbc8b..493a4fb48e 100644 --- a/samcli/lib/utils/managed_cloudformation_stack.py +++ b/samcli/lib/utils/managed_cloudformation_stack.py @@ -59,11 +59,10 @@ def _create_or_get_stack(cloudformation_client, stack_name, template_body): ds_resp = cloudformation_client.describe_stacks(StackName=stack_name) stacks = ds_resp["Stacks"] stack = stacks[0] - click.echo("\n\tLooking for resources needed for deployment: Found!") _check_sanity_of_stack(stack, stack_name) return stack["Outputs"] except ClientError: - click.echo("\n\tLooking for resources needed for deployment: Not found.") + pass try: stack = _create_stack( From f01582953ba5eec01d7a4c0dd3843b2a5fe3cd2f Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Thu, 15 Apr 2021 18:56:56 -0700 Subject: [PATCH 019/121] Updated Unit Tests --- samcli/commands/deploy/guided_context.py | 46 +- .../companion_stack_manager.py | 4 +- .../companion_stack_manager_helper.py | 23 +- .../bootstrap/companion_stack/data_types.py | 2 +- .../image_repository_validation.py | 2 +- tests/unit/commands/deploy/test_command.py | 65 +-- .../commands/deploy/test_guided_context.py | 395 ++++++++++++++---- .../unit/commands/samconfig/test_samconfig.py | 2 + .../test_companion_stack_manager.py | 46 ++ .../test_companion_stack_manager_helper.py | 51 +++ .../companion_stack/test_data_types.py | 63 +++ .../test_image_repository_validation.py | 5 +- 12 files changed, 540 insertions(+), 164 deletions(-) create mode 100644 tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager_helper.py create mode 100644 tests/unit/lib/bootstrap/companion_stack/test_data_types.py diff --git a/samcli/commands/deploy/guided_context.py b/samcli/commands/deploy/guided_context.py index 8333ddcba5..ecaef332c8 100644 --- a/samcli/commands/deploy/guided_context.py +++ b/samcli/commands/deploy/guided_context.py @@ -376,6 +376,7 @@ def prompt_specify_repos( image_uri = prompt( f"\t {self.start_bold}ECR repository for {function_logical_id}{self.end_bold}", default=default_image_repo, + type=click.STRING, ) if not is_ecr_url(image_uri): raise GuidedDeployFailedError(f"Invalid Image Repository ECR URI: {image_uri}") @@ -410,32 +411,31 @@ def prompt_create_all_repos(self, manager_helper: CompanionStackManagerHelper) - return confirm( f"\t {self.start_bold}Create managed ECR repositories for all functions?{self.end_bold}", default=True ) - else: - functions_with_repo_count = len(manager_helper.function_logical_ids) - len( - manager_helper.missing_repo_functions - ) - click.echo( - "\t Image repositories: " - f"Found ({functions_with_repo_count} of {len(manager_helper.function_logical_ids)})" - " #Different image repositories can be set in samconfig.toml" - ) + functions_with_repo_count = len(manager_helper.function_logical_ids) - len( + manager_helper.missing_repo_functions + ) + click.echo( + "\t Image repositories: " + f"Found ({functions_with_repo_count} of {len(manager_helper.function_logical_ids)})" + " #Different image repositories can be set in samconfig.toml" + ) - if not manager_helper.missing_repo_functions: - return False + if not manager_helper.missing_repo_functions: + return False - click.echo( - "\t #Managed repositories will be deleted when their functions are " - "removed from the template and deployed" - ) - return ( - confirm( - f"\n\t {self.start_bold}Create managed ECR repositories for the " - f"{len(manager_helper.missing_repo_functions)} functions without?{self.end_bold}", - default=True, - ) - if manager_helper.missing_repo_functions - else True + click.echo( + "\t #Managed repositories will be deleted when their functions are " + "removed from the template and deployed" + ) + return ( + confirm( + f"\t {self.start_bold}Create managed ECR repositories for the " + f"{len(manager_helper.missing_repo_functions)} functions without?{self.end_bold}", + default=True, ) + if manager_helper.missing_repo_functions + else True + ) def prompt_delete_unreferenced_repos( self, manager_helper: CompanionStackManagerHelper, image_repositories: Dict[str, str] diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py index 546a9917e8..4898e41c01 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py @@ -14,10 +14,10 @@ from samcli.lib.package.artifact_exporter import mktempfile from samcli.lib.package.s3_uploader import S3Uploader -if typing.TYPE_CHECKING: +if typing.TYPE_CHECKING: # pragma: no cover from mypy_boto3_cloudformation.client import CloudFormationClient from mypy_boto3_s3.client import S3Client -else: +else: # pragma: no cover CloudFormationClient = object S3Client = object diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py index 46c2143c6a..612ef611bd 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py @@ -1,17 +1,30 @@ """ Help class to bridge CLI functions and CompanionStackManager """ -from typing import Dict +from samcli.lib.bootstrap.companion_stack.data_types import ECRRepo +from typing import Dict, List -from samcli.commands._utils.template import ( - get_template_function_resource_ids, -) +from samcli.commands._utils.template import get_template_function_resource_ids from samcli.lib.utils.packagetype import IMAGE from samcli.lib.bootstrap.companion_stack.companion_stack_manager import CompanionStackManager class CompanionStackManagerHelper: - def __init__(self, stack_name, region, s3_bucket, s3_prefix, template_file, specified_image_repos): + missing_repo_functions: List[str] + auto_ecr_repo_functions: List[str] + deployed_repos: List[ECRRepo] + deployed_repo_uris: List[str] + unreferenced_repos: List[ECRRepo] + + def __init__( + self, + stack_name: str, + region: str, + s3_bucket: str, + s3_prefix: str, + template_file: str, + specified_image_repos: Dict[str, str], + ): self.function_logical_ids = get_template_function_resource_ids(template_file=template_file, artifact=IMAGE) self.missing_repo_functions = list() self.auto_ecr_repo_functions = list() diff --git a/samcli/lib/bootstrap/companion_stack/data_types.py b/samcli/lib/bootstrap/companion_stack/data_types.py index 311ff1326c..d9843d9d67 100644 --- a/samcli/lib/bootstrap/companion_stack/data_types.py +++ b/samcli/lib/bootstrap/companion_stack/data_types.py @@ -21,7 +21,7 @@ def __init__(self, parent_stack_name: str) -> None: self._parent_stack_name = parent_stack_name self._escaped_parent_stack_name = re.sub(r"[^a-z0-9]", "", self._parent_stack_name.lower()) self._parent_stack_hash = str_checksum(self._parent_stack_name) - self._stack_name = self._parent_stack_name[:104] + "-" + self._parent_stack_hash[:8] + "-CompanionStack" + self._stack_name = f"{self._parent_stack_name[:104]}-{self._parent_stack_hash[:8]}-CompanionStack" @property def parent_stack_name(self) -> str: diff --git a/samcli/lib/cli_validation/image_repository_validation.py b/samcli/lib/cli_validation/image_repository_validation.py index 01377536bf..3d48aac9e8 100644 --- a/samcli/lib/cli_validation/image_repository_validation.py +++ b/samcli/lib/cli_validation/image_repository_validation.py @@ -57,7 +57,7 @@ def wrapped(*args, **kwargs): exception=click.BadOptionUsage( option_name="--image-repositories", ctx=ctx, - message="Missing option '--image-repository', '--image-repositories', or '--resolve_image_repos'", + message="Missing option '--image-repository', '--image-repositories', or '--resolve-image-repos'", ), ), Validator( diff --git a/tests/unit/commands/deploy/test_command.py b/tests/unit/commands/deploy/test_command.py index 44adbd014a..d2547cecad 100644 --- a/tests/unit/commands/deploy/test_command.py +++ b/tests/unit/commands/deploy/test_command.py @@ -50,6 +50,23 @@ def setUp(self): self.resolve_image_repos = False MOCK_SAM_CONFIG.reset_mock() + self.companion_stack_manager_helper_patch = patch( + "samcli.commands.deploy.guided_context.CompanionStackManagerHelper" + ) + self.companion_stack_manager_helper_mock = self.companion_stack_manager_helper_patch.start() + self.companion_stack_manager_helper_mock.return_value.missing_repo_functions = ["HelloWorldFunction"] + self.companion_stack_manager_helper_mock.return_value.function_logical_ids = ["HelloWorldFunction"] + self.companion_stack_manager_helper_mock.return_value.unreferenced_repos = ["HelloWorldFunctionB"] + self.companion_stack_manager_helper_mock.return_value.get_repository_mapping.return_value = { + "HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1" + } + self.companion_stack_manager_helper_mock.return_value.remove_unreferenced_repos_from_mapping.return_value = { + "HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1" + } + + def tearDown(self): + self.companion_stack_manager_helper_patch.stop() + @patch("samcli.commands.package.command.click") @patch("samcli.commands.package.package_context.PackageContext") @patch("samcli.commands.deploy.command.click") @@ -124,7 +141,6 @@ def test_all_args(self, mock_deploy_context, mock_deploy_click, mock_package_con @patch("samcli.commands.deploy.guided_context.get_template_parameters") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") - @patch("samcli.commands.deploy.guided_context.CompanionStackManagerHelper") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") @patch.object(GuidedConfig, "get_config_ctx", MagicMock(return_value=(None, get_mock_sam_config()))) @patch("samcli.commands.deploy.guided_context.prompt") @@ -134,7 +150,6 @@ def test_all_args_guided_no_to_authorization_confirmation_prompt( mock_confirm, mock_prompt, mock_signer_config_per_function, - mock_companion_stack_manager_helper, mock_sam_function_provider, mock_get_buildable_stacks, mock_get_template_parameters, @@ -146,7 +161,7 @@ def test_all_args_guided_no_to_authorization_confirmation_prompt( mock_package_click, ): mock_get_buildable_stacks.return_value = (Mock(), []) - mock_sam_function_provider.return_value = {} + mock_sam_function_provider.return_value.functions = {} context_mock = Mock() mockauth_per_resource.return_value = [("HelloWorldResource1", False), ("HelloWorldResource2", False)] mock_deploy_context.return_value.__enter__.return_value = context_mock @@ -210,7 +225,6 @@ def test_all_args_guided_no_to_authorization_confirmation_prompt( @patch("samcli.commands.deploy.guided_context.get_template_parameters") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") - @patch("samcli.commands.deploy.guided_context.CompanionStackManagerHelper") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") @patch.object(GuidedConfig, "get_config_ctx", MagicMock(return_value=(None, get_mock_sam_config()))) @patch("samcli.commands.deploy.guided_context.prompt") @@ -222,7 +236,6 @@ def test_all_args_guided( mock_confirm, mock_prompt, mock_signer_config_per_function, - mock_companion_stack_manager_helper, mock_sam_function_provider, mock_get_buildable_stacks, mock_get_template_parameters, @@ -240,15 +253,6 @@ def test_all_args_guided( mock_sam_function_provider.return_value = MagicMock( functions={"HelloWorldFunction": MagicMock(packagetype=IMAGE, imageuri="helloworld:v1")} ) - mock_companion_stack_manager_helper.return_value.missing_repo_functions = ["HelloWorldFunction"] - mock_companion_stack_manager_helper.return_value.function_logical_ids = ["HelloWorldFunction"] - mock_companion_stack_manager_helper.return_value.unreferenced_repos = ["HelloWorldFunctionB"] - mock_companion_stack_manager_helper.return_value.get_repository_mapping.return_value = { - "HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1" - } - mock_companion_stack_manager_helper.return_value.remove_unreferenced_repos_from_mapping.return_value = { - "HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1" - } mockauth_per_resource.return_value = [("HelloWorldResource", False)] mock_deploy_context.return_value.__enter__.return_value = context_mock mock_confirm.side_effect = [True, False, True, True, True, True] @@ -355,7 +359,6 @@ def test_all_args_guided( @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") @patch("samcli.commands.deploy.guided_context.get_template_parameters") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") - @patch("samcli.commands.deploy.guided_context.CompanionStackManagerHelper") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") @patch.object( GuidedConfig, @@ -371,7 +374,6 @@ def test_all_args_guided_no_save_echo_param_to_config( mock_confirm, mock_prompt, mock_signer_config_per_function, - mock_companion_stack_manager_helper, mock_sam_function_provider, mock_get_template_parameters, mock_get_buildable_stacks, @@ -389,15 +391,6 @@ def test_all_args_guided_no_save_echo_param_to_config( mock_sam_function_provider.return_value = MagicMock( functions={"HelloWorldFunction": MagicMock(packagetype=IMAGE, imageuri="helloworld:v1")} ) - mock_companion_stack_manager_helper.return_value.missing_repo_functions = ["HelloWorldFunction"] - mock_companion_stack_manager_helper.return_value.function_logical_ids = ["HelloWorldFunction"] - mock_companion_stack_manager_helper.return_value.unreferenced_repos = ["HelloWorldFunctionB"] - mock_companion_stack_manager_helper.return_value.get_repository_mapping.return_value = { - "HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1" - } - mock_companion_stack_manager_helper.return_value.remove_unreferenced_repos_from_mapping.return_value = { - "HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1" - } mockauth_per_resource.return_value = [("HelloWorldResource", False)] mock_get_template_parameters.return_value = { "Myparameter": {"Type": "String"}, @@ -518,7 +511,6 @@ def test_all_args_guided_no_save_echo_param_to_config( @patch("samcli.commands.deploy.guided_context.get_template_parameters") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") - @patch("samcli.commands.deploy.guided_context.CompanionStackManagerHelper") @patch.object( GuidedConfig, "get_config_ctx", @@ -536,7 +528,6 @@ def test_all_args_guided_no_params_save_config( mock_sam_config, mock_confirm, mock_prompt, - mock_companion_stack_manager_helper, mock_sam_function_provider, mock_signer_config_per_function, mock_get_template_parameters, @@ -555,15 +546,6 @@ def test_all_args_guided_no_params_save_config( mock_sam_function_provider.return_value = MagicMock( functions={"HelloWorldFunction": MagicMock(packagetype=IMAGE, imageuri="helloworld:v1")} ) - mock_companion_stack_manager_helper.return_value.missing_repo_functions = ["HelloWorldFunction"] - mock_companion_stack_manager_helper.return_value.function_logical_ids = ["HelloWorldFunction"] - mock_companion_stack_manager_helper.return_value.unreferenced_repos = ["HelloWorldFunctionB"] - mock_companion_stack_manager_helper.return_value.get_repository_mapping.return_value = { - "HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1" - } - mock_companion_stack_manager_helper.return_value.remove_unreferenced_repos_from_mapping.return_value = { - "HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1" - } mockauth_per_resource.return_value = [("HelloWorldResource", False)] mock_get_template_parameters.return_value = {} @@ -667,7 +649,6 @@ def test_all_args_guided_no_params_save_config( @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") @patch("samcli.commands.deploy.guided_context.get_template_parameters") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") - @patch("samcli.commands.deploy.guided_context.CompanionStackManagerHelper") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") @patch.object(GuidedConfig, "get_config_ctx", MagicMock(return_value=(None, get_mock_sam_config()))) @patch("samcli.commands.deploy.guided_context.prompt") @@ -679,7 +660,6 @@ def test_all_args_guided_no_params_no_save_config( mock_confirm, mock_prompt, mock_signer_config_per_function, - mock_companion_stack_manager_helper, mock_sam_function_provider, mock_get_template_parameters, mock_get_buildable_stacks, @@ -697,15 +677,6 @@ def test_all_args_guided_no_params_no_save_config( mock_sam_function_provider.return_value = MagicMock( functions={"HelloWorldFunction": MagicMock(packagetype=IMAGE, imageuri="helloworld:v1")} ) - mock_companion_stack_manager_helper.return_value.missing_repo_functions = ["HelloWorldFunction"] - mock_companion_stack_manager_helper.return_value.function_logical_ids = ["HelloWorldFunction"] - mock_companion_stack_manager_helper.return_value.unreferenced_repos = ["HelloWorldFunctionB"] - mock_companion_stack_manager_helper.return_value.get_repository_mapping.return_value = { - "HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1" - } - mock_companion_stack_manager_helper.return_value.remove_unreferenced_repos_from_mapping.return_value = { - "HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1" - } mockauth_per_resource.return_value = [("HelloWorldResource", False)] mock_get_template_parameters.return_value = {} mock_deploy_context.return_value.__enter__.return_value = context_mock diff --git a/tests/unit/commands/deploy/test_guided_context.py b/tests/unit/commands/deploy/test_guided_context.py index 14ddcc3d98..bfe61b76ab 100644 --- a/tests/unit/commands/deploy/test_guided_context.py +++ b/tests/unit/commands/deploy/test_guided_context.py @@ -21,34 +21,54 @@ def setUp(self): image_repository=None, image_repositories={"HelloWorldFunction": "image-repo"}, ) + self.companion_stack_manager_helper_patch = patch( + "samcli.commands.deploy.guided_context.CompanionStackManagerHelper" + ) + self.companion_stack_manager_helper_mock = self.companion_stack_manager_helper_patch.start() + self.companion_stack_manager_helper_mock.return_value.missing_repo_functions = ["HelloWorldFunction"] + self.companion_stack_manager_helper_mock.return_value.function_logical_ids = ["HelloWorldFunction"] + self.companion_stack_manager_helper_mock.return_value.unreferenced_repos = ["HelloWorldFunctionB"] + self.companion_stack_manager_helper_mock.return_value.get_repository_mapping.return_value = { + "HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1" + } + self.companion_stack_manager_helper_mock.return_value.remove_unreferenced_repos_from_mapping.return_value = { + "HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1" + } + self.companion_stack_manager_helper_mock.return_value.missing_repo_functions = ["HelloWorldFunction"] + + self.verify_image_patch = patch( + "samcli.commands.deploy.guided_context.GuidedContext.verify_images_exist_locally" + ) + self.verify_image_mock = self.verify_image_patch.start() + + def tearDown(self): + self.companion_stack_manager_helper_patch.stop() + self.verify_image_patch.stop() @patch("samcli.commands.deploy.guided_context.prompt") @patch("samcli.commands.deploy.guided_context.confirm") @patch("samcli.commands.deploy.guided_context.manage_stack") @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") - @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") def test_guided_prompts_check_defaults_non_public_resources_zips( self, patched_signer_config_per_function, patched_sam_function_provider, - patched_get_template_artifacts_format, patched_get_buildable_stacks, - patchedauth_per_resource, + patched_auth_per_resource, patched_manage_stack, patched_confirm, patched_prompt, ): - patched_sam_function_provider.return_value = {} - patched_get_template_artifacts_format.return_value = [ZIP] + patched_sam_function_provider.return_value.functions = {} patched_get_buildable_stacks.return_value = (Mock(), []) # Series of inputs to confirmations so that full range of questions are asked. - patchedauth_per_resource.return_value = [ + patched_auth_per_resource.return_value = [ ("HelloWorldFunction", True), ] - patched_confirm.side_effect = [True, False, "", True] + patched_confirm.side_effect = [True, False, "", True, True, True] patched_manage_stack.return_value = "managed_s3_stack" patched_signer_config_per_function.return_value = ({}, {}) self.gc.guided_prompts(parameter_override_keys=None) @@ -57,6 +77,14 @@ def test_guided_prompts_check_defaults_non_public_resources_zips( call(f"\t{self.gc.start_bold}Confirm changes before deploy{self.gc.end_bold}", default=True), call(f"\t{self.gc.start_bold}Allow SAM CLI IAM role creation{self.gc.end_bold}", default=True), call(f"\t{self.gc.start_bold}Save arguments to configuration file{self.gc.end_bold}", default=True), + call( + f"\t {self.gc.start_bold}Create managed ECR repositories for all functions?{self.gc.end_bold}", + default=True, + ), + call( + f"\t {self.gc.start_bold}Delete the unreferenced repositories listed above when deploying?{self.gc.end_bold}", + default=False, + ), ] self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) @@ -73,14 +101,12 @@ def test_guided_prompts_check_defaults_non_public_resources_zips( @patch("samcli.commands.deploy.guided_context.manage_stack") @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") - @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") def test_guided_prompts_check_defaults_public_resources_zips( self, patched_signer_config_per_function, patched_sam_function_provider, - patched_get_template_artifacts_format, patched_get_buildable_stacks, patchedauth_per_resource, patched_manage_stack, @@ -88,12 +114,11 @@ def test_guided_prompts_check_defaults_public_resources_zips( patched_prompt, ): patched_signer_config_per_function.return_value = (None, None) - patched_sam_function_provider.return_value = {} - patched_get_template_artifacts_format.return_value = [ZIP] + patched_sam_function_provider.return_value.functions = {} patched_get_buildable_stacks.return_value = (Mock(), []) # Series of inputs to confirmations so that full range of questions are asked. patchedauth_per_resource.return_value = [("HelloWorldFunction", False)] - patched_confirm.side_effect = [True, False, True, False, ""] + patched_confirm.side_effect = [True, False, True, False, True, True] patched_manage_stack.return_value = "managed_s3_stack" self.gc.guided_prompts(parameter_override_keys=None) # Now to check for all the defaults on confirmations. @@ -105,6 +130,14 @@ def test_guided_prompts_check_defaults_public_resources_zips( default=False, ), call(f"\t{self.gc.start_bold}Save arguments to configuration file{self.gc.end_bold}", default=True), + call( + f"\t {self.gc.start_bold}Create managed ECR repositories for all functions?{self.gc.end_bold}", + default=True, + ), + call( + f"\t {self.gc.start_bold}Delete the unreferenced repositories listed above when deploying?{self.gc.end_bold}", + default=False, + ), ] self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) @@ -121,8 +154,6 @@ def test_guided_prompts_check_defaults_public_resources_zips( @patch("samcli.commands.deploy.guided_context.manage_stack") @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") - @patch("samcli.commands.deploy.guided_context.get_template_function_resource_ids") - @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.click.secho") @patch("samcli.commands.deploy.guided_context.tag_translation") @@ -133,8 +164,6 @@ def test_guided_prompts_check_defaults_public_resources_images( patched_tag_translation, patched_click_secho, patched_sam_function_provider, - patched_get_template_artifacts_format, - mock_get_template_function_resource_ids, patched_get_buildable_stacks, patchedauth_per_resource, patched_manage_stack, @@ -142,23 +171,20 @@ def test_guided_prompts_check_defaults_public_resources_images( patched_prompt, ): - mock_get_template_function_resource_ids.return_value = ["HelloWorldFunction"] patched_signer_config_per_function.return_value = (None, None) patched_tag_translation.return_value = "helloworld-123456-v1" patched_sam_function_provider.return_value = MagicMock( functions={"HelloWorldFunction": MagicMock(packagetype=IMAGE, imageuri="helloworld:v1")} ) - patched_get_template_artifacts_format.return_value = [IMAGE] patched_get_buildable_stacks.return_value = (Mock(), []) patched_prompt.side_effect = [ "sam-app", "region", - "123456789012.dkr.ecr.region.amazonaws.com/myrepo", "CAPABILITY_IAM", ] # Series of inputs to confirmations so that full range of questions are asked. patchedauth_per_resource.return_value = [("HelloWorldFunction", False)] - patched_confirm.side_effect = [True, False, True, False, ""] + patched_confirm.side_effect = [True, False, True, False, True, True] patched_manage_stack.return_value = "managed_s3_stack" self.gc.guided_prompts(parameter_override_keys=None) # Now to check for all the defaults on confirmations. @@ -170,6 +196,14 @@ def test_guided_prompts_check_defaults_public_resources_images( default=False, ), call(f"\t{self.gc.start_bold}Save arguments to configuration file{self.gc.end_bold}", default=True), + call( + f"\t {self.gc.start_bold}Create managed ECR repositories for all functions?{self.gc.end_bold}", + default=True, + ), + call( + f"\t {self.gc.start_bold}Delete the unreferenced repositories listed above when deploying?{self.gc.end_bold}", + default=False, + ), ] self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) @@ -177,10 +211,6 @@ def test_guided_prompts_check_defaults_public_resources_images( expected_prompt_calls = [ call(f"\t{self.gc.start_bold}Stack Name{self.gc.end_bold}", default="test", type=click.STRING), call(f"\t{self.gc.start_bold}AWS Region{self.gc.end_bold}", default="region", type=click.STRING), - call( - f"\t{self.gc.start_bold}Image Repository for HelloWorldFunction{self.gc.end_bold}", - default="image-repo", - ), call(f"\t{self.gc.start_bold}Capabilities{self.gc.end_bold}", default=["CAPABILITY_IAM"], type=ANY), ] self.assertEqual(expected_prompt_calls, patched_prompt.call_args_list) @@ -188,10 +218,6 @@ def test_guided_prompts_check_defaults_public_resources_images( print(expected_prompt_calls) print(patched_prompt.call_args_list) expected_click_secho_calls = [ - call( - f"\t helloworld:v1 to be pushed to 123456789012.dkr.ecr.region.amazonaws.com/myrepo:helloworld-123456-v1" - ), - call(nl=True), call("\t#Shows you resources changes to be deployed and require a 'Y' to initiate deploy"), call("\t#SAM needs permission to be able to create roles to connect to the resources in your template"), ] @@ -202,8 +228,6 @@ def test_guided_prompts_check_defaults_public_resources_images( @patch("samcli.commands.deploy.guided_context.manage_stack") @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") - @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") - @patch("samcli.commands.deploy.guided_context.get_template_function_resource_ids") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.click.secho") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") @@ -212,16 +236,12 @@ def test_guided_prompts_check_defaults_public_resources_images_ecr_url( patched_signer_config_per_function, patched_click_secho, patched_sam_function_provider, - mock_get_template_function_resource_ids, - patched_get_template_artifacts_format, patched_get_buildable_stacks, patchedauth_per_resource, patched_manage_stack, patched_confirm, patched_prompt, ): - mock_get_template_function_resource_ids.return_value = ["HelloWorldFunction"] - patched_sam_function_provider.return_value = MagicMock( functions={ "HelloWorldFunction": MagicMock( @@ -229,17 +249,16 @@ def test_guided_prompts_check_defaults_public_resources_images_ecr_url( ) } ) - patched_get_template_artifacts_format.return_value = [IMAGE] patched_get_buildable_stacks.return_value = (Mock(), []) patched_prompt.side_effect = [ "sam-app", "region", - "123456789012.dkr.ecr.region.amazonaws.com/myrepo", "CAPABILITY_IAM", + "abc", ] # Series of inputs to confirmations so that full range of questions are asked. patchedauth_per_resource.return_value = [("HelloWorldFunction", False)] - patched_confirm.side_effect = [True, False, True, False, ""] + patched_confirm.side_effect = [True, False, True, False, True, True] patched_manage_stack.return_value = "managed_s3_stack" patched_signer_config_per_function.return_value = ({}, {}) self.gc.guided_prompts(parameter_override_keys=None) @@ -252,6 +271,14 @@ def test_guided_prompts_check_defaults_public_resources_images_ecr_url( default=False, ), call(f"\t{self.gc.start_bold}Save arguments to configuration file{self.gc.end_bold}", default=True), + call( + f"\t {self.gc.start_bold}Create managed ECR repositories for all functions?{self.gc.end_bold}", + default=True, + ), + call( + f"\t {self.gc.start_bold}Delete the unreferenced repositories listed above when deploying?{self.gc.end_bold}", + default=False, + ), ] self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) @@ -259,16 +286,125 @@ def test_guided_prompts_check_defaults_public_resources_images_ecr_url( expected_prompt_calls = [ call(f"\t{self.gc.start_bold}Stack Name{self.gc.end_bold}", default="test", type=click.STRING), call(f"\t{self.gc.start_bold}AWS Region{self.gc.end_bold}", default="region", type=click.STRING), + call(f"\t{self.gc.start_bold}Capabilities{self.gc.end_bold}", default=["CAPABILITY_IAM"], type=ANY), + ] + self.assertEqual(expected_prompt_calls, patched_prompt.call_args_list) + # Now to check click secho outputs and no references to images pushed. + expected_click_secho_calls = [ + call("\t#Shows you resources changes to be deployed and require a 'Y' to initiate deploy"), + call("\t#SAM needs permission to be able to create roles to connect to the resources in your template"), + ] + self.assertEqual(expected_click_secho_calls, patched_click_secho.call_args_list) + + @patch("samcli.commands.deploy.guided_context.prompt") + @patch("samcli.commands.deploy.guided_context.confirm") + @patch("samcli.commands.deploy.guided_context.manage_stack") + @patch("samcli.commands.deploy.guided_context.auth_per_resource") + @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") + @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") + @patch("samcli.commands.deploy.guided_context.click.secho") + @patch("samcli.commands.deploy.guided_context.signer_config_per_function") + def test_guided_prompts_images_illegal_image_uri( + self, + patched_signer_config_per_function, + patched_click_secho, + patched_sam_function_provider, + patched_get_buildable_stacks, + patchedauth_per_resource, + patched_manage_stack, + patched_confirm, + patched_prompt, + ): + + # Set ImageUri to be None, the sam app was never built. + patched_sam_function_provider.return_value = MagicMock( + functions={"HelloWorldFunction": MagicMock(packagetype=IMAGE, imageuri=None)} + ) + patched_get_buildable_stacks.return_value = (Mock(), []) + patched_prompt.side_effect = [ + "sam-app", + "region", + "CAPABILITY_IAM", + "illegaluri", + ] + # Series of inputs to confirmations so that full range of questions are asked. + patchedauth_per_resource.return_value = [("HelloWorldFunction", False)] + patched_confirm.side_effect = [True, False, True, False, False, True] + patched_manage_stack.return_value = "managed_s3_stack" + patched_signer_config_per_function.return_value = ({}, {}) + with self.assertRaises(GuidedDeployFailedError): + self.gc.guided_prompts(parameter_override_keys=None) + + @patch("samcli.commands.deploy.guided_context.prompt") + @patch("samcli.commands.deploy.guided_context.confirm") + @patch("samcli.commands.deploy.guided_context.manage_stack") + @patch("samcli.commands.deploy.guided_context.auth_per_resource") + @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") + @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") + @patch("samcli.commands.deploy.guided_context.click.secho") + @patch("samcli.commands.deploy.guided_context.signer_config_per_function") + def test_guided_prompts_images_missing_repo( + self, + patched_signer_config_per_function, + patched_click_secho, + patched_sam_function_provider, + patched_get_buildable_stacks, + patchedauth_per_resource, + patched_manage_stack, + patched_confirm, + patched_prompt, + ): + + self.companion_stack_manager_helper_mock.return_value.function_logical_ids = [ + "HelloWorldFunction", + "GoodbyeWorldFunction", + ] + # Set ImageUri to be None, the sam app was never built. + patched_sam_function_provider.return_value = MagicMock( + functions={"HelloWorldFunction": MagicMock(packagetype=IMAGE, imageuri=None)} + ) + patched_get_buildable_stacks.return_value = (Mock(), []) + patched_prompt.side_effect = [ + "sam-app", + "region", + "CAPABILITY_IAM", + ] + # Series of inputs to confirmations so that full range of questions are asked. + patchedauth_per_resource.return_value = [("HelloWorldFunction", False)] + patched_confirm.side_effect = [True, False, True, False, True, True] + patched_manage_stack.return_value = "managed_s3_stack" + patched_signer_config_per_function.return_value = ({}, {}) + + self.gc.guided_prompts(parameter_override_keys=None) + # Now to check for all the defaults on confirmations. + expected_confirmation_calls = [ + call(f"\t{self.gc.start_bold}Confirm changes before deploy{self.gc.end_bold}", default=True), + call(f"\t{self.gc.start_bold}Allow SAM CLI IAM role creation{self.gc.end_bold}", default=True), + call( + f"\t{self.gc.start_bold}HelloWorldFunction may not have authorization defined, Is this okay?{self.gc.end_bold}", + default=False, + ), + call(f"\t{self.gc.start_bold}Save arguments to configuration file{self.gc.end_bold}", default=True), + call( + f"\t {self.gc.start_bold}Create managed ECR repositories for the 1 functions without?{self.gc.end_bold}", + default=True, + ), call( - f"\t{self.gc.start_bold}Image Repository for HelloWorldFunction{self.gc.end_bold}", - default="image-repo", + f"\t {self.gc.start_bold}Delete the unreferenced repositories listed above when deploying?{self.gc.end_bold}", + default=False, ), + ] + self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) + + # Now to check for all the defaults on prompts. + expected_prompt_calls = [ + call(f"\t{self.gc.start_bold}Stack Name{self.gc.end_bold}", default="test", type=click.STRING), + call(f"\t{self.gc.start_bold}AWS Region{self.gc.end_bold}", default="region", type=click.STRING), call(f"\t{self.gc.start_bold}Capabilities{self.gc.end_bold}", default=["CAPABILITY_IAM"], type=ANY), ] self.assertEqual(expected_prompt_calls, patched_prompt.call_args_list) # Now to check click secho outputs and no references to images pushed. expected_click_secho_calls = [ - call(nl=True), call("\t#Shows you resources changes to be deployed and require a 'Y' to initiate deploy"), call("\t#SAM needs permission to be able to create roles to connect to the resources in your template"), ] @@ -279,41 +415,107 @@ def test_guided_prompts_check_defaults_public_resources_images_ecr_url( @patch("samcli.commands.deploy.guided_context.manage_stack") @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") - @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") - @patch("samcli.commands.deploy.guided_context.get_template_function_resource_ids") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.click.secho") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") - def test_guided_prompts_images_no_image_uri( + def test_guided_prompts_images_no_repo( self, patched_signer_config_per_function, patched_click_secho, patched_sam_function_provider, - mock_get_template_function_resource_ids, - patched_get_template_artifacts_format, patched_get_buildable_stacks, patchedauth_per_resource, patched_manage_stack, patched_confirm, patched_prompt, ): - mock_get_template_function_resource_ids.return_value = ["HelloWorldFunction"] + self.companion_stack_manager_helper_mock.return_value.function_logical_ids = [] # Set ImageUri to be None, the sam app was never built. patched_sam_function_provider.return_value = MagicMock( functions={"HelloWorldFunction": MagicMock(packagetype=IMAGE, imageuri=None)} ) - patched_get_template_artifacts_format.return_value = [IMAGE] patched_get_buildable_stacks.return_value = (Mock(), []) patched_prompt.side_effect = [ "sam-app", "region", + "CAPABILITY_IAM", "123456789012.dkr.ecr.region.amazonaws.com/myrepo", + ] + # Series of inputs to confirmations so that full range of questions are asked. + patchedauth_per_resource.return_value = [("HelloWorldFunction", False)] + patched_confirm.side_effect = [True, False, True, False, True, True] + patched_manage_stack.return_value = "managed_s3_stack" + patched_signer_config_per_function.return_value = ({}, {}) + + self.gc.guided_prompts(parameter_override_keys=None) + # Now to check for all the defaults on confirmations. + expected_confirmation_calls = [ + call(f"\t{self.gc.start_bold}Confirm changes before deploy{self.gc.end_bold}", default=True), + call(f"\t{self.gc.start_bold}Allow SAM CLI IAM role creation{self.gc.end_bold}", default=True), + call( + f"\t{self.gc.start_bold}HelloWorldFunction may not have authorization defined, Is this okay?{self.gc.end_bold}", + default=False, + ), + call(f"\t{self.gc.start_bold}Save arguments to configuration file{self.gc.end_bold}", default=True), + call( + f"\t {self.gc.start_bold}Delete the unreferenced repositories listed above when deploying?{self.gc.end_bold}", + default=False, + ), + ] + self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) + + # Now to check for all the defaults on prompts. + expected_prompt_calls = [ + call(f"\t{self.gc.start_bold}Stack Name{self.gc.end_bold}", default="test", type=click.STRING), + call(f"\t{self.gc.start_bold}AWS Region{self.gc.end_bold}", default="region", type=click.STRING), + call(f"\t{self.gc.start_bold}Capabilities{self.gc.end_bold}", default=["CAPABILITY_IAM"], type=ANY), + call( + f"\t {self.gc.start_bold}ECR repository for HelloWorldFunction{self.gc.end_bold}", + default=None, + type=click.STRING, + ), + ] + self.assertEqual(expected_prompt_calls, patched_prompt.call_args_list) + # Now to check click secho outputs and no references to images pushed. + expected_click_secho_calls = [ + call("\t#Shows you resources changes to be deployed and require a 'Y' to initiate deploy"), + call("\t#SAM needs permission to be able to create roles to connect to the resources in your template"), + ] + self.assertEqual(expected_click_secho_calls, patched_click_secho.call_args_list) + + @patch("samcli.commands.deploy.guided_context.prompt") + @patch("samcli.commands.deploy.guided_context.confirm") + @patch("samcli.commands.deploy.guided_context.manage_stack") + @patch("samcli.commands.deploy.guided_context.auth_per_resource") + @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") + @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") + @patch("samcli.commands.deploy.guided_context.click.secho") + @patch("samcli.commands.deploy.guided_context.signer_config_per_function") + def test_guided_prompts_images_deny_deletion( + self, + patched_signer_config_per_function, + patched_click_secho, + patched_sam_function_provider, + patched_get_buildable_stacks, + patchedauth_per_resource, + patched_manage_stack, + patched_confirm, + patched_prompt, + ): + # Set ImageUri to be None, the sam app was never built. + patched_sam_function_provider.return_value = MagicMock( + functions={"HelloWorldFunction": MagicMock(packagetype=IMAGE, imageuri=None)} + ) + patched_get_buildable_stacks.return_value = (Mock(), []) + patched_prompt.side_effect = [ + "sam-app", + "region", "CAPABILITY_IAM", ] # Series of inputs to confirmations so that full range of questions are asked. patchedauth_per_resource.return_value = [("HelloWorldFunction", False)] - patched_confirm.side_effect = [True, False, True, False, ""] + patched_confirm.side_effect = [True, False, True, False, True, False] patched_manage_stack.return_value = "managed_s3_stack" patched_signer_config_per_function.return_value = ({}, {}) with self.assertRaises(GuidedDeployFailedError): @@ -324,8 +526,6 @@ def test_guided_prompts_images_no_image_uri( @patch("samcli.commands.deploy.guided_context.manage_stack") @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") - @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") - @patch("samcli.commands.deploy.guided_context.get_template_function_resource_ids") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.click.secho") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") @@ -334,30 +534,26 @@ def test_guided_prompts_images_blank_image_repository( patched_signer_config_per_function, patched_click_secho, patched_sam_function_provider, - mock_get_template_function_resource_ids, - patched_get_template_artifacts_format, patched_get_buildable_stacks, patchedauth_per_resource, patched_manage_stack, patched_confirm, patched_prompt, ): - mock_get_template_function_resource_ids.return_value = ["HelloWorldFunction"] - patched_sam_function_provider.return_value = MagicMock( functions={"HelloWorldFunction": MagicMock(packagetype=IMAGE, imageuri="mysamapp:v1")} ) - patched_get_template_artifacts_format.return_value = [IMAGE] patched_get_buildable_stacks.return_value = (Mock(), []) # set Image repository to be blank. patched_prompt.side_effect = [ "sam-app", "region", "", + "", ] # Series of inputs to confirmations so that full range of questions are asked. patchedauth_per_resource.return_value = [("HelloWorldFunction", False)] - patched_confirm.side_effect = [True, False, True, False, ""] + patched_confirm.side_effect = [True, False, True, False, False, True] patched_manage_stack.return_value = "managed_s3_stack" patched_signer_config_per_function.return_value = ({}, {}) with self.assertRaises(GuidedDeployFailedError): @@ -382,7 +578,6 @@ def test_guided_prompts_images_blank_image_repository( @patch("samcli.commands.deploy.guided_context.manage_stack") @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") - @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") def test_guided_prompts_with_given_capabilities( @@ -390,7 +585,6 @@ def test_guided_prompts_with_given_capabilities( given_capabilities, patched_signer_config_per_function, patched_sam_function_provider, - patched_get_template_artifacts_format, patched_get_buildable_stacks, patchedauth_per_resource, patched_manage_stack, @@ -401,13 +595,21 @@ def test_guided_prompts_with_given_capabilities( patched_get_buildable_stacks.return_value = (Mock(), []) self.gc.capabilities = given_capabilities # Series of inputs to confirmations so that full range of questions are asked. - patched_confirm.side_effect = [True, False, "", True] + patched_confirm.side_effect = [True, False, "", True, True, True] self.gc.guided_prompts(parameter_override_keys=None) # Now to check for all the defaults on confirmations. expected_confirmation_calls = [ call(f"\t{self.gc.start_bold}Confirm changes before deploy{self.gc.end_bold}", default=True), call(f"\t{self.gc.start_bold}Allow SAM CLI IAM role creation{self.gc.end_bold}", default=True), call(f"\t{self.gc.start_bold}Save arguments to configuration file{self.gc.end_bold}", default=True), + call( + f"\t {self.gc.start_bold}Create managed ECR repositories for all functions?{self.gc.end_bold}", + default=True, + ), + call( + f"\t {self.gc.start_bold}Delete the unreferenced repositories listed above when deploying?{self.gc.end_bold}", + default=False, + ), ] self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) @@ -425,27 +627,24 @@ def test_guided_prompts_with_given_capabilities( @patch("samcli.commands.deploy.guided_context.manage_stack") @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") - @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") def test_guided_prompts_check_configuration_file_prompt_calls( self, patched_signer_config_per_function, patched_sam_function_provider, - patched_get_template_artifacts_format, patched_get_buildable_stacks, patchedauth_per_resource, patched_manage_stack, patched_confirm, patched_prompt, ): - patched_sam_function_provider.return_value = {} - patched_get_template_artifacts_format.return_value = [ZIP] + patched_sam_function_provider.return_value.fucntions = {} patched_get_buildable_stacks.return_value = (Mock(), []) patched_signer_config_per_function.return_value = ({}, {}) # Series of inputs to confirmations so that full range of questions are asked. patchedauth_per_resource.return_value = [("HelloWorldFunction", False)] - patched_confirm.side_effect = [True, False, True, True, ""] + patched_confirm.side_effect = [True, False, True, True, True, True] patched_manage_stack.return_value = "managed_s3_stack" self.gc.guided_prompts(parameter_override_keys=None) # Now to check for all the defaults on confirmations. @@ -457,6 +656,14 @@ def test_guided_prompts_check_configuration_file_prompt_calls( default=False, ), call(f"\t{self.gc.start_bold}Save arguments to configuration file{self.gc.end_bold}", default=True), + call( + f"\t {self.gc.start_bold}Create managed ECR repositories for all functions?{self.gc.end_bold}", + default=True, + ), + call( + f"\t {self.gc.start_bold}Delete the unreferenced repositories listed above when deploying?{self.gc.end_bold}", + default=False, + ), ] self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) @@ -482,26 +689,23 @@ def test_guided_prompts_check_configuration_file_prompt_calls( @patch("samcli.commands.deploy.guided_context.manage_stack") @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") - @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") def test_guided_prompts_check_parameter_from_template( self, patched_signer_config_per_function, patched_sam_function_provider, - patched_get_template_artifacts_format, patched_get_buildable_stacks, patchedauth_per_resource, patched_manage_stack, patched_confirm, patched_prompt, ): - patched_sam_function_provider.return_value = {} - patched_get_template_artifacts_format.return_value = [ZIP] + patched_sam_function_provider.return_value.functions = {} patched_get_buildable_stacks.return_value = (Mock(), []) # Series of inputs to confirmations so that full range of questions are asked. patchedauth_per_resource.return_value = [("HelloWorldFunction", False)] - patched_confirm.side_effect = [True, False, True, False, ""] + patched_confirm.side_effect = [True, False, True, False, True, True] patched_manage_stack.return_value = "managed_s3_stack" patched_signer_config_per_function.return_value = ({}, {}) parameter_override_from_template = {"MyTestKey": {"Default": "MyTemplateDefaultVal"}} @@ -516,6 +720,14 @@ def test_guided_prompts_check_parameter_from_template( default=False, ), call(f"\t{self.gc.start_bold}Save arguments to configuration file{self.gc.end_bold}", default=True), + call( + f"\t {self.gc.start_bold}Create managed ECR repositories for all functions?{self.gc.end_bold}", + default=True, + ), + call( + f"\t {self.gc.start_bold}Delete the unreferenced repositories listed above when deploying?{self.gc.end_bold}", + default=False, + ), ] self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) @@ -536,26 +748,23 @@ def test_guided_prompts_check_parameter_from_template( @patch("samcli.commands.deploy.guided_context.manage_stack") @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") - @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") def test_guided_prompts_check_parameter_from_cmd_or_config( self, patched_signer_config_per_function, patched_sam_function_provider, - patched_get_template_artifacts_format, patched_get_buildable_stacks, patchedauth_per_resource, patched_manage_stack, patched_confirm, patched_prompt, ): - patched_sam_function_provider.return_value = {} - patched_get_template_artifacts_format.return_value = [ZIP] + patched_sam_function_provider.return_value.functions = {} patched_get_buildable_stacks.return_value = (Mock(), []) # Series of inputs to confirmations so that full range of questions are asked. patchedauth_per_resource.return_value = [("HelloWorldFunction", False)] - patched_confirm.side_effect = [True, False, True, False, ""] + patched_confirm.side_effect = [True, False, True, False, True, True] patched_signer_config_per_function.return_value = ({}, {}) patched_manage_stack.return_value = "managed_s3_stack" parameter_override_from_template = {"MyTestKey": {"Default": "MyTemplateDefaultVal"}} @@ -570,6 +779,14 @@ def test_guided_prompts_check_parameter_from_cmd_or_config( default=False, ), call(f"\t{self.gc.start_bold}Save arguments to configuration file{self.gc.end_bold}", default=True), + call( + f"\t {self.gc.start_bold}Create managed ECR repositories for all functions?{self.gc.end_bold}", + default=True, + ), + call( + f"\t {self.gc.start_bold}Delete the unreferenced repositories listed above when deploying?{self.gc.end_bold}", + default=False, + ), ] self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) @@ -601,14 +818,12 @@ def test_guided_prompts_check_parameter_from_cmd_or_config( @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") - @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") def test_guided_prompts_with_code_signing( self, given_sign_packages_flag, given_code_signing_configs, patched_sam_function_provider, - patched_get_template_artifacts_format, patched_signer_config_per_function, patched_get_buildable_stacks, patchedauth_per_resource, @@ -619,12 +834,11 @@ def test_guided_prompts_with_code_signing( ): # given_sign_packages_flag = True # given_code_signing_configs = ({"MyFunction1"}, {"MyLayer1": {"MyFunction1"}, "MyLayer2": {"MyFunction1"}}) - patched_sam_function_provider.return_value = {} - patched_get_template_artifacts_format.return_value = [ZIP] + patched_sam_function_provider.return_value.functions = {} patched_signer_config_per_function.return_value = given_code_signing_configs patched_get_buildable_stacks.return_value = (Mock(), []) # Series of inputs to confirmations so that full range of questions are asked. - patched_confirm.side_effect = [True, False, given_sign_packages_flag, "", True] + patched_confirm.side_effect = [True, False, given_sign_packages_flag, "", True, True, True] self.gc.guided_prompts(parameter_override_keys=None) # Now to check for all the defaults on confirmations. expected_confirmation_calls = [ @@ -635,6 +849,14 @@ def test_guided_prompts_with_code_signing( default=True, ), call(f"\t{self.gc.start_bold}Save arguments to configuration file{self.gc.end_bold}", default=True), + call( + f"\t {self.gc.start_bold}Create managed ECR repositories for all functions?{self.gc.end_bold}", + default=True, + ), + call( + f"\t {self.gc.start_bold}Delete the unreferenced repositories listed above when deploying?{self.gc.end_bold}", + default=False, + ), ] self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) @@ -669,14 +891,12 @@ def test_guided_prompts_with_code_signing( @patch("samcli.commands.deploy.guided_context.manage_stack") @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") - @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") def test_guided_prompts_check_default_config_region( self, patched_signer_config_per_function, patched_sam_function_provider, - patched_get_template_artifacts_format, patched_get_buildable_stacks, patchedauth_per_resource, patched_manage_stack, @@ -684,12 +904,11 @@ def test_guided_prompts_check_default_config_region( patched_prompt, patched_get_session, ): - patched_sam_function_provider.return_value = {} - patched_get_template_artifacts_format.return_value = [ZIP] + patched_sam_function_provider.return_value.functions = {} patched_get_buildable_stacks.return_value = (Mock(), []) # Series of inputs to confirmations so that full range of questions are asked. patchedauth_per_resource.return_value = [("HelloWorldFunction", False)] - patched_confirm.side_effect = [True, False, True, True, ""] + patched_confirm.side_effect = [True, False, True, True, True, True] patched_signer_config_per_function.return_value = ({}, {}) patched_manage_stack.return_value = "managed_s3_stack" patched_get_session.return_value.get_config_variable.return_value = "default_config_region" @@ -705,6 +924,14 @@ def test_guided_prompts_check_default_config_region( default=False, ), call(f"\t{self.gc.start_bold}Save arguments to configuration file{self.gc.end_bold}", default=True), + call( + f"\t {self.gc.start_bold}Create managed ECR repositories for all functions?{self.gc.end_bold}", + default=True, + ), + call( + f"\t {self.gc.start_bold}Delete the unreferenced repositories listed above when deploying?{self.gc.end_bold}", + default=False, + ), ] self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) diff --git a/tests/unit/commands/samconfig/test_samconfig.py b/tests/unit/commands/samconfig/test_samconfig.py index 369156f014..ce0d13c702 100644 --- a/tests/unit/commands/samconfig/test_samconfig.py +++ b/tests/unit/commands/samconfig/test_samconfig.py @@ -595,6 +595,7 @@ def test_deploy(self, do_cli_mock, get_template_artifacts_format_mock): False, "samconfig.toml", "default", + False, ) @patch("samcli.commands.deploy.command.do_cli") @@ -703,6 +704,7 @@ def test_deploy_different_parameter_override_format(self, do_cli_mock, get_templ False, "samconfig.toml", "default", + False, ) @patch("samcli.commands.logs.command.do_cli") diff --git a/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager.py b/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager.py index 6be004356d..71afef6467 100644 --- a/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager.py +++ b/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager.py @@ -117,11 +117,35 @@ def test_list_deployed_repos(self, boto3_resource_mock, ecr_repo_mock): resources = [resource_a, resource_b, resource_c] boto3_resource_mock.return_value.Stack.return_value.resource_summaries.all.return_value = resources + self.manager.does_companion_stack_exist = lambda: True + repos = self.manager.list_deployed_repos() self.assertTrue(len(repos) == 2) ecr_repo_mock.assert_any_call(logical_id=repo_a, physical_id=ANY) ecr_repo_mock.assert_any_call(logical_id=repo_b, physical_id=ANY) + @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.ECRRepo") + @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.boto3.resource") + def test_list_deployed_repos_does_not_exist(self, boto3_resource_mock, ecr_repo_mock): + repo_a = "ECRRepoA" + repo_b = "ECRRepoB" + + resource_a = Mock() + resource_a.resource_type = "AWS::ECR::Repository" + resource_a.logical_resource_id = repo_a + resource_b = Mock() + resource_b.resource_type = "AWS::ECR::Repository" + resource_b.logical_resource_id = repo_b + resource_c = Mock() + resource_c.resource_type = "RandomResource" + resources = [resource_a, resource_b, resource_c] + boto3_resource_mock.return_value.Stack.return_value.resource_summaries.all.return_value = resources + + self.manager.does_companion_stack_exist = lambda: False + + repos = self.manager.list_deployed_repos() + self.assertEqual(repos, []) + @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.ECRRepo") def test_get_unreferenced_repos(self, ecr_repo_mock): repo_a_id = "ECRRepoA" @@ -145,6 +169,28 @@ def test_get_unreferenced_repos(self, ecr_repo_mock): self.assertEqual(len(unreferenced_repos), 1) self.assertEqual(unreferenced_repos[0].logical_id, repo_b_id) + @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.ECRRepo") + def test_get_unreferenced_repos_does_not_exist(self, ecr_repo_mock): + repo_a_id = "ECRRepoA" + repo_b_id = "ECRRepoB" + + current_repo_a = Mock() + current_repo_a.logical_id = repo_a_id + current_repos = {"FunctionA": current_repo_a} + + repo_a = Mock() + repo_a.logical_id = repo_a_id + repo_b = Mock() + repo_b.logical_id = repo_b_id + deployed_repos = [repo_a, repo_b] + + self.manager.does_companion_stack_exist = lambda: False + self.manager.list_deployed_repos = lambda: deployed_repos + self.companion_stack_builder_mock.return_value.repo_mapping = current_repos + + unreferenced_repos = self.manager.get_unreferenced_repos() + self.assertEqual(unreferenced_repos, []) + def test_delete_unreferenced_repos(self): repo_a_id = "ECRRepoA" repo_b_id = "ECRRepoB" diff --git a/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager_helper.py b/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager_helper.py new file mode 100644 index 0000000000..332f0a2b21 --- /dev/null +++ b/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager_helper.py @@ -0,0 +1,51 @@ +from samcli.lib.bootstrap.companion_stack.companion_stack_manager_helper import CompanionStackManagerHelper +from unittest import TestCase +from unittest.mock import Mock, patch + + +class TestCompanionStackManagerHelper(TestCase): + def setUp(self): + self.stack_name = "stackname" + self.function_a_id = "FunctionA" + self.function_b_id = "FunctionB" + self.function_c_id = "FunctionC" + self.get_template_function_resource_ids_patch = patch( + "samcli.lib.bootstrap.companion_stack.companion_stack_manager_helper.get_template_function_resource_ids" + ) + self.get_template_function_resource_ids_mock = self.get_template_function_resource_ids_patch.start() + self.get_template_function_resource_ids_mock.return_value = [self.function_a_id, self.function_b_id] + + self.companion_stack_manager_patch = patch( + "samcli.lib.bootstrap.companion_stack.companion_stack_manager_helper.CompanionStackManager" + ) + self.companion_stack_manager_mock = self.companion_stack_manager_patch.start().return_value + self.companion_stack_manager_mock.list_deployed_repos.return_value = [] + self.companion_stack_manager_mock.get_repo_uri.return_value = "" + self.companion_stack_manager_mock.is_repo_uri.return_value = True + self.companion_stack_manager_mock.get_unreferenced_repos.return_value = [Mock()] + + self.manager_helper = CompanionStackManagerHelper( + self.stack_name, "region", "s3_bucket", "s3_prefix", "template_file", {} + ) + + def tearDown(self): + self.get_template_function_resource_ids_patch.stop() + self.companion_stack_manager_patch.stop() + + def test_init(self): + self.assertEqual(self.manager_helper.missing_repo_functions, [self.function_a_id, self.function_b_id]) + self.assertEqual(len(self.manager_helper.unreferenced_repos), 1) + + def test_update_sepcified_image_repos(self): + self.manager_helper.update_sepcified_image_repos({"FunctionA": "abc"}) + self.assertEqual(self.manager_helper.missing_repo_functions, [self.function_b_id]) + self.assertEqual(len(self.manager_helper.unreferenced_repos), 1) + + def test_remove_unreferenced_repos_from_mapping(self): + self.companion_stack_manager_mock.get_repo_uri = lambda x: "repo_uri" + + image_repositories = {self.function_a_id: "a", self.function_b_id: "b", self.function_c_id: "repo_uri"} + init_image_repositories = image_repositories.copy() + output_image_repositories = self.manager_helper.remove_unreferenced_repos_from_mapping(image_repositories) + self.assertEqual(init_image_repositories, image_repositories) + self.assertEqual(output_image_repositories, {self.function_a_id: "a", self.function_b_id: "b"}) diff --git a/tests/unit/lib/bootstrap/companion_stack/test_data_types.py b/tests/unit/lib/bootstrap/companion_stack/test_data_types.py new file mode 100644 index 0000000000..4e6e6fd954 --- /dev/null +++ b/tests/unit/lib/bootstrap/companion_stack/test_data_types.py @@ -0,0 +1,63 @@ +from samcli.lib.bootstrap.companion_stack.data_types import CompanionStack, ECRRepo +from samcli.lib.bootstrap.companion_stack.companion_stack_builder import CompanionStackBuilder +from unittest import TestCase +from unittest.mock import Mock, patch + + +class TestCompanionStack(TestCase): + def setUp(self): + self.check_sum = "checksum" + self.parent_stack_name = "Parent-Stack" + self.check_sum_patch = patch("samcli.lib.bootstrap.companion_stack.data_types.str_checksum") + self.check_sum_mock = self.check_sum_patch.start() + self.check_sum_mock.return_value = self.check_sum + self.companion_stack = CompanionStack(self.parent_stack_name) + + def tearDown(self): + self.check_sum_patch.stop() + + def test_parent_stack_name(self): + self.assertEqual(self.companion_stack.parent_stack_name, self.parent_stack_name) + + def test_escaped_parent_stack_name(self): + self.assertEqual(self.companion_stack.escaped_parent_stack_name, "parentstack") + + def test_parent_stack_hash(self): + self.assertEqual(self.companion_stack.parent_stack_hash, "checksum") + + def test_stack_name(self): + self.assertEqual(self.companion_stack.stack_name, "Parent-Stack-checksum-CompanionStack") + + +class TestECRRepo(TestCase): + def setUp(self): + self.check_sum = "qwertyuiop" + self.parent_stack_name = "Parent-Stack" + self.function_id = "FunctionA" + + self.check_sum_patch = patch("samcli.lib.bootstrap.companion_stack.data_types.str_checksum") + self.check_sum_mock = self.check_sum_patch.start() + self.check_sum_mock.return_value = self.check_sum + + self.companion_stack_mock = Mock() + self.companion_stack_mock.escaped_parent_stack_name = "parentstackname" + self.companion_stack_mock.parent_stack_hash = "abcdefghijklmn" + self.ecr_repo = ECRRepo(companion_stack=self.companion_stack_mock, function_logical_id=self.function_id) + + def tearDown(self): + self.check_sum_patch.stop() + + def test_logical_id(self): + self.assertEqual(self.ecr_repo.logical_id, "FunctionAqwertyuiRepo") + + def test_physical_id(self): + self.assertEqual(self.ecr_repo.physical_id, "parentstacknameabcdefgh/functionaqwertyuirepo") + + def test_output_logical_id(self): + self.assertEqual(self.ecr_repo.output_logical_id, "FunctionAqwertyuiOut") + + def test_get_repo_uri(self): + self.assertEqual( + self.ecr_repo.get_repo_uri("12345", "us-west-2"), + "12345.dkr.ecr.us-west-2.amazonaws.com/parentstacknameabcdefgh/functionaqwertyuirepo", + ) diff --git a/tests/unit/lib/cli_validation/test_image_repository_validation.py b/tests/unit/lib/cli_validation/test_image_repository_validation.py index 9773cbc9d0..9df0e83727 100644 --- a/tests/unit/lib/cli_validation/test_image_repository_validation.py +++ b/tests/unit/lib/cli_validation/test_image_repository_validation.py @@ -131,7 +131,10 @@ def test_image_repository_validation_failure_IMAGE_missing_image_repositories( with self.assertRaises(click.BadOptionUsage) as ex: self.foobar() - self.assertIn("Missing option '--image-repository' or '--image-repositories'", ex.exception.message) + self.assertIn( + "Missing option '--image-repository', '--image-repositories', or '--resolve-image-repos'", + ex.exception.message, + ) @patch("samcli.lib.cli_validation.image_repository_validation.click") @patch("samcli.lib.cli_validation.image_repository_validation.get_template_function_resource_ids") From cdef3a61167d88b044776544221eda4eb3360309 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Thu, 15 Apr 2021 18:57:18 -0700 Subject: [PATCH 020/121] Fixed Import Order --- .../companion_stack/companion_stack_manager_helper.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py index 612ef611bd..993df2fb6b 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py @@ -1,9 +1,10 @@ """ Help class to bridge CLI functions and CompanionStackManager """ -from samcli.lib.bootstrap.companion_stack.data_types import ECRRepo from typing import Dict, List +from samcli.lib.bootstrap.companion_stack.data_types import ECRRepo + from samcli.commands._utils.template import get_template_function_resource_ids from samcli.lib.utils.packagetype import IMAGE from samcli.lib.bootstrap.companion_stack.companion_stack_manager import CompanionStackManager From 6dd8c6716e139f59068c517e22d221b3d702cba6 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Thu, 15 Apr 2021 19:13:35 -0700 Subject: [PATCH 021/121] Added Ignore Import Check --- .../lib/bootstrap/companion_stack/companion_stack_manager.py | 1 + tests/regression/deploy/regression_deploy_base.py | 3 +++ 2 files changed, 4 insertions(+) diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py index 4898e41c01..281824972c 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py @@ -14,6 +14,7 @@ from samcli.lib.package.artifact_exporter import mktempfile from samcli.lib.package.s3_uploader import S3Uploader +# pylint: disable=E0401 if typing.TYPE_CHECKING: # pragma: no cover from mypy_boto3_cloudformation.client import CloudFormationClient from mypy_boto3_s3.client import S3Client diff --git a/tests/regression/deploy/regression_deploy_base.py b/tests/regression/deploy/regression_deploy_base.py index 9c482d7a3c..2154ad6910 100644 --- a/tests/regression/deploy/regression_deploy_base.py +++ b/tests/regression/deploy/regression_deploy_base.py @@ -42,6 +42,7 @@ def get_deploy_command_list( tags=None, profile=None, region=None, + resolve_image_repos=False, ): command_list = self.base_command(base=base) @@ -79,6 +80,8 @@ def get_deploy_command_list( command_list = command_list + ["--region", str(region)] if profile: command_list = command_list + ["--profile", str(profile)] + if resolve_image_repos: + command_list = command_list + ["--resolve-image-repos"] return command_list From addd5a16f7704785b0d6d3faff165053d517199f Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Thu, 15 Apr 2021 22:35:00 -0700 Subject: [PATCH 022/121] Added Integration Tests --- tests/integration/deploy/deploy_integ_base.py | 3 + .../integration/deploy/test_deploy_command.py | 88 ++++++++++++++++++- 2 files changed, 87 insertions(+), 4 deletions(-) diff --git a/tests/integration/deploy/deploy_integ_base.py b/tests/integration/deploy/deploy_integ_base.py index 234463a30b..5fc2a56664 100644 --- a/tests/integration/deploy/deploy_integ_base.py +++ b/tests/integration/deploy/deploy_integ_base.py @@ -45,6 +45,7 @@ def get_deploy_command_list( resolve_s3=False, config_file=None, signing_profiles=None, + resolve_image_repos=False, ): command_list = [self.base_command(), "deploy"] @@ -98,6 +99,8 @@ def get_deploy_command_list( command_list = command_list + ["--config-file", str(config_file)] if signing_profiles: command_list = command_list + ["--signing-profiles", str(signing_profiles)] + if resolve_image_repos: + command_list = command_list + ["--resolve-image-repos"] return command_list diff --git a/tests/integration/deploy/test_deploy_command.py b/tests/integration/deploy/test_deploy_command.py index 9653aefd57..dc4df95368 100644 --- a/tests/integration/deploy/test_deploy_command.py +++ b/tests/integration/deploy/test_deploy_command.py @@ -1,4 +1,5 @@ import os +from samcli.lib.bootstrap.companion_stack.data_types import CompanionStack import shutil import tempfile import uuid @@ -6,6 +7,7 @@ from unittest import skipIf import boto3 +from botocore.exceptions import ClientError import docker from parameterized import parameterized @@ -46,7 +48,8 @@ def setUpClass(cls): DeployIntegBase.setUpClass() def setUp(self): - self.cf_client = boto3.client("cloudformation") + self.cfn_client = boto3.client("cloudformation") + self.ecr_client = boto3.client("ecr") self.sns_arn = os.environ.get("AWS_SNS") self.stack_names = [] time.sleep(CFN_SLEEP) @@ -57,7 +60,8 @@ def tearDown(self): for stack_name in self.stack_names: # because of the termination protection, do not delete aws-sam-cli-managed-default stack if stack_name != SAM_CLI_STACK_NAME: - self.cf_client.delete_stack(StackName=stack_name) + self._delete_companion_stack(self._stack_name_to_companion_stack(stack_name)) + self.cfn_client.delete_stack(StackName=stack_name) super().tearDown() @parameterized.expand(["aws-serverless-function.yaml"]) @@ -189,6 +193,33 @@ def test_no_package_and_deploy_with_s3_bucket_all_args_image_repositories(self, deploy_process_execute = run_command(deploy_command_list) self.assertEqual(deploy_process_execute.process.returncode, 0) + @parameterized.expand(["aws-serverless-function-image.yaml"]) + def test_no_package_and_deploy_with_s3_bucket_all_args_resolve_image_repos(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = self._method_to_stack_name(self.id()) + self.stack_names.append(stack_name) + + # Package and Deploy in one go without confirming change set. + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_prefix="integ_deploy", + s3_bucket=self.s3_bucket.name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes foo_bar=baz", + confirm_changeset=False, + resolve_image_repos=True, + ) + + deploy_process_execute = run_command(deploy_command_list) + self.assertEqual(deploy_process_execute.process.returncode, 0) + @parameterized.expand(["aws-serverless-function.yaml"]) def test_no_package_and_deploy_with_s3_bucket_and_no_confirm_changeset(self, template_file): template_path = self.test_data_path.joinpath(template_file) @@ -543,7 +574,7 @@ def test_deploy_guided_zip(self, template_file): os.remove(self.test_data_path.joinpath(DEFAULT_CONFIG_FILE_NAME)) @parameterized.expand(["aws-serverless-function-image.yaml"]) - def test_deploy_guided_image(self, template_file): + def test_deploy_guided_image_auto(self, template_file): template_path = self.test_data_path.joinpath(template_file) stack_name = self._method_to_stack_name(self.id()) @@ -553,7 +584,7 @@ def test_deploy_guided_image(self, template_file): deploy_command_list = self.get_deploy_command_list(template_file=template_path, guided=True) deploy_process_execute = run_command_with_input( - deploy_command_list, f"{stack_name}\n\n{self.ecr_repo_name}\n\n\ny\n\n\n\n\n\n".encode() + deploy_command_list, f"{stack_name}\n\n\n\ny\n\n\ny\n\n\n\n".encode() ) # Deploy should succeed with a managed stack @@ -562,6 +593,34 @@ def test_deploy_guided_image(self, template_file): # Remove samconfig.toml os.remove(self.test_data_path.joinpath(DEFAULT_CONFIG_FILE_NAME)) + @parameterized.expand(["aws-serverless-function-image.yaml"]) + def test_deploy_guided_image_specify(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = self._method_to_stack_name(self.id()) + self.stack_names.append(stack_name) + + # Package and Deploy in one go without confirming change set. + deploy_command_list = self.get_deploy_command_list(template_file=template_path, guided=True) + + deploy_process_execute = run_command_with_input( + deploy_command_list, f"{stack_name}\n\n\n\ny\n\n\n\nn\n{self.ecr_repo_name}\n\n\n\n".encode() + ) + + # Deploy should succeed with a managed stack + self.assertEqual(deploy_process_execute.process.returncode, 0) + # Verify companion stack does not exist + try: + self.cfn_client.describe_stacks(StackName=self._stack_name_to_companion_stack(stack_name)) + except ClientError: + pass + else: + self.fail("Companion stack was created. This should not happen with specifying image repos.") + + self.stack_names.append(SAM_CLI_STACK_NAME) + # Remove samconfig.toml + os.remove(self.test_data_path.joinpath(DEFAULT_CONFIG_FILE_NAME)) + @parameterized.expand(["aws-serverless-function.yaml"]) def test_deploy_guided_set_parameter(self, template_file): template_path = self.test_data_path.joinpath(template_file) @@ -764,3 +823,24 @@ def _method_to_stack_name(self, method_name): """Method expects method name which can be a full path. Eg: test.integration.test_deploy_command.method_name""" method_name = method_name.split(".")[-1] return f"{method_name.replace('_', '-')}-{CFN_PYTHON_VERSION_SUFFIX}" + + def _stack_name_to_companion_stack(self, stack_name): + return CompanionStack(stack_name).stack_name + + def _delete_companion_stack(self, companion_stack_name): + repos = list() + try: + self.cfn_client.describe_stacks(StackName=companion_stack_name) + except ClientError: + return + stack = boto3.resource("cloudformation").Stack(companion_stack_name) + resources = stack.resource_summaries.all() + for resource in resources: + if resource.resource_type == "AWS::ECR::Repository": + repos.append(resource.physical_resource_id) + for repo in repos: + try: + self.ecr_client.delete_repository(repositoryName=repo, force=True) + except self.ecr_client.exceptions.RepositoryNotFoundException: + pass + self.cfn_client.delete_stack(StackName=companion_stack_name) From edf778da89d46af9a26bcc89ed72c970f3913ec0 Mon Sep 17 00:00:00 2001 From: Cosh_ Date: Fri, 16 Apr 2021 12:52:39 -0700 Subject: [PATCH 023/121] Updated help text. Co-authored-by: Chris Rehn --- samcli/commands/deploy/command.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index c1133e3427..aa57011757 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -151,14 +151,14 @@ "the output AWS CloudFormation template. YAML is used by default.", ) @click.option( - "--resolve-s3", required=False, is_flag=True, help="Automatically resolve s3 bucket for non-guided deployments." + "--resolve-s3", required=False, is_flag=True, help="Automatically resolve S3 bucket for non-guided deployments." ) @click.option( "--resolve-image-repos", required=False, is_flag=True, - help="Automatically create and delete ECR repos for image based functions in non-guided deployments." - "Auto created image repos will be deleted if the corresponding functions are removed.", + help="Automatically create and delete ECR repositories for image-based functions in non-guided deployments. " + "Automatically created image repositories will be deleted if the corresponding functions are removed.", ) @metadata_override_option @notification_arns_override_option From f8244a45e0f4e2087c953e3c888c0b2e8539ec1b Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Fri, 16 Apr 2021 14:22:48 -0700 Subject: [PATCH 024/121] Added Comments for Name Generation --- samcli/lib/bootstrap/companion_stack/data_types.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/samcli/lib/bootstrap/companion_stack/data_types.py b/samcli/lib/bootstrap/companion_stack/data_types.py index d9843d9d67..1a3c682130 100644 --- a/samcli/lib/bootstrap/companion_stack/data_types.py +++ b/samcli/lib/bootstrap/companion_stack/data_types.py @@ -21,6 +21,9 @@ def __init__(self, parent_stack_name: str) -> None: self._parent_stack_name = parent_stack_name self._escaped_parent_stack_name = re.sub(r"[^a-z0-9]", "", self._parent_stack_name.lower()) self._parent_stack_hash = str_checksum(self._parent_stack_name) + # There is max 128 characters limit on the length of stack name. + # Using MD5 to avoid collision after trucating + # 104 + 1 + 8 + 15 = 128 max char self._stack_name = f"{self._parent_stack_name[:104]}-{self._parent_stack_hash[:8]}-CompanionStack" @property @@ -96,6 +99,9 @@ def __init__( @property def logical_id(self) -> Optional[str]: if self._logical_id is None and self._function_logical_id and self._function_md5: + # MD5 is used to avoid two having the same escaped name with different Lambda Functions + # For example: Helloworld and HELLO-WORLD + # 52 + 8 + 4 = 64 max char self._logical_id = self._function_logical_id[:52] + self._function_md5[:8] + "Repo" return self._logical_id @@ -107,6 +113,7 @@ def physical_id(self) -> Optional[str]: and self._function_md5 and self._escaped_function_logical_id ): + # 128 + 8 + 1 + 64 + 8 + 4 = 213 max char self._physical_id = ( self._companion_stack.escaped_parent_stack_name + self._companion_stack.parent_stack_hash[:8] From 0214950b833640346a6320c8b956db198af9db19 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Mon, 19 Apr 2021 10:27:12 -0700 Subject: [PATCH 025/121] Updated Image Option Validator --- .../image_repository_validation.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/samcli/lib/cli_validation/image_repository_validation.py b/samcli/lib/cli_validation/image_repository_validation.py index 3d48aac9e8..20db8301a6 100644 --- a/samcli/lib/cli_validation/image_repository_validation.py +++ b/samcli/lib/cli_validation/image_repository_validation.py @@ -12,7 +12,7 @@ def image_repository_validation(func): """ Wrapper Validation function that will run last after the all cli parmaters have been loaded - to check for conditions surrounding `--image-repository` and `--image-repositories`. The + to check for conditions surrounding `--image-repository`, `--image-repositories`, and `--resolve-image-repos`. The reason they are done last instead of in callback functions, is because the options depend on each other, and this breaks cyclic dependencies. @@ -30,7 +30,7 @@ def wrapped(*args, **kwargs): ctx.params.get("t", False) or ctx.params.get("template_file", False) or ctx.params.get("template", False) ) - # Check if `--image-repository` or `--image-repositories` are required by + # Check if `--image-repository`, `--image-repositories`, or `--resolve-image-repos` are required by # looking for resources that have an IMAGE based packagetype. required = any( @@ -50,6 +50,15 @@ def wrapped(*args, **kwargs): "Do you have both specified in the command or in a configuration file?", ), ), + Validator( + validation_function=lambda: image_repository and resolve_image_repos, + exception=click.BadOptionUsage( + option_name="--resolve-image-repos", + ctx=ctx, + message="Both '--resolve-image-repos' and '--image-repository' cannot be provided. " + "Do you have both specified in the command or in a configuration file?", + ), + ), Validator( validation_function=lambda: not guided and not (image_repository or image_repositories or resolve_image_repos) @@ -70,7 +79,8 @@ def wrapped(*args, **kwargs): exception=click.BadOptionUsage( option_name="--image-repositories", ctx=ctx, - message="Incomplete list of function logical ids specified for '--image-repositories'", + message="Incomplete list of function logical ids specified for '--image-repositories'. " + "You can also add --resolve-image-repos to automatically create missing repositories.", ), ), ] From 7b5a46c6991d2d0627c725953d6be5aa397ff2c2 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Mon, 19 Apr 2021 13:22:01 -0700 Subject: [PATCH 026/121] Updated CompanionStackBuilder to Use Dict instead of String --- .../companion_stack_builder.py | 160 +++++++++--------- .../test_companion_stack_builder.py | 12 +- 2 files changed, 86 insertions(+), 86 deletions(-) diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_builder.py b/samcli/lib/bootstrap/companion_stack/companion_stack_builder.py index 539288bc1c..85280c2513 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_builder.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_builder.py @@ -1,65 +1,13 @@ """ Companion stack template builder """ -from typing import Dict +import json -# pylint: disable=W0402 -from string import Template +from typing import Dict from samcli.lib.bootstrap.companion_stack.data_types import CompanionStack, ECRRepo from samcli import __version__ as VERSION -_STACK_TEMPLATE = Template( - """ -AWSTemplateFormatVersion : '2010-09-09' -Transform: AWS::Serverless-2016-10-31 -Description: AWS SAM CLI Managed ECR Repo Stack -Metadata: - SamCliInfo: $sam_cli_version - CompanionStackname: $companion_stack_name - -Resources: -$resources -Outputs: -$outputs -""" -) - -_REPO_TEMPLATE = Template( - """ - $repo_logical_id: - Type: AWS::ECR::Repository - Properties: - RepositoryName: $repo_name - Tags: - - Key: ManagedStackSource - Value: AwsSamCli - - Key: AwsSamCliCompanionStack - Value: $companion_stack_name - - RepositoryPolicyText: - Version: "2012-10-17" - Statement: - - - Sid: AllowLambdaSLR - Effect: Allow - Principal: - Service: - - "lambda.amazonaws.com" - Action: - - "ecr:GetDownloadUrlForLayer" - - "ecr:GetRepositoryPolicy" - - "ecr:BatchGetImage" -""" -) - -_OUTPUT_TEMPLATE = Template( - """ - $repo_output_logical_id: - Value: !Sub $${AWS::AccountId}.dkr.ecr.$${AWS::Region}.$${AWS::URLSuffix}/$${$repo_logical_id} -""" -) - class CompanionStackBuilder: """ @@ -94,33 +42,85 @@ def build(self) -> str: str CFN template for companions stack """ - repo_templates = list() - repo_output_templates = list() - companion_stack_name = self._companion_stack.stack_name + template_dict = self._build_template_dict() for _, ecr_repo in self._repo_mapping.items(): - repo_logical_id = ecr_repo.logical_id - repo_name = ecr_repo.physical_id - repo_output_logical_id = ecr_repo.output_logical_id - - repo_template = _REPO_TEMPLATE.substitute( - repo_logical_id=repo_logical_id, repo_name=repo_name, companion_stack_name=companion_stack_name - ) - repo_templates.append(repo_template) - repo_output_template = _OUTPUT_TEMPLATE.substitute( - repo_output_logical_id=repo_output_logical_id, repo_logical_id=repo_logical_id - ) - repo_output_templates.append(repo_output_template) - repo_templates_string = "".join(repo_templates) - repo_output_templates_string = "".join(repo_output_templates) - - stack_template_string = _STACK_TEMPLATE.substitute( - sam_cli_version=VERSION, - companion_stack_name=companion_stack_name, - resources=repo_templates_string, - outputs=repo_output_templates_string, - ) - - return stack_template_string + template_dict["Resources"][ecr_repo.logical_id] = self._build_repo_dict(ecr_repo) + template_dict["Outputs"][ecr_repo.output_logical_id] = CompanionStackBuilder._build_output_dict(ecr_repo) + + return json.dumps(template_dict) + + def _build_template_dict(self) -> Dict: + """ + Build Companion stack template dictionary with Resources and Outputs not filled + Returns + ------- + dict + Companion stack template dictionary + """ + template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Transform": "AWS::Serverless-2016-10-31", + "Description": "AWS SAM CLI Managed ECR Repo Stack", + "Metadata": {"SamCliInfo": VERSION, "CompanionStackname": self._companion_stack.stack_name}, + "Resources": {}, + "Outputs": {}, + } + return template + + def _build_repo_dict(self, repo: ECRRepo) -> Dict: + """ + Build a single ECR repo resource dictionary + + Parameters + ---------- + repo + ECR repo that will be turned into CFN resource + + Returns + ------- + dict + ECR repo resource dictionary + """ + return { + "Type": "AWS::ECR::Repository", + "Properties": { + "RepositoryName": repo.physical_id, + "Tags": [ + {"Key": "ManagedStackSource", "Value": "AwsSamCli"}, + {"Key": "AwsSamCliCompanionStack", "Value": self._companion_stack.stack_name}, + ], + "RepositoryPolicyText": { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "AllowLambdaSLR", + "Effect": "Allow", + "Principal": {"Service": ["lambda.amazonaws.com"]}, + "Action": ["ecr:GetDownloadUrlForLayer", "ecr:GetRepositoryPolicy", "ecr:BatchGetImage"], + } + ], + }, + }, + } + + @staticmethod + def _build_output_dict(repo: ECRRepo) -> Dict: + """ + Build a single ECR repo output resource dictionary + + Parameters + ---------- + repo + ECR repo that will be turned into CFN output resource + + Returns + ------- + dict + ECR repo output resource dictionary + """ + return { + "Value": f"!Sub ${{AWS::AccountId}}.dkr.ecr.${{AWS::Region}}.${{AWS::URLSuffix}}/${{{repo.logical_id}}}" + } @property def repo_mapping(self) -> Dict[str, ECRRepo]: diff --git a/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_builder.py b/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_builder.py index a5173e4b6f..f395ebee95 100644 --- a/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_builder.py +++ b/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_builder.py @@ -24,9 +24,9 @@ def test_building_single_function(self, ecr_repo_mock): builder.add_function(function_a) template = builder.build() - self.assertIn(f"{repo_logical_id}:", template) - self.assertIn(f"RepositoryName: {repo_physical_id}", template) - self.assertIn(f"{repo_output_id}:", template) + self.assertIn(f'"{repo_logical_id}":', template) + self.assertIn(f'"RepositoryName": "{repo_physical_id}"', template) + self.assertIn(f'"{repo_output_id}":', template) @patch("samcli.lib.bootstrap.companion_stack.companion_stack_builder.ECRRepo") def test_building_multiple_functions(self, ecr_repo_mock): @@ -56,9 +56,9 @@ def test_building_multiple_functions(self, ecr_repo_mock): builder.add_function(function_prefix + function_name) template = builder.build() for function_name in function_names: - self.assertIn(f"{repo_logical_id_prefix + function_name}:", template) - self.assertIn(f"RepositoryName: {repo_physical_id_prefix + function_name}", template) - self.assertIn(f"{repo_output_id_prefix + function_name}:", template) + self.assertIn(f'"{repo_logical_id_prefix + function_name}":', template) + self.assertIn(f'"RepositoryName": "{repo_physical_id_prefix + function_name}"', template) + self.assertIn(f'"{repo_output_id_prefix + function_name}":', template) @patch("samcli.lib.bootstrap.companion_stack.companion_stack_builder.ECRRepo") def test_mapping_multiple_functions(self, ecr_repo_mock): From 5edd4951331cb75d01ea2ddfd66e862060326a01 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Fri, 23 Apr 2021 13:02:02 -0700 Subject: [PATCH 027/121] Fixed Argument Ordering --- samcli/commands/deploy/command.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index aa57011757..f05f011100 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -198,9 +198,9 @@ def cli( confirm_changeset, signing_profiles, resolve_s3, + resolve_image_repos, config_file, config_env, - resolve_image_repos, ): """ `sam deploy` command entry point From 9ffd4bd83db0fd0f6a69a8979155f300ebce35c2 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Mon, 26 Apr 2021 12:36:31 -0700 Subject: [PATCH 028/121] Added Mapping Information to Help Text --- samcli/commands/deploy/command.py | 1 + 1 file changed, 1 insertion(+) diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index f05f011100..6905d8b528 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -158,6 +158,7 @@ required=False, is_flag=True, help="Automatically create and delete ECR repositories for image-based functions in non-guided deployments. " + "A companion stack containing ECR repos for each function will be deployed along with the template stack. " "Automatically created image repositories will be deleted if the corresponding functions are removed.", ) @metadata_override_option From 79ef739a27d03b63ec6dd41d6ab18f94c7f601ee Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Mon, 26 Apr 2021 13:11:39 -0700 Subject: [PATCH 029/121] Updated delete_unreferenced_repos Doc String --- samcli/lib/bootstrap/companion_stack/companion_stack_manager.py | 1 + 1 file changed, 1 insertion(+) diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py index 281824972c..c860150fd6 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py @@ -172,6 +172,7 @@ def get_unreferenced_repos(self) -> List[ECRRepo]: def delete_unreferenced_repos(self) -> None: """ Blocking call to delete all deployed ECR repos that are unreferenced by a function + If repo does not exist, this will simply skip it. """ repos = self.get_unreferenced_repos() for repo in repos: From 90b5b4c3729a537b3e45270aeed466905d232b35 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Mon, 26 Apr 2021 13:14:00 -0700 Subject: [PATCH 030/121] Updated sync_repos Doc String --- .../lib/bootstrap/companion_stack/companion_stack_manager.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py index c860150fd6..69473932c5 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py @@ -184,8 +184,8 @@ def delete_unreferenced_repos(self) -> None: def sync_repos(self) -> None: """ Blocking call to sync companion stack with the following actions - Create/Update companion stack. - Deletes unreferenced repos. + Creates the stack if it does not exist, and updates it if it does. + Deletes unreferenced repos if they exist. Deletes companion stack if there isn't any repo left. """ exists = self.does_companion_stack_exist() From 4157cb91b02dbde401328c1724bd8c3faa61d67d Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Mon, 26 Apr 2021 13:21:36 -0700 Subject: [PATCH 031/121] Added Justification for ECR Repo Physical ID --- samcli/lib/bootstrap/companion_stack/data_types.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/samcli/lib/bootstrap/companion_stack/data_types.py b/samcli/lib/bootstrap/companion_stack/data_types.py index 1a3c682130..162acf9ce0 100644 --- a/samcli/lib/bootstrap/companion_stack/data_types.py +++ b/samcli/lib/bootstrap/companion_stack/data_types.py @@ -113,6 +113,8 @@ def physical_id(self) -> Optional[str]: and self._function_md5 and self._escaped_function_logical_id ): + # The physical ID is constructed with escaped_stack_name + stack_md5[:8] as prefix/path and + # followed by escaped_lambda_logical_id + function_md5[:8] + "repo" to show linkage between the function and the repo # 128 + 8 + 1 + 64 + 8 + 4 = 213 max char self._physical_id = ( self._companion_stack.escaped_parent_stack_name From 5d84813dab6e1a90a4140320e0de52ac80722e36 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Mon, 26 Apr 2021 13:42:12 -0700 Subject: [PATCH 032/121] Refactored to be Less Coupled --- samcli/commands/deploy/guided_context.py | 68 ++++++++++-------------- 1 file changed, 27 insertions(+), 41 deletions(-) diff --git a/samcli/commands/deploy/guided_context.py b/samcli/commands/deploy/guided_context.py index ecaef332c8..c1e7e4e5b4 100644 --- a/samcli/commands/deploy/guided_context.py +++ b/samcli/commands/deploy/guided_context.py @@ -333,15 +333,21 @@ def prompt_image_repository( stack_name, region, s3_bucket, s3_prefix, self.template_file, image_repositories ) - create_all_repos = self.prompt_create_all_repos(manager_helper) + create_all_repos = self.prompt_create_all_repos( + manager_helper.function_logical_ids, manager_helper.missing_repo_functions + ) if create_all_repos: image_repositories.update(manager_helper.manager.get_repository_mapping()) else: image_repositories = self.prompt_specify_repos(manager_helper, image_repositories, self.image_repository) manager_helper.update_sepcified_image_repos(image_repositories) - image_repositories = self.prompt_delete_unreferenced_repos(manager_helper, image_repositories) - GuidedContext.verify_images_exist_locally(self.function_provider.functions, image_repositories) + self.prompt_delete_unreferenced_repos( + [manager_helper.manager.get_repo_uri(repo) for repo in manager_helper.unreferenced_repos] + ) + + image_repositories = manager_helper.remove_unreferenced_repos_from_mapping(image_repositories) + GuidedContext.verify_images_exist_locally(self.function_provider.functions) manager_helper.manager.sync_repos() return image_repositories @@ -385,24 +391,26 @@ def prompt_specify_repos( return image_repositories - def prompt_create_all_repos(self, manager_helper: CompanionStackManagerHelper) -> bool: + def prompt_create_all_repos(self, functions: List[str], functions_without_repo: List[str]) -> bool: """ Prompt whether to create all repos Parameters ---------- - manager_helper: CompanionStackManagerHelper - Instance of CompanionStackManagerHelper + functions: List[str] + List of function logical IDs that are image based + functions_without_repo: List[str] + List of function logical IDs that do not have an ECR image repo specified Returns ------- Boolean Returns False if there is no missing function or denied by prompt """ - if not manager_helper.function_logical_ids: + if not functions: return False - if manager_helper.missing_repo_functions == manager_helper.function_logical_ids: + if functions == functions_without_repo: click.echo("\t Image repositories: Not found.") click.echo( "\t #Managed repositories will be deleted when " @@ -411,16 +419,14 @@ def prompt_create_all_repos(self, manager_helper: CompanionStackManagerHelper) - return confirm( f"\t {self.start_bold}Create managed ECR repositories for all functions?{self.end_bold}", default=True ) - functions_with_repo_count = len(manager_helper.function_logical_ids) - len( - manager_helper.missing_repo_functions - ) + functions_with_repo_count = len(functions) - len(functions_without_repo) click.echo( "\t Image repositories: " - f"Found ({functions_with_repo_count} of {len(manager_helper.function_logical_ids)})" + f"Found ({functions_with_repo_count} of {len(functions)})" " #Different image repositories can be set in samconfig.toml" ) - if not manager_helper.missing_repo_functions: + if not functions_without_repo: return False click.echo( @@ -430,16 +436,14 @@ def prompt_create_all_repos(self, manager_helper: CompanionStackManagerHelper) - return ( confirm( f"\t {self.start_bold}Create managed ECR repositories for the " - f"{len(manager_helper.missing_repo_functions)} functions without?{self.end_bold}", + f"{len(functions_without_repo)} functions without?{self.end_bold}", default=True, ) - if manager_helper.missing_repo_functions + if functions_without_repo else True ) - def prompt_delete_unreferenced_repos( - self, manager_helper: CompanionStackManagerHelper, image_repositories: Dict[str, str] - ) -> Dict[str, str]: + def prompt_delete_unreferenced_repos(self, unreferenced_repo_uris: List[str]) -> None: """ Prompt user for deleting unreferenced companion stack image repos. Throws GuidedDeployFailedError if delete repos has been denied by the user. @@ -447,26 +451,13 @@ def prompt_delete_unreferenced_repos( Parameters ---------- - manager_helper: CompanionStackManagerHelper - Instance of CompanionStackManagerHelper - - image_repositories: Dict[str, str] - Current image repo dictionary with function logical ID as key and image repo URI as value. - Returns - ------- - Dict[str, str] - Updated image repo dictionary with unreferenced repos removed + unreferenced_repo_uris: List[str] + List of unreferenced image repos that need to be deleted. """ - if not manager_helper.unreferenced_repos: - return image_repositories - click.echo( - "\t Checking for unreferenced ECR repositories to clean-up: " - f"{len(manager_helper.unreferenced_repos)} found" - ) - for repo in manager_helper.unreferenced_repos: - repo_uri = manager_helper.manager.get_repo_uri(repo) + click.echo("\t Checking for unreferenced ECR repositories to clean-up: " f"{len(unreferenced_repo_uris)} found") + for repo_uri in unreferenced_repo_uris: click.echo(f"\t {repo_uri}") delete_repos = confirm( f"\t {self.start_bold}Delete the unreferenced repositories listed above when deploying?{self.end_bold}", @@ -482,10 +473,8 @@ def prompt_delete_unreferenced_repos( ) raise GuidedDeployFailedError("Unreferenced Auto Created ECR Repos Must Be Deleted.") - return manager_helper.remove_unreferenced_repos_from_mapping(image_repositories) - @staticmethod - def verify_images_exist_locally(functions: Dict[str, Function], image_repositories: Dict[str, str]) -> None: + def verify_images_exist_locally(functions: Dict[str, Function]) -> None: """ Verify all images associated with deploying functions exist locally. @@ -493,9 +482,6 @@ def verify_images_exist_locally(functions: Dict[str, Function], image_repositori ---------- functions: Dict[str, Function] Dictionary of functions in the stack to be deployed with key as their logical ID. - - image_repositories: Dict[str, str] - Image repo dictionary with function logical ID as key and image repo URI as value. """ for _, function_prop in functions.items(): if function_prop.packagetype != IMAGE: From ce355cfabb4987b4314b2e7f52669d6edc24901a Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Mon, 26 Apr 2021 13:46:38 -0700 Subject: [PATCH 033/121] Refactored for prompt_specify_repos --- samcli/commands/deploy/guided_context.py | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/samcli/commands/deploy/guided_context.py b/samcli/commands/deploy/guided_context.py index c1e7e4e5b4..21d5f838af 100644 --- a/samcli/commands/deploy/guided_context.py +++ b/samcli/commands/deploy/guided_context.py @@ -339,7 +339,7 @@ def prompt_image_repository( if create_all_repos: image_repositories.update(manager_helper.manager.get_repository_mapping()) else: - image_repositories = self.prompt_specify_repos(manager_helper, image_repositories, self.image_repository) + image_repositories = self.prompt_specify_repos(manager_helper.missing_repo_functions, image_repositories) manager_helper.update_sepcified_image_repos(image_repositories) self.prompt_delete_unreferenced_repos( @@ -354,34 +354,29 @@ def prompt_image_repository( def prompt_specify_repos( self, - manager_helper: CompanionStackManagerHelper, + functions_without_repos: List[str], image_repositories: Dict[str, str], - default_image_repo, ) -> Dict[str, str]: """ Show prompts for each function that isn't associated with a image repo Parameters ---------- - manager_helper: CompanionStackManagerHelper - Instance of CompanionStackManagerHelper + functions_without_repos: List[str] + List of functions without associating repos image_repositories: Dict[str, str] Current image repo dictionary with function logical ID as key and image repo URI as value. - default_image_repo: str - Default image repo URI to be shown for each function prompt. - Returns ------- Dict[str, str] Updated image repo dictionary with values(image repo URIs) filled by user input """ image_repositories = image_repositories.copy() - for function_logical_id in manager_helper.missing_repo_functions: + for function_logical_id in functions_without_repos: image_uri = prompt( f"\t {self.start_bold}ECR repository for {function_logical_id}{self.end_bold}", - default=default_image_repo, type=click.STRING, ) if not is_ecr_url(image_uri): From 366b7f7d28148a12614e251e263a9bf855dae00f Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Mon, 26 Apr 2021 13:50:04 -0700 Subject: [PATCH 034/121] Fixed Unit Test --- tests/unit/commands/deploy/test_guided_context.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/unit/commands/deploy/test_guided_context.py b/tests/unit/commands/deploy/test_guided_context.py index bfe61b76ab..f7f8aaf681 100644 --- a/tests/unit/commands/deploy/test_guided_context.py +++ b/tests/unit/commands/deploy/test_guided_context.py @@ -472,7 +472,6 @@ def test_guided_prompts_images_no_repo( call(f"\t{self.gc.start_bold}Capabilities{self.gc.end_bold}", default=["CAPABILITY_IAM"], type=ANY), call( f"\t {self.gc.start_bold}ECR repository for HelloWorldFunction{self.gc.end_bold}", - default=None, type=click.STRING, ), ] From c653dff1efc1e85a593b7029844b0b79df5988b7 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Mon, 26 Apr 2021 14:06:25 -0700 Subject: [PATCH 035/121] Moved WaiterConfig Out of Methods --- .../bootstrap/companion_stack/companion_stack_manager.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py index 69473932c5..edceb82958 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py @@ -41,6 +41,8 @@ def __init__(self, stack_name, region, s3_bucket, s3_prefix): self._companion_stack = CompanionStack(stack_name) self._builder = CompanionStackBuilder(self._companion_stack) self._boto_config = Config(region_name=region if region else None) + self._update_stack_waiter_config = {"Delay": 5, "MaxAttempts": 240} + self._delete_stack_waiter_config = {"Delay": 5, "MaxAttempts": 120} self._s3_bucket = s3_bucket self._s3_prefix = s3_prefix try: @@ -96,7 +98,6 @@ def update_companion_stack(self) -> None: ) template_url = s3_uploader.to_path_style_s3_url(parts["Key"], parts.get("Version", None)) - waiter_config = {"Delay": 10, "MaxAttempts": 120} exists = self.does_companion_stack_exist() if exists: @@ -110,7 +111,7 @@ def update_companion_stack(self) -> None: ) waiter = self._cfn_client.get_waiter("stack_create_complete") - waiter.wait(StackName=stack_name, WaiterConfig=waiter_config) # type: ignore + waiter.wait(StackName=stack_name, WaiterConfig=self._update_stack_waiter_config) # type: ignore def delete_companion_stack(self): """ @@ -118,9 +119,8 @@ def delete_companion_stack(self): """ stack_name = self._companion_stack.stack_name waiter = self._cfn_client.get_waiter("stack_delete_complete") - waiter_config = {"Delay": 10, "MaxAttempts": 60} self._cfn_client.delete_stack(StackName=stack_name) - waiter.wait(StackName=stack_name, WaiterConfig=waiter_config) + waiter.wait(StackName=stack_name, WaiterConfig=self._delete_stack_waiter_config) def list_deployed_repos(self) -> List[ECRRepo]: """ From 4be0e119c205e6d203e62b2d860790db26943e12 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Mon, 26 Apr 2021 14:07:09 -0700 Subject: [PATCH 036/121] Updated Typing --- samcli/lib/bootstrap/companion_stack/companion_stack_manager.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py index edceb82958..f329355ae4 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py @@ -32,6 +32,8 @@ class CompanionStackManager: _companion_stack: CompanionStack _builder: CompanionStackBuilder _boto_config: Config + _update_stack_waiter_config: Dict[str, int] + _delete_stack_waiter_config: Dict[str, int] _s3_bucket: str _s3_prefix: str _cfn_client: CloudFormationClient From c18ad4139b6b68c70c0a1f80e43983b3fa50be3f Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Mon, 26 Apr 2021 14:22:08 -0700 Subject: [PATCH 037/121] Updated Managed S3 Template to be Dict --- samcli/lib/bootstrap/bootstrap.py | 90 +++++++++++++++---------------- 1 file changed, 43 insertions(+), 47 deletions(-) diff --git a/samcli/lib/bootstrap/bootstrap.py b/samcli/lib/bootstrap/bootstrap.py index 81c30c7748..4283133606 100644 --- a/samcli/lib/bootstrap/bootstrap.py +++ b/samcli/lib/bootstrap/bootstrap.py @@ -32,51 +32,47 @@ def manage_stack(profile, region): def _get_stack_template(): gc = GlobalConfig() - info = {"version": __version__, "installationId": gc.installation_id if gc.installation_id else "unknown"} + template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Transform": "AWS::Serverless-2016-10-31", + "Description": "Managed Stack for AWS SAM CLI", + "Metadata": { + "SamCliInfo": { + "version": __version__, + "installationId": gc.installation_id if gc.installation_id else "unknown", + } + }, + "Resources": { + "SamCliSourceBucket": { + "Type": "AWS::S3::Bucket", + "Properties": { + "VersioningConfiguration": {"Status": "Enabled"}, + "Tags": [{"Key": "ManagedStackSource", "Value": "AwsSamCli"}], + }, + }, + "SamCliSourceBucketBucketPolicy": { + "Type": "AWS::S3::BucketPolicy", + "Properties": { + "Bucket": "!Ref SamCliSourceBucket", + "PolicyDocument": { + "Statement": [ + { + "Action": ["s3:GetObject"], + "Effect": "Allow", + "Resource": { + "Fn::Join": [ + "", + ["arn:", "!Ref AWS::Partition", ":s3:::", "!Ref SamCliSourceBucket", "/*"], + ] + }, + "Principal": {"Service": "serverlessrepo.amazonaws.com"}, + } + ] + }, + }, + }, + }, + "Outputs": {"SourceBucket": {"Value": "!Ref SamCliSourceBucket"}}, + } - template = """ - AWSTemplateFormatVersion : '2010-09-09' - Transform: AWS::Serverless-2016-10-31 - Description: Managed Stack for AWS SAM CLI - - Metadata: - SamCliInfo: {info} - - Resources: - SamCliSourceBucket: - Type: AWS::S3::Bucket - Properties: - VersioningConfiguration: - Status: Enabled - Tags: - - Key: ManagedStackSource - Value: AwsSamCli - - SamCliSourceBucketBucketPolicy: - Type: AWS::S3::BucketPolicy - Properties: - Bucket: !Ref SamCliSourceBucket - PolicyDocument: - Statement: - - - Action: - - "s3:GetObject" - Effect: "Allow" - Resource: - Fn::Join: - - "" - - - - "arn:" - - !Ref AWS::Partition - - ":s3:::" - - !Ref SamCliSourceBucket - - "/*" - Principal: - Service: serverlessrepo.amazonaws.com - - Outputs: - SourceBucket: - Value: !Ref SamCliSourceBucket - """ - - return template.format(info=json.dumps(info)) + return json.dumps(template) From 191309a56bb2407a5cdba6d77f9a16a54963a1d5 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Mon, 26 Apr 2021 14:59:33 -0700 Subject: [PATCH 038/121] Fixed Typo --- .../bootstrap/companion_stack/companion_stack_manager_helper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py index 993df2fb6b..04e26d0fef 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py @@ -1,5 +1,5 @@ """ - Help class to bridge CLI functions and CompanionStackManager + Helper class to bridge CLI functions and CompanionStackManager """ from typing import Dict, List From bfdf4a7c89854f68ed9ee0fd9257efcb51e005a3 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Mon, 26 Apr 2021 15:09:39 -0700 Subject: [PATCH 039/121] Added Comments for _save_image_repositories --- samcli/commands/deploy/guided_config.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/samcli/commands/deploy/guided_config.py b/samcli/commands/deploy/guided_config.py index ef2c156b54..a236c18808 100644 --- a/samcli/commands/deploy/guided_config.py +++ b/samcli/commands/deploy/guided_config.py @@ -102,6 +102,9 @@ def _save_parameter_overrides(self, cmd_names, config_env, parameter_overrides, samconfig.put(cmd_names, self.section, "parameter_overrides", " ".join(_params), env=config_env) def _save_image_repositories(self, cmd_names, config_env, samconfig, image_repositories): + # Check for None only as empty dict should be saved to config + # This can happen in an edge case where all companion stack repos are deleted and + # the config needs to be updated. if image_repositories is not None: _image_repositories = [f"{key}={value}" for key, value in image_repositories.items()] samconfig.put(cmd_names, self.section, "image_repositories", _image_repositories, env=config_env) From e03c48aa6663e7deadbbbb32a03a3c3fa94ff5b3 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Mon, 26 Apr 2021 15:12:35 -0700 Subject: [PATCH 040/121] Fixed Pylint Issue --- samcli/lib/bootstrap/companion_stack/data_types.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/samcli/lib/bootstrap/companion_stack/data_types.py b/samcli/lib/bootstrap/companion_stack/data_types.py index 162acf9ce0..2493fd349c 100644 --- a/samcli/lib/bootstrap/companion_stack/data_types.py +++ b/samcli/lib/bootstrap/companion_stack/data_types.py @@ -114,7 +114,8 @@ def physical_id(self) -> Optional[str]: and self._escaped_function_logical_id ): # The physical ID is constructed with escaped_stack_name + stack_md5[:8] as prefix/path and - # followed by escaped_lambda_logical_id + function_md5[:8] + "repo" to show linkage between the function and the repo + # followed by escaped_lambda_logical_id + function_md5[:8] + "repo" to show + # the linkage between the function and the repo # 128 + 8 + 1 + 64 + 8 + 4 = 213 max char self._physical_id = ( self._companion_stack.escaped_parent_stack_name From 85ced27ef097fdbacb8879fe1f48c429b3c8018a Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Mon, 26 Apr 2021 23:54:00 -0700 Subject: [PATCH 041/121] Added Missing Check for unreferenced_repo_uris --- samcli/commands/deploy/guided_context.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/samcli/commands/deploy/guided_context.py b/samcli/commands/deploy/guided_context.py index 21d5f838af..8323803e89 100644 --- a/samcli/commands/deploy/guided_context.py +++ b/samcli/commands/deploy/guided_context.py @@ -414,6 +414,7 @@ def prompt_create_all_repos(self, functions: List[str], functions_without_repo: return confirm( f"\t {self.start_bold}Create managed ECR repositories for all functions?{self.end_bold}", default=True ) + functions_with_repo_count = len(functions) - len(functions_without_repo) click.echo( "\t Image repositories: " @@ -450,6 +451,8 @@ def prompt_delete_unreferenced_repos(self, unreferenced_repo_uris: List[str]) -> unreferenced_repo_uris: List[str] List of unreferenced image repos that need to be deleted. """ + if not unreferenced_repo_uris: + return click.echo("\t Checking for unreferenced ECR repositories to clean-up: " f"{len(unreferenced_repo_uris)} found") for repo_uri in unreferenced_repo_uris: From 7693d9aea90c6b9badb43d2b69548eb66f19e6c0 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Mon, 3 May 2021 23:19:51 -0700 Subject: [PATCH 042/121] Updated Variable Name --- samcli/commands/deploy/guided_context.py | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/samcli/commands/deploy/guided_context.py b/samcli/commands/deploy/guided_context.py index 8323803e89..0a85a0b6e2 100644 --- a/samcli/commands/deploy/guided_context.py +++ b/samcli/commands/deploy/guided_context.py @@ -326,31 +326,33 @@ def prompt_image_repository( Dict[str, str] A dictionary contains image function logical ID as key, image repository as value. """ - image_repositories = image_repositories.copy() if image_repositories is not None else {} + updated_repositories = image_repositories.copy() if image_repositories is not None else {} self.function_provider = SamFunctionProvider(stacks, ignore_code_extraction_warnings=True) manager_helper = CompanionStackManagerHelper( - stack_name, region, s3_bucket, s3_prefix, self.template_file, image_repositories + stack_name, region, s3_bucket, s3_prefix, self.template_file, updated_repositories ) create_all_repos = self.prompt_create_all_repos( manager_helper.function_logical_ids, manager_helper.missing_repo_functions ) if create_all_repos: - image_repositories.update(manager_helper.manager.get_repository_mapping()) + updated_repositories.update(manager_helper.manager.get_repository_mapping()) else: - image_repositories = self.prompt_specify_repos(manager_helper.missing_repo_functions, image_repositories) - manager_helper.update_sepcified_image_repos(image_repositories) + updated_repositories = self.prompt_specify_repos( + manager_helper.missing_repo_functions, updated_repositories + ) + manager_helper.update_sepcified_image_repos(updated_repositories) self.prompt_delete_unreferenced_repos( [manager_helper.manager.get_repo_uri(repo) for repo in manager_helper.unreferenced_repos] ) - image_repositories = manager_helper.remove_unreferenced_repos_from_mapping(image_repositories) + updated_repositories = manager_helper.remove_unreferenced_repos_from_mapping(updated_repositories) GuidedContext.verify_images_exist_locally(self.function_provider.functions) manager_helper.manager.sync_repos() - return image_repositories + return updated_repositories def prompt_specify_repos( self, @@ -373,7 +375,7 @@ def prompt_specify_repos( Dict[str, str] Updated image repo dictionary with values(image repo URIs) filled by user input """ - image_repositories = image_repositories.copy() + updated_repositories = image_repositories.copy() for function_logical_id in functions_without_repos: image_uri = prompt( f"\t {self.start_bold}ECR repository for {function_logical_id}{self.end_bold}", @@ -382,9 +384,9 @@ def prompt_specify_repos( if not is_ecr_url(image_uri): raise GuidedDeployFailedError(f"Invalid Image Repository ECR URI: {image_uri}") - image_repositories[function_logical_id] = image_uri + updated_repositories[function_logical_id] = image_uri - return image_repositories + return updated_repositories def prompt_create_all_repos(self, functions: List[str], functions_without_repo: List[str]) -> bool: """ From 6393418aedbabbdde218e611e2bc01c875c3ea32 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Mon, 3 May 2021 23:20:03 -0700 Subject: [PATCH 043/121] Fixed Typo --- .../companion_stack/companion_stack_manager_helper.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py index 04e26d0fef..660989606b 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py @@ -32,10 +32,10 @@ def __init__( self.manager = CompanionStackManager(stack_name, region, s3_bucket, s3_prefix) self.deployed_repos = self.manager.list_deployed_repos() self.deployed_repo_uris = [self.manager.get_repo_uri(repo) for repo in self.deployed_repos] - self.update_sepcified_image_repos(specified_image_repos) + self.update_specified_image_repos(specified_image_repos) self.unreferenced_repos = self.manager.get_unreferenced_repos() - def update_sepcified_image_repos(self, specified_image_repos: Dict[str, str]) -> None: + def update_specified_image_repos(self, specified_image_repos: Dict[str, str]) -> None: """ Update list of image repos specified for each function. updates missing_repo_functions and auto_ecr_repo_functions accordingly. From 7411f9f576e463f560883573196b8770c5bc741b Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Mon, 3 May 2021 23:21:18 -0700 Subject: [PATCH 044/121] Updated Windows Check to Use platform.system() --- samcli/lib/package/stream_cursor_utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/samcli/lib/package/stream_cursor_utils.py b/samcli/lib/package/stream_cursor_utils.py index ef4dc149c0..c8a037f4ab 100644 --- a/samcli/lib/package/stream_cursor_utils.py +++ b/samcli/lib/package/stream_cursor_utils.py @@ -2,13 +2,14 @@ Stream cursor utilities for moving cursor in the terminal. """ import os +import platform # NOTE: ANSI escape codes. # NOTE: Still needs investigation on non terminal environments. ESC = "\u001B[" # Enables ANSI escape codes on Windows -if os.name == "nt": +if platform.system().lower() == "windows": try: os.system("color") except Exception: From f3b5bcb18183bc53ac03f6bd988ec1715fa36099 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Tue, 4 May 2021 00:30:59 -0700 Subject: [PATCH 045/121] Updated update_companion_stack Logic --- .../lib/bootstrap/companion_stack/companion_stack_manager.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py index f329355ae4..6801b9b18c 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py @@ -84,6 +84,9 @@ def update_companion_stack(self) -> None: Blocking call to create or update the companion stack based on current functions Companion stack template will be updated to the s3 bucket first before deployment """ + if not self._builder.repo_mapping: + return + stack_name = self._companion_stack.stack_name template = self._builder.build() @@ -107,7 +110,7 @@ def update_companion_stack(self) -> None: StackName=stack_name, TemplateURL=template_url, Capabilities=["CAPABILITY_AUTO_EXPAND"] ) waiter = self._cfn_client.get_waiter("stack_update_complete") - elif self._builder.repo_mapping: + else: self._cfn_client.create_stack( StackName=stack_name, TemplateURL=template_url, Capabilities=["CAPABILITY_AUTO_EXPAND"] ) From 3575581bf57a16dba9410658d9fc2bd77b297f5e Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Tue, 4 May 2021 00:43:43 -0700 Subject: [PATCH 046/121] Fixed Comment Typo --- samcli/lib/bootstrap/companion_stack/companion_stack_manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py index 6801b9b18c..31a8d254f7 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py @@ -120,7 +120,7 @@ def update_companion_stack(self) -> None: def delete_companion_stack(self): """ - Blocking call to delte the companion stack + Blocking call to delete the companion stack """ stack_name = self._companion_stack.stack_name waiter = self._cfn_client.get_waiter("stack_delete_complete") From 17a7a41b3a4fb7b19ad3680a94e26ca7704ab7c2 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Tue, 4 May 2021 00:44:54 -0700 Subject: [PATCH 047/121] Fixed Typos --- samcli/commands/deploy/guided_context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samcli/commands/deploy/guided_context.py b/samcli/commands/deploy/guided_context.py index 0a85a0b6e2..be91719161 100644 --- a/samcli/commands/deploy/guided_context.py +++ b/samcli/commands/deploy/guided_context.py @@ -342,7 +342,7 @@ def prompt_image_repository( updated_repositories = self.prompt_specify_repos( manager_helper.missing_repo_functions, updated_repositories ) - manager_helper.update_sepcified_image_repos(updated_repositories) + manager_helper.update_specified_image_repos(updated_repositories) self.prompt_delete_unreferenced_repos( [manager_helper.manager.get_repo_uri(repo) for repo in manager_helper.unreferenced_repos] From 8a1c458298299a511ebd2a9f5dc02ee7c6f62300 Mon Sep 17 00:00:00 2001 From: Wilton Wang Date: Tue, 4 May 2021 00:45:13 -0700 Subject: [PATCH 048/121] Fixed Test Name --- .../companion_stack/test_companion_stack_manager_helper.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager_helper.py b/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager_helper.py index 332f0a2b21..89a3e5b788 100644 --- a/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager_helper.py +++ b/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager_helper.py @@ -36,8 +36,8 @@ def test_init(self): self.assertEqual(self.manager_helper.missing_repo_functions, [self.function_a_id, self.function_b_id]) self.assertEqual(len(self.manager_helper.unreferenced_repos), 1) - def test_update_sepcified_image_repos(self): - self.manager_helper.update_sepcified_image_repos({"FunctionA": "abc"}) + def test_update_specified_image_repos(self): + self.manager_helper.update_specified_image_repos({"FunctionA": "abc"}) self.assertEqual(self.manager_helper.missing_repo_functions, [self.function_b_id]) self.assertEqual(len(self.manager_helper.unreferenced_repos), 1) From 126373508854ed86f0f3e7366c5bbe5e3ef5f590 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Mon, 21 Jun 2021 16:27:30 -0400 Subject: [PATCH 049/121] Added methods for cf and s3 files and init UI --- samcli/cli/cli_config_file.py | 13 ++- samcli/cli/command.py | 1 + samcli/commands/delete/__init__.py | 6 ++ samcli/commands/delete/command.py | 88 +++++++++++++++++ samcli/commands/delete/delete_context.py | 117 +++++++++++++++++++++++ samcli/commands/delete/exceptions.py | 14 +++ samcli/lib/delete/__init__.py | 0 samcli/lib/delete/cf_utils.py | 105 ++++++++++++++++++++ samcli/lib/delete/utils.py | 16 ++++ samcli/lib/package/s3_uploader.py | 23 +++++ 10 files changed, 379 insertions(+), 4 deletions(-) create mode 100644 samcli/commands/delete/__init__.py create mode 100644 samcli/commands/delete/command.py create mode 100644 samcli/commands/delete/delete_context.py create mode 100644 samcli/commands/delete/exceptions.py create mode 100644 samcli/lib/delete/__init__.py create mode 100644 samcli/lib/delete/cf_utils.py create mode 100644 samcli/lib/delete/utils.py diff --git a/samcli/cli/cli_config_file.py b/samcli/cli/cli_config_file.py index 67e214e122..9e2b4aa020 100644 --- a/samcli/cli/cli_config_file.py +++ b/samcli/cli/cli_config_file.py @@ -27,12 +27,14 @@ class TomlProvider: A parser for toml configuration files """ - def __init__(self, section=None): + def __init__(self, section=None, cmd_names=None): """ The constructor for TomlProvider class :param section: section defined in the configuration file nested within `cmd` + :param cmd_names: cmd_name defined in the configuration file """ self.section = section + self.cmd_names = cmd_names def __call__(self, config_path, config_env, cmd_names): """ @@ -67,18 +69,21 @@ def __call__(self, config_path, config_env, cmd_names): LOG.debug("Config file '%s' does not exist", samconfig.path()) return resolved_config + if not self.cmd_names: + self.cmd_names = cmd_names + try: LOG.debug( "Loading configuration values from [%s.%s.%s] (env.command_name.section) in config file at '%s'...", config_env, - cmd_names, + self.cmd_names, self.section, samconfig.path(), ) # NOTE(TheSriram): change from tomlkit table type to normal dictionary, # so that click defaults work out of the box. - resolved_config = dict(samconfig.get_all(cmd_names, self.section, env=config_env).items()) + resolved_config = dict(samconfig.get_all(self.cmd_names, self.section, env=config_env).items()) LOG.debug("Configuration values successfully loaded.") LOG.debug("Configuration values are: %s", resolved_config) @@ -87,7 +92,7 @@ def __call__(self, config_path, config_env, cmd_names): "Error reading configuration from [%s.%s.%s] (env.command_name.section) " "in configuration file at '%s' with : %s", config_env, - cmd_names, + self.cmd_names, self.section, samconfig.path(), str(ex), diff --git a/samcli/cli/command.py b/samcli/cli/command.py index 384529f78b..c329345f14 100644 --- a/samcli/cli/command.py +++ b/samcli/cli/command.py @@ -19,6 +19,7 @@ "samcli.commands.local.local", "samcli.commands.package", "samcli.commands.deploy", + "samcli.commands.delete", "samcli.commands.logs", "samcli.commands.publish", # We intentionally do not expose the `bootstrap` command for now. We might open it up later diff --git a/samcli/commands/delete/__init__.py b/samcli/commands/delete/__init__.py new file mode 100644 index 0000000000..ea5b0202d2 --- /dev/null +++ b/samcli/commands/delete/__init__.py @@ -0,0 +1,6 @@ +""" +`sam delete` command +""" + +# Expose the cli object here +from .command import cli # noqa diff --git a/samcli/commands/delete/command.py b/samcli/commands/delete/command.py new file mode 100644 index 0000000000..bdc201aef4 --- /dev/null +++ b/samcli/commands/delete/command.py @@ -0,0 +1,88 @@ +# """ +# CLI command for "delete" command +# """ + +import logging + +import click +from samcli.cli.cli_config_file import TomlProvider, configuration_option +from samcli.cli.main import aws_creds_options, common_options, pass_context, print_cmdline_args + +from samcli.lib.utils.version_checker import check_newer_version + +SHORT_HELP = "Delete an AWS SAM application." + +HELP_TEXT = """The sam delete command deletes a Cloudformation Stack and deletes all your resources which were created. + +\b +e.g. sam delete --stack-name sam-app --region us-east-1 + +\b +""" + +CONFIG_SECTION = "parameters" +CONFIG_COMMAND = "deploy" +LOG = logging.getLogger(__name__) + + +@click.command( + "delete", + short_help=SHORT_HELP, + context_settings={"ignore_unknown_options": False, "allow_interspersed_args": True, "allow_extra_args": True}, + help=HELP_TEXT, +) +@configuration_option(provider=TomlProvider(section=CONFIG_SECTION, cmd_names=[CONFIG_COMMAND])) +@click.option( + "--stack-name", + required=False, + help="The name of the AWS CloudFormation stack you want to delete. ", +) +@click.option( + "--s3-bucket", + required=False, + help="The name of the S3 bucket where this command delets your " "CloudFormation artifacts.", +) +@click.option( + "--s3-prefix", + required=False, + help="A prefix name that the command uses to delete the " + "artifacts' that were deployed to the S3 bucket. " + "The prefix name is a path name (folder name) for the S3 bucket.", +) +@aws_creds_options +@common_options +@pass_context +@check_newer_version +@print_cmdline_args +def cli( + ctx, + stack_name, + s3_bucket, + s3_prefix, + config_file, + config_env, +): + """ + `sam delete` command entry point + """ + + # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing + do_cli(stack_name, ctx.region, ctx.profile, s3_bucket, s3_prefix) # pragma: no cover + + +def do_cli( + stack_name, + region, + profile, + s3_bucket, + s3_prefix +): + """ + Implementation of the ``cli`` method + """ + from samcli.commands.delete.delete_context import DeleteContext + + with DeleteContext( + stack_name=stack_name, region=region, s3_bucket=s3_bucket, s3_prefix=s3_prefix, profile=profile + ) as delete_context: + delete_context.run() diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py new file mode 100644 index 0000000000..d34d096c0c --- /dev/null +++ b/samcli/commands/delete/delete_context.py @@ -0,0 +1,117 @@ +import boto3 + +import click +from click import confirm +from click import prompt + +from samcli.lib.utils.botoconfig import get_boto_config_with_user_agent +from samcli.lib.delete.cf_utils import CfUtils +from samcli.lib.delete.utils import get_cf_template_name +from samcli.lib.package.s3_uploader import S3Uploader +from samcli.yamlhelper import yaml_parse +# from samcli.lib.package.artifact_exporter import Template +# from samcli.lib.package.ecr_uploader import ECRUploader +# from samcli.lib.package.uploaders import Uploaders +import docker + +class DeleteContext: + def __init__(self, stack_name, region, s3_bucket, s3_prefix, profile): + self.stack_name = stack_name + self.region = region + self.profile = profile + self.s3_bucket = s3_bucket + self.s3_prefix = s3_prefix + self.cf_utils = None + self.start_bold = "\033[1m" + self.end_bold = "\033[0m" + self.s3_uploader = None + # self.uploaders = None + self.cf_template_file_name = None + self.delete_artifacts_folder = None + self.delete_cf_template_file = None + + def __enter__(self): + return self + + def __exit__(self, *args): + pass + + def run(self): + # print("Stack Name:", self.stack_name) + # print(self.s3_bucket) + # print(self.s3_prefix) + if not self.stack_name: + self.stack_name = prompt( + f"\t{self.start_bold}Enter stack name you want to delete{self.end_bold}", type=click.STRING + ) + + delete_stack = confirm( + f"\t{self.start_bold}Are you sure you want to delete the stack {self.stack_name}?{self.end_bold}", + default=False, + ) + # Fetch the template using the stack-name + if delete_stack: + boto_config = get_boto_config_with_user_agent() + + # Define cf_client based on the region as different regions can have same stack-names + cloudformation_client = boto3.client( + "cloudformation", region_name=self.region if self.region else None, config=boto_config + ) + + s3_client = boto3.client("s3", region_name=self.region if self.region else None, config=boto_config) + ecr_client = boto3.client("ecr", region_name=self.region if self.region else None, config=boto_config) + + self.s3_uploader = S3Uploader(s3_client=s3_client, bucket_name=self.s3_bucket, prefix=self.s3_prefix) + + # docker_client = docker.from_env() + # ecr_uploader = ECRUploader(docker_client, ecr_client, None, None) + + self.cf_utils = CfUtils(cloudformation_client) + + is_deployed = self.cf_utils.has_stack(self.stack_name) + + if is_deployed: + template_str = self.cf_utils.get_stack_template(self.stack_name, "Original") + + template_dict = yaml_parse(template_str) + + if self.s3_bucket and self.s3_prefix: + self.delete_artifacts_folder = confirm( + f"\t{self.start_bold}Are you sure you want to delete the folder {self.s3_prefix} in S3 which contains the artifacts?{self.end_bold}", + default=False, + ) + if not self.delete_artifacts_folder: + self.cf_template_file_name = get_cf_template_name(template_str, "template") + delete_cf_template_file = confirm( + f"\t{self.start_bold}Do you want to delete the template file {self.cf_template_file_name} in S3?{self.end_bold}", + default=False, + ) + + click.echo("\n") + # Delete the primary stack + self.cf_utils.delete_stack(self.stack_name) + + click.echo("- deleting Cloudformation stack {0}".format(self.stack_name)) + + # Delete the artifacts + # self.uploaders = Uploaders(self.s3_uploader, ecr_uploader) + # template = Template(None, None, self.uploaders, None) + # template.delete(template_dict) + + # Delete the template file using template_str + if self.delete_cf_template_file: + self.s3_uploader.delete_artifact(cf_template_file_name) + + # Delete the folder of artifacts if s3_bucket and s3_prefix provided + elif self.delete_artifacts_folder: + prefix_files = s3_client.list_objects_v2(Bucket=self.s3_bucket, Prefix=self.s3_prefix) + self.s3_uploader.delete_artifact(None, prefix_files) + + # Delete the ECR companion stack + + if self.cf_template_file_name: + click.echo("- deleting template file {0}".format(cf_template_file)) + click.echo("\n") + click.echo("delete complete") + else: + click.echo("Error: The input stack {0} does not exist on Cloudformation".format(self.stack_name)) diff --git a/samcli/commands/delete/exceptions.py b/samcli/commands/delete/exceptions.py new file mode 100644 index 0000000000..82c56b6bb6 --- /dev/null +++ b/samcli/commands/delete/exceptions.py @@ -0,0 +1,14 @@ +""" +Exceptions that are raised by sam delete +""" +from samcli.commands.exceptions import UserException + + +class DeleteFailedError(UserException): + def __init__(self, stack_name, msg): + self.stack_name = stack_name + self.msg = msg + + message_fmt = "Failed to delete the stack: {stack_name}, {msg}" + + super().__init__(message=message_fmt.format(stack_name=self.stack_name, msg=msg)) diff --git a/samcli/lib/delete/__init__.py b/samcli/lib/delete/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/lib/delete/cf_utils.py b/samcli/lib/delete/cf_utils.py new file mode 100644 index 0000000000..b8bccdc651 --- /dev/null +++ b/samcli/lib/delete/cf_utils.py @@ -0,0 +1,105 @@ +""" +Delete Cloudformation stacks and s3 files +""" + +import botocore +import logging + +from samcli.commands.delete.exceptions import DeleteFailedError + +LOG = logging.getLogger(__name__) + + +class CfUtils: + def __init__(self, cloudformation_client): + self._client = cloudformation_client + + def has_stack(self, stack_name): + """ + Checks if a CloudFormation stack with given name exists + + :param stack_name: Name or ID of the stack + :return: True if stack exists. False otherwise + """ + try: + resp = self._client.describe_stacks(StackName=stack_name) + if not resp["Stacks"]: + return False + + stack = resp["Stacks"][0] + return stack["StackStatus"] != "REVIEW_IN_PROGRESS" + + except botocore.exceptions.ClientError as e: + # If a stack does not exist, describe_stacks will throw an + # exception. Unfortunately we don't have a better way than parsing + # the exception msg to understand the nature of this exception. + + if "Stack with id {0} does not exist".format(stack_name) in str(e): + LOG.debug("Stack with id %s does not exist", stack_name) + return False + except botocore.exceptions.BotoCoreError as e: + # If there are credentials, environment errors, + # catch that and throw a deploy failed error. + + LOG.debug("Botocore Exception : %s", str(e)) + raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e + + except Exception as e: + # We don't know anything about this exception. Don't handle + LOG.debug("Unable to get stack details.", exc_info=e) + raise e + + def get_stack_template(self, stack_name, stage): + try: + resp = self._client.get_template(StackName=stack_name, TemplateStage=stage) + if not resp["TemplateBody"]: + return "" + + return resp["TemplateBody"] + + except botocore.exceptions.ClientError as e: + # If a stack does not exist, get_stack_template will throw an + # exception. Unfortunately we don't have a better way than parsing + # the exception msg to understand the nature of this exception. + + if "Stack with id {0} does not exist".format(stack_name) in str(e): + LOG.debug("Stack with id %s does not exist", stack_name) + return "" + except botocore.exceptions.BotoCoreError as e: + # If there are credentials, environment errors, + # catch that and throw a deploy failed error. + + LOG.debug("Botocore Exception : %s", str(e)) + raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e + + except Exception as e: + # We don't know anything about this exception. Don't handle + LOG.debug("Unable to get stack details.", exc_info=e) + raise e + + def delete_stack(self, stack_name): + try: + resp = self._client.delete_stack(StackName=stack_name) + + return resp + + except botocore.exceptions.ClientError as e: + # If a stack does not exist, describe_stacks will throw an + # exception. Unfortunately we don't have a better way than parsing + # the exception msg to understand the nature of this exception. + + if "Stack with id {0} does not exist".format(stack_name) in str(e): + LOG.debug("Stack with id %s does not exist", stack_name) + return False + except botocore.exceptions.BotoCoreError as e: + # If there are credentials, environment errors, + # catch that and throw a deploy failed error. + + LOG.debug("Botocore Exception : %s", str(e)) + raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e + + except Exception as e: + # We don't know anything about this exception. Don't handle + LOG.debug("Unable to get stack details.", exc_info=e) + raise e + diff --git a/samcli/lib/delete/utils.py b/samcli/lib/delete/utils.py new file mode 100644 index 0000000000..280d24e462 --- /dev/null +++ b/samcli/lib/delete/utils.py @@ -0,0 +1,16 @@ +""" +Utilities for Delete +""" + +from samcli.lib.utils.hash import file_checksum +from samcli.lib.package.artifact_exporter import mktempfile + +def get_cf_template_name(self, template_str, extension): + with mktempfile() as temp_file: + temp_file.write(template_str) + temp_file.flush() + + filemd5 = file_checksum(temp_file.name) + remote_path = filemd5 + "." + extension + + return remote_path \ No newline at end of file diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index 4a64a983d0..08bbf4db23 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -144,6 +144,29 @@ def upload_with_dedup( return self.upload(file_name, remote_path) + def delete_artifact(self, file_name: str, prefix_files=None): + + try: + if not self.bucket_name: + raise BucketNotSpecifiedError() + + remote_path = file_name + if self.prefix: + if remote_path: + remote_path = "{0}/{1}".format(self.prefix, file_name) + print("- deleting", remote_path) + self.s3.delete_object(Bucket=self.bucket_name, Key=remote_path) + elif prefix_files: + for obj in prefix_files["Contents"]: + print("- deleting", obj["Key"]) + self.s3.delete_object(Bucket=self.bucket_name, Key=obj["Key"]) + + except botocore.exceptions.ClientError as ex: + error_code = ex.response["Error"]["Code"] + if error_code == "NoSuchBucket": + raise NoSuchBucketError(bucket_name=self.bucket_name) from ex + raise ex + def file_exists(self, remote_path: str) -> bool: """ Check if the file we are trying to upload already exists in S3 From ba47369e90cf966e95f799e4153571e4321e449f Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Wed, 23 Jun 2021 13:36:39 -0400 Subject: [PATCH 050/121] Added unit tests for utils methods and s3_uploader --- samcli/commands/delete/command.py | 15 ++--- samcli/commands/delete/delete_context.py | 46 ++++++++------ samcli/lib/delete/cf_utils.py | 56 ++++++++--------- samcli/lib/delete/utils.py | 4 +- samcli/lib/package/s3_uploader.py | 38 ++++++++---- tests/unit/lib/delete/__init__.py | 0 tests/unit/lib/delete/test_cf_utils.py | 71 ++++++++++++++++++++++ tests/unit/lib/package/test_s3_uploader.py | 45 ++++++++++++++ 8 files changed, 204 insertions(+), 71 deletions(-) create mode 100644 tests/unit/lib/delete/__init__.py create mode 100644 tests/unit/lib/delete/test_cf_utils.py diff --git a/samcli/commands/delete/command.py b/samcli/commands/delete/command.py index bdc201aef4..13483b40e1 100644 --- a/samcli/commands/delete/command.py +++ b/samcli/commands/delete/command.py @@ -1,6 +1,6 @@ -# """ -# CLI command for "delete" command -# """ +""" +CLI command for "delete" command +""" import logging @@ -70,18 +70,13 @@ def cli( do_cli(stack_name, ctx.region, ctx.profile, s3_bucket, s3_prefix) # pragma: no cover -def do_cli( - stack_name, - region, - profile, - s3_bucket, - s3_prefix -): +def do_cli(stack_name, region, profile, s3_bucket, s3_prefix): """ Implementation of the ``cli`` method """ from samcli.commands.delete.delete_context import DeleteContext + # ctx = click.get_current_context() #This is here if s3_bucket and s3_prefix options are not used with DeleteContext( stack_name=stack_name, region=region, s3_bucket=s3_bucket, s3_prefix=s3_prefix, profile=profile ) as delete_context: diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index d34d096c0c..7e4999442f 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -1,5 +1,9 @@ -import boto3 +""" +Delete a SAM stack +""" +import boto3 +import docker import click from click import confirm from click import prompt @@ -9,10 +13,12 @@ from samcli.lib.delete.utils import get_cf_template_name from samcli.lib.package.s3_uploader import S3Uploader from samcli.yamlhelper import yaml_parse + +# Intentionally commented # from samcli.lib.package.artifact_exporter import Template # from samcli.lib.package.ecr_uploader import ECRUploader # from samcli.lib.package.uploaders import Uploaders -import docker + class DeleteContext: def __init__(self, stack_name, region, s3_bucket, s3_prefix, profile): @@ -37,20 +43,24 @@ def __exit__(self, *args): pass def run(self): - # print("Stack Name:", self.stack_name) - # print(self.s3_bucket) - # print(self.s3_prefix) + """ + Delete the stack based on the argument provided by customers and samconfig.toml. + """ if not self.stack_name: self.stack_name = prompt( f"\t{self.start_bold}Enter stack name you want to delete{self.end_bold}", type=click.STRING ) + if not self.region: + self.region = prompt( + f"\t{self.start_bold}Enter region you want to delete from{self.end_bold}", type=click.STRING + ) delete_stack = confirm( f"\t{self.start_bold}Are you sure you want to delete the stack {self.stack_name}?{self.end_bold}", default=False, ) # Fetch the template using the stack-name - if delete_stack: + if delete_stack and self.region: boto_config = get_boto_config_with_user_agent() # Define cf_client based on the region as different regions can have same stack-names @@ -63,8 +73,8 @@ def run(self): self.s3_uploader = S3Uploader(s3_client=s3_client, bucket_name=self.s3_bucket, prefix=self.s3_prefix) - # docker_client = docker.from_env() - # ecr_uploader = ECRUploader(docker_client, ecr_client, None, None) + docker_client = docker.from_env() + ecr_uploader = ECRUploader(docker_client, ecr_client, None, None) self.cf_utils = CfUtils(cloudformation_client) @@ -77,14 +87,16 @@ def run(self): if self.s3_bucket and self.s3_prefix: self.delete_artifacts_folder = confirm( - f"\t{self.start_bold}Are you sure you want to delete the folder {self.s3_prefix} in S3 which contains the artifacts?{self.end_bold}", - default=False, + f"\t{self.start_bold}Are you sure you want to delete the folder {self.s3_prefix} \ + in S3 which contains the artifacts?{self.end_bold}", + default=False, ) if not self.delete_artifacts_folder: self.cf_template_file_name = get_cf_template_name(template_str, "template") delete_cf_template_file = confirm( - f"\t{self.start_bold}Do you want to delete the template file {self.cf_template_file_name} in S3?{self.end_bold}", - default=False, + f"\t{self.start_bold}Do you want to delete the template file \ + {self.cf_template_file_name} in S3?{self.end_bold}", + default=False, ) click.echo("\n") @@ -94,23 +106,23 @@ def run(self): click.echo("- deleting Cloudformation stack {0}".format(self.stack_name)) # Delete the artifacts + # Intentionally commented # self.uploaders = Uploaders(self.s3_uploader, ecr_uploader) # template = Template(None, None, self.uploaders, None) # template.delete(template_dict) - # Delete the template file using template_str + # Delete the CF template file in S3 if self.delete_cf_template_file: - self.s3_uploader.delete_artifact(cf_template_file_name) + self.s3_uploader.delete_artifact(self.cf_template_file_name) # Delete the folder of artifacts if s3_bucket and s3_prefix provided elif self.delete_artifacts_folder: - prefix_files = s3_client.list_objects_v2(Bucket=self.s3_bucket, Prefix=self.s3_prefix) - self.s3_uploader.delete_artifact(None, prefix_files) + self.s3_uploader.delete_prefix_artifacts() # Delete the ECR companion stack if self.cf_template_file_name: - click.echo("- deleting template file {0}".format(cf_template_file)) + click.echo("- deleting template file {0}".format(self.cf_template_file)) click.echo("\n") click.echo("delete complete") else: diff --git a/samcli/lib/delete/cf_utils.py b/samcli/lib/delete/cf_utils.py index b8bccdc651..945aa5a09a 100644 --- a/samcli/lib/delete/cf_utils.py +++ b/samcli/lib/delete/cf_utils.py @@ -1,10 +1,10 @@ """ -Delete Cloudformation stacks and s3 files +Delete Cloudformation stacks and s3 files """ -import botocore import logging +from botocore.exceptions import ClientError, BotoCoreError from samcli.commands.delete.exceptions import DeleteFailedError LOG = logging.getLogger(__name__) @@ -29,7 +29,7 @@ def has_stack(self, stack_name): stack = resp["Stacks"][0] return stack["StackStatus"] != "REVIEW_IN_PROGRESS" - except botocore.exceptions.ClientError as e: + except ClientError as e: # If a stack does not exist, describe_stacks will throw an # exception. Unfortunately we don't have a better way than parsing # the exception msg to understand the nature of this exception. @@ -37,9 +37,10 @@ def has_stack(self, stack_name): if "Stack with id {0} does not exist".format(stack_name) in str(e): LOG.debug("Stack with id %s does not exist", stack_name) return False - except botocore.exceptions.BotoCoreError as e: + raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e + except BotoCoreError as e: # If there are credentials, environment errors, - # catch that and throw a deploy failed error. + # catch that and throw a delete failed error. LOG.debug("Botocore Exception : %s", str(e)) raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e @@ -50,26 +51,25 @@ def has_stack(self, stack_name): raise e def get_stack_template(self, stack_name, stage): + """ + Return the Cloudformation template of the given stack_name + + :param stack_name: Name or ID of the stack + :param stage: The Stage of the template Original or Processed + :return: Template body of the stack + """ try: resp = self._client.get_template(StackName=stack_name, TemplateStage=stage) if not resp["TemplateBody"]: - return "" + return None return resp["TemplateBody"] - except botocore.exceptions.ClientError as e: - # If a stack does not exist, get_stack_template will throw an - # exception. Unfortunately we don't have a better way than parsing - # the exception msg to understand the nature of this exception. - - if "Stack with id {0} does not exist".format(stack_name) in str(e): - LOG.debug("Stack with id %s does not exist", stack_name) - return "" - except botocore.exceptions.BotoCoreError as e: + except (ClientError, BotoCoreError) as e: # If there are credentials, environment errors, - # catch that and throw a deploy failed error. + # catch that and throw a delete failed error. - LOG.debug("Botocore Exception : %s", str(e)) + LOG.debug("Failed to delete stack : %s", str(e)) raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e except Exception as e: @@ -78,28 +78,24 @@ def get_stack_template(self, stack_name, stage): raise e def delete_stack(self, stack_name): + """ + Delete the Cloudformation stack with the given stack_name + + :param stack_name: Name or ID of the stack + :return: Status of deletion + """ try: resp = self._client.delete_stack(StackName=stack_name) - return resp - except botocore.exceptions.ClientError as e: - # If a stack does not exist, describe_stacks will throw an - # exception. Unfortunately we don't have a better way than parsing - # the exception msg to understand the nature of this exception. - - if "Stack with id {0} does not exist".format(stack_name) in str(e): - LOG.debug("Stack with id %s does not exist", stack_name) - return False - except botocore.exceptions.BotoCoreError as e: + except (ClientError, BotoCoreError) as e: # If there are credentials, environment errors, - # catch that and throw a deploy failed error. + # catch that and throw a delete failed error. - LOG.debug("Botocore Exception : %s", str(e)) + LOG.debug("Failed to delete stack : %s", str(e)) raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e except Exception as e: # We don't know anything about this exception. Don't handle LOG.debug("Unable to get stack details.", exc_info=e) raise e - diff --git a/samcli/lib/delete/utils.py b/samcli/lib/delete/utils.py index 280d24e462..f6e6edeb4d 100644 --- a/samcli/lib/delete/utils.py +++ b/samcli/lib/delete/utils.py @@ -5,7 +5,7 @@ from samcli.lib.utils.hash import file_checksum from samcli.lib.package.artifact_exporter import mktempfile -def get_cf_template_name(self, template_str, extension): +def get_cf_template_name(template_str, extension): with mktempfile() as temp_file: temp_file.write(template_str) temp_file.flush() @@ -13,4 +13,4 @@ def get_cf_template_name(self, template_str, extension): filemd5 = file_checksum(temp_file.name) remote_path = filemd5 + "." + extension - return remote_path \ No newline at end of file + return remote_path diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index 08bbf4db23..3e445aec25 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -144,22 +144,24 @@ def upload_with_dedup( return self.upload(file_name, remote_path) - def delete_artifact(self, file_name: str, prefix_files=None): - + def delete_artifact(self, remote_path: str, is_key=False): + """ + Deletes a given file from S3 + :param remote_path: Path to the file that will be deleted + :param is_key: If the given remote_path is the key or a file_name + """ try: if not self.bucket_name: raise BucketNotSpecifiedError() - remote_path = file_name - if self.prefix: - if remote_path: - remote_path = "{0}/{1}".format(self.prefix, file_name) - print("- deleting", remote_path) - self.s3.delete_object(Bucket=self.bucket_name, Key=remote_path) - elif prefix_files: - for obj in prefix_files["Contents"]: - print("- deleting", obj["Key"]) - self.s3.delete_object(Bucket=self.bucket_name, Key=obj["Key"]) + key = remote_path + if self.prefix and not is_key: + key = "{0}/{1}".format(self.prefix, remote_path) + + # Deleting Specific file with key + print("- deleting", key) + resp = self.s3.delete_object(Bucket=self.bucket_name, Key=key) + return resp["ResponseMetadata"] except botocore.exceptions.ClientError as ex: error_code = ex.response["Error"]["Code"] @@ -167,6 +169,18 @@ def delete_artifact(self, file_name: str, prefix_files=None): raise NoSuchBucketError(bucket_name=self.bucket_name) from ex raise ex + def delete_prefix_artifacts(self): + """ + Deletes all the files from the prefix in S3 + """ + if not self.bucket_name: + raise BucketNotSpecifiedError() + if self.prefix: + prefix_files = self.s3.list_objects_v2(Bucket=self.bucket_name, Prefix=self.prefix) + + for obj in prefix_files["Contents"]: + self.delete_artifact(obj["Key"], True) + def file_exists(self, remote_path: str) -> bool: """ Check if the file we are trying to upload already exists in S3 diff --git a/tests/unit/lib/delete/__init__.py b/tests/unit/lib/delete/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/lib/delete/test_cf_utils.py b/tests/unit/lib/delete/test_cf_utils.py new file mode 100644 index 0000000000..37403a156f --- /dev/null +++ b/tests/unit/lib/delete/test_cf_utils.py @@ -0,0 +1,71 @@ +from unittest.mock import patch, MagicMock, ANY, call +from unittest import TestCase + +from samcli.commands.delete.exceptions import DeleteFailedError +from botocore.exceptions import ClientError, BotoCoreError +from samcli.lib.delete.cf_utils import CfUtils + + +class TestCfUtils(TestCase): + def setUp(self): + self.session = MagicMock() + self.cloudformation_client = self.session.client("cloudformation") + self.s3_client = self.session.client("s3") + self.cf_utils = CfUtils(self.cloudformation_client) + + def test_cf_utils_init(self): + self.assertEqual(self.cf_utils._client, self.cloudformation_client) + + def test_cf_utils_has_no_stack(self): + self.cf_utils._client.describe_stacks = MagicMock(return_value={"Stacks": []}) + self.assertEqual(self.cf_utils.has_stack("test"), False) + + def test_cf_utils_has_stack_exception_non_exsistent(self): + self.cf_utils._client.describe_stacks = MagicMock( + side_effect=ClientError( + error_response={"Error": {"Message": "Stack with id test does not exist"}}, + operation_name="stack_status", + ) + ) + self.assertEqual(self.cf_utils.has_stack("test"), False) + + def test_cf_utils_has_stack_exception(self): + self.cf_utils._client.describe_stacks = MagicMock(side_effect=Exception()) + with self.assertRaises(Exception): + self.cf_utils.has_stack("test") + + def test_cf_utils_has_stack_in_review(self): + self.cf_utils._client.describe_stacks = MagicMock( + return_value={"Stacks": [{"StackStatus": "REVIEW_IN_PROGRESS"}]} + ) + self.assertEqual(self.cf_utils.has_stack("test"), False) + + def test_cf_utils_has_stack_exception_botocore(self): + self.cf_utils._client.describe_stacks = MagicMock(side_effect=BotoCoreError()) + with self.assertRaises(DeleteFailedError): + self.cf_utils.has_stack("test") + + def test_cf_utils_get_stack_template_exception_botocore(self): + self.cf_utils._client.get_template = MagicMock(side_effect=BotoCoreError()) + with self.assertRaises(DeleteFailedError): + self.cf_utils.get_stack_template("test", "Original") + + def test_cf_utils_get_stack_template_exception_botocore(self): + self.cf_utils._client.get_template = MagicMock(side_effect=BotoCoreError()) + with self.assertRaises(DeleteFailedError): + self.cf_utils.get_stack_template("test", "Original") + + def test_cf_utils_get_stack_template_exception(self): + self.cf_utils._client.get_template = MagicMock(side_effect=Exception()) + with self.assertRaises(Exception): + self.cf_utils.get_stack_template("test", "Original") + + def test_cf_utils_delete_stack_exception_botocore(self): + self.cf_utils._client.delete_stack = MagicMock(side_effect=BotoCoreError()) + with self.assertRaises(DeleteFailedError): + self.cf_utils.delete_stack("test") + + def test_cf_utils_delete_stack_exception(self): + self.cf_utils._client.delete_stack = MagicMock(side_effect=Exception()) + with self.assertRaises(Exception): + self.cf_utils.delete_stack("test") diff --git a/tests/unit/lib/package/test_s3_uploader.py b/tests/unit/lib/package/test_s3_uploader.py index c40c4c6cf4..07fed24211 100644 --- a/tests/unit/lib/package/test_s3_uploader.py +++ b/tests/unit/lib/package/test_s3_uploader.py @@ -172,6 +172,51 @@ def test_s3_upload_no_bucket(self): s3_uploader.upload(f.name, remote_path) self.assertEqual(BucketNotSpecifiedError().message, str(ex)) + def test_s3_delete_artifact(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=None, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + no_progressbar=self.no_progressbar, + ) + s3_uploader.artifact_metadata = {"a": "b"} + with self.assertRaises(BucketNotSpecifiedError) as ex: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: + self.assertEqual(s3_uploader.delete_artifact(f.name), {"a": "b"}) + + def test_s3_delete_artifact_no_bucket(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=None, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + no_progressbar=self.no_progressbar, + ) + with self.assertRaises(BucketNotSpecifiedError) as ex: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: + s3_uploader.delete_artifact(f.name) + self.assertEqual(BucketNotSpecifiedError().message, str(ex)) + + def test_s3_upload_bucket_not_found(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=self.bucket_name, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=True, + no_progressbar=self.no_progressbar, + ) + + s3_uploader.s3.delete_object = MagicMock( + side_effect=ClientError(error_response={"Error": {"Code": "NoSuchBucket"}}, operation_name="create_object") + ) + with tempfile.NamedTemporaryFile() as f: + with self.assertRaises(NoSuchBucketError): + s3_uploader.delete_artifact(f.name) + def test_s3_upload_with_dedup(self): s3_uploader = S3Uploader( s3_client=self.s3, From d77f7c4831eefca0e4a975a159157205a903e4f7 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Thu, 24 Jun 2021 13:16:23 -0400 Subject: [PATCH 051/121] Removed s3_bucket and s3_prefix click options --- samcli/commands/delete/command.py | 23 +++++------------------ samcli/commands/delete/delete_context.py | 12 +++++------- 2 files changed, 10 insertions(+), 25 deletions(-) diff --git a/samcli/commands/delete/command.py b/samcli/commands/delete/command.py index 13483b40e1..7227be86e3 100644 --- a/samcli/commands/delete/command.py +++ b/samcli/commands/delete/command.py @@ -37,18 +37,6 @@ required=False, help="The name of the AWS CloudFormation stack you want to delete. ", ) -@click.option( - "--s3-bucket", - required=False, - help="The name of the S3 bucket where this command delets your " "CloudFormation artifacts.", -) -@click.option( - "--s3-prefix", - required=False, - help="A prefix name that the command uses to delete the " - "artifacts' that were deployed to the S3 bucket. " - "The prefix name is a path name (folder name) for the S3 bucket.", -) @aws_creds_options @common_options @pass_context @@ -57,8 +45,6 @@ def cli( ctx, stack_name, - s3_bucket, - s3_prefix, config_file, config_env, ): @@ -67,17 +53,18 @@ def cli( """ # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing - do_cli(stack_name, ctx.region, ctx.profile, s3_bucket, s3_prefix) # pragma: no cover + do_cli(stack_name, ctx.region, ctx.profile) # pragma: no cover -def do_cli(stack_name, region, profile, s3_bucket, s3_prefix): +def do_cli(stack_name, region, profile): """ Implementation of the ``cli`` method """ from samcli.commands.delete.delete_context import DeleteContext - # ctx = click.get_current_context() #This is here if s3_bucket and s3_prefix options are not used + ctx = click.get_current_context() + with DeleteContext( - stack_name=stack_name, region=region, s3_bucket=s3_bucket, s3_prefix=s3_prefix, profile=profile + stack_name=stack_name, region=region, s3_bucket=ctx.default_map.get("s3_bucket", None), s3_prefix=ctx.default_map.get("s3_prefix", None), profile=profile ) as delete_context: delete_context.run() diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 7e4999442f..919f1e4f99 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -74,7 +74,7 @@ def run(self): self.s3_uploader = S3Uploader(s3_client=s3_client, bucket_name=self.s3_bucket, prefix=self.s3_prefix) docker_client = docker.from_env() - ecr_uploader = ECRUploader(docker_client, ecr_client, None, None) + # ecr_uploader = ECRUploader(docker_client, ecr_client, None, None) self.cf_utils = CfUtils(cloudformation_client) @@ -87,15 +87,13 @@ def run(self): if self.s3_bucket and self.s3_prefix: self.delete_artifacts_folder = confirm( - f"\t{self.start_bold}Are you sure you want to delete the folder {self.s3_prefix} \ - in S3 which contains the artifacts?{self.end_bold}", + f"\t{self.start_bold}Are you sure you want to delete the folder {self.s3_prefix} in S3 which contains the artifacts?{self.end_bold}", default=False, ) if not self.delete_artifacts_folder: self.cf_template_file_name = get_cf_template_name(template_str, "template") - delete_cf_template_file = confirm( - f"\t{self.start_bold}Do you want to delete the template file \ - {self.cf_template_file_name} in S3?{self.end_bold}", + self.delete_cf_template_file = confirm( + f"\t{self.start_bold}Do you want to delete the template file {self.cf_template_file_name} in S3?{self.end_bold}", default=False, ) @@ -122,7 +120,7 @@ def run(self): # Delete the ECR companion stack if self.cf_template_file_name: - click.echo("- deleting template file {0}".format(self.cf_template_file)) + click.echo(f"- deleting template file {self.cf_template_file_name}") click.echo("\n") click.echo("delete complete") else: From d50664880eb387343e40e25acf0f4eda3487576d Mon Sep 17 00:00:00 2001 From: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Date: Thu, 24 Jun 2021 13:34:28 -0700 Subject: [PATCH 052/121] chore: Increase awareness of same file warning during package (#2946) * chore: increase awareness of same file warning during package * fix formatting & grammar Co-authored-by: Mathieu Grandis <73313235+mgrandis@users.noreply.github.com> --- samcli/lib/package/s3_uploader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index 4a64a983d0..34ac666b86 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -85,7 +85,7 @@ def upload(self, file_name: str, remote_path: str) -> str: # Check if a file with same data exists if not self.force_upload and self.file_exists(remote_path): - LOG.debug("File with same data is already exists at %s. " "Skipping upload", remote_path) + LOG.info("File with same data already exists at %s, skipping upload", remote_path) return self.make_url(remote_path) try: From 698de67035967eff345a72fb3859bf7a06378c6b Mon Sep 17 00:00:00 2001 From: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> Date: Thu, 24 Jun 2021 16:07:13 -0700 Subject: [PATCH 053/121] fix: Allow the base64Encoded field in REST Api, skip validation of unknown fields and validate missing statusCode for Http Api (#2941) * fix API Gateway emulator: - skip validating the non allowed fields for Http Api Gateway, as it always skip the unknown fields - add base64Encoded as an allowed field for Rest Api gateway - base64 decoding will be always done for Http API gateway if the lambda response isBase64Encoded is true regardless the content-type - validate if statusCode is missing in case of Http API, and payload version 1.0 * - accept "true", "True", "false", "False" as valid isBase64Encoded values. - Validate on other isBase64Encoded Values - add more integration && unit test cases * fix lint && black issues * use smaller image to test Base64 response --- samcli/local/apigw/local_apigw_service.py | 71 ++- .../local/start_api/test_start_api.py | 49 +- .../testdata/start_api/binarydata.gif | Bin 1951 -> 49 bytes .../start_api/image_package_type/main.py | 2 +- tests/integration/testdata/start_api/main.py | 36 +- .../testdata/start_api/swagger-template.yaml | 48 ++ .../local/apigw/test_local_apigw_service.py | 441 ++++++++++++++++-- 7 files changed, 577 insertions(+), 70 deletions(-) diff --git a/samcli/local/apigw/local_apigw_service.py b/samcli/local/apigw/local_apigw_service.py index cc2684c200..5a6d397d54 100644 --- a/samcli/local/apigw/local_apigw_service.py +++ b/samcli/local/apigw/local_apigw_service.py @@ -333,7 +333,7 @@ def _request_handler(self, **kwargs): ) else: (status_code, headers, body) = self._parse_v1_payload_format_lambda_output( - lambda_response, self.api.binary_media_types, request + lambda_response, self.api.binary_media_types, request, route.event_type ) except LambdaResponseParseException as ex: LOG.error("Invalid lambda response received: %s", ex) @@ -379,13 +379,14 @@ def get_request_methods_endpoints(flask_request): # Consider moving this out to its own class. Logic is started to get dense and looks messy @jfuss @staticmethod - def _parse_v1_payload_format_lambda_output(lambda_output: str, binary_types, flask_request): + def _parse_v1_payload_format_lambda_output(lambda_output: str, binary_types, flask_request, event_type): """ Parses the output from the Lambda Container :param str lambda_output: Output from Lambda Invoke :param binary_types: list of binary types :param flask_request: flash request object + :param event_type: determines the route event type :return: Tuple(int, dict, str, bool) """ # pylint: disable-msg=too-many-statements @@ -397,6 +398,9 @@ def _parse_v1_payload_format_lambda_output(lambda_output: str, binary_types, fla if not isinstance(json_output, dict): raise LambdaResponseParseException(f"Lambda returned {type(json_output)} instead of dict") + if event_type == Route.HTTP and json_output.get("statusCode") is None: + raise LambdaResponseParseException(f"Invalid API Gateway Response Key: statusCode is not in {json_output}") + status_code = json_output.get("statusCode") or 200 headers = LocalApigwService._merge_response_headers( json_output.get("headers") or {}, json_output.get("multiValueHeaders") or {} @@ -405,7 +409,8 @@ def _parse_v1_payload_format_lambda_output(lambda_output: str, binary_types, fla body = json_output.get("body") if body is None: LOG.warning("Lambda returned empty body!") - is_base_64_encoded = json_output.get("isBase64Encoded") or False + + is_base_64_encoded = LocalApigwService.get_base_64_encoded(event_type, json_output) try: status_code = int(status_code) @@ -422,8 +427,10 @@ def _parse_v1_payload_format_lambda_output(lambda_output: str, binary_types, fla f"Non null response bodies should be able to convert to string: {body}" ) from ex - invalid_keys = LocalApigwService._invalid_apig_response_keys(json_output) - if invalid_keys: + invalid_keys = LocalApigwService._invalid_apig_response_keys(json_output, event_type) + # HTTP API Gateway just skip the non allowed lambda response fields, but Rest API gateway fail on + # the non allowed fields + if event_type == Route.API and invalid_keys: raise LambdaResponseParseException(f"Invalid API Gateway Response Keys: {invalid_keys} in {json_output}") # If the customer doesn't define Content-Type default to application/json @@ -432,17 +439,51 @@ def _parse_v1_payload_format_lambda_output(lambda_output: str, binary_types, fla headers["Content-Type"] = "application/json" try: - if LocalApigwService._should_base64_decode_body(binary_types, flask_request, headers, is_base_64_encoded): + # HTTP API Gateway always decode the lambda response only if isBase64Encoded field in response is True + # regardless the response content-type + # Rest API Gateway depends on the response content-type and the API configured BinaryMediaTypes to decide + # if it will decode the response or not + if (event_type == Route.HTTP and is_base_64_encoded) or ( + event_type == Route.API + and LocalApigwService._should_base64_decode_body( + binary_types, flask_request, headers, is_base_64_encoded + ) + ): body = base64.b64decode(body) except ValueError as ex: LambdaResponseParseException(str(ex)) return status_code, headers, body + @staticmethod + def get_base_64_encoded(event_type, json_output): + # The following behaviour is undocumented behaviour, and based on some trials + # Http API gateway checks lambda response for isBase64Encoded field, and ignore base64Encoded + # Rest API gateway checks first the field base64Encoded field, if not exist, it checks isBase64Encoded field + + if event_type == Route.API and json_output.get("base64Encoded") is not None: + is_base_64_encoded = json_output.get("base64Encoded") + field_name = "base64Encoded" + elif json_output.get("isBase64Encoded") is not None: + is_base_64_encoded = json_output.get("isBase64Encoded") + field_name = "isBase64Encoded" + else: + is_base_64_encoded = False + field_name = "isBase64Encoded" + + if isinstance(is_base_64_encoded, str) and is_base_64_encoded in ["true", "True", "false", "False"]: + is_base_64_encoded = is_base_64_encoded in ["true", "True"] + elif not isinstance(is_base_64_encoded, bool): + raise LambdaResponseParseException( + f"Invalid API Gateway Response Key: {is_base_64_encoded} is not a valid" f"{field_name}" + ) + + return is_base_64_encoded + @staticmethod def _parse_v2_payload_format_lambda_output(lambda_output: str, binary_types, flask_request): """ - Parses the output from the Lambda Container + Parses the output from the Lambda Container. V2 Payload Format means that the event_type is only HTTP :param str lambda_output: Output from Lambda Invoke :param binary_types: list of binary types @@ -487,21 +528,15 @@ def _parse_v2_payload_format_lambda_output(lambda_output: str, binary_types, fla f"Non null response bodies should be able to convert to string: {body}" ) from ex - # API Gateway only accepts statusCode, body, headers, and isBase64Encoded in - # a response shape. - # Don't check the response keys when inferring a response, see - # https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html#http-api-develop-integrations-lambda.v2. - invalid_keys = LocalApigwService._invalid_apig_response_keys(json_output) - if "statusCode" in json_output and invalid_keys: - raise LambdaResponseParseException(f"Invalid API Gateway Response Keys: {invalid_keys} in {json_output}") - # If the customer doesn't define Content-Type default to application/json if "Content-Type" not in headers: LOG.info("No Content-Type given. Defaulting to 'application/json'.") headers["Content-Type"] = "application/json" try: - if LocalApigwService._should_base64_decode_body(binary_types, flask_request, headers, is_base_64_encoded): + # HTTP API Gateway always decode the lambda response only if isBase64Encoded field in response is True + # regardless the response content-type + if is_base_64_encoded: # Note(xinhol): here in this method we change the type of the variable body multiple times # and confused mypy, we might want to avoid this and use multiple variables here. body = base64.b64decode(body) # type: ignore @@ -511,8 +546,10 @@ def _parse_v2_payload_format_lambda_output(lambda_output: str, binary_types, fla return status_code, headers, body @staticmethod - def _invalid_apig_response_keys(output): + def _invalid_apig_response_keys(output, event_type): allowable = {"statusCode", "body", "headers", "multiValueHeaders", "isBase64Encoded", "cookies"} + if event_type == Route.API: + allowable.add("base64Encoded") invalid_keys = output.keys() - allowable return invalid_keys diff --git a/tests/integration/local/start_api/test_start_api.py b/tests/integration/local/start_api/test_start_api.py index e7e5ad59a1..0ddb8d5a31 100644 --- a/tests/integration/local/start_api/test_start_api.py +++ b/tests/integration/local/start_api/test_start_api.py @@ -1,3 +1,4 @@ +import base64 import uuid import random @@ -382,14 +383,14 @@ def test_valid_v2_lambda_integer_response(self): @pytest.mark.flaky(reruns=3) @pytest.mark.timeout(timeout=600, method="thread") - def test_invalid_v2_lambda_response(self): + def test_v2_lambda_response_skip_unexpected_fields(self): """ Patch Request to a path that was defined as ANY in SAM through AWS::Serverless::Function Events """ response = requests.get(self.url + "/invalidv2response", timeout=300) - self.assertEqual(response.status_code, 502) - self.assertEqual(response.json(), {"message": "Internal server error"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) @pytest.mark.flaky(reruns=3) @pytest.mark.timeout(timeout=600, method="thread") @@ -538,6 +539,48 @@ def test_binary_response(self): self.assertEqual(response.headers.get("Content-Type"), "image/gif") self.assertEqual(response.content, expected) + @pytest.mark.flaky(reruns=3) + @pytest.mark.timeout(timeout=600, method="thread") + def test_non_decoded_binary_response(self): + """ + Binary data is returned correctly + """ + expected = base64.b64encode(self.get_binary_data(self.binary_data_file)) + + response = requests.get(self.url + "/nondecodedbase64response", timeout=300) + + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "image/gif") + self.assertEqual(response.content, expected) + + @pytest.mark.flaky(reruns=3) + @pytest.mark.timeout(timeout=600, method="thread") + def test_decoded_binary_response_base64encoded_field(self): + """ + Binary data is returned correctly + """ + expected = self.get_binary_data(self.binary_data_file) + + response = requests.get(self.url + "/decodedbase64responsebas64encoded", timeout=300) + + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "image/gif") + self.assertEqual(response.content, expected) + + @pytest.mark.flaky(reruns=3) + @pytest.mark.timeout(timeout=600, method="thread") + def test_decoded_binary_response_base64encoded_field_is_priority(self): + """ + Binary data is returned correctly + """ + expected = base64.b64encode(self.get_binary_data(self.binary_data_file)) + + response = requests.get(self.url + "/decodedbase64responsebas64encodedpriority", timeout=300) + + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "image/gif") + self.assertEqual(response.content, expected) + class TestStartApiWithSwaggerHttpApis(StartApiIntegBaseClass): template_path = "/testdata/start_api/swagger-template-http-api.yaml" diff --git a/tests/integration/testdata/start_api/binarydata.gif b/tests/integration/testdata/start_api/binarydata.gif index 855b4041793a49335cf6d1b66d8c1e5059daf60f..3f40c2073daf9743db59e7bec58cf90e8f6d3fbc 100644 GIT binary patch literal 49 ucmZ?wbh9u|WMp7un8*ME|Ns97(+r9~SvVOOm>6_GT#!5i6O#)ggEau*tOpVR literal 1951 zcmd^8{a2EC9{qyKi?V`(uBZf}LzuRr)~uT8Lm4uyCYqdPgu-KB=kujfc^aqiu{cg<9!rLi^agVZ!h$> zz$jFYCx9m=a9GUthWTNxaL66B_yG8`|NMEqD=_-|!(y?4Ox6(zT1Eb5Tpax2!)j>g z9D`wwi17OyOxvr~evTZfkA%)-l?nh52n16s);tl!9S)F6rSD>5a!+u_16c$D;k!RV z8z^9aphxB6?Ck7zH^wN%$>8HdrBaOm%sPT8MZT5_$V9@snVFY?feeaMT*1Yfni{yP zYl=wJvRKDC&;WxLLUH=Ev;;wrvQ`0)jFj+ry}i8?C%A>V;#vl6P%OS(F3#ieBw}%I zRMgDOjM;3C2xN_Psj^wjHyRCWGQl%56)fhTSj>l#)E0pt^Gx0w4#!|Hm`tW51`Vds zioBT+i`i9Md0mo9H5#AxTv^r+J`nPvqM{lLQpI91ES19b^>aeuV`YiA#-Ihi-RIc= zfRmGdO0^)oZ= zZWZ=Ns_vx4T>8W5pr(&^R&b56w||iby+!f97hc}8&w3#9sT>ueJmu1_ZYwy=d`7lwzjCN37$e6$bVS~u7^ zlh8$yv&lx#oXPtkVMuiR^wG<3A+4zJoA1*ia<6L`0aCBq!@iw(uT+} zEE$l+3r))KFXy@J(i0HEqvzS{?!MWe z3~{=xc(t$lS_~WEMw-)ET;RAo2T!X1ymw&I9vzQ$SPxnn%&N|~$)t!1dr0NdOJ2oW zqr>eZKiztPxEHg-4&R;7(e11?mu0k$ShT;HqjbbRt{Btj@R0Yrx4oK&qPS~wE^)CD z)zsqF@7B`k^E%(G|6E*TqgLAvU_GNX?4u3|>Jh(GHDb;?$gqohqKwA?dG`Vb> zhPF}+J$2m=?_jFmm*S`XA?!)VE5!ReSCVjIYU-HX>a&Vd(4)B;#RdZ7PRNsb=#`^; zAB2)%Xksz5X4%p+iPeYoERx$FWmsAv^-ePhTUu zK00t9)3*ublF_u!DTukt+JTW9Gsq6qMZcvv@j<*XSgs06_24GDrW{UCJtth7ycJ-V zB8o;M&~5~4yOR>yIrDXe<}899OFM)JR{#5+fG-gr*>&TgDU3Wl^>@qn7&|G1eCst` zgmqivjoONao^O|{DyyT}D8(ppXCqwZx^KLAYl^y=`0drOt2p!{7roUzg_b8glt(HR zqIXaFXeS|T_F;r)BN(=w{f7s=PGw$yc_qM3$`8=Qr>F;U8^7rQ944|pm9TE6vyM$Omm*tOa#_@eN#A Date: Thu, 24 Jun 2021 21:42:20 -0400 Subject: [PATCH 054/121] Fixed lint errors and added few unit tests --- samcli/commands/delete/command.py | 9 +++- samcli/commands/delete/delete_context.py | 16 +++--- samcli/lib/package/s3_uploader.py | 3 +- tests/unit/commands/delete/__init__.py | 0 tests/unit/commands/delete/test_command.py | 49 +++++++++++++++++++ .../commands/delete/test_delete_context.py | 0 tests/unit/lib/delete/test_cf_utils.py | 17 ++++++- tests/unit/lib/package/test_s3_uploader.py | 2 +- 8 files changed, 83 insertions(+), 13 deletions(-) create mode 100644 tests/unit/commands/delete/__init__.py create mode 100644 tests/unit/commands/delete/test_command.py create mode 100644 tests/unit/commands/delete/test_delete_context.py diff --git a/samcli/commands/delete/command.py b/samcli/commands/delete/command.py index 7227be86e3..8fc04716b9 100644 --- a/samcli/commands/delete/command.py +++ b/samcli/commands/delete/command.py @@ -63,8 +63,13 @@ def do_cli(stack_name, region, profile): from samcli.commands.delete.delete_context import DeleteContext ctx = click.get_current_context() - + s3_bucket = ctx.default_map.get("s3_bucket", None) + s3_prefix = ctx.default_map.get("s3_prefix", None) with DeleteContext( - stack_name=stack_name, region=region, s3_bucket=ctx.default_map.get("s3_bucket", None), s3_prefix=ctx.default_map.get("s3_prefix", None), profile=profile + stack_name=stack_name, + region=region, + s3_bucket=s3_bucket, + s3_prefix=s3_prefix, + profile=profile ) as delete_context: delete_context.run() diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 919f1e4f99..b2a861fa16 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -3,7 +3,7 @@ """ import boto3 -import docker +# import docker import click from click import confirm from click import prompt @@ -12,7 +12,7 @@ from samcli.lib.delete.cf_utils import CfUtils from samcli.lib.delete.utils import get_cf_template_name from samcli.lib.package.s3_uploader import S3Uploader -from samcli.yamlhelper import yaml_parse +# from samcli.yamlhelper import yaml_parse # Intentionally commented # from samcli.lib.package.artifact_exporter import Template @@ -69,11 +69,11 @@ def run(self): ) s3_client = boto3.client("s3", region_name=self.region if self.region else None, config=boto_config) - ecr_client = boto3.client("ecr", region_name=self.region if self.region else None, config=boto_config) + # ecr_client = boto3.client("ecr", region_name=self.region if self.region else None, config=boto_config) self.s3_uploader = S3Uploader(s3_client=s3_client, bucket_name=self.s3_bucket, prefix=self.s3_prefix) - docker_client = docker.from_env() + # docker_client = docker.from_env() # ecr_uploader = ECRUploader(docker_client, ecr_client, None, None) self.cf_utils = CfUtils(cloudformation_client) @@ -83,17 +83,19 @@ def run(self): if is_deployed: template_str = self.cf_utils.get_stack_template(self.stack_name, "Original") - template_dict = yaml_parse(template_str) + # template_dict = yaml_parse(template_str) if self.s3_bucket and self.s3_prefix: self.delete_artifacts_folder = confirm( - f"\t{self.start_bold}Are you sure you want to delete the folder {self.s3_prefix} in S3 which contains the artifacts?{self.end_bold}", + f"\t{self.start_bold}Are you sure you want to delete the folder" + \ + f"{self.s3_prefix} in S3 which contains the artifacts?{self.end_bold}", default=False, ) if not self.delete_artifacts_folder: self.cf_template_file_name = get_cf_template_name(template_str, "template") self.delete_cf_template_file = confirm( - f"\t{self.start_bold}Do you want to delete the template file {self.cf_template_file_name} in S3?{self.end_bold}", + f"\t{self.start_bold}Do you want to delete the template file" + \ + f" {self.cf_template_file_name} in S3?{self.end_bold}", default=False, ) diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index 3e445aec25..c0a4d88cf6 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -22,6 +22,7 @@ from collections import abc from typing import Optional, Dict, Any, cast from urllib.parse import urlparse, parse_qs +import click import botocore import botocore.exceptions @@ -159,7 +160,7 @@ def delete_artifact(self, remote_path: str, is_key=False): key = "{0}/{1}".format(self.prefix, remote_path) # Deleting Specific file with key - print("- deleting", key) + click.echo("- deleting S3 file " + key) resp = self.s3.delete_object(Bucket=self.bucket_name, Key=key) return resp["ResponseMetadata"] diff --git a/tests/unit/commands/delete/__init__.py b/tests/unit/commands/delete/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/commands/delete/test_command.py b/tests/unit/commands/delete/test_command.py new file mode 100644 index 0000000000..0a0e58afec --- /dev/null +++ b/tests/unit/commands/delete/test_command.py @@ -0,0 +1,49 @@ +from unittest import TestCase +from unittest.mock import ANY, MagicMock, Mock, call, patch + +from samcli.commands.delete.command import do_cli +from tests.unit.cli.test_cli_config_file import MockContext + +def get_mock_sam_config(): + mock_sam_config = MagicMock() + mock_sam_config.exists = MagicMock(return_value=True) + return mock_sam_config + +MOCK_SAM_CONFIG = get_mock_sam_config() + +class TestDeleteCliCommand(TestCase): + def setUp(self): + + self.stack_name = "stack-name" + self.s3_bucket = "s3-bucket" + self.s3_prefix = "s3-prefix" + self.region = None + self.profile = None + self.config_env = "mock-default-env" + self.config_file = "mock-default-filename" + MOCK_SAM_CONFIG.reset_mock() + + + @patch("samcli.commands.delete.command.click") + @patch("samcli.commands.delete.delete_context.DeleteContext") + def test_all_args(self, mock_delete_context, mock_delete_click): + + context_mock = Mock() + mock_delete_context.return_value.__enter__.return_value = context_mock + + do_cli( + stack_name=self.stack_name, + region=self.region, + profile=self.profile, + ) + + mock_delete_context.assert_called_with( + stack_name=self.stack_name, + s3_bucket=mock_delete_click.get_current_context().default_map.get("s3_bucket", None), + s3_prefix=mock_delete_click.get_current_context().default_map.get("s3_prefix", None), + region=self.region, + profile=self.profile, + ) + + context_mock.run.assert_called_with() + self.assertEqual(context_mock.run.call_count, 1) diff --git a/tests/unit/commands/delete/test_delete_context.py b/tests/unit/commands/delete/test_delete_context.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/lib/delete/test_cf_utils.py b/tests/unit/lib/delete/test_cf_utils.py index 37403a156f..20cb5288dc 100644 --- a/tests/unit/lib/delete/test_cf_utils.py +++ b/tests/unit/lib/delete/test_cf_utils.py @@ -28,6 +28,16 @@ def test_cf_utils_has_stack_exception_non_exsistent(self): ) ) self.assertEqual(self.cf_utils.has_stack("test"), False) + + def test_cf_utils_has_stack_exception_client_error(self): + self.cf_utils._client.describe_stacks = MagicMock( + side_effect=ClientError( + error_response={"Error": {"Message": "Error: The security token included in the request is expired"}}, + operation_name="stack_status", + ) + ) + with self.assertRaises(DeleteFailedError): + self.cf_utils.has_stack("test") def test_cf_utils_has_stack_exception(self): self.cf_utils._client.describe_stacks = MagicMock(side_effect=Exception()) @@ -45,8 +55,11 @@ def test_cf_utils_has_stack_exception_botocore(self): with self.assertRaises(DeleteFailedError): self.cf_utils.has_stack("test") - def test_cf_utils_get_stack_template_exception_botocore(self): - self.cf_utils._client.get_template = MagicMock(side_effect=BotoCoreError()) + def test_cf_utils_get_stack_template_exception_client_error(self): + self.cf_utils._client.get_template = MagicMock(side_effect=ClientError( + error_response={"Error": {"Message": "Stack with id test does not exist"}}, + operation_name="stack_status", + )) with self.assertRaises(DeleteFailedError): self.cf_utils.get_stack_template("test", "Original") diff --git a/tests/unit/lib/package/test_s3_uploader.py b/tests/unit/lib/package/test_s3_uploader.py index 07fed24211..f1765c3f8c 100644 --- a/tests/unit/lib/package/test_s3_uploader.py +++ b/tests/unit/lib/package/test_s3_uploader.py @@ -200,7 +200,7 @@ def test_s3_delete_artifact_no_bucket(self): s3_uploader.delete_artifact(f.name) self.assertEqual(BucketNotSpecifiedError().message, str(ex)) - def test_s3_upload_bucket_not_found(self): + def test_s3_delete_artifact_bucket_not_found(self): s3_uploader = S3Uploader( s3_client=self.s3, bucket_name=self.bucket_name, From af2f9296f30568f9e851f520f1d3853edce2350f Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Thu, 24 Jun 2021 22:14:25 -0400 Subject: [PATCH 055/121] Make black happy --- samcli/commands/delete/command.py | 6 +----- samcli/commands/delete/delete_context.py | 14 ++++++++------ samcli/lib/delete/cf_utils.py | 4 ++-- samcli/lib/delete/utils.py | 1 + tests/unit/commands/delete/test_command.py | 4 +++- tests/unit/lib/delete/test_cf_utils.py | 12 +++++++----- 6 files changed, 22 insertions(+), 19 deletions(-) diff --git a/samcli/commands/delete/command.py b/samcli/commands/delete/command.py index 8fc04716b9..412e4fc76f 100644 --- a/samcli/commands/delete/command.py +++ b/samcli/commands/delete/command.py @@ -66,10 +66,6 @@ def do_cli(stack_name, region, profile): s3_bucket = ctx.default_map.get("s3_bucket", None) s3_prefix = ctx.default_map.get("s3_prefix", None) with DeleteContext( - stack_name=stack_name, - region=region, - s3_bucket=s3_bucket, - s3_prefix=s3_prefix, - profile=profile + stack_name=stack_name, region=region, s3_bucket=s3_bucket, s3_prefix=s3_prefix, profile=profile ) as delete_context: delete_context.run() diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index b2a861fa16..fb4a09e4e1 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -3,6 +3,7 @@ """ import boto3 + # import docker import click from click import confirm @@ -12,6 +13,7 @@ from samcli.lib.delete.cf_utils import CfUtils from samcli.lib.delete.utils import get_cf_template_name from samcli.lib.package.s3_uploader import S3Uploader + # from samcli.yamlhelper import yaml_parse # Intentionally commented @@ -87,16 +89,16 @@ def run(self): if self.s3_bucket and self.s3_prefix: self.delete_artifacts_folder = confirm( - f"\t{self.start_bold}Are you sure you want to delete the folder" + \ - f"{self.s3_prefix} in S3 which contains the artifacts?{self.end_bold}", - default=False, + f"\t{self.start_bold}Are you sure you want to delete the folder" + + f"{self.s3_prefix} in S3 which contains the artifacts?{self.end_bold}", + default=False, ) if not self.delete_artifacts_folder: self.cf_template_file_name = get_cf_template_name(template_str, "template") self.delete_cf_template_file = confirm( - f"\t{self.start_bold}Do you want to delete the template file" + \ - f" {self.cf_template_file_name} in S3?{self.end_bold}", - default=False, + f"\t{self.start_bold}Do you want to delete the template file" + + f" {self.cf_template_file_name} in S3?{self.end_bold}", + default=False, ) click.echo("\n") diff --git a/samcli/lib/delete/cf_utils.py b/samcli/lib/delete/cf_utils.py index 945aa5a09a..f0c7aa4731 100644 --- a/samcli/lib/delete/cf_utils.py +++ b/samcli/lib/delete/cf_utils.py @@ -65,7 +65,7 @@ def get_stack_template(self, stack_name, stage): return resp["TemplateBody"] - except (ClientError, BotoCoreError) as e: + except (ClientError, BotoCoreError) as e: # If there are credentials, environment errors, # catch that and throw a delete failed error. @@ -88,7 +88,7 @@ def delete_stack(self, stack_name): resp = self._client.delete_stack(StackName=stack_name) return resp - except (ClientError, BotoCoreError) as e: + except (ClientError, BotoCoreError) as e: # If there are credentials, environment errors, # catch that and throw a delete failed error. diff --git a/samcli/lib/delete/utils.py b/samcli/lib/delete/utils.py index f6e6edeb4d..497610f776 100644 --- a/samcli/lib/delete/utils.py +++ b/samcli/lib/delete/utils.py @@ -5,6 +5,7 @@ from samcli.lib.utils.hash import file_checksum from samcli.lib.package.artifact_exporter import mktempfile + def get_cf_template_name(template_str, extension): with mktempfile() as temp_file: temp_file.write(template_str) diff --git a/tests/unit/commands/delete/test_command.py b/tests/unit/commands/delete/test_command.py index 0a0e58afec..a199c4e960 100644 --- a/tests/unit/commands/delete/test_command.py +++ b/tests/unit/commands/delete/test_command.py @@ -4,13 +4,16 @@ from samcli.commands.delete.command import do_cli from tests.unit.cli.test_cli_config_file import MockContext + def get_mock_sam_config(): mock_sam_config = MagicMock() mock_sam_config.exists = MagicMock(return_value=True) return mock_sam_config + MOCK_SAM_CONFIG = get_mock_sam_config() + class TestDeleteCliCommand(TestCase): def setUp(self): @@ -23,7 +26,6 @@ def setUp(self): self.config_file = "mock-default-filename" MOCK_SAM_CONFIG.reset_mock() - @patch("samcli.commands.delete.command.click") @patch("samcli.commands.delete.delete_context.DeleteContext") def test_all_args(self, mock_delete_context, mock_delete_click): diff --git a/tests/unit/lib/delete/test_cf_utils.py b/tests/unit/lib/delete/test_cf_utils.py index 20cb5288dc..36f32ae735 100644 --- a/tests/unit/lib/delete/test_cf_utils.py +++ b/tests/unit/lib/delete/test_cf_utils.py @@ -28,7 +28,7 @@ def test_cf_utils_has_stack_exception_non_exsistent(self): ) ) self.assertEqual(self.cf_utils.has_stack("test"), False) - + def test_cf_utils_has_stack_exception_client_error(self): self.cf_utils._client.describe_stacks = MagicMock( side_effect=ClientError( @@ -56,10 +56,12 @@ def test_cf_utils_has_stack_exception_botocore(self): self.cf_utils.has_stack("test") def test_cf_utils_get_stack_template_exception_client_error(self): - self.cf_utils._client.get_template = MagicMock(side_effect=ClientError( - error_response={"Error": {"Message": "Stack with id test does not exist"}}, - operation_name="stack_status", - )) + self.cf_utils._client.get_template = MagicMock( + side_effect=ClientError( + error_response={"Error": {"Message": "Stack with id test does not exist"}}, + operation_name="stack_status", + ) + ) with self.assertRaises(DeleteFailedError): self.cf_utils.get_stack_template("test", "Original") From 1d70155554159e3add65cb17cc1afad77cb314c0 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Mon, 28 Jun 2021 12:41:21 -0400 Subject: [PATCH 056/121] Added methods for deleting template artifacts --- samcli/commands/delete/delete_context.py | 34 ++++++++--------- samcli/commands/package/exceptions.py | 22 +++++++++++ samcli/lib/package/artifact_exporter.py | 42 ++++++++++++++++----- samcli/lib/package/ecr_uploader.py | 32 +++++++++++++++- samcli/lib/package/packageable_resources.py | 27 +++++++++++++ tests/unit/lib/delete/test_utils.py | 8 ++++ 6 files changed, 136 insertions(+), 29 deletions(-) create mode 100644 tests/unit/lib/delete/test_utils.py diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index fb4a09e4e1..1ae7122dbd 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -4,7 +4,7 @@ import boto3 -# import docker +import docker import click from click import confirm from click import prompt @@ -14,12 +14,11 @@ from samcli.lib.delete.utils import get_cf_template_name from samcli.lib.package.s3_uploader import S3Uploader -# from samcli.yamlhelper import yaml_parse +from samcli.yamlhelper import yaml_parse -# Intentionally commented -# from samcli.lib.package.artifact_exporter import Template -# from samcli.lib.package.ecr_uploader import ECRUploader -# from samcli.lib.package.uploaders import Uploaders +from samcli.lib.package.artifact_exporter import Template +from samcli.lib.package.ecr_uploader import ECRUploader +from samcli.lib.package.uploaders import Uploaders class DeleteContext: @@ -33,7 +32,7 @@ def __init__(self, stack_name, region, s3_bucket, s3_prefix, profile): self.start_bold = "\033[1m" self.end_bold = "\033[0m" self.s3_uploader = None - # self.uploaders = None + self.uploaders = None self.cf_template_file_name = None self.delete_artifacts_folder = None self.delete_cf_template_file = None @@ -71,12 +70,12 @@ def run(self): ) s3_client = boto3.client("s3", region_name=self.region if self.region else None, config=boto_config) - # ecr_client = boto3.client("ecr", region_name=self.region if self.region else None, config=boto_config) + ecr_client = boto3.client("ecr", region_name=self.region if self.region else None, config=boto_config) self.s3_uploader = S3Uploader(s3_client=s3_client, bucket_name=self.s3_bucket, prefix=self.s3_prefix) - # docker_client = docker.from_env() - # ecr_uploader = ECRUploader(docker_client, ecr_client, None, None) + docker_client = docker.from_env() + ecr_uploader = ECRUploader(docker_client, ecr_client, None, None) self.cf_utils = CfUtils(cloudformation_client) @@ -85,12 +84,12 @@ def run(self): if is_deployed: template_str = self.cf_utils.get_stack_template(self.stack_name, "Original") - # template_dict = yaml_parse(template_str) + template_dict = yaml_parse(template_str) if self.s3_bucket and self.s3_prefix: self.delete_artifacts_folder = confirm( f"\t{self.start_bold}Are you sure you want to delete the folder" - + f"{self.s3_prefix} in S3 which contains the artifacts?{self.end_bold}", + + f" {self.s3_prefix} in S3 which contains the artifacts?{self.end_bold}", default=False, ) if not self.delete_artifacts_folder: @@ -103,15 +102,14 @@ def run(self): click.echo("\n") # Delete the primary stack + click.echo("- deleting Cloudformation stack {0}".format(self.stack_name)) self.cf_utils.delete_stack(self.stack_name) - click.echo("- deleting Cloudformation stack {0}".format(self.stack_name)) # Delete the artifacts - # Intentionally commented - # self.uploaders = Uploaders(self.s3_uploader, ecr_uploader) - # template = Template(None, None, self.uploaders, None) - # template.delete(template_dict) + self.uploaders = Uploaders(self.s3_uploader, ecr_uploader) + template = Template(None, None, self.uploaders, None) + template.delete(template_dict) # Delete the CF template file in S3 if self.delete_cf_template_file: @@ -123,8 +121,6 @@ def run(self): # Delete the ECR companion stack - if self.cf_template_file_name: - click.echo(f"- deleting template file {self.cf_template_file_name}") click.echo("\n") click.echo("delete complete") else: diff --git a/samcli/commands/package/exceptions.py b/samcli/commands/package/exceptions.py index a650f62843..2e23cf7458 100644 --- a/samcli/commands/package/exceptions.py +++ b/samcli/commands/package/exceptions.py @@ -62,6 +62,28 @@ def __init__(self, resource_id, property_name, property_value, ex): ) +class DeleteArtifactFailedError(UserException): + def __init__(self, resource_id, property_name, ex): + self.resource_id = resource_id + self.property_name = property_name + self.ex = ex + + message_fmt = ( + "Unable to delete artifact referenced " + "by {property_name} parameter of {resource_id} resource." + "\n" + "{ex}" + ) + + super().__init__( + message=message_fmt.format( + property_name=self.property_name, + resource_id=self.resource_id, + ex=self.ex, + ) + ) + + class ImageNotFoundError(UserException): def __init__(self, resource_id, property_name): self.resource_id = resource_id diff --git a/samcli/lib/package/artifact_exporter.py b/samcli/lib/package/artifact_exporter.py index c0f2b94576..d0372730b6 100644 --- a/samcli/lib/package/artifact_exporter.py +++ b/samcli/lib/package/artifact_exporter.py @@ -130,21 +130,22 @@ def __init__( """ Reads the template and makes it ready for export """ - if not (is_local_folder(parent_dir) and os.path.isabs(parent_dir)): - raise ValueError("parent_dir parameter must be " "an absolute path to a folder {0}".format(parent_dir)) + if template_path and parent_dir: + if not (is_local_folder(parent_dir) and os.path.isabs(parent_dir)): + raise ValueError("parent_dir parameter must be " "an absolute path to a folder {0}".format(parent_dir)) - abs_template_path = make_abs_path(parent_dir, template_path) - template_dir = os.path.dirname(abs_template_path) + abs_template_path = make_abs_path(parent_dir, template_path) + template_dir = os.path.dirname(abs_template_path) - with open(abs_template_path, "r") as handle: - template_str = handle.read() + with open(abs_template_path, "r") as handle: + template_str = handle.read() - self.template_dict = yaml_parse(template_str) - self.template_dir = template_dir + self.template_dict = yaml_parse(template_str) + self.template_dir = template_dir + self.code_signer = code_signer self.resources_to_export = resources_to_export self.metadata_to_export = metadata_to_export self.uploaders = uploaders - self.code_signer = code_signer def _export_global_artifacts(self, template_dict: Dict) -> Dict: """ @@ -235,3 +236,26 @@ def export(self) -> Dict: exporter.export(resource_id, resource_dict, self.template_dir) return self.template_dict + + def delete(self, template_dict): + self.template_dict = template_dict + + if "Resources" not in self.template_dict: + return self.template_dict + + self._apply_global_values() + + for resource_id, resource in self.template_dict["Resources"].items(): + + resource_type = resource.get("Type", None) + resource_dict = resource.get("Properties", {}) + + for exporter_class in self.resources_to_export: + if exporter_class.RESOURCE_TYPE != resource_type: + continue + if resource_dict.get("PackageType", ZIP) != exporter_class.ARTIFACT_TYPE: + continue + # Delete code resources + exporter = exporter_class(self.uploaders, None) + exporter.delete(resource_id, resource_dict) + return self.template_dict diff --git a/samcli/lib/package/ecr_uploader.py b/samcli/lib/package/ecr_uploader.py index fcf4e836e9..4f8b0246d0 100644 --- a/samcli/lib/package/ecr_uploader.py +++ b/samcli/lib/package/ecr_uploader.py @@ -5,12 +5,19 @@ import base64 import os +import click import botocore import docker from docker.errors import BuildError, APIError -from samcli.commands.package.exceptions import DockerPushFailedError, DockerLoginFailedError, ECRAuthorizationError +from samcli.commands.package.exceptions import ( + DockerPushFailedError, + DockerLoginFailedError, + ECRAuthorizationError, + ImageNotFoundError, + DeleteArtifactFailedError +) from samcli.lib.package.image_utils import tag_translation from samcli.lib.package.stream_cursor_utils import cursor_up, cursor_left, cursor_down, clear_line from samcli.lib.utils.osutils import stderr @@ -83,6 +90,29 @@ def upload(self, image, resource_name): return f"{repository}:{_tag}" + def delete_artifact(self, image_uri, resource_id, property_name): + try: + repo_image_tag = image_uri.split("/")[1].split(":") + repository = repo_image_tag[0] + image_tag = repo_image_tag[1] + resp = self.ecr_client.batch_delete_image(repositoryName=repository, + imageIds=[ + { + 'imageTag': image_tag + }, + ] + ) + if resp["failures"]: + image_details = resp["failures"][0] + if image_details["failureCode"] == "ImageNotFound": + LOG.debug("ImageNotFound Exception : ") + raise ImageNotFoundError(resource_id, property_name) + + click.echo("- deleting ECR image {0} in repository {1}".format(image_tag, repository)) + + except botocore.exceptions.ClientError as ex: + raise DeleteArtifactFailedError(resource_id=resource_id, property_name=property_name, ex=ex) from ex + # TODO: move this to a generic class to allow for streaming logs back from docker. def _stream_progress(self, logs): """ diff --git a/samcli/lib/package/packageable_resources.py b/samcli/lib/package/packageable_resources.py index 937b451a28..1169e23ebb 100644 --- a/samcli/lib/package/packageable_resources.py +++ b/samcli/lib/package/packageable_resources.py @@ -79,6 +79,9 @@ def export(self, resource_id, resource_dict, parent_dir): def do_export(self, resource_id, resource_dict, parent_dir): pass + def delete(self, resource_id, resource_dict): + pass + class ResourceZip(Resource): """ @@ -154,6 +157,16 @@ def do_export(self, resource_id, resource_dict, parent_dir): ) set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, uploaded_url) + def delete(self, resource_id, resource_dict): + + if resource_dict is None: + return + resource_path = resource_dict[self.PROPERTY_NAME] + parsed_s3_url = self.uploader.parse_s3_url(resource_path) + print(parsed_s3_url["Key"]) + if not self.uploader.bucket_name: + self.uploader.bucket_name = parsed_s3_url["Bucket"] + self.uploader.delete_artifact(parsed_s3_url["Key"], True) class ResourceImageDict(Resource): """ @@ -238,6 +251,8 @@ def do_export(self, resource_id, resource_dict, parent_dir): ) set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, uploaded_url) + def delete(self, resource_id, resource_dict): + self.uploader.delete_artifact(resource_dict["ImageUri"], resource_id, self.PROPERTY_NAME) class ResourceWithS3UrlDict(ResourceZip): """ @@ -269,6 +284,18 @@ def do_export(self, resource_id, resource_dict, parent_dir): ) set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, parsed_url) + def delete(self, resource_id, resource_dict): + + if resource_dict is None: + return + resource_path = resource_dict[self.PROPERTY_NAME] + s3_bucket = resource_path[self.BUCKET_NAME_PROPERTY] + key = resource_path["Key"] + + if not self.uploader.bucket_name: + self.uploader.bucket_name = s3_bucket + self.uploader.delete_artifact(remote_path=key, is_key=True) + class ServerlessFunctionResource(ResourceZip): RESOURCE_TYPE = AWS_SERVERLESS_FUNCTION diff --git a/tests/unit/lib/delete/test_utils.py b/tests/unit/lib/delete/test_utils.py new file mode 100644 index 0000000000..c39f176d5c --- /dev/null +++ b/tests/unit/lib/delete/test_utils.py @@ -0,0 +1,8 @@ +from unittest import TestCase + +from samcli.lib.delete.utils import get_cf_template_name + +class TestCfUtils(TestCase): + + def test_utils(self): + self.assertEqual(get_cf_template_name("hello world!", "template"), "fc3ff98e8c6a0d3087d515c0473f8677.template") \ No newline at end of file From e3f787232dd21fcc121da77bcd9368bb54b59b31 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Mon, 28 Jun 2021 13:06:23 -0400 Subject: [PATCH 057/121] Wait method added for delete cf api --- samcli/commands/delete/delete_context.py | 1 + samcli/lib/delete/cf_utils.py | 23 ++++++++++++++++++++++- tests/unit/lib/delete/test_cf_utils.py | 24 ++++++++++++++++++++++-- 3 files changed, 45 insertions(+), 3 deletions(-) diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 1ae7122dbd..6d470438e1 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -104,6 +104,7 @@ def run(self): # Delete the primary stack click.echo("- deleting Cloudformation stack {0}".format(self.stack_name)) self.cf_utils.delete_stack(self.stack_name) + self.cf_utils.wait_for_delete(self.stack_name) # Delete the artifacts diff --git a/samcli/lib/delete/cf_utils.py b/samcli/lib/delete/cf_utils.py index f0c7aa4731..7d8be75601 100644 --- a/samcli/lib/delete/cf_utils.py +++ b/samcli/lib/delete/cf_utils.py @@ -4,7 +4,7 @@ import logging -from botocore.exceptions import ClientError, BotoCoreError +from botocore.exceptions import ClientError, BotoCoreError, WaiterError from samcli.commands.delete.exceptions import DeleteFailedError LOG = logging.getLogger(__name__) @@ -99,3 +99,24 @@ def delete_stack(self, stack_name): # We don't know anything about this exception. Don't handle LOG.debug("Unable to get stack details.", exc_info=e) raise e + + def wait_for_delete(self, stack_name): + """ + Waits until the delete stack completes + + :param stack_name: Stack name + """ + + # Wait for Delete to Finish + waiter = self._client.get_waiter("stack_delete_complete") + # Poll every 5 seconds. + waiter_config = {"Delay": 5} + try: + waiter.wait(StackName=stack_name, WaiterConfig=waiter_config) + except WaiterError as ex: + + resp = ex.last_response + status = resp["Status"] + reason = resp["StatusReason"] + + raise DeleteFailedError(stack_name=stack_name, msg="ex: {0} Status: {1}. Reason: {2}".format(ex, status, reason)) from ex diff --git a/tests/unit/lib/delete/test_cf_utils.py b/tests/unit/lib/delete/test_cf_utils.py index 36f32ae735..8e57407231 100644 --- a/tests/unit/lib/delete/test_cf_utils.py +++ b/tests/unit/lib/delete/test_cf_utils.py @@ -2,9 +2,17 @@ from unittest import TestCase from samcli.commands.delete.exceptions import DeleteFailedError -from botocore.exceptions import ClientError, BotoCoreError +from botocore.exceptions import ClientError, BotoCoreError, WaiterError from samcli.lib.delete.cf_utils import CfUtils +class MockDeleteWaiter: + def __init__(self, ex=None): + self.ex = ex + + def wait(self, StackName, WaiterConfig): + if self.ex: + raise self.ex + return class TestCfUtils(TestCase): def setUp(self): @@ -20,7 +28,7 @@ def test_cf_utils_has_no_stack(self): self.cf_utils._client.describe_stacks = MagicMock(return_value={"Stacks": []}) self.assertEqual(self.cf_utils.has_stack("test"), False) - def test_cf_utils_has_stack_exception_non_exsistent(self): + def test_cf_utils_has_stack_exception_non_existent(self): self.cf_utils._client.describe_stacks = MagicMock( side_effect=ClientError( error_response={"Error": {"Message": "Stack with id test does not exist"}}, @@ -84,3 +92,15 @@ def test_cf_utils_delete_stack_exception(self): self.cf_utils._client.delete_stack = MagicMock(side_effect=Exception()) with self.assertRaises(Exception): self.cf_utils.delete_stack("test") + + def test_cf_utils_wait_for_delete_exception(self): + self.cf_utils._client.get_waiter = MagicMock( + return_value=MockDeleteWaiter( + ex=WaiterError( + name="wait_for_delete", + reason="unit-test", + last_response={"Status": "Failed", "StatusReason": "It's a unit test"}, + ) + )) + with self.assertRaises(DeleteFailedError): + self.cf_utils.wait_for_delete("test") From 3b2cdf1cdfcf09c5574baf29f2cf015833af4bfb Mon Sep 17 00:00:00 2001 From: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Date: Mon, 28 Jun 2021 18:50:33 -0700 Subject: [PATCH 058/121] fix: pass copy of environment variables for keeping cache valid (#2943) * fix: pass copy of environment variables for keeping cache valid * add integ tests * update docs * make black happy Co-authored-by: Qingchuan Ma <69653965+qingchm@users.noreply.github.com> --- samcli/lib/build/build_strategy.py | 7 ++++- tests/integration/buildcmd/test_build_cmd.py | 30 +++++++++++++++++++ .../lib/build_module/test_build_strategy.py | 9 ++++-- 3 files changed, 43 insertions(+), 3 deletions(-) diff --git a/samcli/lib/build/build_strategy.py b/samcli/lib/build/build_strategy.py index ecded3a743..258101ba2d 100644 --- a/samcli/lib/build/build_strategy.py +++ b/samcli/lib/build/build_strategy.py @@ -5,6 +5,7 @@ import pathlib import shutil from abc import abstractmethod, ABC +from copy import deepcopy from typing import Callable, Dict, List, Any, Optional, cast from samcli.commands.build.exceptions import MissingBuildMethodException @@ -114,6 +115,10 @@ def build_single_function_definition(self, build_definition: FunctionBuildDefini LOG.debug("Building to following folder %s", single_build_dir) + # we should create a copy and pass it down, otherwise additional env vars like LAMBDA_BUILDERS_LOG_LEVEL + # will make cache invalid all the time + container_env_vars = deepcopy(build_definition.env_vars) + # when a function is passed here, it is ZIP function, codeuri and runtime are not None result = self._build_function( build_definition.get_function_name(), @@ -123,7 +128,7 @@ def build_single_function_definition(self, build_definition: FunctionBuildDefini build_definition.get_handler_name(), single_build_dir, build_definition.metadata, - build_definition.env_vars, + container_env_vars, ) function_build_results[single_full_path] = result diff --git a/tests/integration/buildcmd/test_build_cmd.py b/tests/integration/buildcmd/test_build_cmd.py index ba25849672..4681a4f2eb 100644 --- a/tests/integration/buildcmd/test_build_cmd.py +++ b/tests/integration/buildcmd/test_build_cmd.py @@ -1346,6 +1346,36 @@ def test_cache_build(self, use_container, code_uri, function1_handler, function2 expected_messages, command_result, self._make_parameter_override_arg(overrides) ) + @skipIf(SKIP_DOCKER_TESTS, SKIP_DOCKER_MESSAGE) + def test_cached_build_with_env_vars(self): + """ + Build 2 times to verify that second time hits the cached build + """ + overrides = { + "FunctionCodeUri": "Python", + "Function1Handler": "main.first_function_handler", + "Function2Handler": "main.second_function_handler", + "FunctionRuntime": "python3.8", + } + cmdlist = self.get_command_list( + use_container=True, parameter_overrides=overrides, cached=True, container_env_var="FOO=BAR" + ) + + LOG.info("Running Command (cache should be invalid): %s", cmdlist) + command_result = run_command(cmdlist, cwd=self.working_dir) + self.assertTrue( + "Cache is invalid, running build and copying resources to function build definition" + in command_result.stderr.decode("utf-8") + ) + + LOG.info("Re-Running Command (valid cache should exist): %s", cmdlist) + command_result_with_cache = run_command(cmdlist, cwd=self.working_dir) + + self.assertTrue( + "Valid cache found, copying previously built resources from function build definition" + in command_result_with_cache.stderr.decode("utf-8") + ) + @skipIf( ((IS_WINDOWS and RUNNING_ON_CI) and not CI_OVERRIDE), diff --git a/tests/unit/lib/build_module/test_build_strategy.py b/tests/unit/lib/build_module/test_build_strategy.py index 7e9902a172..1fae5b7962 100644 --- a/tests/unit/lib/build_module/test_build_strategy.py +++ b/tests/unit/lib/build_module/test_build_strategy.py @@ -1,3 +1,4 @@ +from copy import deepcopy from unittest import TestCase from unittest.mock import Mock, patch, MagicMock, call, ANY @@ -218,11 +219,15 @@ def test_build_single_function_definition_image_functions_with_same_metadata(sel function2.name = "Function2" function2.full_path = "Function2" function2.packagetype = IMAGE - build_definition = FunctionBuildDefinition("3.7", "codeuri", IMAGE, {}) + build_definition = FunctionBuildDefinition("3.7", "codeuri", IMAGE, {}, env_vars={"FOO": "BAR"}) # since they have the same metadata, they are put into the same build_definition. build_definition.functions = [function1, function2] - result = default_build_strategy.build_single_function_definition(build_definition) + with patch("samcli.lib.build.build_strategy.deepcopy", wraps=deepcopy) as patched_deepcopy: + result = default_build_strategy.build_single_function_definition(build_definition) + + patched_deepcopy.assert_called_with(build_definition.env_vars) + # both of the function name should show up in results self.assertEqual(result, {"Function": built_image, "Function2": built_image}) From 99f7db4768e44bffed317bcbcd2d1ebba48174cb Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Tue, 29 Jun 2021 10:16:00 -0400 Subject: [PATCH 059/121] Added LOG statements --- samcli/lib/delete/cf_utils.py | 13 +++++++------ samcli/lib/package/s3_uploader.py | 4 ++++ 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/samcli/lib/delete/cf_utils.py b/samcli/lib/delete/cf_utils.py index f0c7aa4731..db36f11ce3 100644 --- a/samcli/lib/delete/cf_utils.py +++ b/samcli/lib/delete/cf_utils.py @@ -37,17 +37,18 @@ def has_stack(self, stack_name): if "Stack with id {0} does not exist".format(stack_name) in str(e): LOG.debug("Stack with id %s does not exist", stack_name) return False + LOG.error("ClientError Exception : %s", str(e)) raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e except BotoCoreError as e: # If there are credentials, environment errors, # catch that and throw a delete failed error. - LOG.debug("Botocore Exception : %s", str(e)) + LOG.error("Botocore Exception : %s", str(e)) raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e except Exception as e: # We don't know anything about this exception. Don't handle - LOG.debug("Unable to get stack details.", exc_info=e) + LOG.error("Unable to get stack details.", exc_info=e) raise e def get_stack_template(self, stack_name, stage): @@ -69,12 +70,12 @@ def get_stack_template(self, stack_name, stage): # If there are credentials, environment errors, # catch that and throw a delete failed error. - LOG.debug("Failed to delete stack : %s", str(e)) + LOG.error("Failed to delete stack : %s", str(e)) raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e except Exception as e: # We don't know anything about this exception. Don't handle - LOG.debug("Unable to get stack details.", exc_info=e) + LOG.error("Unable to get stack details.", exc_info=e) raise e def delete_stack(self, stack_name): @@ -92,10 +93,10 @@ def delete_stack(self, stack_name): # If there are credentials, environment errors, # catch that and throw a delete failed error. - LOG.debug("Failed to delete stack : %s", str(e)) + LOG.error("Failed to delete stack : %s", str(e)) raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e except Exception as e: # We don't know anything about this exception. Don't handle - LOG.debug("Unable to get stack details.", exc_info=e) + LOG.error("Failed to delete stack. ", exc_info=e) raise e diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index c0a4d88cf6..3fb7070d2b 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -153,6 +153,7 @@ def delete_artifact(self, remote_path: str, is_key=False): """ try: if not self.bucket_name: + LOG.error("Bucket not specified") raise BucketNotSpecifiedError() key = remote_path @@ -162,11 +163,13 @@ def delete_artifact(self, remote_path: str, is_key=False): # Deleting Specific file with key click.echo("- deleting S3 file " + key) resp = self.s3.delete_object(Bucket=self.bucket_name, Key=key) + LOG.debug("S3 method delete_object is called and returned: %s", resp["ResponseMetadata"]) return resp["ResponseMetadata"] except botocore.exceptions.ClientError as ex: error_code = ex.response["Error"]["Code"] if error_code == "NoSuchBucket": + LOG.error("Provided bucket %s does not exist ", self.bucket_name) raise NoSuchBucketError(bucket_name=self.bucket_name) from ex raise ex @@ -175,6 +178,7 @@ def delete_prefix_artifacts(self): Deletes all the files from the prefix in S3 """ if not self.bucket_name: + LOG.error("Bucket not specified") raise BucketNotSpecifiedError() if self.prefix: prefix_files = self.s3.list_objects_v2(Bucket=self.bucket_name, Prefix=self.prefix) From e7304ec2807bd4943363ca7ae791407fdfdf673f Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Tue, 29 Jun 2021 12:48:29 -0400 Subject: [PATCH 060/121] Added and updated changes based on CR --- samcli/commands/delete/command.py | 43 ++++-- samcli/commands/delete/delete_context.py | 144 +++++++++++------- samcli/lib/delete/utils.py | 17 --- samcli/lib/deploy/deployer.py | 7 +- samcli/lib/package/artifact_exporter.py | 6 +- samcli/lib/package/utils.py | 12 +- tests/unit/commands/delete/test_command.py | 6 +- .../lib/package/test_artifact_exporter.py | 28 ++-- 8 files changed, 153 insertions(+), 110 deletions(-) delete mode 100644 samcli/lib/delete/utils.py diff --git a/samcli/commands/delete/command.py b/samcli/commands/delete/command.py index 412e4fc76f..d95382bb51 100644 --- a/samcli/commands/delete/command.py +++ b/samcli/commands/delete/command.py @@ -5,23 +5,21 @@ import logging import click -from samcli.cli.cli_config_file import TomlProvider, configuration_option from samcli.cli.main import aws_creds_options, common_options, pass_context, print_cmdline_args from samcli.lib.utils.version_checker import check_newer_version -SHORT_HELP = "Delete an AWS SAM application." +SHORT_HELP = "Delete an AWS SAM application and the artifacts created by sam deploy." -HELP_TEXT = """The sam delete command deletes a Cloudformation Stack and deletes all your resources which were created. +HELP_TEXT = """The sam delete command deletes the Cloudformation +Stack and all the artifacts which were created using sam deploy. \b -e.g. sam delete --stack-name sam-app --region us-east-1 +e.g. sam delete \b """ -CONFIG_SECTION = "parameters" -CONFIG_COMMAND = "deploy" LOG = logging.getLogger(__name__) @@ -31,12 +29,34 @@ context_settings={"ignore_unknown_options": False, "allow_interspersed_args": True, "allow_extra_args": True}, help=HELP_TEXT, ) -@configuration_option(provider=TomlProvider(section=CONFIG_SECTION, cmd_names=[CONFIG_COMMAND])) @click.option( "--stack-name", required=False, help="The name of the AWS CloudFormation stack you want to delete. ", ) +@click.option( + "--config-file", + required=False, + help=( + "The path and file name of the configuration file containing default parameter values to use. " + "Its default value is 'samconfig.toml' in project directory. For more information about configuration files, " + "see: " + "https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-config.html." + ), + type=click.STRING, + default="samconfig.toml", +) +@click.option( + "--config-env", + required=False, + help=( + "The environment name specifying the default parameter values in the configuration file to use. " + "Its default value is 'default'. For more information about configuration files, see: " + "https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-config.html." + ), + type=click.STRING, + default="default", +) @aws_creds_options @common_options @pass_context @@ -53,19 +73,16 @@ def cli( """ # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing - do_cli(stack_name, ctx.region, ctx.profile) # pragma: no cover + do_cli(stack_name, ctx.region, config_file, config_env, ctx.profile) # pragma: no cover -def do_cli(stack_name, region, profile): +def do_cli(stack_name, region, config_file, config_env, profile): """ Implementation of the ``cli`` method """ from samcli.commands.delete.delete_context import DeleteContext - ctx = click.get_current_context() - s3_bucket = ctx.default_map.get("s3_bucket", None) - s3_prefix = ctx.default_map.get("s3_prefix", None) with DeleteContext( - stack_name=stack_name, region=region, s3_bucket=s3_bucket, s3_prefix=s3_prefix, profile=profile + stack_name=stack_name, region=region, profile=profile, config_file=config_file, config_env=config_env ) as delete_context: delete_context.run() diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index fb4a09e4e1..95c62d4dd1 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -8,11 +8,11 @@ import click from click import confirm from click import prompt - +from samcli.cli.cli_config_file import TomlProvider from samcli.lib.utils.botoconfig import get_boto_config_with_user_agent from samcli.lib.delete.cf_utils import CfUtils -from samcli.lib.delete.utils import get_cf_template_name from samcli.lib.package.s3_uploader import S3Uploader +from samcli.lib.package.artifact_exporter import mktempfile, get_cf_template_name # from samcli.yamlhelper import yaml_parse @@ -21,14 +21,20 @@ # from samcli.lib.package.ecr_uploader import ECRUploader # from samcli.lib.package.uploaders import Uploaders +CONFIG_COMMAND = "deploy" +CONFIG_SECTION = "parameters" +TEMPLATE_STAGE = "Original" + class DeleteContext: - def __init__(self, stack_name, region, s3_bucket, s3_prefix, profile): + def __init__(self, stack_name, region, profile, config_file, config_env): self.stack_name = stack_name self.region = region self.profile = profile - self.s3_bucket = s3_bucket - self.s3_prefix = s3_prefix + self.config_file = config_file + self.config_env = config_env + self.s3_bucket = None # s3_bucket + self.s3_prefix = None # s3_prefix self.cf_utils = None self.start_bold = "\033[1m" self.end_bold = "\033[0m" @@ -39,15 +45,7 @@ def __init__(self, stack_name, region, s3_bucket, s3_prefix, profile): self.delete_cf_template_file = None def __enter__(self): - return self - - def __exit__(self, *args): - pass - - def run(self): - """ - Delete the stack based on the argument provided by customers and samconfig.toml. - """ + self.parse_config_file() if not self.stack_name: self.stack_name = prompt( f"\t{self.start_bold}Enter stack name you want to delete{self.end_bold}", type=click.STRING @@ -57,8 +55,82 @@ def run(self): self.region = prompt( f"\t{self.start_bold}Enter region you want to delete from{self.end_bold}", type=click.STRING ) + return self + + def __exit__(self, *args): + pass + + def parse_config_file(self): + """ + Read the provided config file if it exists and assign the options values. + """ + toml_provider = TomlProvider(CONFIG_SECTION, [CONFIG_COMMAND]) + config_options = toml_provider( + config_path=self.config_file, config_env=self.config_env, cmd_names=[CONFIG_COMMAND] + ) + if config_options: + if not self.stack_name: + self.stack_name = config_options.get("stack_name", None) + if self.stack_name == config_options["stack_name"]: + if not self.region: + self.region = config_options.get("region", None) + if not self.profile: + self.profile = config_options.get("profile", None) + self.s3_bucket = config_options.get("s3_bucket", None) + self.s3_prefix = config_options.get("s3_prefix", None) + + def delete(self): + """ + Delete method calls for Cloudformation stacks and S3 and ECR artifacts + """ + template_str = self.cf_utils.get_stack_template(self.stack_name, TEMPLATE_STAGE) + + # template_dict = yaml_parse(template_str) + + if self.s3_bucket and self.s3_prefix: + self.delete_artifacts_folder = confirm( + f"\t{self.start_bold}Are you sure you want to delete the folder" + + f" {self.s3_prefix} in S3 which contains the artifacts?{self.end_bold}", + default=False, + ) + if not self.delete_artifacts_folder: + with mktempfile() as temp_file: + self.cf_template_file_name = get_cf_template_name(temp_file, template_str, "template") + self.delete_cf_template_file = confirm( + f"\t{self.start_bold}Do you want to delete the template file" + + f" {self.cf_template_file_name} in S3?{self.end_bold}", + default=False, + ) + + click.echo("\n") + # Delete the primary stack + self.cf_utils.delete_stack(self.stack_name) + + click.echo("- deleting Cloudformation stack {0}".format(self.stack_name)) + + # Delete the artifacts + # Intentionally commented + # self.uploaders = Uploaders(self.s3_uploader, ecr_uploader) + # template = Template(None, None, self.uploaders, None) + # template.delete(template_dict) + + # Delete the CF template file in S3 + if self.delete_cf_template_file: + self.s3_uploader.delete_artifact(self.cf_template_file_name) + + # Delete the folder of artifacts if s3_bucket and s3_prefix provided + elif self.delete_artifacts_folder: + self.s3_uploader.delete_prefix_artifacts() + + # Delete the ECR companion stack + + def run(self): + """ + Delete the stack based on the argument provided by customers and samconfig.toml. + """ delete_stack = confirm( - f"\t{self.start_bold}Are you sure you want to delete the stack {self.stack_name}?{self.end_bold}", + f"\t{self.start_bold}Are you sure you want to delete the stack {self.stack_name}" + + f" in the region {self.region} ?{self.end_bold}", default=False, ) # Fetch the template using the stack-name @@ -83,48 +155,8 @@ def run(self): is_deployed = self.cf_utils.has_stack(self.stack_name) if is_deployed: - template_str = self.cf_utils.get_stack_template(self.stack_name, "Original") - - # template_dict = yaml_parse(template_str) - - if self.s3_bucket and self.s3_prefix: - self.delete_artifacts_folder = confirm( - f"\t{self.start_bold}Are you sure you want to delete the folder" - + f"{self.s3_prefix} in S3 which contains the artifacts?{self.end_bold}", - default=False, - ) - if not self.delete_artifacts_folder: - self.cf_template_file_name = get_cf_template_name(template_str, "template") - self.delete_cf_template_file = confirm( - f"\t{self.start_bold}Do you want to delete the template file" - + f" {self.cf_template_file_name} in S3?{self.end_bold}", - default=False, - ) - - click.echo("\n") - # Delete the primary stack - self.cf_utils.delete_stack(self.stack_name) - - click.echo("- deleting Cloudformation stack {0}".format(self.stack_name)) - - # Delete the artifacts - # Intentionally commented - # self.uploaders = Uploaders(self.s3_uploader, ecr_uploader) - # template = Template(None, None, self.uploaders, None) - # template.delete(template_dict) - - # Delete the CF template file in S3 - if self.delete_cf_template_file: - self.s3_uploader.delete_artifact(self.cf_template_file_name) - - # Delete the folder of artifacts if s3_bucket and s3_prefix provided - elif self.delete_artifacts_folder: - self.s3_uploader.delete_prefix_artifacts() - - # Delete the ECR companion stack + self.delete() - if self.cf_template_file_name: - click.echo(f"- deleting template file {self.cf_template_file_name}") click.echo("\n") click.echo("delete complete") else: diff --git a/samcli/lib/delete/utils.py b/samcli/lib/delete/utils.py deleted file mode 100644 index 497610f776..0000000000 --- a/samcli/lib/delete/utils.py +++ /dev/null @@ -1,17 +0,0 @@ -""" -Utilities for Delete -""" - -from samcli.lib.utils.hash import file_checksum -from samcli.lib.package.artifact_exporter import mktempfile - - -def get_cf_template_name(template_str, extension): - with mktempfile() as temp_file: - temp_file.write(template_str) - temp_file.flush() - - filemd5 = file_checksum(temp_file.name) - remote_path = filemd5 + "." + extension - - return remote_path diff --git a/samcli/lib/deploy/deployer.py b/samcli/lib/deploy/deployer.py index 8aae03425e..eeed0fd321 100644 --- a/samcli/lib/deploy/deployer.py +++ b/samcli/lib/deploy/deployer.py @@ -34,7 +34,7 @@ ) from samcli.commands._utils.table_print import pprint_column_names, pprint_columns, newline_per_item, MIN_OFFSET from samcli.commands.deploy import exceptions as deploy_exceptions -from samcli.lib.package.artifact_exporter import mktempfile +from samcli.lib.package.artifact_exporter import mktempfile, get_cf_template_name from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.utils.time import utc_to_timestamp @@ -174,12 +174,11 @@ def create_changeset( # TemplateBody. This is required for large templates. if s3_uploader: with mktempfile() as temporary_file: - temporary_file.write(kwargs.pop("TemplateBody")) - temporary_file.flush() + remote_path = get_cf_template_name(temporary_file, kwargs.pop("TemplateBody"), "template") # TemplateUrl property requires S3 URL to be in path-style format parts = S3Uploader.parse_s3_url( - s3_uploader.upload_with_dedup(temporary_file.name, "template"), version_property="Version" + s3_uploader.upload(temporary_file.name, remote_path), version_property="Version" ) kwargs["TemplateURL"] = s3_uploader.to_path_style_s3_url(parts["Key"], parts.get("Version", None)) diff --git a/samcli/lib/package/artifact_exporter.py b/samcli/lib/package/artifact_exporter.py index c0f2b94576..85b5792ef9 100644 --- a/samcli/lib/package/artifact_exporter.py +++ b/samcli/lib/package/artifact_exporter.py @@ -42,6 +42,7 @@ is_local_file, mktempfile, is_s3_url, + get_cf_template_name, ) from samcli.lib.utils.packagetype import ZIP from samcli.yamlhelper import yaml_parse, yaml_dump @@ -83,10 +84,9 @@ def do_export(self, resource_id, resource_dict, parent_dir): exported_template_str = yaml_dump(exported_template_dict) with mktempfile() as temporary_file: - temporary_file.write(exported_template_str) - temporary_file.flush() - url = self.uploader.upload_with_dedup(temporary_file.name, "template") + remote_path = get_cf_template_name(temporary_file, exported_template_str, "template") + url = self.uploader.upload(temporary_file.name, remote_path) # TemplateUrl property requires S3 URL to be in path-style format parts = S3Uploader.parse_s3_url(url, version_property="Version") diff --git a/samcli/lib/package/utils.py b/samcli/lib/package/utils.py index 6317c35a48..a831518805 100644 --- a/samcli/lib/package/utils.py +++ b/samcli/lib/package/utils.py @@ -19,7 +19,7 @@ from samcli.commands.package.exceptions import ImageNotFoundError from samcli.lib.package.ecr_utils import is_ecr_url from samcli.lib.package.s3_uploader import S3Uploader -from samcli.lib.utils.hash import dir_checksum +from samcli.lib.utils.hash import dir_checksum, file_checksum LOG = logging.getLogger(__name__) @@ -284,3 +284,13 @@ def copy_to_temp_dir(filepath): dst = os.path.join(tmp_dir, os.path.basename(filepath)) shutil.copyfile(filepath, dst) return tmp_dir + + +def get_cf_template_name(temp_file, template_str, extension): + temp_file.write(template_str) + temp_file.flush() + + filemd5 = file_checksum(temp_file.name) + remote_path = filemd5 + "." + extension + + return remote_path diff --git a/tests/unit/commands/delete/test_command.py b/tests/unit/commands/delete/test_command.py index a199c4e960..4e268688ee 100644 --- a/tests/unit/commands/delete/test_command.py +++ b/tests/unit/commands/delete/test_command.py @@ -36,15 +36,17 @@ def test_all_args(self, mock_delete_context, mock_delete_click): do_cli( stack_name=self.stack_name, region=self.region, + config_file=self.config_file, + config_env=self.config_env, profile=self.profile, ) mock_delete_context.assert_called_with( stack_name=self.stack_name, - s3_bucket=mock_delete_click.get_current_context().default_map.get("s3_bucket", None), - s3_prefix=mock_delete_click.get_current_context().default_map.get("s3_prefix", None), region=self.region, profile=self.profile, + config_file=self.config_file, + config_env=self.config_env, ) context_mock.run.assert_called_with() diff --git a/tests/unit/lib/package/test_artifact_exporter.py b/tests/unit/lib/package/test_artifact_exporter.py index 7cc20f6be7..6c85108c8e 100644 --- a/tests/unit/lib/package/test_artifact_exporter.py +++ b/tests/unit/lib/package/test_artifact_exporter.py @@ -778,7 +778,7 @@ def test_export_cloudformation_stack(self, TemplateMock): TemplateMock.return_value = template_instance_mock template_instance_mock.export.return_value = exported_template_dict - self.s3_uploader_mock.upload_with_dedup.return_value = result_s3_url + self.s3_uploader_mock.upload.return_value = result_s3_url self.s3_uploader_mock.to_path_style_s3_url.return_value = result_path_style_s3_url with tempfile.NamedTemporaryFile() as handle: @@ -792,7 +792,7 @@ def test_export_cloudformation_stack(self, TemplateMock): TemplateMock.assert_called_once_with(template_path, parent_dir, self.uploaders_mock, self.code_signer_mock) template_instance_mock.export.assert_called_once_with() - self.s3_uploader_mock.upload_with_dedup.assert_called_once_with(mock.ANY, "template") + self.s3_uploader_mock.upload.assert_called_once_with(mock.ANY, "721aad13918f292d25bc9dc7d61b0e9c.template") self.s3_uploader_mock.to_path_style_s3_url.assert_called_once_with("world", None) def test_export_cloudformation_stack_no_upload_path_is_s3url(self): @@ -805,7 +805,7 @@ def test_export_cloudformation_stack_no_upload_path_is_s3url(self): # Case 1: Path is already S3 url stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict[property_name], s3_url) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_cloudformation_stack_no_upload_path_is_httpsurl(self): stack_resource = CloudFormationStackResource(self.uploaders_mock, self.code_signer_mock) @@ -817,7 +817,7 @@ def test_export_cloudformation_stack_no_upload_path_is_httpsurl(self): # Case 1: Path is already S3 url stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict[property_name], s3_url) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_cloudformation_stack_no_upload_path_is_s3_region_httpsurl(self): stack_resource = CloudFormationStackResource(self.uploaders_mock, self.code_signer_mock) @@ -829,7 +829,7 @@ def test_export_cloudformation_stack_no_upload_path_is_s3_region_httpsurl(self): stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict[property_name], s3_url) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_cloudformation_stack_no_upload_path_is_empty(self): stack_resource = CloudFormationStackResource(self.uploaders_mock, self.code_signer_mock) @@ -842,7 +842,7 @@ def test_export_cloudformation_stack_no_upload_path_is_empty(self): resource_dict = {} stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict, {}) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_cloudformation_stack_no_upload_path_not_file(self): stack_resource = CloudFormationStackResource(self.uploaders_mock, self.code_signer_mock) @@ -855,7 +855,7 @@ def test_export_cloudformation_stack_no_upload_path_not_file(self): resource_dict = {property_name: dirname} with self.assertRaises(exceptions.ExportFailedError): stack_resource.export(resource_id, resource_dict, "dir") - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() @patch("samcli.lib.package.artifact_exporter.Template") def test_export_serverless_application(self, TemplateMock): @@ -871,7 +871,7 @@ def test_export_serverless_application(self, TemplateMock): TemplateMock.return_value = template_instance_mock template_instance_mock.export.return_value = exported_template_dict - self.s3_uploader_mock.upload_with_dedup.return_value = result_s3_url + self.s3_uploader_mock.upload.return_value = result_s3_url self.s3_uploader_mock.to_path_style_s3_url.return_value = result_path_style_s3_url with tempfile.NamedTemporaryFile() as handle: @@ -885,7 +885,7 @@ def test_export_serverless_application(self, TemplateMock): TemplateMock.assert_called_once_with(template_path, parent_dir, self.uploaders_mock, self.code_signer_mock) template_instance_mock.export.assert_called_once_with() - self.s3_uploader_mock.upload_with_dedup.assert_called_once_with(mock.ANY, "template") + self.s3_uploader_mock.upload.assert_called_once_with(mock.ANY, "721aad13918f292d25bc9dc7d61b0e9c.template") self.s3_uploader_mock.to_path_style_s3_url.assert_called_once_with("world", None) def test_export_serverless_application_no_upload_path_is_s3url(self): @@ -898,7 +898,7 @@ def test_export_serverless_application_no_upload_path_is_s3url(self): # Case 1: Path is already S3 url stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict[property_name], s3_url) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_serverless_application_no_upload_path_is_httpsurl(self): stack_resource = ServerlessApplicationResource(self.uploaders_mock, self.code_signer_mock) @@ -910,7 +910,7 @@ def test_export_serverless_application_no_upload_path_is_httpsurl(self): # Case 1: Path is already S3 url stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict[property_name], s3_url) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_serverless_application_no_upload_path_is_empty(self): stack_resource = ServerlessApplicationResource(self.uploaders_mock, self.code_signer_mock) @@ -921,7 +921,7 @@ def test_export_serverless_application_no_upload_path_is_empty(self): resource_dict = {} stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict, {}) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_serverless_application_no_upload_path_not_file(self): stack_resource = ServerlessApplicationResource(self.uploaders_mock, self.code_signer_mock) @@ -933,7 +933,7 @@ def test_export_serverless_application_no_upload_path_not_file(self): resource_dict = {property_name: dirname} with self.assertRaises(exceptions.ExportFailedError): stack_resource.export(resource_id, resource_dict, "dir") - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_serverless_application_no_upload_path_is_dictionary(self): stack_resource = ServerlessApplicationResource(self.uploaders_mock, self.code_signer_mock) @@ -945,7 +945,7 @@ def test_export_serverless_application_no_upload_path_is_dictionary(self): resource_dict = {property_name: location} stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict[property_name], location) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() @patch("samcli.lib.package.artifact_exporter.yaml_parse") def test_template_export_metadata(self, yaml_parse_mock): From c2e43db315aa292db027ba54f7a3f2fd006eb50e Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Tue, 29 Jun 2021 13:37:40 -0400 Subject: [PATCH 061/121] Fixed the unit tests in artifact_exporter.py --- tests/unit/lib/package/test_artifact_exporter.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/unit/lib/package/test_artifact_exporter.py b/tests/unit/lib/package/test_artifact_exporter.py index 6c85108c8e..f7aceafef1 100644 --- a/tests/unit/lib/package/test_artifact_exporter.py +++ b/tests/unit/lib/package/test_artifact_exporter.py @@ -792,7 +792,7 @@ def test_export_cloudformation_stack(self, TemplateMock): TemplateMock.assert_called_once_with(template_path, parent_dir, self.uploaders_mock, self.code_signer_mock) template_instance_mock.export.assert_called_once_with() - self.s3_uploader_mock.upload.assert_called_once_with(mock.ANY, "721aad13918f292d25bc9dc7d61b0e9c.template") + self.s3_uploader_mock.upload.assert_called_once_with(mock.ANY, mock.ANY) self.s3_uploader_mock.to_path_style_s3_url.assert_called_once_with("world", None) def test_export_cloudformation_stack_no_upload_path_is_s3url(self): @@ -885,7 +885,7 @@ def test_export_serverless_application(self, TemplateMock): TemplateMock.assert_called_once_with(template_path, parent_dir, self.uploaders_mock, self.code_signer_mock) template_instance_mock.export.assert_called_once_with() - self.s3_uploader_mock.upload.assert_called_once_with(mock.ANY, "721aad13918f292d25bc9dc7d61b0e9c.template") + self.s3_uploader_mock.upload.assert_called_once_with(mock.ANY, mock.ANY) self.s3_uploader_mock.to_path_style_s3_url.assert_called_once_with("world", None) def test_export_serverless_application_no_upload_path_is_s3url(self): From dd35d532d6a2f48fa007bd2907a337924262de3d Mon Sep 17 00:00:00 2001 From: hnnasit <84355507+hnnasit@users.noreply.github.com> Date: Tue, 29 Jun 2021 14:09:54 -0400 Subject: [PATCH 062/121] Update HELP_TEXT in delete/command.py Co-authored-by: Chris Rehn --- samcli/commands/delete/command.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/samcli/commands/delete/command.py b/samcli/commands/delete/command.py index d95382bb51..c0310bfa6e 100644 --- a/samcli/commands/delete/command.py +++ b/samcli/commands/delete/command.py @@ -11,8 +11,8 @@ SHORT_HELP = "Delete an AWS SAM application and the artifacts created by sam deploy." -HELP_TEXT = """The sam delete command deletes the Cloudformation -Stack and all the artifacts which were created using sam deploy. +HELP_TEXT = """The sam delete command deletes the CloudFormation +stack and all the artifacts which were created using sam deploy. \b e.g. sam delete From 6fc1b991777ad362e88e0a4064a4736a9429e37c Mon Sep 17 00:00:00 2001 From: Alexis Facques Date: Tue, 29 Jun 2021 22:56:20 +0200 Subject: [PATCH 063/121] fix: Skip build of Docker image if ImageUri is a valid ECR URL (#2934) (#2935) --- samcli/commands/deploy/guided_context.py | 2 +- samcli/lib/package/ecr_utils.py | 2 +- samcli/lib/providers/sam_base_provider.py | 28 +++ samcli/lib/providers/sam_function_provider.py | 17 +- .../local/lib/test_sam_function_provider.py | 163 ++++++++++++++---- 5 files changed, 178 insertions(+), 34 deletions(-) diff --git a/samcli/commands/deploy/guided_context.py b/samcli/commands/deploy/guided_context.py index b4bb65a0d6..dafdf0a331 100644 --- a/samcli/commands/deploy/guided_context.py +++ b/samcli/commands/deploy/guided_context.py @@ -316,7 +316,7 @@ def prompt_image_repository(self, stacks: List[Stack]): if isinstance(self.image_repositories, dict) else "" or self.image_repository, ) - if not is_ecr_url(image_repositories.get(resource_id)): + if resource_id not in image_repositories or not is_ecr_url(str(image_repositories[resource_id])): raise GuidedDeployFailedError( f"Invalid Image Repository ECR URI: {image_repositories.get(resource_id)}" ) diff --git a/samcli/lib/package/ecr_utils.py b/samcli/lib/package/ecr_utils.py index 6186d24099..f4bedc4a27 100644 --- a/samcli/lib/package/ecr_utils.py +++ b/samcli/lib/package/ecr_utils.py @@ -6,5 +6,5 @@ from samcli.lib.package.regexpr import ECR_URL -def is_ecr_url(url): +def is_ecr_url(url: str) -> bool: return bool(re.match(ECR_URL, url)) if url else False diff --git a/samcli/lib/providers/sam_base_provider.py b/samcli/lib/providers/sam_base_provider.py index c059284eb8..7a75c70cc8 100644 --- a/samcli/lib/providers/sam_base_provider.py +++ b/samcli/lib/providers/sam_base_provider.py @@ -10,6 +10,8 @@ from samcli.lib.intrinsic_resolver.intrinsics_symbol_table import IntrinsicsSymbolTable from samcli.lib.samlib.resource_metadata_normalizer import ResourceMetadataNormalizer from samcli.lib.samlib.wrapper import SamTranslatorWrapper +from samcli.lib.package.ecr_utils import is_ecr_url + LOG = logging.getLogger(__name__) @@ -34,6 +36,11 @@ class SamBaseProvider: SERVERLESS_LAYER: "ContentUri", } + IMAGE_PROPERTY_KEYS = { + LAMBDA_FUNCTION: "Code", + SERVERLESS_FUNCTION: "ImageUri", + } + def get(self, name: str) -> Optional[Any]: """ Given name of the function, this method must return the Function object @@ -88,6 +95,17 @@ def _is_s3_location(location: Optional[Union[str, Dict]]) -> bool: isinstance(location, str) and location.startswith("s3://") ) + @staticmethod + def _is_ecr_uri(location: Optional[Union[str, Dict]]) -> bool: + """ + the input could be: + - ImageUri of Serverless::Function + - Code of Lambda::Function + """ + return location is not None and is_ecr_url( + str(location.get("ImageUri", "")) if isinstance(location, dict) else location + ) + @staticmethod def _warn_code_extraction(resource_type: str, resource_name: str, code_property: str) -> None: LOG.warning( @@ -98,6 +116,16 @@ def _warn_code_extraction(resource_type: str, resource_name: str, code_property: code_property, ) + @staticmethod + def _warn_imageuri_extraction(resource_type: str, resource_name: str, image_property: str) -> None: + LOG.warning( + "The resource %s '%s' has specified ECR registry image for %s. " + "It will not be built and SAM CLI does not support invoking it locally.", + resource_type, + resource_name, + image_property, + ) + @staticmethod def _extract_lambda_function_imageuri(resource_properties: Dict, code_property_key: str) -> Optional[str]: """ diff --git a/samcli/lib/providers/sam_function_provider.py b/samcli/lib/providers/sam_function_provider.py index 7bc231f929..6bffc4bf75 100644 --- a/samcli/lib/providers/sam_function_provider.py +++ b/samcli/lib/providers/sam_function_provider.py @@ -130,13 +130,28 @@ def _extract_functions( resource_properties["Metadata"] = resource_metadata if resource_type in [SamFunctionProvider.SERVERLESS_FUNCTION, SamFunctionProvider.LAMBDA_FUNCTION]: + resource_package_type = resource_properties.get("PackageType", ZIP) + code_property_key = SamBaseProvider.CODE_PROPERTY_KEYS[resource_type] - if SamBaseProvider._is_s3_location(resource_properties.get(code_property_key)): + image_property_key = SamBaseProvider.IMAGE_PROPERTY_KEYS[resource_type] + + if resource_package_type == ZIP and SamBaseProvider._is_s3_location( + resource_properties.get(code_property_key) + ): + # CodeUri can be a dictionary of S3 Bucket/Key or a S3 URI, neither of which are supported if not ignore_code_extraction_warnings: SamFunctionProvider._warn_code_extraction(resource_type, name, code_property_key) continue + if resource_package_type == IMAGE and SamBaseProvider._is_ecr_uri( + resource_properties.get(image_property_key) + ): + # ImageUri can be an ECR uri, which is not supported + if not ignore_code_extraction_warnings: + SamFunctionProvider._warn_imageuri_extraction(resource_type, name, image_property_key) + continue + if resource_type == SamFunctionProvider.SERVERLESS_FUNCTION: layers = SamFunctionProvider._parse_layer_info( stack, diff --git a/tests/unit/commands/local/lib/test_sam_function_provider.py b/tests/unit/commands/local/lib/test_sam_function_provider.py index 3d33f1a312..9daf92abc0 100644 --- a/tests/unit/commands/local/lib/test_sam_function_provider.py +++ b/tests/unit/commands/local/lib/test_sam_function_provider.py @@ -63,10 +63,6 @@ class TestSamFunctionProviderEndToEnd(TestCase): "Handler": "index.handler", }, }, - "SamFunc4": { - "Type": "AWS::Serverless::Function", - "Properties": {"ImageUri": "123456789012.dkr.ecr.us-east-1.amazonaws.com/myrepo", "PackageType": IMAGE}, - }, "SamFuncWithFunctionNameOverride": { "Type": "AWS::Serverless::Function", "Properties": { @@ -76,6 +72,29 @@ class TestSamFunctionProviderEndToEnd(TestCase): "Handler": "index.handler", }, }, + "SamFuncWithImage1": { + "Type": "AWS::Serverless::Function", + "Properties": { + "PackageType": IMAGE, + }, + "Metadata": {"DockerTag": "tag", "DockerContext": "./image", "Dockerfile": "Dockerfile"}, + }, + "SamFuncWithImage2": { + "Type": "AWS::Serverless::Function", + "Properties": { + "ImageUri": "image:tag", + "PackageType": IMAGE, + }, + "Metadata": {"DockerTag": "tag", "DockerContext": "./image", "Dockerfile": "Dockerfile"}, + }, + "SamFuncWithImage3": { + # ImageUri is unsupported ECR location + "Type": "AWS::Serverless::Function", + "Properties": { + "ImageUri": "123456789012.dkr.ecr.us-east-1.amazonaws.com/myrepo:myimage", + "PackageType": IMAGE, + }, + }, "LambdaFunc1": { "Type": "AWS::Lambda::Function", "Properties": { @@ -84,21 +103,37 @@ class TestSamFunctionProviderEndToEnd(TestCase): "Handler": "index.handler", }, }, - "LambdaFuncWithInlineCode": { + "LambdaFuncWithImage1": { "Type": "AWS::Lambda::Function", "Properties": { - "Code": {"ZipFile": "testcode"}, - "Runtime": "nodejs4.3", - "Handler": "index.handler", + "PackageType": IMAGE, + }, + "Metadata": {"DockerTag": "tag", "DockerContext": "./image", "Dockerfile": "Dockerfile"}, + }, + "LambdaFuncWithImage2": { + "Type": "AWS::Lambda::Function", + "Properties": { + "Code": {"ImageUri": "image:tag"}, + "PackageType": IMAGE, }, + "Metadata": {"DockerTag": "tag", "DockerContext": "./image", "Dockerfile": "Dockerfile"}, }, - "LambdaFunc2": { + "LambdaFuncWithImage3": { + # ImageUri is unsupported ECR location "Type": "AWS::Lambda::Function", "Properties": { "Code": {"ImageUri": "123456789012.dkr.ecr.us-east-1.amazonaws.com/myrepo"}, "PackageType": IMAGE, }, }, + "LambdaFuncWithInlineCode": { + "Type": "AWS::Lambda::Function", + "Properties": { + "Code": {"ZipFile": "testcode"}, + "Runtime": "nodejs4.3", + "Handler": "index.handler", + }, + }, "LambdaFuncWithLocalPath": { "Type": "AWS::Lambda::Function", "Properties": {"Code": "./some/path/to/code", "Runtime": "nodejs4.3", "Handler": "index.handler"}, @@ -248,10 +283,10 @@ def setUp(self): ("SamFunc2", None), # codeuri is a s3 location, ignored ("SamFunc3", None), # codeuri is a s3 location, ignored ( - "SamFunc4", + "SamFuncWithImage1", Function( - name="SamFunc4", - functionname="SamFunc4", + name="SamFuncWithImage1", + functionname="SamFuncWithImage1", runtime=None, handler=None, codeuri=".", @@ -262,14 +297,46 @@ def setUp(self): layers=[], events=None, inlinecode=None, - imageuri="123456789012.dkr.ecr.us-east-1.amazonaws.com/myrepo", + imageuri=None, imageconfig=None, packagetype=IMAGE, - metadata=None, + metadata={ + "DockerTag": "tag", + "DockerContext": os.path.join("image"), + "Dockerfile": "Dockerfile", + }, codesign_config_arn=None, stack_path="", ), ), + ( + "SamFuncWithImage2", + Function( + name="SamFuncWithImage2", + functionname="SamFuncWithImage2", + runtime=None, + handler=None, + codeuri=".", + memory=None, + timeout=None, + environment=None, + rolearn=None, + layers=[], + events=None, + inlinecode=None, + imageuri="image:tag", + imageconfig=None, + packagetype=IMAGE, + metadata={ + "DockerTag": "tag", + "DockerContext": os.path.join("image"), + "Dockerfile": "Dockerfile", + }, + codesign_config_arn=None, + stack_path="", + ), + ), + ("SamFuncWithImage3", None), # imageuri is ecr location, ignored ( "SamFuncWithFunctionNameOverride-x", Function( @@ -295,33 +362,37 @@ def setUp(self): ), ("LambdaFunc1", None), # codeuri is a s3 location, ignored ( - "LambdaFuncWithInlineCode", + "LambdaFuncWithImage1", Function( - name="LambdaFuncWithInlineCode", - functionname="LambdaFuncWithInlineCode", - runtime="nodejs4.3", - handler="index.handler", - codeuri=None, + name="LambdaFuncWithImage1", + functionname="LambdaFuncWithImage1", + runtime=None, + handler=None, + codeuri=".", memory=None, timeout=None, environment=None, rolearn=None, layers=[], events=None, - metadata=None, - inlinecode="testcode", - codesign_config_arn=None, + metadata={ + "DockerTag": "tag", + "DockerContext": os.path.join("image"), + "Dockerfile": "Dockerfile", + }, + inlinecode=None, imageuri=None, imageconfig=None, - packagetype=ZIP, + packagetype=IMAGE, + codesign_config_arn=None, stack_path="", ), ), ( - "LambdaFunc2", + "LambdaFuncWithImage2", Function( - name="LambdaFunc2", - functionname="LambdaFunc2", + name="LambdaFuncWithImage2", + functionname="LambdaFuncWithImage2", runtime=None, handler=None, codeuri=".", @@ -331,15 +402,43 @@ def setUp(self): rolearn=None, layers=[], events=None, - metadata=None, + metadata={ + "DockerTag": "tag", + "DockerContext": os.path.join("image"), + "Dockerfile": "Dockerfile", + }, inlinecode=None, - imageuri="123456789012.dkr.ecr.us-east-1.amazonaws.com/myrepo", + imageuri="image:tag", imageconfig=None, packagetype=IMAGE, codesign_config_arn=None, stack_path="", ), ), + ("LambdaFuncWithImage3", None), # imageuri is a ecr location, ignored + ( + "LambdaFuncWithInlineCode", + Function( + name="LambdaFuncWithInlineCode", + functionname="LambdaFuncWithInlineCode", + runtime="nodejs4.3", + handler="index.handler", + codeuri=None, + memory=None, + timeout=None, + environment=None, + rolearn=None, + layers=[], + events=None, + metadata=None, + inlinecode="testcode", + codesign_config_arn=None, + imageuri=None, + imageconfig=None, + packagetype=ZIP, + stack_path="", + ), + ), ( "LambdaFuncWithLocalPath", Function( @@ -494,11 +593,13 @@ def test_get_all_must_return_all_functions(self): result = {posixpath.join(f.stack_path, f.name) for f in self.provider.get_all()} expected = { "SamFunctions", + "SamFuncWithImage1", + "SamFuncWithImage2", "SamFuncWithInlineCode", - "SamFunc4", "SamFuncWithFunctionNameOverride", + "LambdaFuncWithImage1", + "LambdaFuncWithImage2", "LambdaFuncWithInlineCode", - "LambdaFunc2", "LambdaFuncWithLocalPath", "LambdaFuncWithFunctionNameOverride", "LambdaFuncWithCodeSignConfig", From f43e763e1a382ee946bf8fba45075c00326edc8c Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Wed, 30 Jun 2021 12:17:26 -0400 Subject: [PATCH 064/121] Updated code based on Chris' comments --- samcli/commands/delete/command.py | 12 +++-- samcli/commands/delete/delete_context.py | 66 ++++++++---------------- samcli/commands/delete/exceptions.py | 10 ++++ samcli/lib/delete/cf_utils.py | 19 ++++--- samcli/lib/package/s3_uploader.py | 4 +- tests/unit/lib/delete/test_cf_utils.py | 6 +-- 6 files changed, 55 insertions(+), 62 deletions(-) diff --git a/samcli/commands/delete/command.py b/samcli/commands/delete/command.py index c0310bfa6e..823e262bc6 100644 --- a/samcli/commands/delete/command.py +++ b/samcli/commands/delete/command.py @@ -64,19 +64,21 @@ @print_cmdline_args def cli( ctx, - stack_name, - config_file, - config_env, + stack_name: str, + config_file: str, + config_env: str, ): """ `sam delete` command entry point """ # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing - do_cli(stack_name, ctx.region, config_file, config_env, ctx.profile) # pragma: no cover + do_cli( + stack_name=stack_name, region=ctx.region, config_file=config_file, config_env=config_env, profile=ctx.profile + ) # pragma: no cover -def do_cli(stack_name, region, config_file, config_env, profile): +def do_cli(stack_name: str, region: str, config_file: str, config_env: str, profile: str): """ Implementation of the ``cli`` method """ diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 95c62d4dd1..a6d146d584 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -4,7 +4,6 @@ import boto3 -# import docker import click from click import confirm from click import prompt @@ -14,32 +13,22 @@ from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.package.artifact_exporter import mktempfile, get_cf_template_name -# from samcli.yamlhelper import yaml_parse - -# Intentionally commented -# from samcli.lib.package.artifact_exporter import Template -# from samcli.lib.package.ecr_uploader import ECRUploader -# from samcli.lib.package.uploaders import Uploaders - CONFIG_COMMAND = "deploy" CONFIG_SECTION = "parameters" TEMPLATE_STAGE = "Original" class DeleteContext: - def __init__(self, stack_name, region, profile, config_file, config_env): + def __init__(self, stack_name: str, region: str, profile: str, config_file: str, config_env: str): self.stack_name = stack_name self.region = region self.profile = profile self.config_file = config_file self.config_env = config_env - self.s3_bucket = None # s3_bucket - self.s3_prefix = None # s3_prefix + self.s3_bucket = None + self.s3_prefix = None self.cf_utils = None - self.start_bold = "\033[1m" - self.end_bold = "\033[0m" self.s3_uploader = None - # self.uploaders = None self.cf_template_file_name = None self.delete_artifacts_folder = None self.delete_cf_template_file = None @@ -48,13 +37,11 @@ def __enter__(self): self.parse_config_file() if not self.stack_name: self.stack_name = prompt( - f"\t{self.start_bold}Enter stack name you want to delete{self.end_bold}", type=click.STRING + click.style("\tEnter stack name you want to delete:", bold=True), type=click.STRING ) if not self.region: - self.region = prompt( - f"\t{self.start_bold}Enter region you want to delete from{self.end_bold}", type=click.STRING - ) + self.region = prompt(click.style("\tEnter region you want to delete from:", bold=True), type=click.STRING) return self def __exit__(self, *args): @@ -85,52 +72,48 @@ def delete(self): """ template_str = self.cf_utils.get_stack_template(self.stack_name, TEMPLATE_STAGE) - # template_dict = yaml_parse(template_str) - if self.s3_bucket and self.s3_prefix: self.delete_artifacts_folder = confirm( - f"\t{self.start_bold}Are you sure you want to delete the folder" - + f" {self.s3_prefix} in S3 which contains the artifacts?{self.end_bold}", + click.style( + "\tAre you sure you want to delete the folder" + + f" {self.s3_prefix} in S3 which contains the artifacts?", + bold=True, + ), default=False, ) if not self.delete_artifacts_folder: with mktempfile() as temp_file: self.cf_template_file_name = get_cf_template_name(temp_file, template_str, "template") self.delete_cf_template_file = confirm( - f"\t{self.start_bold}Do you want to delete the template file" - + f" {self.cf_template_file_name} in S3?{self.end_bold}", + click.style( + "\tDo you want to delete the template file" + f" {self.cf_template_file_name} in S3?", bold=True + ), default=False, ) click.echo("\n") # Delete the primary stack - self.cf_utils.delete_stack(self.stack_name) + self.cf_utils.delete_stack(stack_name=self.stack_name) - click.echo("- deleting Cloudformation stack {0}".format(self.stack_name)) - - # Delete the artifacts - # Intentionally commented - # self.uploaders = Uploaders(self.s3_uploader, ecr_uploader) - # template = Template(None, None, self.uploaders, None) - # template.delete(template_dict) + click.echo(f"- deleting Cloudformation stack {self.stack_name}") # Delete the CF template file in S3 if self.delete_cf_template_file: - self.s3_uploader.delete_artifact(self.cf_template_file_name) + self.s3_uploader.delete_artifact(remote_path=self.cf_template_file_name) # Delete the folder of artifacts if s3_bucket and s3_prefix provided elif self.delete_artifacts_folder: self.s3_uploader.delete_prefix_artifacts() - # Delete the ECR companion stack - def run(self): """ Delete the stack based on the argument provided by customers and samconfig.toml. """ delete_stack = confirm( - f"\t{self.start_bold}Are you sure you want to delete the stack {self.stack_name}" - + f" in the region {self.region} ?{self.end_bold}", + click.style( + f"\tAre you sure you want to delete the stack {self.stack_name}" + f" in the region {self.region} ?", + bold=True, + ), default=False, ) # Fetch the template using the stack-name @@ -143,16 +126,11 @@ def run(self): ) s3_client = boto3.client("s3", region_name=self.region if self.region else None, config=boto_config) - # ecr_client = boto3.client("ecr", region_name=self.region if self.region else None, config=boto_config) self.s3_uploader = S3Uploader(s3_client=s3_client, bucket_name=self.s3_bucket, prefix=self.s3_prefix) - - # docker_client = docker.from_env() - # ecr_uploader = ECRUploader(docker_client, ecr_client, None, None) - self.cf_utils = CfUtils(cloudformation_client) - is_deployed = self.cf_utils.has_stack(self.stack_name) + is_deployed = self.cf_utils.has_stack(stack_name=self.stack_name) if is_deployed: self.delete() @@ -160,4 +138,4 @@ def run(self): click.echo("\n") click.echo("delete complete") else: - click.echo("Error: The input stack {0} does not exist on Cloudformation".format(self.stack_name)) + click.echo(f"Error: The input stack {self.stack_name} does not exist on Cloudformation") diff --git a/samcli/commands/delete/exceptions.py b/samcli/commands/delete/exceptions.py index 82c56b6bb6..7e2ba5105c 100644 --- a/samcli/commands/delete/exceptions.py +++ b/samcli/commands/delete/exceptions.py @@ -12,3 +12,13 @@ def __init__(self, stack_name, msg): message_fmt = "Failed to delete the stack: {stack_name}, {msg}" super().__init__(message=message_fmt.format(stack_name=self.stack_name, msg=msg)) + + +class FetchTemplateFailedError(UserException): + def __init__(self, stack_name, msg): + self.stack_name = stack_name + self.msg = msg + + message_fmt = "Failed to fetch the template for the stack: {stack_name}, {msg}" + + super().__init__(message=message_fmt.format(stack_name=self.stack_name, msg=msg)) diff --git a/samcli/lib/delete/cf_utils.py b/samcli/lib/delete/cf_utils.py index db36f11ce3..8644a51445 100644 --- a/samcli/lib/delete/cf_utils.py +++ b/samcli/lib/delete/cf_utils.py @@ -5,7 +5,7 @@ import logging from botocore.exceptions import ClientError, BotoCoreError -from samcli.commands.delete.exceptions import DeleteFailedError +from samcli.commands.delete.exceptions import DeleteFailedError, FetchTemplateFailedError LOG = logging.getLogger(__name__) @@ -14,7 +14,7 @@ class CfUtils: def __init__(self, cloudformation_client): self._client = cloudformation_client - def has_stack(self, stack_name): + def has_stack(self, stack_name: str): """ Checks if a CloudFormation stack with given name exists @@ -27,6 +27,10 @@ def has_stack(self, stack_name): return False stack = resp["Stacks"][0] + # Note: Stacks with REVIEW_IN_PROGRESS can be deleted + # using delete_stack but get_template does not return + # the template_str for this stack restricting deletion of + # artifacts. return stack["StackStatus"] != "REVIEW_IN_PROGRESS" except ClientError as e: @@ -51,7 +55,7 @@ def has_stack(self, stack_name): LOG.error("Unable to get stack details.", exc_info=e) raise e - def get_stack_template(self, stack_name, stage): + def get_stack_template(self, stack_name: str, stage: str): """ Return the Cloudformation template of the given stack_name @@ -62,23 +66,22 @@ def get_stack_template(self, stack_name, stage): try: resp = self._client.get_template(StackName=stack_name, TemplateStage=stage) if not resp["TemplateBody"]: - return None - + return "" return resp["TemplateBody"] except (ClientError, BotoCoreError) as e: # If there are credentials, environment errors, # catch that and throw a delete failed error. - LOG.error("Failed to delete stack : %s", str(e)) - raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e + LOG.error("Failed to fetch template for the stack : %s", str(e)) + raise FetchTemplateFailedError(stack_name=stack_name, msg=str(e)) from e except Exception as e: # We don't know anything about this exception. Don't handle LOG.error("Unable to get stack details.", exc_info=e) raise e - def delete_stack(self, stack_name): + def delete_stack(self, stack_name: str): """ Delete the Cloudformation stack with the given stack_name diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index 3fb7070d2b..c9a5e3f6f0 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -145,7 +145,7 @@ def upload_with_dedup( return self.upload(file_name, remote_path) - def delete_artifact(self, remote_path: str, is_key=False): + def delete_artifact(self, remote_path: str, is_key: Optional[bool] = False): """ Deletes a given file from S3 :param remote_path: Path to the file that will be deleted @@ -161,7 +161,7 @@ def delete_artifact(self, remote_path: str, is_key=False): key = "{0}/{1}".format(self.prefix, remote_path) # Deleting Specific file with key - click.echo("- deleting S3 file " + key) + click.echo(f"- deleting S3 file {key}") resp = self.s3.delete_object(Bucket=self.bucket_name, Key=key) LOG.debug("S3 method delete_object is called and returned: %s", resp["ResponseMetadata"]) return resp["ResponseMetadata"] diff --git a/tests/unit/lib/delete/test_cf_utils.py b/tests/unit/lib/delete/test_cf_utils.py index 36f32ae735..9e80a00d4a 100644 --- a/tests/unit/lib/delete/test_cf_utils.py +++ b/tests/unit/lib/delete/test_cf_utils.py @@ -1,7 +1,7 @@ from unittest.mock import patch, MagicMock, ANY, call from unittest import TestCase -from samcli.commands.delete.exceptions import DeleteFailedError +from samcli.commands.delete.exceptions import DeleteFailedError, FetchTemplateFailedError from botocore.exceptions import ClientError, BotoCoreError from samcli.lib.delete.cf_utils import CfUtils @@ -62,12 +62,12 @@ def test_cf_utils_get_stack_template_exception_client_error(self): operation_name="stack_status", ) ) - with self.assertRaises(DeleteFailedError): + with self.assertRaises(FetchTemplateFailedError): self.cf_utils.get_stack_template("test", "Original") def test_cf_utils_get_stack_template_exception_botocore(self): self.cf_utils._client.get_template = MagicMock(side_effect=BotoCoreError()) - with self.assertRaises(DeleteFailedError): + with self.assertRaises(FetchTemplateFailedError): self.cf_utils.get_stack_template("test", "Original") def test_cf_utils_get_stack_template_exception(self): From 5779cd34c91b9f1aa1540362fab38da144e5f4e7 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Wed, 30 Jun 2021 14:57:29 -0400 Subject: [PATCH 065/121] Added condition for resources that have deletionpolicy specified --- samcli/lib/package/artifact_exporter.py | 19 ++++++++++--------- samcli/lib/package/packageable_resources.py | 18 ++++++++++++++++-- 2 files changed, 26 insertions(+), 11 deletions(-) diff --git a/samcli/lib/package/artifact_exporter.py b/samcli/lib/package/artifact_exporter.py index d0372730b6..a1b2a98fe4 100644 --- a/samcli/lib/package/artifact_exporter.py +++ b/samcli/lib/package/artifact_exporter.py @@ -249,13 +249,14 @@ def delete(self, template_dict): resource_type = resource.get("Type", None) resource_dict = resource.get("Properties", {}) - - for exporter_class in self.resources_to_export: - if exporter_class.RESOURCE_TYPE != resource_type: - continue - if resource_dict.get("PackageType", ZIP) != exporter_class.ARTIFACT_TYPE: - continue - # Delete code resources - exporter = exporter_class(self.uploaders, None) - exporter.delete(resource_id, resource_dict) + resource_deletion_policy = resource.get("DeletionPolicy", None) + if resource_deletion_policy != "Retain": + for exporter_class in self.resources_to_export: + if exporter_class.RESOURCE_TYPE != resource_type: + continue + if resource_dict.get("PackageType", ZIP) != exporter_class.ARTIFACT_TYPE: + continue + # Delete code resources + exporter = exporter_class(self.uploaders, None) + exporter.delete(resource_id, resource_dict) return self.template_dict diff --git a/samcli/lib/package/packageable_resources.py b/samcli/lib/package/packageable_resources.py index 1169e23ebb..aabe675e26 100644 --- a/samcli/lib/package/packageable_resources.py +++ b/samcli/lib/package/packageable_resources.py @@ -22,6 +22,7 @@ upload_local_image_artifacts, is_s3_protocol_url, is_path_value_valid, + is_ecr_url ) from samcli.commands._utils.resources import ( @@ -210,6 +211,14 @@ def do_export(self, resource_id, resource_dict, parent_dir): ) set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, {self.EXPORT_PROPERTY_CODE_KEY: uploaded_url}) + def delete(self, resource_id, resource_dict): + if resource_dict is None: + return + + remote_path = resource_dict[self.PROPERTY_NAME][self.EXPORT_PROPERTY_CODE_KEY] + if is_ecr_url(remote_path): + self.uploader.delete_artifact(remote_path, resource_id, self.PROPERTY_NAME) + class ResourceImage(Resource): """ @@ -252,7 +261,12 @@ def do_export(self, resource_id, resource_dict, parent_dir): set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, uploaded_url) def delete(self, resource_id, resource_dict): - self.uploader.delete_artifact(resource_dict["ImageUri"], resource_id, self.PROPERTY_NAME) + if resource_dict is None: + return + + remote_path = resource_dict[self.PROPERTY_NAME] + if is_ecr_url(remote_path): + self.uploader.delete_artifact(remote_path, resource_id, self.PROPERTY_NAME) class ResourceWithS3UrlDict(ResourceZip): """ @@ -290,7 +304,7 @@ def delete(self, resource_id, resource_dict): return resource_path = resource_dict[self.PROPERTY_NAME] s3_bucket = resource_path[self.BUCKET_NAME_PROPERTY] - key = resource_path["Key"] + key = resource_path[self.OBJECT_KEY_PROPERTY] if not self.uploader.bucket_name: self.uploader.bucket_name = s3_bucket From bc9db140b2a84b7c4457a8eafc63eb9c23526fca Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Wed, 30 Jun 2021 16:18:58 -0400 Subject: [PATCH 066/121] Small changes and fixes based on the comments --- samcli/commands/delete/command.py | 4 ++-- samcli/commands/delete/delete_context.py | 12 +++++------- samcli/lib/delete/cf_utils.py | 15 +++++++-------- samcli/lib/package/s3_uploader.py | 8 +++++--- 4 files changed, 19 insertions(+), 20 deletions(-) diff --git a/samcli/commands/delete/command.py b/samcli/commands/delete/command.py index 823e262bc6..266d093a36 100644 --- a/samcli/commands/delete/command.py +++ b/samcli/commands/delete/command.py @@ -36,7 +36,6 @@ ) @click.option( "--config-file", - required=False, help=( "The path and file name of the configuration file containing default parameter values to use. " "Its default value is 'samconfig.toml' in project directory. For more information about configuration files, " @@ -45,10 +44,10 @@ ), type=click.STRING, default="samconfig.toml", + show_default=True, ) @click.option( "--config-env", - required=False, help=( "The environment name specifying the default parameter values in the configuration file to use. " "Its default value is 'default'. For more information about configuration files, see: " @@ -56,6 +55,7 @@ ), type=click.STRING, default="default", + show_default=True, ) @aws_creds_options @common_options diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index a6d146d584..8f12402cde 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -70,9 +70,10 @@ def delete(self): """ Delete method calls for Cloudformation stacks and S3 and ECR artifacts """ - template_str = self.cf_utils.get_stack_template(self.stack_name, TEMPLATE_STAGE) + template = self.cf_utils.get_stack_template(self.stack_name, TEMPLATE_STAGE) + template_str = template.get("TemplateBody", None) - if self.s3_bucket and self.s3_prefix: + if self.s3_bucket and self.s3_prefix and template_str: self.delete_artifacts_folder = confirm( click.style( "\tAre you sure you want to delete the folder" @@ -91,11 +92,10 @@ def delete(self): default=False, ) - click.echo("\n") # Delete the primary stack self.cf_utils.delete_stack(stack_name=self.stack_name) - click.echo(f"- deleting Cloudformation stack {self.stack_name}") + click.echo(f"\n\t- Deleting Cloudformation stack {self.stack_name}") # Delete the CF template file in S3 if self.delete_cf_template_file: @@ -134,8 +134,6 @@ def run(self): if is_deployed: self.delete() - - click.echo("\n") - click.echo("delete complete") + click.echo("\nDeleted successfully") else: click.echo(f"Error: The input stack {self.stack_name} does not exist on Cloudformation") diff --git a/samcli/lib/delete/cf_utils.py b/samcli/lib/delete/cf_utils.py index 8644a51445..a78ed6d38b 100644 --- a/samcli/lib/delete/cf_utils.py +++ b/samcli/lib/delete/cf_utils.py @@ -4,6 +4,7 @@ import logging +from typing import Dict from botocore.exceptions import ClientError, BotoCoreError from samcli.commands.delete.exceptions import DeleteFailedError, FetchTemplateFailedError @@ -14,7 +15,7 @@ class CfUtils: def __init__(self, cloudformation_client): self._client = cloudformation_client - def has_stack(self, stack_name: str): + def has_stack(self, stack_name: str) -> bool: """ Checks if a CloudFormation stack with given name exists @@ -31,7 +32,7 @@ def has_stack(self, stack_name: str): # using delete_stack but get_template does not return # the template_str for this stack restricting deletion of # artifacts. - return stack["StackStatus"] != "REVIEW_IN_PROGRESS" + return bool(stack["StackStatus"] != "REVIEW_IN_PROGRESS") except ClientError as e: # If a stack does not exist, describe_stacks will throw an @@ -55,7 +56,7 @@ def has_stack(self, stack_name: str): LOG.error("Unable to get stack details.", exc_info=e) raise e - def get_stack_template(self, stack_name: str, stage: str): + def get_stack_template(self, stack_name: str, stage: str) -> Dict: """ Return the Cloudformation template of the given stack_name @@ -66,8 +67,8 @@ def get_stack_template(self, stack_name: str, stage: str): try: resp = self._client.get_template(StackName=stack_name, TemplateStage=stage) if not resp["TemplateBody"]: - return "" - return resp["TemplateBody"] + return {} + return dict(resp) except (ClientError, BotoCoreError) as e: # If there are credentials, environment errors, @@ -86,11 +87,9 @@ def delete_stack(self, stack_name: str): Delete the Cloudformation stack with the given stack_name :param stack_name: Name or ID of the stack - :return: Status of deletion """ try: - resp = self._client.delete_stack(StackName=stack_name) - return resp + self._client.delete_stack(StackName=stack_name) except (ClientError, BotoCoreError) as e: # If there are credentials, environment errors, diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index c9a5e3f6f0..61a6988416 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -145,11 +145,13 @@ def upload_with_dedup( return self.upload(file_name, remote_path) - def delete_artifact(self, remote_path: str, is_key: Optional[bool] = False): + def delete_artifact(self, remote_path: str, is_key: bool = False) -> Dict: """ Deletes a given file from S3 :param remote_path: Path to the file that will be deleted :param is_key: If the given remote_path is the key or a file_name + + :return: metadata dict of the deleted object """ try: if not self.bucket_name: @@ -161,10 +163,10 @@ def delete_artifact(self, remote_path: str, is_key: Optional[bool] = False): key = "{0}/{1}".format(self.prefix, remote_path) # Deleting Specific file with key - click.echo(f"- deleting S3 file {key}") + click.echo(f"\t- Deleting S3 file {key}") resp = self.s3.delete_object(Bucket=self.bucket_name, Key=key) LOG.debug("S3 method delete_object is called and returned: %s", resp["ResponseMetadata"]) - return resp["ResponseMetadata"] + return dict(resp["ResponseMetadata"]) except botocore.exceptions.ClientError as ex: error_code = ex.response["Error"]["Code"] From 0d58f89f563def47a74301cf0c92c87beb2cf706 Mon Sep 17 00:00:00 2001 From: Qingchuan Ma <69653965+qingchm@users.noreply.github.com> Date: Wed, 30 Jun 2021 15:53:08 -0700 Subject: [PATCH 067/121] Add condition to managed bucket policy (#2999) --- samcli/lib/bootstrap/bootstrap.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/samcli/lib/bootstrap/bootstrap.py b/samcli/lib/bootstrap/bootstrap.py index 81c30c7748..eaed58d630 100644 --- a/samcli/lib/bootstrap/bootstrap.py +++ b/samcli/lib/bootstrap/bootstrap.py @@ -73,6 +73,9 @@ def _get_stack_template(): - "/*" Principal: Service: serverlessrepo.amazonaws.com + Condition: + StringEquals: + aws:SourceAccount: !Ref AWS::AccountId Outputs: SourceBucket: From 401a950f1b384c07892ba2d2a9ad76266e94ef46 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Wed, 30 Jun 2021 19:31:40 -0400 Subject: [PATCH 068/121] Removed region prompt --- samcli/commands/delete/delete_context.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 8f12402cde..5a70fa9f07 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -40,8 +40,6 @@ def __enter__(self): click.style("\tEnter stack name you want to delete:", bold=True), type=click.STRING ) - if not self.region: - self.region = prompt(click.style("\tEnter region you want to delete from:", bold=True), type=click.STRING) return self def __exit__(self, *args): From 50ac3cbd72c983a430e0d7a2c5271f5d7af85701 Mon Sep 17 00:00:00 2001 From: Qingchuan Ma <69653965+qingchm@users.noreply.github.com> Date: Wed, 30 Jun 2021 20:16:23 -0700 Subject: [PATCH 069/121] Update appveyor.yml to do docker login on both dockerhub and Public ECR (#3005) (#3006) Co-authored-by: Wing Fung Lau <4760060+hawflau@users.noreply.github.com> --- appveyor.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/appveyor.yml b/appveyor.yml index 606fe62d5f..1833c78b1f 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -170,7 +170,11 @@ for: # Runs only in Linux, logging docker hub when running canary and docker cred is available - sh: " if [[ -n $BY_CANARY ]] && [[ -n $DOCKER_USER ]] && [[ -n $DOCKER_PASS ]]; - then echo Logging in Docker Hub; echo $DOCKER_PASS | docker login --username $DOCKER_USER --password-stdin; + then echo Logging in Docker Hub; echo $DOCKER_PASS | docker login --username $DOCKER_USER --password-stdin registry-1.docker.io; + fi" + - sh: " + if [[ -n $BY_CANARY ]]; + then echo Logging in Public ECR; aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin public.ecr.aws; fi" - sh: "pytest -vv tests/integration" - sh: "pytest -vv tests/regression" From f1e05695802f0c1c9f9b1b5c42cda413dac91596 Mon Sep 17 00:00:00 2001 From: Qingchuan Ma <69653965+qingchm@users.noreply.github.com> Date: Wed, 30 Jun 2021 20:17:45 -0700 Subject: [PATCH 070/121] chore: bump version to 1.25.0 (#3007) Co-authored-by: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> --- samcli/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samcli/__init__.py b/samcli/__init__.py index 3fe00ac134..d49ef0cf7e 100644 --- a/samcli/__init__.py +++ b/samcli/__init__.py @@ -2,4 +2,4 @@ SAM CLI version """ -__version__ = "1.24.1" +__version__ = "1.25.0" From 224156330fa6e862b7d7f2b3567aaf53842e7528 Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> Date: Wed, 30 Jun 2021 20:40:16 -0700 Subject: [PATCH 071/121] temp: reduce python testing matrix (#3008) --- appveyor.yml | 42 +++++++++++++++++++++--------------------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/appveyor.yml b/appveyor.yml index 1833c78b1f..b96017d0d3 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -9,26 +9,26 @@ environment: matrix: - - PYTHON_HOME: "C:\\Python36-x64" - PYTHON_VERSION: '3.6' - PYTHON_ARCH: '64' - NOSE_PARAMETERIZED_NO_WARN: 1 - INSTALL_PY_37_PIP: 1 - INSTALL_PY_38_PIP: 1 - AWS_S3: 'AWS_S3_36' - AWS_ECR: 'AWS_ECR_36' - APPVEYOR_CONSOLE_DISABLE_PTY: true - - - PYTHON_HOME: "C:\\Python37-x64" - PYTHON_VERSION: '3.7' - PYTHON_ARCH: '64' - RUN_SMOKE: 1 - NOSE_PARAMETERIZED_NO_WARN: 1 - INSTALL_PY_36_PIP: 1 - INSTALL_PY_38_PIP: 1 - AWS_S3: 'AWS_S3_37' - AWS_ECR: 'AWS_ECR_37' - APPVEYOR_CONSOLE_DISABLE_PTY: true + # - PYTHON_HOME: "C:\\Python36-x64" + # PYTHON_VERSION: '3.6' + # PYTHON_ARCH: '64' + # NOSE_PARAMETERIZED_NO_WARN: 1 + # INSTALL_PY_37_PIP: 1 + # INSTALL_PY_38_PIP: 1 + # AWS_S3: 'AWS_S3_36' + # AWS_ECR: 'AWS_ECR_36' + # APPVEYOR_CONSOLE_DISABLE_PTY: true + + # - PYTHON_HOME: "C:\\Python37-x64" + # PYTHON_VERSION: '3.7' + # PYTHON_ARCH: '64' + # RUN_SMOKE: 1 + # NOSE_PARAMETERIZED_NO_WARN: 1 + # INSTALL_PY_36_PIP: 1 + # INSTALL_PY_38_PIP: 1 + # AWS_S3: 'AWS_S3_37' + # AWS_ECR: 'AWS_ECR_37' + # APPVEYOR_CONSOLE_DISABLE_PTY: true - PYTHON_HOME: "C:\\Python38-x64" PYTHON_VERSION: '3.8' @@ -159,7 +159,7 @@ for: # Pre-dev Tests - "pip install -e \".[pre-dev]\"" - "pylint --rcfile .pylintrc samcli" - + # Dev Tests - "pip install -e \".[dev]\"" - "pytest --cov samcli --cov-report term-missing --cov-fail-under 94 tests/unit" From fe832185be09acb199b2a09ad73bf59e1553d131 Mon Sep 17 00:00:00 2001 From: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> Date: Wed, 30 Jun 2021 22:28:02 -0700 Subject: [PATCH 072/121] temp: disable testing against python 3.8, and enabled 3.7 (#3009) * temp: disable testing against python 3.8, and enabled 3.7 * temp: disable testing against python 3.8, and enabled 3.7 & 3.6 --- appveyor.yml | 52 ++++++++++++++++++++++++++-------------------------- 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/appveyor.yml b/appveyor.yml index b96017d0d3..e47ea65e81 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -9,38 +9,38 @@ environment: matrix: - # - PYTHON_HOME: "C:\\Python36-x64" - # PYTHON_VERSION: '3.6' - # PYTHON_ARCH: '64' - # NOSE_PARAMETERIZED_NO_WARN: 1 - # INSTALL_PY_37_PIP: 1 - # INSTALL_PY_38_PIP: 1 - # AWS_S3: 'AWS_S3_36' - # AWS_ECR: 'AWS_ECR_36' - # APPVEYOR_CONSOLE_DISABLE_PTY: true - - # - PYTHON_HOME: "C:\\Python37-x64" - # PYTHON_VERSION: '3.7' - # PYTHON_ARCH: '64' - # RUN_SMOKE: 1 - # NOSE_PARAMETERIZED_NO_WARN: 1 - # INSTALL_PY_36_PIP: 1 - # INSTALL_PY_38_PIP: 1 - # AWS_S3: 'AWS_S3_37' - # AWS_ECR: 'AWS_ECR_37' - # APPVEYOR_CONSOLE_DISABLE_PTY: true - - - PYTHON_HOME: "C:\\Python38-x64" - PYTHON_VERSION: '3.8' + - PYTHON_HOME: "C:\\Python36-x64" + PYTHON_VERSION: '3.6' + PYTHON_ARCH: '64' + NOSE_PARAMETERIZED_NO_WARN: 1 + INSTALL_PY_37_PIP: 1 + INSTALL_PY_38_PIP: 1 + AWS_S3: 'AWS_S3_36' + AWS_ECR: 'AWS_ECR_36' + APPVEYOR_CONSOLE_DISABLE_PTY: true + + - PYTHON_HOME: "C:\\Python37-x64" + PYTHON_VERSION: '3.7' PYTHON_ARCH: '64' RUN_SMOKE: 1 NOSE_PARAMETERIZED_NO_WARN: 1 INSTALL_PY_36_PIP: 1 - INSTALL_PY_37_PIP: 1 - AWS_S3: 'AWS_S3_38' - AWS_ECR: 'AWS_ECR_38' + INSTALL_PY_38_PIP: 1 + AWS_S3: 'AWS_S3_37' + AWS_ECR: 'AWS_ECR_37' APPVEYOR_CONSOLE_DISABLE_PTY: true + # - PYTHON_HOME: "C:\\Python38-x64" + # PYTHON_VERSION: '3.8' + # PYTHON_ARCH: '64' + # RUN_SMOKE: 1 + # NOSE_PARAMETERIZED_NO_WARN: 1 + # INSTALL_PY_36_PIP: 1 + # INSTALL_PY_37_PIP: 1 + # AWS_S3: 'AWS_S3_38' + # AWS_ECR: 'AWS_ECR_38' + # APPVEYOR_CONSOLE_DISABLE_PTY: true + for: - matrix: From cc806a28968bae5b0e63845a767307383082458b Mon Sep 17 00:00:00 2001 From: Qingchuan Ma <69653965+qingchm@users.noreply.github.com> Date: Thu, 1 Jul 2021 00:23:18 -0700 Subject: [PATCH 073/121] fix: enable all runtimes in python testing matrix (#3011) * revert: enable all runtimes in python testing matrix * fix indentation for yml --- appveyor.yml | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/appveyor.yml b/appveyor.yml index e47ea65e81..ed730e0a24 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -30,16 +30,16 @@ environment: AWS_ECR: 'AWS_ECR_37' APPVEYOR_CONSOLE_DISABLE_PTY: true - # - PYTHON_HOME: "C:\\Python38-x64" - # PYTHON_VERSION: '3.8' - # PYTHON_ARCH: '64' - # RUN_SMOKE: 1 - # NOSE_PARAMETERIZED_NO_WARN: 1 - # INSTALL_PY_36_PIP: 1 - # INSTALL_PY_37_PIP: 1 - # AWS_S3: 'AWS_S3_38' - # AWS_ECR: 'AWS_ECR_38' - # APPVEYOR_CONSOLE_DISABLE_PTY: true + - PYTHON_HOME: "C:\\Python38-x64" + PYTHON_VERSION: '3.8' + PYTHON_ARCH: '64' + RUN_SMOKE: 1 + NOSE_PARAMETERIZED_NO_WARN: 1 + INSTALL_PY_36_PIP: 1 + INSTALL_PY_37_PIP: 1 + AWS_S3: 'AWS_S3_38' + AWS_ECR: 'AWS_ECR_38' + APPVEYOR_CONSOLE_DISABLE_PTY: true for: - From 0a38340649c3cdf21bba5ba8d7f048ff558d5edf Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Sat, 3 Jul 2021 16:59:41 -0400 Subject: [PATCH 074/121] Added unit tests for ecr delete method and typing for methods --- samcli/lib/delete/cf_utils.py | 4 +- samcli/lib/package/ecr_uploader.py | 17 ++++---- samcli/lib/package/packageable_resources.py | 28 +++++++++--- tests/unit/lib/delete/test_cf_utils.py | 5 ++- tests/unit/lib/delete/test_utils.py | 8 ---- tests/unit/lib/package/test_ecr_uploader.py | 47 ++++++++++++++++++++- 6 files changed, 84 insertions(+), 25 deletions(-) delete mode 100644 tests/unit/lib/delete/test_utils.py diff --git a/samcli/lib/delete/cf_utils.py b/samcli/lib/delete/cf_utils.py index 7d8be75601..8e257a0cd9 100644 --- a/samcli/lib/delete/cf_utils.py +++ b/samcli/lib/delete/cf_utils.py @@ -119,4 +119,6 @@ def wait_for_delete(self, stack_name): status = resp["Status"] reason = resp["StatusReason"] - raise DeleteFailedError(stack_name=stack_name, msg="ex: {0} Status: {1}. Reason: {2}".format(ex, status, reason)) from ex + raise DeleteFailedError( + stack_name=stack_name, msg="ex: {0} Status: {1}. Reason: {2}".format(ex, status, reason) + ) from ex diff --git a/samcli/lib/package/ecr_uploader.py b/samcli/lib/package/ecr_uploader.py index 4f8b0246d0..402ebbf4cf 100644 --- a/samcli/lib/package/ecr_uploader.py +++ b/samcli/lib/package/ecr_uploader.py @@ -16,7 +16,7 @@ DockerLoginFailedError, ECRAuthorizationError, ImageNotFoundError, - DeleteArtifactFailedError + DeleteArtifactFailedError, ) from samcli.lib.package.image_utils import tag_translation from samcli.lib.package.stream_cursor_utils import cursor_up, cursor_left, cursor_down, clear_line @@ -90,27 +90,28 @@ def upload(self, image, resource_name): return f"{repository}:{_tag}" - def delete_artifact(self, image_uri, resource_id, property_name): + def delete_artifact(self, image_uri: str, resource_id: str, property_name: str): try: repo_image_tag = image_uri.split("/")[1].split(":") repository = repo_image_tag[0] image_tag = repo_image_tag[1] - resp = self.ecr_client.batch_delete_image(repositoryName=repository, + resp = self.ecr_client.batch_delete_image( + repositoryName=repository, imageIds=[ - { - 'imageTag': image_tag - }, - ] + {"imageTag": image_tag}, + ], ) if resp["failures"]: + # Image not found image_details = resp["failures"][0] if image_details["failureCode"] == "ImageNotFound": LOG.debug("ImageNotFound Exception : ") raise ImageNotFoundError(resource_id, property_name) - click.echo("- deleting ECR image {0} in repository {1}".format(image_tag, repository)) + click.echo(f"- Deleting ECR image {image_tag} in repository {repository}") except botocore.exceptions.ClientError as ex: + # Handle Client errors such as RepositoryNotFoundException or InvalidParameterException raise DeleteArtifactFailedError(resource_id=resource_id, property_name=property_name, ex=ex) from ex # TODO: move this to a generic class to allow for streaming logs back from docker. diff --git a/samcli/lib/package/packageable_resources.py b/samcli/lib/package/packageable_resources.py index aabe675e26..cc2fe999d9 100644 --- a/samcli/lib/package/packageable_resources.py +++ b/samcli/lib/package/packageable_resources.py @@ -22,7 +22,7 @@ upload_local_image_artifacts, is_s3_protocol_url, is_path_value_valid, - is_ecr_url + is_ecr_url, ) from samcli.commands._utils.resources import ( @@ -159,16 +159,18 @@ def do_export(self, resource_id, resource_dict, parent_dir): set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, uploaded_url) def delete(self, resource_id, resource_dict): - + """ + Delete the S3 artifact using S3 url referenced by PROPERTY_NAME + """ if resource_dict is None: return resource_path = resource_dict[self.PROPERTY_NAME] parsed_s3_url = self.uploader.parse_s3_url(resource_path) - print(parsed_s3_url["Key"]) if not self.uploader.bucket_name: self.uploader.bucket_name = parsed_s3_url["Bucket"] self.uploader.delete_artifact(parsed_s3_url["Key"], True) + class ResourceImageDict(Resource): """ Base class representing a CFN Image based resource that can be exported. @@ -212,12 +214,17 @@ def do_export(self, resource_id, resource_dict, parent_dir): set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, {self.EXPORT_PROPERTY_CODE_KEY: uploaded_url}) def delete(self, resource_id, resource_dict): + """ + Delete the ECR artifact using ECR url in PROPERTY_NAME referenced by EXPORT_PROPERTY_CODE_KEY + """ if resource_dict is None: return remote_path = resource_dict[self.PROPERTY_NAME][self.EXPORT_PROPERTY_CODE_KEY] if is_ecr_url(remote_path): - self.uploader.delete_artifact(remote_path, resource_id, self.PROPERTY_NAME) + self.uploader.delete_artifact( + image_uri=remote_path, resource_id=resource_id, property_name=self.PROPERTY_NAME + ) class ResourceImage(Resource): @@ -261,12 +268,18 @@ def do_export(self, resource_id, resource_dict, parent_dir): set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, uploaded_url) def delete(self, resource_id, resource_dict): + """ + Delete the ECR artifact using ECR url referenced by property_name + """ if resource_dict is None: return remote_path = resource_dict[self.PROPERTY_NAME] if is_ecr_url(remote_path): - self.uploader.delete_artifact(remote_path, resource_id, self.PROPERTY_NAME) + self.uploader.delete_artifact( + image_uri=remote_path, resource_id=resource_id, property_name=self.PROPERTY_NAME + ) + class ResourceWithS3UrlDict(ResourceZip): """ @@ -299,7 +312,10 @@ def do_export(self, resource_id, resource_dict, parent_dir): set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, parsed_url) def delete(self, resource_id, resource_dict): - + """ + Delete the S3 artifact using S3 url in the dict PROPERTY_NAME + using the bucket at BUCKET_NAME_PROPERTY and key at OBJECT_KEY_PROPERTY + """ if resource_dict is None: return resource_path = resource_dict[self.PROPERTY_NAME] diff --git a/tests/unit/lib/delete/test_cf_utils.py b/tests/unit/lib/delete/test_cf_utils.py index 8e57407231..b9bc00faba 100644 --- a/tests/unit/lib/delete/test_cf_utils.py +++ b/tests/unit/lib/delete/test_cf_utils.py @@ -5,6 +5,7 @@ from botocore.exceptions import ClientError, BotoCoreError, WaiterError from samcli.lib.delete.cf_utils import CfUtils + class MockDeleteWaiter: def __init__(self, ex=None): self.ex = ex @@ -14,6 +15,7 @@ def wait(self, StackName, WaiterConfig): raise self.ex return + class TestCfUtils(TestCase): def setUp(self): self.session = MagicMock() @@ -101,6 +103,7 @@ def test_cf_utils_wait_for_delete_exception(self): reason="unit-test", last_response={"Status": "Failed", "StatusReason": "It's a unit test"}, ) - )) + ) + ) with self.assertRaises(DeleteFailedError): self.cf_utils.wait_for_delete("test") diff --git a/tests/unit/lib/delete/test_utils.py b/tests/unit/lib/delete/test_utils.py deleted file mode 100644 index c39f176d5c..0000000000 --- a/tests/unit/lib/delete/test_utils.py +++ /dev/null @@ -1,8 +0,0 @@ -from unittest import TestCase - -from samcli.lib.delete.utils import get_cf_template_name - -class TestCfUtils(TestCase): - - def test_utils(self): - self.assertEqual(get_cf_template_name("hello world!", "template"), "fc3ff98e8c6a0d3087d515c0473f8677.template") \ No newline at end of file diff --git a/tests/unit/lib/package/test_ecr_uploader.py b/tests/unit/lib/package/test_ecr_uploader.py index 91798d43f9..a66207efca 100644 --- a/tests/unit/lib/package/test_ecr_uploader.py +++ b/tests/unit/lib/package/test_ecr_uploader.py @@ -5,7 +5,13 @@ from docker.errors import APIError, BuildError from parameterized import parameterized -from samcli.commands.package.exceptions import DockerLoginFailedError, DockerPushFailedError, ECRAuthorizationError +from samcli.commands.package.exceptions import ( + DockerLoginFailedError, + DockerPushFailedError, + ECRAuthorizationError, + ImageNotFoundError, + DeleteArtifactFailedError, +) from samcli.lib.package.ecr_uploader import ECRUploader from samcli.lib.utils.stream_writer import StreamWriter @@ -23,6 +29,9 @@ def setUp(self): BuildError.__name__: {"reason": "mock_reason", "build_log": "mock_build_log"}, APIError.__name__: {"message": "mock message"}, } + self.image_uri = "900643008914.dkr.ecr.us-east-1.amazonaws.com/" + self.ecr_repo + ":" + self.tag + self.property_name = "AWS::Serverless::Function" + self.resource_id = "HelloWorldFunction" def test_ecr_uploader_init(self): ecr_uploader = ECRUploader( @@ -166,3 +175,39 @@ def test_upload_failure_while_streaming(self): ecr_uploader.login = MagicMock() with self.assertRaises(DockerPushFailedError): ecr_uploader.upload(image, resource_name="HelloWorldFunction") + + def test_delete_artifact_no_image_error(self): + ecr_uploader = ECRUploader( + docker_client=self.docker_client, + ecr_client=self.ecr_client, + ecr_repo=self.ecr_repo, + ecr_repo_multi=self.ecr_repo_multi, + tag=self.tag, + ) + ecr_uploader.ecr_client.batch_delete_image.return_value = { + "failures": [{"imageId": {"imageTag": self.tag}, "failureCode": "ImageNotFound"}] + } + + with self.assertRaises(ImageNotFoundError): + ecr_uploader.delete_artifact( + image_uri=self.image_uri, resource_id=self.resource_id, property_name=self.property_name + ) + + def test_delete_artifact_client_error(self): + ecr_uploader = ECRUploader( + docker_client=self.docker_client, + ecr_client=self.ecr_client, + ecr_repo=self.ecr_repo, + ecr_repo_multi=self.ecr_repo_multi, + tag=self.tag, + ) + ecr_uploader.ecr_client.batch_delete_image = MagicMock( + side_effect=ClientError( + error_response={"Error": {"Message": "mock client error"}}, operation_name="batch_delete_image" + ) + ) + + with self.assertRaises(DeleteArtifactFailedError): + ecr_uploader.delete_artifact( + image_uri=self.image_uri, resource_id=self.resource_id, property_name=self.property_name + ) From aaa1b05003eebb12a28c78200bad45b8aa4469c7 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Mon, 5 Jul 2021 09:34:59 -0400 Subject: [PATCH 075/121] Reformatted delete_context and added option to skip user prompts --- samcli/commands/delete/command.py | 30 +++-- samcli/commands/delete/delete_context.py | 135 +++++++++++++-------- samcli/lib/package/ecr_uploader.py | 2 +- tests/unit/commands/delete/test_command.py | 3 + 4 files changed, 111 insertions(+), 59 deletions(-) diff --git a/samcli/commands/delete/command.py b/samcli/commands/delete/command.py index 266d093a36..4e0b9ec6bc 100644 --- a/samcli/commands/delete/command.py +++ b/samcli/commands/delete/command.py @@ -57,34 +57,46 @@ default="default", show_default=True, ) +@click.option( + "--force", + help=("Specify this flag to allow SAM CLI to skip through the guided prompts" ""), + is_flag=True, + type=click.BOOL, + required=False, +) @aws_creds_options @common_options @pass_context @check_newer_version @print_cmdline_args -def cli( - ctx, - stack_name: str, - config_file: str, - config_env: str, -): +def cli(ctx, stack_name: str, config_file: str, config_env: str, force: bool): """ `sam delete` command entry point """ # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing do_cli( - stack_name=stack_name, region=ctx.region, config_file=config_file, config_env=config_env, profile=ctx.profile + stack_name=stack_name, + region=ctx.region, + config_file=config_file, + config_env=config_env, + profile=ctx.profile, + force=force, ) # pragma: no cover -def do_cli(stack_name: str, region: str, config_file: str, config_env: str, profile: str): +def do_cli(stack_name: str, region: str, config_file: str, config_env: str, profile: str, force: bool): """ Implementation of the ``cli`` method """ from samcli.commands.delete.delete_context import DeleteContext with DeleteContext( - stack_name=stack_name, region=region, profile=profile, config_file=config_file, config_env=config_env + stack_name=stack_name, + region=region, + profile=profile, + config_file=config_file, + config_env=config_env, + force=force, ) as delete_context: delete_context.run() diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 9b1d5210b5..597f8c64e0 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -25,18 +25,21 @@ CONFIG_SECTION = "parameters" TEMPLATE_STAGE = "Original" + class DeleteContext: - def __init__(self, stack_name: str, region: str, profile: str, config_file: str, config_env: str): + def __init__(self, stack_name: str, region: str, profile: str, config_file: str, config_env: str, force: bool): self.stack_name = stack_name self.region = region self.profile = profile self.config_file = config_file self.config_env = config_env + self.force = force self.s3_bucket = None self.s3_prefix = None self.cf_utils = None self.s3_uploader = None self.uploaders = None + self.template = None self.cf_template_file_name = None self.delete_artifacts_folder = None self.delete_cf_template_file = None @@ -48,6 +51,7 @@ def __enter__(self): click.style("\tEnter stack name you want to delete:", bold=True), type=click.STRING ) + self.init_clients() return self def __exit__(self, *args): @@ -72,43 +76,85 @@ def parse_config_file(self): self.s3_bucket = config_options.get("s3_bucket", None) self.s3_prefix = config_options.get("s3_prefix", None) - def delete(self): + def init_clients(self): """ - Delete method calls for Cloudformation stacks and S3 and ECR artifacts + Initialize all the clients being used by sam delete. """ - template = self.cf_utils.get_stack_template(self.stack_name, TEMPLATE_STAGE) - template_str = template.get("TemplateBody", None) - template_dict = yaml_parse(template_str) + boto_config = get_boto_config_with_user_agent() - if self.s3_bucket and self.s3_prefix and template_str: - self.delete_artifacts_folder = confirm( - click.style( - "\tAre you sure you want to delete the folder" - + f" {self.s3_prefix} in S3 which contains the artifacts?", - bold=True, - ), - default=False, - ) + # Define cf_client based on the region as different regions can have same stack-names + cloudformation_client = boto3.client( + "cloudformation", region_name=self.region if self.region else None, config=boto_config + ) + + s3_client = boto3.client("s3", region_name=self.region if self.region else None, config=boto_config) + ecr_client = boto3.client("ecr", region_name=self.region if self.region else None, config=boto_config) + + self.s3_uploader = S3Uploader(s3_client=s3_client, bucket_name=self.s3_bucket, prefix=self.s3_prefix) + + docker_client = docker.from_env() + ecr_uploader = ECRUploader(docker_client, ecr_client, None, None) + + self.uploaders = Uploaders(self.s3_uploader, ecr_uploader) + self.cf_utils = CfUtils(cloudformation_client) + self.template = Template(None, None, self.uploaders, None) + + def guided_prompts(self): + """ + Guided prompts asking customer to delete artifacts + """ + # Note: s3_bucket and s3_prefix information is only + # available if a local toml file is present or if + # this information is obtained from the template resources and so if this + # information is not found, warn the customer that S3 artifacts + # will need to be manually deleted. + + if not self.force and self.s3_bucket: + if self.s3_prefix: + self.delete_artifacts_folder = confirm( + click.style( + "\tAre you sure you want to delete the folder" + + f" {self.s3_prefix} in S3 which contains the artifacts?", + bold=True, + ), + default=False, + ) if not self.delete_artifacts_folder: - with mktempfile() as temp_file: - self.cf_template_file_name = get_cf_template_name(temp_file, template_str, "template") self.delete_cf_template_file = confirm( click.style( "\tDo you want to delete the template file" + f" {self.cf_template_file_name} in S3?", bold=True ), default=False, ) + elif self.s3_bucket: + if self.s3_prefix: + self.delete_artifacts_folder = True + else: + self.delete_cf_template_file = True + + def delete(self): + """ + Delete method calls for Cloudformation stacks and S3 and ECR artifacts + """ + # Fetch the template using the stack-name + template = self.cf_utils.get_stack_template(self.stack_name, TEMPLATE_STAGE) + template_str = template.get("TemplateBody", None) + template_dict = yaml_parse(template_str) + + # Get the cloudformation template name using template_str + with mktempfile() as temp_file: + self.cf_template_file_name = get_cf_template_name(temp_file, template_str, "template") + + self.guided_prompts() # Delete the primary stack + click.echo(f"\n\t- Deleting Cloudformation stack {self.stack_name}") self.cf_utils.delete_stack(stack_name=self.stack_name) self.cf_utils.wait_for_delete(self.stack_name) - - click.echo(f"\n\t- Deleting Cloudformation stack {self.stack_name}") - + # Delete the artifacts - template = Template(None, None, self.uploaders, None) - template.delete(template_dict) - + self.template.delete(template_dict) + # Delete the CF template file in S3 if self.delete_cf_template_file: self.s3_uploader.delete_artifact(remote_path=self.cf_template_file_name) @@ -117,39 +163,30 @@ def delete(self): elif self.delete_artifacts_folder: self.s3_uploader.delete_prefix_artifacts() + else: + click.secho( + "\nWarning: s3_bucket and s3_prefix information cannot be obtained," + " delete the files manually if required", + fg="yellow", + ) + def run(self): """ Delete the stack based on the argument provided by customers and samconfig.toml. """ - delete_stack = confirm( - click.style( - f"\tAre you sure you want to delete the stack {self.stack_name}" + f" in the region {self.region} ?", - bold=True, - ), - default=False, - ) - # Fetch the template using the stack-name - if delete_stack and self.region: - boto_config = get_boto_config_with_user_agent() - - # Define cf_client based on the region as different regions can have same stack-names - cloudformation_client = boto3.client( - "cloudformation", region_name=self.region if self.region else None, config=boto_config + if not self.force: + delete_stack = confirm( + click.style( + f"\tAre you sure you want to delete the stack {self.stack_name}" + + f" in the region {self.region} ?", + bold=True, + ), + default=False, ) - s3_client = boto3.client("s3", region_name=self.region if self.region else None, config=boto_config) - ecr_client = boto3.client("ecr", region_name=self.region if self.region else None, config=boto_config) - - self.s3_uploader = S3Uploader(s3_client=s3_client, bucket_name=self.s3_bucket, prefix=self.s3_prefix) - - docker_client = docker.from_env() - ecr_uploader = ECRUploader(docker_client, ecr_client, None, None) - - self.uploaders = Uploaders(self.s3_uploader, ecr_uploader) - self.cf_utils = CfUtils(cloudformation_client) - + if self.force or delete_stack: is_deployed = self.cf_utils.has_stack(stack_name=self.stack_name) - + # Check if the provided stack-name exists if is_deployed: self.delete() click.echo("\nDeleted successfully") diff --git a/samcli/lib/package/ecr_uploader.py b/samcli/lib/package/ecr_uploader.py index 402ebbf4cf..d046c83e68 100644 --- a/samcli/lib/package/ecr_uploader.py +++ b/samcli/lib/package/ecr_uploader.py @@ -108,7 +108,7 @@ def delete_artifact(self, image_uri: str, resource_id: str, property_name: str): LOG.debug("ImageNotFound Exception : ") raise ImageNotFoundError(resource_id, property_name) - click.echo(f"- Deleting ECR image {image_tag} in repository {repository}") + click.echo(f"\t- Deleting ECR image {image_tag} in repository {repository}") except botocore.exceptions.ClientError as ex: # Handle Client errors such as RepositoryNotFoundException or InvalidParameterException diff --git a/tests/unit/commands/delete/test_command.py b/tests/unit/commands/delete/test_command.py index 4e268688ee..9a17ec6114 100644 --- a/tests/unit/commands/delete/test_command.py +++ b/tests/unit/commands/delete/test_command.py @@ -22,6 +22,7 @@ def setUp(self): self.s3_prefix = "s3-prefix" self.region = None self.profile = None + self.force = None self.config_env = "mock-default-env" self.config_file = "mock-default-filename" MOCK_SAM_CONFIG.reset_mock() @@ -39,6 +40,7 @@ def test_all_args(self, mock_delete_context, mock_delete_click): config_file=self.config_file, config_env=self.config_env, profile=self.profile, + force=self.force ) mock_delete_context.assert_called_with( @@ -47,6 +49,7 @@ def test_all_args(self, mock_delete_context, mock_delete_click): profile=self.profile, config_file=self.config_file, config_env=self.config_env, + force=self.force ) context_mock.run.assert_called_with() From e2e85a906302562ce2537cdacd18f3bcfdb2559a Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Mon, 5 Jul 2021 09:36:09 -0400 Subject: [PATCH 076/121] Removed return type from artifact_exporter for delete method --- samcli/lib/package/artifact_exporter.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/samcli/lib/package/artifact_exporter.py b/samcli/lib/package/artifact_exporter.py index 2a8f484d87..a42181f22a 100644 --- a/samcli/lib/package/artifact_exporter.py +++ b/samcli/lib/package/artifact_exporter.py @@ -238,10 +238,13 @@ def export(self) -> Dict: return self.template_dict def delete(self, template_dict): + """ + Deletes all the artifacts referenced by the given Cloudformation template + """ self.template_dict = template_dict if "Resources" not in self.template_dict: - return self.template_dict + return self._apply_global_values() @@ -259,4 +262,4 @@ def delete(self, template_dict): # Delete code resources exporter = exporter_class(self.uploaders, None) exporter.delete(resource_id, resource_dict) - return self.template_dict + From c98e6ee6e920a50c5b5fb1dfea90e3b300fa15a0 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Mon, 5 Jul 2021 13:41:03 -0400 Subject: [PATCH 077/121] Added unit tests for artifact_exporter and delete_context --- samcli/commands/delete/delete_context.py | 1 + samcli/lib/package/artifact_exporter.py | 1 - tests/unit/commands/delete/test_command.py | 14 +---- .../commands/delete/test_delete_context.py | 53 +++++++++++++++++++ .../lib/package/test_artifact_exporter.py | 35 ++++++++++++ 5 files changed, 91 insertions(+), 13 deletions(-) diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 597f8c64e0..ae4e7444dd 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -90,6 +90,7 @@ def init_clients(self): s3_client = boto3.client("s3", region_name=self.region if self.region else None, config=boto_config) ecr_client = boto3.client("ecr", region_name=self.region if self.region else None, config=boto_config) + self.region = s3_client._client_config.region_name if s3_client else self.region # pylint: disable=W0212 self.s3_uploader = S3Uploader(s3_client=s3_client, bucket_name=self.s3_bucket, prefix=self.s3_prefix) docker_client = docker.from_env() diff --git a/samcli/lib/package/artifact_exporter.py b/samcli/lib/package/artifact_exporter.py index a42181f22a..dea0e6d960 100644 --- a/samcli/lib/package/artifact_exporter.py +++ b/samcli/lib/package/artifact_exporter.py @@ -262,4 +262,3 @@ def delete(self, template_dict): # Delete code resources exporter = exporter_class(self.uploaders, None) exporter.delete(resource_id, resource_dict) - diff --git a/tests/unit/commands/delete/test_command.py b/tests/unit/commands/delete/test_command.py index 9a17ec6114..73ea9e3f19 100644 --- a/tests/unit/commands/delete/test_command.py +++ b/tests/unit/commands/delete/test_command.py @@ -5,15 +5,6 @@ from tests.unit.cli.test_cli_config_file import MockContext -def get_mock_sam_config(): - mock_sam_config = MagicMock() - mock_sam_config.exists = MagicMock(return_value=True) - return mock_sam_config - - -MOCK_SAM_CONFIG = get_mock_sam_config() - - class TestDeleteCliCommand(TestCase): def setUp(self): @@ -25,7 +16,6 @@ def setUp(self): self.force = None self.config_env = "mock-default-env" self.config_file = "mock-default-filename" - MOCK_SAM_CONFIG.reset_mock() @patch("samcli.commands.delete.command.click") @patch("samcli.commands.delete.delete_context.DeleteContext") @@ -40,7 +30,7 @@ def test_all_args(self, mock_delete_context, mock_delete_click): config_file=self.config_file, config_env=self.config_env, profile=self.profile, - force=self.force + force=self.force, ) mock_delete_context.assert_called_with( @@ -49,7 +39,7 @@ def test_all_args(self, mock_delete_context, mock_delete_click): profile=self.profile, config_file=self.config_file, config_env=self.config_env, - force=self.force + force=self.force, ) context_mock.run.assert_called_with() diff --git a/tests/unit/commands/delete/test_delete_context.py b/tests/unit/commands/delete/test_delete_context.py index e69de29bb2..4d84ed0561 100644 --- a/tests/unit/commands/delete/test_delete_context.py +++ b/tests/unit/commands/delete/test_delete_context.py @@ -0,0 +1,53 @@ +from unittest import TestCase +from unittest.mock import patch, call, MagicMock + +import click + +from samcli.commands.delete.delete_context import DeleteContext +from samcli.cli.cli_config_file import TomlProvider + + +class TestDeleteContext(TestCase): + @patch.object(DeleteContext, "parse_config_file", MagicMock()) + @patch.object(DeleteContext, "init_clients", MagicMock()) + def test_delete_context_enter(self): + with DeleteContext( + stack_name="test", + region="us-east-1", + config_file="samconfig.toml", + config_env="default", + profile="test", + force=True, + ) as delete_context: + self.assertEqual(delete_context.parse_config_file.call_count, 1) + self.assertEqual(delete_context.init_clients.call_count, 1) + + @patch.object( + TomlProvider, + "__call__", + MagicMock( + return_value=( + { + "stack_name": "test", + "region": "us-east-1", + "profile": "developer", + "s3_bucket": "s3-bucket", + "s3_prefix": "s3-prefix", + } + ) + ), + ) + def test_delete_context_parse_config_file(self): + with DeleteContext( + stack_name=None, + region=None, + config_file="samconfig.toml", + config_env="default", + profile=None, + force=True, + ) as delete_context: + self.assertEqual(delete_context.stack_name, "test") + self.assertEqual(delete_context.region, "us-east-1") + self.assertEqual(delete_context.profile, "developer") + self.assertEqual(delete_context.s3_bucket, "s3-bucket") + self.assertEqual(delete_context.s3_prefix, "s3-prefix") diff --git a/tests/unit/lib/package/test_artifact_exporter.py b/tests/unit/lib/package/test_artifact_exporter.py index f7aceafef1..52a450f586 100644 --- a/tests/unit/lib/package/test_artifact_exporter.py +++ b/tests/unit/lib/package/test_artifact_exporter.py @@ -1377,3 +1377,38 @@ def example_yaml_template(self): Timeout: 20 Runtime: nodejs4.3 """ + + def test_template_delete(self): + template_str = self.example_yaml_template() + + resource_type1_class = Mock() + resource_type1_class.RESOURCE_TYPE = "resource_type1" + resource_type1_class.ARTIFACT_TYPE = ZIP + resource_type1_class.EXPORT_DESTINATION = Destination.S3 + resource_type1_instance = Mock() + resource_type1_class.return_value = resource_type1_instance + resource_type2_class = Mock() + resource_type2_class.RESOURCE_TYPE = "resource_type2" + resource_type2_class.ARTIFACT_TYPE = ZIP + resource_type2_class.EXPORT_DESTINATION = Destination.S3 + resource_type2_instance = Mock() + resource_type2_class.return_value = resource_type2_instance + + resources_to_export = [resource_type1_class, resource_type2_class] + + properties = {"foo": "bar"} + template_dict = { + "Resources": { + "Resource1": {"Type": "resource_type1", "Properties": properties}, + "Resource2": {"Type": "resource_type2", "Properties": properties}, + "Resource3": {"Type": "some-other-type", "Properties": properties}, + } + } + + template_exporter = Template(None, None, self.uploaders_mock, None, resources_to_export) + template_exporter.delete(template_dict) + + resource_type1_class.assert_called_once_with(self.uploaders_mock, None) + resource_type1_instance.delete.assert_called_once_with("Resource1", mock.ANY) + resource_type2_class.assert_called_once_with(self.uploaders_mock, None) + resource_type2_instance.delete.assert_called_once_with("Resource2", mock.ANY) From 17a427a5e15a28d979ee27d2a128cebe52f8ecfc Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Mon, 5 Jul 2021 19:06:21 -0400 Subject: [PATCH 078/121] Added more unit tests for delete_context and artifact_exporter --- .../commands/delete/test_delete_context.py | 40 ++++++++++++++++++- .../lib/package/test_artifact_exporter.py | 18 +++++++-- 2 files changed, 54 insertions(+), 4 deletions(-) diff --git a/tests/unit/commands/delete/test_delete_context.py b/tests/unit/commands/delete/test_delete_context.py index 4d84ed0561..b7f230e7ae 100644 --- a/tests/unit/commands/delete/test_delete_context.py +++ b/tests/unit/commands/delete/test_delete_context.py @@ -5,7 +5,8 @@ from samcli.commands.delete.delete_context import DeleteContext from samcli.cli.cli_config_file import TomlProvider - +from samcli.lib.delete.cf_utils import CfUtils +from samcli.lib.package.s3_uploader import S3Uploader class TestDeleteContext(TestCase): @patch.object(DeleteContext, "parse_config_file", MagicMock()) @@ -51,3 +52,40 @@ def test_delete_context_parse_config_file(self): self.assertEqual(delete_context.profile, "developer") self.assertEqual(delete_context.s3_bucket, "s3-bucket") self.assertEqual(delete_context.s3_prefix, "s3-prefix") + + @patch.object( + TomlProvider, + "__call__", + MagicMock( + return_value=( + { + "stack_name": "test", + "region": "us-east-1", + "profile": "developer", + "s3_bucket": "s3-bucket", + "s3_prefix": "s3-prefix", + } + ) + ), + ) + @patch.object(CfUtils, "has_stack", MagicMock(return_value=(True))) + @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) + @patch.object(CfUtils, "delete_stack", MagicMock()) + @patch.object(CfUtils, "wait_for_delete", MagicMock()) + @patch.object(S3Uploader, "delete_prefix_artifacts", MagicMock()) + def test_delete_context_valid_execute_run(self): + with DeleteContext( + stack_name=None, + region=None, + config_file="samconfig.toml", + config_env="default", + profile=None, + force=True, + ) as delete_context: + delete_context.run() + + self.assertEqual(CfUtils.has_stack.call_count, 1) + self.assertEqual(CfUtils.get_stack_template.call_count, 1) + self.assertEqual(CfUtils.delete_stack.call_count, 1) + self.assertEqual(CfUtils.wait_for_delete.call_count, 1) + self.assertEqual(S3Uploader.delete_prefix_artifacts.call_count, 1) diff --git a/tests/unit/lib/package/test_artifact_exporter.py b/tests/unit/lib/package/test_artifact_exporter.py index 52a450f586..36430ef44a 100644 --- a/tests/unit/lib/package/test_artifact_exporter.py +++ b/tests/unit/lib/package/test_artifact_exporter.py @@ -7,7 +7,7 @@ from contextlib import contextmanager, closing from unittest import mock -from unittest.mock import patch, Mock +from unittest.mock import patch, Mock, MagicMock from samcli.commands.package.exceptions import ExportFailedError from samcli.lib.package.s3_uploader import S3Uploader @@ -56,7 +56,7 @@ class TestArtifactExporter(unittest.TestCase): def setUp(self): - self.s3_uploader_mock = Mock() + self.s3_uploader_mock = MagicMock() self.s3_uploader_mock.s3.meta.endpoint_url = "https://s3.some-valid-region.amazonaws.com" self.ecr_uploader_mock = Mock() @@ -411,6 +411,10 @@ class MockResource(ResourceZip): self.assertEqual(resource_dict[resource.PROPERTY_NAME], s3_url) + self.s3_uploader_mock.delete_artifact = MagicMock() + resource.delete(resource_id, resource_dict) + self.assertEqual(self.s3_uploader_mock.delete_artifact.call_count, 1) + @patch("samcli.lib.package.packageable_resources.upload_local_image_artifacts") def test_resource_lambda_image(self, upload_local_image_artifacts_mock): # Property value is a path to an image @@ -1393,6 +1397,12 @@ def test_template_delete(self): resource_type2_class.EXPORT_DESTINATION = Destination.S3 resource_type2_instance = Mock() resource_type2_class.return_value = resource_type2_instance + resource_type3_class = Mock() + resource_type3_class.RESOURCE_TYPE = "resource_type3" + resource_type3_class.ARTIFACT_TYPE = ZIP + resource_type3_class.EXPORT_DESTINATION = Destination.S3 + resource_type3_instance = Mock() + resource_type3_class.return_value = resource_type3_instance resources_to_export = [resource_type1_class, resource_type2_class] @@ -1401,7 +1411,7 @@ def test_template_delete(self): "Resources": { "Resource1": {"Type": "resource_type1", "Properties": properties}, "Resource2": {"Type": "resource_type2", "Properties": properties}, - "Resource3": {"Type": "some-other-type", "Properties": properties}, + "Resource3": {"Type": "some-other-type", "Properties": properties, "DeletionPolicy": "Retain"}, } } @@ -1412,3 +1422,5 @@ def test_template_delete(self): resource_type1_instance.delete.assert_called_once_with("Resource1", mock.ANY) resource_type2_class.assert_called_once_with(self.uploaders_mock, None) resource_type2_instance.delete.assert_called_once_with("Resource2", mock.ANY) + resource_type3_class.assert_not_called() + resource_type3_instance.delete.assert_not_called() From ac4e485b0f3eaa915262caed8cd570529a19adf1 Mon Sep 17 00:00:00 2001 From: Mathieu Grandis <73313235+mgrandis@users.noreply.github.com> Date: Tue, 6 Jul 2021 09:37:06 -0700 Subject: [PATCH 079/121] chore: update to aws-sam-translator 1.37.0 (#3019) --- requirements/base.txt | 2 +- requirements/reproducible-linux.txt | 18 +++++++++--------- .../models/function_with_mq_virtual_host.yaml | 19 +++++++++++++++++++ 3 files changed, 29 insertions(+), 10 deletions(-) create mode 100644 tests/functional/commands/validate/lib/models/function_with_mq_virtual_host.yaml diff --git a/requirements/base.txt b/requirements/base.txt index ab432ff159..25efa93b05 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ boto3~=1.14 jmespath~=0.10.0 PyYAML~=5.3 cookiecutter~=1.7.2 -aws-sam-translator==1.36.0 +aws-sam-translator==1.37.0 #docker minor version updates can include breaking changes. Auto update micro version only. docker~=4.2.0 dateparser~=0.7 diff --git a/requirements/reproducible-linux.txt b/requirements/reproducible-linux.txt index a7a92e25a4..a2f725e5fd 100644 --- a/requirements/reproducible-linux.txt +++ b/requirements/reproducible-linux.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile +# This file is autogenerated by pip-compile with python 3.7 # To update, run: # # pip-compile --allow-unsafe --generate-hashes --output-file=requirements/reproducible-linux.txt @@ -17,10 +17,10 @@ aws-lambda-builders==1.4.0 \ --hash=sha256:5d4e4ecb3d3290f0eec1f62b7b0d9d6b91160ae71447d95899eede392d05f75f \ --hash=sha256:d32f79cf67b189a7598793f69797f284b2eb9a9fada562175b1e854187f95aed # via aws-sam-cli (setup.py) -aws-sam-translator==1.36.0 \ - --hash=sha256:4195ae8196f04803e7f0384a2b5ccd8c2b06ce0d8dc408aa1f1ce96c23bcf39d \ - --hash=sha256:f7d51b661fe1f5613a882f4733d1c92eff4dac36a076eafd18031d209b178695 \ - --hash=sha256:fa1b990d9329d19052e7b91cf0b19371ed9d31a529054b616005884cd662b584 +aws-sam-translator==1.37.0 \ + --hash=sha256:12cbf4af9e95acf73dabfbc44af990dc1e880f35697bb8c04f31b3bb90ab5526 \ + --hash=sha256:26e4866627e4284afc367bee2bd04d3cf23cecc8ff879b419457715a738395a9 \ + --hash=sha256:6884d942a815450637bac48e297996df2dacc27077d25ced09d8e9ce1f6a585c # via aws-sam-cli (setup.py) binaryornot==0.4.4 \ --hash=sha256:359501dfc9d40632edc9fac890e19542db1a287bbcfa58175b66658392018061 \ @@ -88,10 +88,6 @@ itsdangerous==1.1.0 \ --hash=sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19 \ --hash=sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749 # via flask -jinja2-time==0.2.0 \ - --hash=sha256:d14eaa4d315e7688daa4969f616f226614350c48730bfa1692d2caebd8c90d40 \ - --hash=sha256:d3eab6605e3ec8b7a0863df09cc1d23714908fa61aa6986a845c20ba488b4efa - # via cookiecutter jinja2==2.11.3 \ --hash=sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419 \ --hash=sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6 @@ -99,6 +95,10 @@ jinja2==2.11.3 \ # cookiecutter # flask # jinja2-time +jinja2-time==0.2.0 \ + --hash=sha256:d14eaa4d315e7688daa4969f616f226614350c48730bfa1692d2caebd8c90d40 \ + --hash=sha256:d3eab6605e3ec8b7a0863df09cc1d23714908fa61aa6986a845c20ba488b4efa + # via cookiecutter jmespath==0.10.0 \ --hash=sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9 \ --hash=sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f diff --git a/tests/functional/commands/validate/lib/models/function_with_mq_virtual_host.yaml b/tests/functional/commands/validate/lib/models/function_with_mq_virtual_host.yaml new file mode 100644 index 0000000000..b5d2c62085 --- /dev/null +++ b/tests/functional/commands/validate/lib/models/function_with_mq_virtual_host.yaml @@ -0,0 +1,19 @@ +Resources: + MQFunction: + Type: 'AWS::Serverless::Function' + Properties: + CodeUri: s3://sam-demo-bucket/queues.zip + Handler: queue.mq_handler + Runtime: python2.7 + Events: + MyMQQueue: + Type: MQ + Properties: + Broker: arn:aws:mq:us-east-2:123456789012:broker:MyBroker:b-1234a5b6-78cd-901e-2fgh-3i45j6k178l9 + Queues: + - "Queue1" + SourceAccessConfigurations: + - Type: BASIC_AUTH + URI: arn:aws:secretsmanager:us-west-2:123456789012:secret:my-path/my-secret-name-1a2b3c + - Type: VIRTUAL_HOST + URI: vhost_name \ No newline at end of file From a1e5c92c1d47ab7b11158b3ef2d7efdb67300cbb Mon Sep 17 00:00:00 2001 From: Mathieu Grandis <73313235+mgrandis@users.noreply.github.com> Date: Tue, 6 Jul 2021 09:46:47 -0700 Subject: [PATCH 080/121] chore: bump version to 1.26.0 (#3020) --- samcli/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samcli/__init__.py b/samcli/__init__.py index d49ef0cf7e..1c484dccfd 100644 --- a/samcli/__init__.py +++ b/samcli/__init__.py @@ -2,4 +2,4 @@ SAM CLI version """ -__version__ = "1.25.0" +__version__ = "1.26.0" From e577d7f65ac53de7ba263d03f2e73f786bd7f0f7 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Tue, 6 Jul 2021 15:42:51 -0400 Subject: [PATCH 081/121] Added more unit tests for delete_context and artifact_exporter --- samcli/commands/delete/delete_context.py | 3 +- samcli/lib/package/ecr_uploader.py | 4 +- .../commands/delete/test_delete_context.py | 138 ++++++++++++++++++ .../lib/package/test_artifact_exporter.py | 10 +- 4 files changed, 152 insertions(+), 3 deletions(-) diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index ae4e7444dd..13577c9774 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -164,7 +164,8 @@ def delete(self): elif self.delete_artifacts_folder: self.s3_uploader.delete_prefix_artifacts() - else: + # If s3_bucket information is not available + elif not self.s3_bucket: click.secho( "\nWarning: s3_bucket and s3_prefix information cannot be obtained," " delete the files manually if required", diff --git a/samcli/lib/package/ecr_uploader.py b/samcli/lib/package/ecr_uploader.py index d046c83e68..8306e000d0 100644 --- a/samcli/lib/package/ecr_uploader.py +++ b/samcli/lib/package/ecr_uploader.py @@ -105,13 +105,15 @@ def delete_artifact(self, image_uri: str, resource_id: str, property_name: str): # Image not found image_details = resp["failures"][0] if image_details["failureCode"] == "ImageNotFound": - LOG.debug("ImageNotFound Exception : ") + LOG.error("ImageNotFound Exception : ") raise ImageNotFoundError(resource_id, property_name) + LOG.debug("Deleting ECR image with tag %s", image_tag) click.echo(f"\t- Deleting ECR image {image_tag} in repository {repository}") except botocore.exceptions.ClientError as ex: # Handle Client errors such as RepositoryNotFoundException or InvalidParameterException + LOG.error("DeleteArtifactFailedError Exception : %s", str(ex)) raise DeleteArtifactFailedError(resource_id=resource_id, property_name=property_name, ex=ex) from ex # TODO: move this to a generic class to allow for streaming logs back from docker. diff --git a/tests/unit/commands/delete/test_delete_context.py b/tests/unit/commands/delete/test_delete_context.py index b7f230e7ae..e353dc17d4 100644 --- a/tests/unit/commands/delete/test_delete_context.py +++ b/tests/unit/commands/delete/test_delete_context.py @@ -8,7 +8,26 @@ from samcli.lib.delete.cf_utils import CfUtils from samcli.lib.package.s3_uploader import S3Uploader + class TestDeleteContext(TestCase): + @patch("samcli.commands.deploy.guided_context.click.echo") + @patch.object(CfUtils, "has_stack", MagicMock(return_value=(False))) + def test_delete_context_stack_does_not_exist(self, patched_click_echo): + with DeleteContext( + stack_name="test", + region="us-east-1", + config_file="samconfig.toml", + config_env="default", + profile="test", + force=True, + ) as delete_context: + + delete_context.run() + expected_click_echo_calls = [ + call(f"Error: The input stack test does not exist on Cloudformation"), + ] + self.assertEqual(expected_click_echo_calls, patched_click_echo.call_args_list) + @patch.object(DeleteContext, "parse_config_file", MagicMock()) @patch.object(DeleteContext, "init_clients", MagicMock()) def test_delete_context_enter(self): @@ -89,3 +108,122 @@ def test_delete_context_valid_execute_run(self): self.assertEqual(CfUtils.delete_stack.call_count, 1) self.assertEqual(CfUtils.wait_for_delete.call_count, 1) self.assertEqual(S3Uploader.delete_prefix_artifacts.call_count, 1) + + @patch("samcli.commands.deploy.guided_context.click.secho") + @patch.object(CfUtils, "has_stack", MagicMock(return_value=(True))) + @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) + @patch.object(CfUtils, "delete_stack", MagicMock()) + @patch.object(CfUtils, "wait_for_delete", MagicMock()) + def test_delete_context_no_s3_bucket(self, patched_click_secho): + with DeleteContext( + stack_name="test", + region="us-east-1", + config_file="samconfig.toml", + config_env="default", + profile="test", + force=True, + ) as delete_context: + + delete_context.run() + expected_click_secho_calls = [ + call( + "\nWarning: s3_bucket and s3_prefix information cannot be obtained," + " delete the files manually if required", + fg="yellow", + ), + ] + self.assertEqual(expected_click_secho_calls, patched_click_secho.call_args_list) + + @patch("samcli.commands.delete.delete_context.confirm") + @patch.object(CfUtils, "has_stack", MagicMock(return_value=(True))) + @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) + @patch.object(CfUtils, "delete_stack", MagicMock()) + @patch.object(CfUtils, "wait_for_delete", MagicMock()) + @patch.object(S3Uploader, "delete_artifact", MagicMock()) + def test_guided_prompts_s3_bucket_prefix_present_execute_run(self, patched_confirm): + + with DeleteContext( + stack_name="test", + region="us-east-1", + config_file="samconfig.toml", + config_env="default", + profile="test", + force=None, + ) as delete_context: + patched_confirm.side_effect = [True, False, True] + delete_context.cf_template_file_name = "hello.template" + delete_context.s3_bucket = "s3_bucket" + delete_context.s3_prefix = "s3_prefix" + + delete_context.run() + # Now to check for all the defaults on confirmations. + expected_confirmation_calls = [ + call( + click.style( + f"\tAre you sure you want to delete the stack test" + f" in the region us-east-1 ?", + bold=True, + ), + default=False, + ), + call( + click.style( + "\tAre you sure you want to delete the folder" + + f" s3_prefix in S3 which contains the artifacts?", + bold=True, + ), + default=False, + ), + call( + click.style( + "\tDo you want to delete the template file b10a8db164e0754105b7a99be72e3fe5.template in S3?", + bold=True, + ), + default=False, + ), + ] + + self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) + self.assertFalse(delete_context.delete_artifacts_folder) + self.assertTrue(delete_context.delete_cf_template_file) + + @patch("samcli.commands.delete.delete_context.confirm") + @patch.object(CfUtils, "has_stack", MagicMock(return_value=(True))) + @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) + @patch.object(CfUtils, "delete_stack", MagicMock()) + @patch.object(CfUtils, "wait_for_delete", MagicMock()) + @patch.object(S3Uploader, "delete_artifact", MagicMock()) + def test_guided_prompts_s3_bucket_present_no_prefix_execute_run(self, patched_confirm): + + with DeleteContext( + stack_name="test", + region="us-east-1", + config_file="samconfig.toml", + config_env="default", + profile="test", + force=None, + ) as delete_context: + patched_confirm.side_effect = [True, True] + delete_context.cf_template_file_name = "hello.template" + delete_context.s3_bucket = "s3_bucket" + + delete_context.run() + # Now to check for all the defaults on confirmations. + expected_confirmation_calls = [ + call( + click.style( + f"\tAre you sure you want to delete the stack test" + f" in the region us-east-1 ?", + bold=True, + ), + default=False, + ), + call( + click.style( + "\tDo you want to delete the template file b10a8db164e0754105b7a99be72e3fe5.template in S3?", + bold=True, + ), + default=False, + ), + ] + + self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) + self.assertTrue(delete_context.delete_cf_template_file) diff --git a/tests/unit/lib/package/test_artifact_exporter.py b/tests/unit/lib/package/test_artifact_exporter.py index 36430ef44a..1167876ece 100644 --- a/tests/unit/lib/package/test_artifact_exporter.py +++ b/tests/unit/lib/package/test_artifact_exporter.py @@ -414,7 +414,7 @@ class MockResource(ResourceZip): self.s3_uploader_mock.delete_artifact = MagicMock() resource.delete(resource_id, resource_dict) self.assertEqual(self.s3_uploader_mock.delete_artifact.call_count, 1) - + @patch("samcli.lib.package.packageable_resources.upload_local_image_artifacts") def test_resource_lambda_image(self, upload_local_image_artifacts_mock): # Property value is a path to an image @@ -440,6 +440,10 @@ class MockResource(ResourceImage): self.assertEqual(resource_dict[resource.PROPERTY_NAME], ecr_url) + self.ecr_uploader_mock.delete_artifact = MagicMock() + resource.delete(resource_id, resource_dict) + self.assertEqual(self.ecr_uploader_mock.delete_artifact.call_count, 1) + def test_lambda_image_resource_package_success(self): # Property value is set to an image @@ -750,6 +754,10 @@ class MockResource(ResourceWithS3UrlDict): resource_dict[resource.PROPERTY_NAME], {"b": "bucket", "o": "key1/key2", "v": "SomeVersionNumber"} ) + self.s3_uploader_mock.delete_artifact = MagicMock() + resource.delete(resource_id, resource_dict) + self.s3_uploader_mock.delete_artifact.assert_called_once_with(remote_path="key1/key2", is_key=True) + @patch("samcli.lib.package.packageable_resources.upload_local_artifacts") def test_resource_with_signing_configuration(self, upload_local_artifacts_mock): class MockResource(ResourceZip): From 58ead7198982b4211eaef6d4b85269013e580ad9 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Tue, 6 Jul 2021 17:30:33 -0400 Subject: [PATCH 082/121] Added docs and comments for artifact_exporter and ecr_uploader --- samcli/lib/package/artifact_exporter.py | 2 ++ samcli/lib/package/ecr_uploader.py | 8 ++++++++ 2 files changed, 10 insertions(+) diff --git a/samcli/lib/package/artifact_exporter.py b/samcli/lib/package/artifact_exporter.py index f005a642cc..2ec18eec68 100644 --- a/samcli/lib/package/artifact_exporter.py +++ b/samcli/lib/package/artifact_exporter.py @@ -255,6 +255,8 @@ def delete(self, template_dict): resource_type = resource.get("Type", None) resource_dict = resource.get("Properties", {}) resource_deletion_policy = resource.get("DeletionPolicy", None) + # If the deletion policy is set to Retain, + # do not delete the artifact for the resource. if resource_deletion_policy != "Retain": for exporter_class in self.resources_to_export: if exporter_class.RESOURCE_TYPE != resource_type: diff --git a/samcli/lib/package/ecr_uploader.py b/samcli/lib/package/ecr_uploader.py index 8306e000d0..7e70b88593 100644 --- a/samcli/lib/package/ecr_uploader.py +++ b/samcli/lib/package/ecr_uploader.py @@ -91,6 +91,14 @@ def upload(self, image, resource_name): return f"{repository}:{_tag}" def delete_artifact(self, image_uri: str, resource_id: str, property_name: str): + """ + Delete the given ECR image by extracting the repository and image_tag from + image_uri + + :param image_uri: image_uri of the image to be deleted + :param resource_id: id of the resource for which the image is deleted + :param property_name: provided property_name for the resource + """ try: repo_image_tag = image_uri.split("/")[1].split(":") repository = repo_image_tag[0] From 45ee66fbb4816ef5876a6f5627da024086193416 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Wed, 7 Jul 2021 10:58:45 -0400 Subject: [PATCH 083/121] Added log statements in delete_context and some updates in unit tests --- samcli/commands/delete/delete_context.py | 11 +++++++- .../commands/delete/test_delete_context.py | 25 ++++++++++++++----- 2 files changed, 29 insertions(+), 7 deletions(-) diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 27e36d6799..e9c4575813 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -1,7 +1,7 @@ """ Delete a SAM stack """ - +import logging import boto3 @@ -25,6 +25,8 @@ CONFIG_SECTION = "parameters" TEMPLATE_STAGE = "Original" +LOG = logging.getLogger(__name__) + class DeleteContext: def __init__(self, stack_name: str, region: str, profile: str, config_file: str, config_env: str, force: bool): @@ -47,6 +49,7 @@ def __init__(self, stack_name: str, region: str, profile: str, config_file: str, def __enter__(self): self.parse_config_file() if not self.stack_name: + LOG.debug("No stack-name input found") self.stack_name = prompt( click.style("\tEnter stack name you want to delete:", bold=True), type=click.STRING ) @@ -71,6 +74,7 @@ def parse_config_file(self): # If the stack_name is same as the one present in samconfig file, # get the information about parameters if not specified by customer. if self.stack_name and self.stack_name == config_options.get("stack_name", None): + LOG.debug("Local config present and using the defined options") if not self.region: self.region = config_options.get("region", None) click.get_current_context().region = self.region @@ -125,6 +129,7 @@ def guided_prompts(self): default=False, ) if not self.delete_artifacts_folder: + LOG.debug("S3 prefix not present or user does not want to delete the prefix folder") self.delete_cf_template_file = confirm( click.style( "\tDo you want to delete the template file" + f" {self.cf_template_file_name} in S3?", bold=True @@ -156,6 +161,7 @@ def delete(self): click.echo(f"\n\t- Deleting Cloudformation stack {self.stack_name}") self.cf_utils.delete_stack(stack_name=self.stack_name) self.cf_utils.wait_for_delete(self.stack_name) + LOG.debug("Deleted Cloudformation stack: %s", self.stack_name) # Delete the artifacts self.template.delete(template_dict) @@ -170,6 +176,7 @@ def delete(self): # If s3_bucket information is not available elif not self.s3_bucket: + LOG.debug("Cannot delete s3 files as no s3_bucket found") click.secho( "\nWarning: s3_bucket and s3_prefix information cannot be obtained," " delete the files manually if required", @@ -194,7 +201,9 @@ def run(self): is_deployed = self.cf_utils.has_stack(stack_name=self.stack_name) # Check if the provided stack-name exists if is_deployed: + LOG.debug("Input stack is deployed, continue deleting") self.delete() click.echo("\nDeleted successfully") else: + LOG.debug("Input stack does not exists on Cloudformation") click.echo(f"Error: The input stack {self.stack_name} does not exist on Cloudformation") diff --git a/tests/unit/commands/delete/test_delete_context.py b/tests/unit/commands/delete/test_delete_context.py index 39aa38b010..0d4dfcbd52 100644 --- a/tests/unit/commands/delete/test_delete_context.py +++ b/tests/unit/commands/delete/test_delete_context.py @@ -10,7 +10,7 @@ class TestDeleteContext(TestCase): - @patch("samcli.commands.deploy.guided_context.click.echo") + @patch("samcli.commands.delete.delete_context.click.echo") @patch.object(CfUtils, "has_stack", MagicMock(return_value=(False))) def test_delete_context_stack_does_not_exist(self, patched_click_echo): with DeleteContext( @@ -113,12 +113,13 @@ def test_delete_context_valid_execute_run(self, patched_click_get_current_contex self.assertEqual(CfUtils.wait_for_delete.call_count, 1) self.assertEqual(S3Uploader.delete_prefix_artifacts.call_count, 1) + @patch("samcli.commands.delete.delete_context.click.echo") @patch("samcli.commands.deploy.guided_context.click.secho") @patch.object(CfUtils, "has_stack", MagicMock(return_value=(True))) @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) @patch.object(CfUtils, "delete_stack", MagicMock()) @patch.object(CfUtils, "wait_for_delete", MagicMock()) - def test_delete_context_no_s3_bucket(self, patched_click_secho): + def test_delete_context_no_s3_bucket(self, patched_click_secho, patched_click_echo): with DeleteContext( stack_name="test", region="us-east-1", @@ -138,14 +139,22 @@ def test_delete_context_no_s3_bucket(self, patched_click_secho): ] self.assertEqual(expected_click_secho_calls, patched_click_secho.call_args_list) + expected_click_echo_calls = [ + call("\n\t- Deleting Cloudformation stack test"), + call("\nDeleted successfully"), + ] + self.assertEqual(expected_click_echo_calls, patched_click_echo.call_args_list) + + @patch("samcli.commands.delete.delete_context.get_cf_template_name") @patch("samcli.commands.delete.delete_context.confirm") @patch.object(CfUtils, "has_stack", MagicMock(return_value=(True))) @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) @patch.object(CfUtils, "delete_stack", MagicMock()) @patch.object(CfUtils, "wait_for_delete", MagicMock()) @patch.object(S3Uploader, "delete_artifact", MagicMock()) - def test_guided_prompts_s3_bucket_prefix_present_execute_run(self, patched_confirm): + def test_guided_prompts_s3_bucket_prefix_present_execute_run(self, patched_confirm, patched_get_cf_template_name): + patched_get_cf_template_name.return_value = "hello.template" with DeleteContext( stack_name="test", region="us-east-1", @@ -179,7 +188,7 @@ def test_guided_prompts_s3_bucket_prefix_present_execute_run(self, patched_confi ), call( click.style( - "\tDo you want to delete the template file b10a8db164e0754105b7a99be72e3fe5.template in S3?", + "\tDo you want to delete the template file hello.template in S3?", bold=True, ), default=False, @@ -190,14 +199,18 @@ def test_guided_prompts_s3_bucket_prefix_present_execute_run(self, patched_confi self.assertFalse(delete_context.delete_artifacts_folder) self.assertTrue(delete_context.delete_cf_template_file) + @patch("samcli.commands.delete.delete_context.get_cf_template_name") @patch("samcli.commands.delete.delete_context.confirm") @patch.object(CfUtils, "has_stack", MagicMock(return_value=(True))) @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) @patch.object(CfUtils, "delete_stack", MagicMock()) @patch.object(CfUtils, "wait_for_delete", MagicMock()) @patch.object(S3Uploader, "delete_artifact", MagicMock()) - def test_guided_prompts_s3_bucket_present_no_prefix_execute_run(self, patched_confirm): + def test_guided_prompts_s3_bucket_present_no_prefix_execute_run( + self, patched_confirm, patched_get_cf_template_name + ): + patched_get_cf_template_name.return_value = "hello.template" with DeleteContext( stack_name="test", region="us-east-1", @@ -222,7 +235,7 @@ def test_guided_prompts_s3_bucket_present_no_prefix_execute_run(self, patched_co ), call( click.style( - "\tDo you want to delete the template file b10a8db164e0754105b7a99be72e3fe5.template in S3?", + "\tDo you want to delete the template file hello.template in S3?", bold=True, ), default=False, From d151b019d1c001f69b2dc4cec37d70bfcff5ef43 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Thu, 8 Jul 2021 17:27:06 -0400 Subject: [PATCH 084/121] Changed force to no-prompts and updated ecr delete method error handling --- samcli/commands/delete/command.py | 13 +++++------ samcli/commands/delete/delete_context.py | 14 +++++------ samcli/commands/package/exceptions.py | 4 +--- samcli/lib/package/ecr_uploader.py | 18 +++++++++++++-- samcli/lib/package/utils.py | 3 ++- tests/unit/commands/delete/test_command.py | 6 ++--- .../commands/delete/test_delete_context.py | 14 +++++------ tests/unit/lib/package/test_ecr_uploader.py | 23 +++++++++++++++++++ 8 files changed, 64 insertions(+), 31 deletions(-) diff --git a/samcli/commands/delete/command.py b/samcli/commands/delete/command.py index 4e0b9ec6bc..06130eb68b 100644 --- a/samcli/commands/delete/command.py +++ b/samcli/commands/delete/command.py @@ -58,10 +58,9 @@ show_default=True, ) @click.option( - "--force", - help=("Specify this flag to allow SAM CLI to skip through the guided prompts" ""), + "--no-prompts", + help=("Specify this flag to allow SAM CLI to skip through the guided prompts."), is_flag=True, - type=click.BOOL, required=False, ) @aws_creds_options @@ -69,7 +68,7 @@ @pass_context @check_newer_version @print_cmdline_args -def cli(ctx, stack_name: str, config_file: str, config_env: str, force: bool): +def cli(ctx, stack_name: str, config_file: str, config_env: str, no_prompts: bool): """ `sam delete` command entry point """ @@ -81,11 +80,11 @@ def cli(ctx, stack_name: str, config_file: str, config_env: str, force: bool): config_file=config_file, config_env=config_env, profile=ctx.profile, - force=force, + no_prompts=no_prompts, ) # pragma: no cover -def do_cli(stack_name: str, region: str, config_file: str, config_env: str, profile: str, force: bool): +def do_cli(stack_name: str, region: str, config_file: str, config_env: str, profile: str, no_prompts: bool): """ Implementation of the ``cli`` method """ @@ -97,6 +96,6 @@ def do_cli(stack_name: str, region: str, config_file: str, config_env: str, prof profile=profile, config_file=config_file, config_env=config_env, - force=force, + no_prompts=no_prompts, ) as delete_context: delete_context.run() diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index e9c4575813..2914762318 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -5,7 +5,6 @@ import boto3 -import docker import click from click import confirm from click import prompt @@ -29,13 +28,13 @@ class DeleteContext: - def __init__(self, stack_name: str, region: str, profile: str, config_file: str, config_env: str, force: bool): + def __init__(self, stack_name: str, region: str, profile: str, config_file: str, config_env: str, no_prompts: bool): self.stack_name = stack_name self.region = region self.profile = profile self.config_file = config_file self.config_env = config_env - self.force = force + self.no_prompts = no_prompts self.s3_bucket = None self.s3_prefix = None self.cf_utils = None @@ -101,8 +100,7 @@ def init_clients(self): self.region = s3_client._client_config.region_name if s3_client else self.region # pylint: disable=W0212 self.s3_uploader = S3Uploader(s3_client=s3_client, bucket_name=self.s3_bucket, prefix=self.s3_prefix) - docker_client = docker.from_env() - ecr_uploader = ECRUploader(docker_client, ecr_client, None, None) + ecr_uploader = ECRUploader(docker_client=None, ecr_client=ecr_client, ecr_repo=None, ecr_repo_multi=None) self.uploaders = Uploaders(self.s3_uploader, ecr_uploader) self.cf_utils = CfUtils(cloudformation_client) @@ -118,7 +116,7 @@ def guided_prompts(self): # information is not found, warn the customer that S3 artifacts # will need to be manually deleted. - if not self.force and self.s3_bucket: + if not self.no_prompts and self.s3_bucket: if self.s3_prefix: self.delete_artifacts_folder = confirm( click.style( @@ -187,7 +185,7 @@ def run(self): """ Delete the stack based on the argument provided by customers and samconfig.toml. """ - if not self.force: + if not self.no_prompts: delete_stack = confirm( click.style( f"\tAre you sure you want to delete the stack {self.stack_name}" @@ -197,7 +195,7 @@ def run(self): default=False, ) - if self.force or delete_stack: + if self.no_prompts or delete_stack: is_deployed = self.cf_utils.has_stack(stack_name=self.stack_name) # Check if the provided stack-name exists if is_deployed: diff --git a/samcli/commands/package/exceptions.py b/samcli/commands/package/exceptions.py index 2e23cf7458..70ed0ba958 100644 --- a/samcli/commands/package/exceptions.py +++ b/samcli/commands/package/exceptions.py @@ -85,12 +85,10 @@ def __init__(self, resource_id, property_name, ex): class ImageNotFoundError(UserException): - def __init__(self, resource_id, property_name): + def __init__(self, resource_id, property_name, message_fmt): self.resource_id = resource_id self.property_name = property_name - message_fmt = "Image not found for {property_name} parameter of {resource_id} resource. \n" - super().__init__( message=message_fmt.format( property_name=self.property_name, diff --git a/samcli/lib/package/ecr_uploader.py b/samcli/lib/package/ecr_uploader.py index 7e70b88593..7edc41ad41 100644 --- a/samcli/lib/package/ecr_uploader.py +++ b/samcli/lib/package/ecr_uploader.py @@ -113,8 +113,22 @@ def delete_artifact(self, image_uri: str, resource_id: str, property_name: str): # Image not found image_details = resp["failures"][0] if image_details["failureCode"] == "ImageNotFound": - LOG.error("ImageNotFound Exception : ") - raise ImageNotFoundError(resource_id, property_name) + LOG.error("ImageNotFound Exception") + message_fmt = ( + "Could not delete image for {property_name}" + " parameter of {resource_id} resource as it does not exist. \n" + ) + raise ImageNotFoundError(resource_id, property_name, message_fmt=message_fmt) + + LOG.error( + "Could not delete the image for the resource %s. FailureCode: %s, FailureReason: %s", + property_name, + image_details["failureCode"], + image_details["failureReason"], + ) + raise DeleteArtifactFailedError( + resource_id=resource_id, property_name=property_name, ex=image_details["failureReason"] + ) LOG.debug("Deleting ECR image with tag %s", image_tag) click.echo(f"\t- Deleting ECR image {image_tag} in repository {repository}") diff --git a/samcli/lib/package/utils.py b/samcli/lib/package/utils.py index c33b2b3de7..c152b37aa0 100644 --- a/samcli/lib/package/utils.py +++ b/samcli/lib/package/utils.py @@ -110,7 +110,8 @@ def upload_local_image_artifacts(resource_id, resource_dict, property_name, pare image_path = jmespath.search(property_name, resource_dict) if not image_path: - raise ImageNotFoundError(property_name=property_name, resource_id=resource_id) + message_fmt = "Image not found for {property_name} parameter of {resource_id} resource. \n" + raise ImageNotFoundError(property_name=property_name, resource_id=resource_id, message_fmt=message_fmt) if is_ecr_url(image_path): LOG.debug("Property %s of %s is already an ECR URL", property_name, resource_id) diff --git a/tests/unit/commands/delete/test_command.py b/tests/unit/commands/delete/test_command.py index 73ea9e3f19..7160553793 100644 --- a/tests/unit/commands/delete/test_command.py +++ b/tests/unit/commands/delete/test_command.py @@ -13,7 +13,7 @@ def setUp(self): self.s3_prefix = "s3-prefix" self.region = None self.profile = None - self.force = None + self.no_prompts = None self.config_env = "mock-default-env" self.config_file = "mock-default-filename" @@ -30,7 +30,7 @@ def test_all_args(self, mock_delete_context, mock_delete_click): config_file=self.config_file, config_env=self.config_env, profile=self.profile, - force=self.force, + no_prompts=self.no_prompts, ) mock_delete_context.assert_called_with( @@ -39,7 +39,7 @@ def test_all_args(self, mock_delete_context, mock_delete_click): profile=self.profile, config_file=self.config_file, config_env=self.config_env, - force=self.force, + no_prompts=self.no_prompts, ) context_mock.run.assert_called_with() diff --git a/tests/unit/commands/delete/test_delete_context.py b/tests/unit/commands/delete/test_delete_context.py index 0d4dfcbd52..f0975f144e 100644 --- a/tests/unit/commands/delete/test_delete_context.py +++ b/tests/unit/commands/delete/test_delete_context.py @@ -19,7 +19,7 @@ def test_delete_context_stack_does_not_exist(self, patched_click_echo): config_file="samconfig.toml", config_env="default", profile="test", - force=True, + no_prompts=True, ) as delete_context: delete_context.run() @@ -37,7 +37,7 @@ def test_delete_context_enter(self): config_file="samconfig.toml", config_env="default", profile="test", - force=True, + no_prompts=True, ) as delete_context: self.assertEqual(delete_context.parse_config_file.call_count, 1) self.assertEqual(delete_context.init_clients.call_count, 1) @@ -66,7 +66,7 @@ def test_delete_context_parse_config_file(self, patched_click_get_current_contex config_file="samconfig.toml", config_env="default", profile=None, - force=True, + no_prompts=True, ) as delete_context: self.assertEqual(delete_context.stack_name, "test") self.assertEqual(delete_context.region, "us-east-1") @@ -103,7 +103,7 @@ def test_delete_context_valid_execute_run(self, patched_click_get_current_contex config_file="samconfig.toml", config_env="default", profile=None, - force=True, + no_prompts=True, ) as delete_context: delete_context.run() @@ -126,7 +126,7 @@ def test_delete_context_no_s3_bucket(self, patched_click_secho, patched_click_ec config_file="samconfig.toml", config_env="default", profile="test", - force=True, + no_prompts=True, ) as delete_context: delete_context.run() @@ -161,7 +161,7 @@ def test_guided_prompts_s3_bucket_prefix_present_execute_run(self, patched_confi config_file="samconfig.toml", config_env="default", profile="test", - force=None, + no_prompts=None, ) as delete_context: patched_confirm.side_effect = [True, False, True] delete_context.cf_template_file_name = "hello.template" @@ -217,7 +217,7 @@ def test_guided_prompts_s3_bucket_present_no_prefix_execute_run( config_file="samconfig.toml", config_env="default", profile="test", - force=None, + no_prompts=None, ) as delete_context: patched_confirm.side_effect = [True, True] delete_context.cf_template_file_name = "hello.template" diff --git a/tests/unit/lib/package/test_ecr_uploader.py b/tests/unit/lib/package/test_ecr_uploader.py index a66207efca..3d4f962b06 100644 --- a/tests/unit/lib/package/test_ecr_uploader.py +++ b/tests/unit/lib/package/test_ecr_uploader.py @@ -193,6 +193,29 @@ def test_delete_artifact_no_image_error(self): image_uri=self.image_uri, resource_id=self.resource_id, property_name=self.property_name ) + def test_delete_artifact_resp_failure(self): + ecr_uploader = ECRUploader( + docker_client=self.docker_client, + ecr_client=self.ecr_client, + ecr_repo=self.ecr_repo, + ecr_repo_multi=self.ecr_repo_multi, + tag=self.tag, + ) + ecr_uploader.ecr_client.batch_delete_image.return_value = { + "failures": [ + { + "imageId": {"imageTag": self.tag}, + "failureCode": "Mock response Failure", + "failureReason": "Mock ECR testing", + } + ] + } + + with self.assertRaises(DeleteArtifactFailedError): + ecr_uploader.delete_artifact( + image_uri=self.image_uri, resource_id=self.resource_id, property_name=self.property_name + ) + def test_delete_artifact_client_error(self): ecr_uploader = ECRUploader( docker_client=self.docker_client, From 59c85768356089edb265c2ea7f53bce2412f9e19 Mon Sep 17 00:00:00 2001 From: Qingchuan Ma <69653965+qingchm@users.noreply.github.com> Date: Thu, 8 Jul 2021 15:48:08 -0700 Subject: [PATCH 085/121] chore: Improved --resolve-s3 option documentation and deployment without s3 error messages (#2983) * Improve documentation on --resolve-s3 option and improve s3 failure messages * Changed indentation for integration test on s3 error message * Fixed a typo in description * Improve spacing on help text for resolve-s3 option --- samcli/commands/deploy/command.py | 4 +++- samcli/commands/package/command.py | 4 +++- samcli/commands/package/exceptions.py | 3 ++- tests/integration/deploy/test_deploy_command.py | 3 ++- 4 files changed, 10 insertions(+), 4 deletions(-) diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index 371dc61c4d..5b7744b89d 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -153,7 +153,9 @@ "--resolve-s3", required=False, is_flag=True, - help="Automatically resolve s3 bucket for non-guided deployments." + help="Automatically resolve s3 bucket for non-guided deployments. " + "Enabling this option will also create a managed default s3 bucket for you. " + "If you do not provide a --s3-bucket value, the managed bucket will be used. " "Do not use --s3-guided parameter with this option.", ) @metadata_override_option diff --git a/samcli/commands/package/command.py b/samcli/commands/package/command.py index cab68b6d88..cc0dc35c5d 100644 --- a/samcli/commands/package/command.py +++ b/samcli/commands/package/command.py @@ -121,7 +121,9 @@ def resources_and_properties_help_string(): exc_set=PackageResolveS3AndS3SetError, exc_not_set=PackageResolveS3AndS3NotSetError, ), - help="Automatically resolve s3 bucket for non-guided deployments." + help="Automatically resolve s3 bucket for non-guided deployments. " + "Enabling this option will also create a managed default s3 bucket for you. " + "If you do not provide a --s3-bucket value, the managed bucket will be used. " "Do not use --s3-guided parameter with this option.", ) @metadata_override_option diff --git a/samcli/commands/package/exceptions.py b/samcli/commands/package/exceptions.py index a650f62843..af549058e9 100644 --- a/samcli/commands/package/exceptions.py +++ b/samcli/commands/package/exceptions.py @@ -124,7 +124,8 @@ class BucketNotSpecifiedError(UserException): def __init__(self, **kwargs): self.kwargs = kwargs - message_fmt = "\nS3 Bucket not specified, use --s3-bucket to specify a bucket name or run sam deploy --guided" + message_fmt = "\nS3 Bucket not specified, use --s3-bucket to specify a bucket name, or use --resolve-s3 \ +to create a managed default bucket, or run sam deploy --guided" super().__init__(message=message_fmt.format(**self.kwargs)) diff --git a/tests/integration/deploy/test_deploy_command.py b/tests/integration/deploy/test_deploy_command.py index 3e4bd53f87..893799e157 100644 --- a/tests/integration/deploy/test_deploy_command.py +++ b/tests/integration/deploy/test_deploy_command.py @@ -313,7 +313,8 @@ def test_deploy_without_s3_bucket(self, template_file): self.assertEqual(deploy_process_execute.process.returncode, 1) self.assertIn( bytes( - f"S3 Bucket not specified, use --s3-bucket to specify a bucket name or run sam deploy --guided", + f"S3 Bucket not specified, use --s3-bucket to specify a bucket name, or use --resolve-s3 \ +to create a managed default bucket, or run sam deploy --guided", encoding="utf-8", ), deploy_process_execute.stderr, From 6f542400faeb7b267d7899b2a82ad9f59bbba951 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Thu, 8 Jul 2021 22:07:47 -0400 Subject: [PATCH 086/121] Created a separate function for parsing ecr url in ecr_uploader --- samcli/lib/package/ecr_uploader.py | 26 ++++++++++++++++++--- samcli/lib/package/packageable_resources.py | 4 ++++ tests/unit/lib/package/test_ecr_uploader.py | 18 ++++++++++++++ 3 files changed, 45 insertions(+), 3 deletions(-) diff --git a/samcli/lib/package/ecr_uploader.py b/samcli/lib/package/ecr_uploader.py index 7edc41ad41..9aa6aaa159 100644 --- a/samcli/lib/package/ecr_uploader.py +++ b/samcli/lib/package/ecr_uploader.py @@ -5,6 +5,7 @@ import base64 import os +from typing import Dict import click import botocore import docker @@ -100,9 +101,9 @@ def delete_artifact(self, image_uri: str, resource_id: str, property_name: str): :param property_name: provided property_name for the resource """ try: - repo_image_tag = image_uri.split("/")[1].split(":") - repository = repo_image_tag[0] - image_tag = repo_image_tag[1] + repo_image_tag = self.parse_ecr_url(image_uri=image_uri) + repository = repo_image_tag["repository"] + image_tag = repo_image_tag["image_tag"] resp = self.ecr_client.batch_delete_image( repositoryName=repository, imageIds=[ @@ -138,6 +139,25 @@ def delete_artifact(self, image_uri: str, resource_id: str, property_name: str): LOG.error("DeleteArtifactFailedError Exception : %s", str(ex)) raise DeleteArtifactFailedError(resource_id=resource_id, property_name=property_name, ex=ex) from ex + @staticmethod + def parse_ecr_url(image_uri: str) -> Dict: + result = {} + registry_repo_tag = image_uri.split("/") + repo_colon_image_tag = None + if len(registry_repo_tag) == 1: + # If there is no registry specified, e.g. repo:tag + repo_colon_image_tag = registry_repo_tag[0] + else: + # Registry present, e.g. registry/repo:tag + repo_colon_image_tag = registry_repo_tag[1] + repo_image_tag_split = repo_colon_image_tag.split(":") + + # If no tag is specified, use latest + result["repository"] = repo_image_tag_split[0] + result["image_tag"] = repo_image_tag_split[1] if len(repo_image_tag_split) > 1 else "latest" + + return result + # TODO: move this to a generic class to allow for streaming logs back from docker. def _stream_progress(self, logs): """ diff --git a/samcli/lib/package/packageable_resources.py b/samcli/lib/package/packageable_resources.py index cc2fe999d9..02d76faeb6 100644 --- a/samcli/lib/package/packageable_resources.py +++ b/samcli/lib/package/packageable_resources.py @@ -225,6 +225,8 @@ def delete(self, resource_id, resource_dict): self.uploader.delete_artifact( image_uri=remote_path, resource_id=resource_id, property_name=self.PROPERTY_NAME ) + else: + raise ValueError("URL given to the parse method is not a valid ECR url " "{0}".format(remote_path)) class ResourceImage(Resource): @@ -279,6 +281,8 @@ def delete(self, resource_id, resource_dict): self.uploader.delete_artifact( image_uri=remote_path, resource_id=resource_id, property_name=self.PROPERTY_NAME ) + else: + raise ValueError("URL given to the parse method is not a valid ECR url " "{0}".format(remote_path)) class ResourceWithS3UrlDict(ResourceZip): diff --git a/tests/unit/lib/package/test_ecr_uploader.py b/tests/unit/lib/package/test_ecr_uploader.py index 3d4f962b06..6264fe5d6b 100644 --- a/tests/unit/lib/package/test_ecr_uploader.py +++ b/tests/unit/lib/package/test_ecr_uploader.py @@ -234,3 +234,21 @@ def test_delete_artifact_client_error(self): ecr_uploader.delete_artifact( image_uri=self.image_uri, resource_id=self.resource_id, property_name=self.property_name ) + + def test_parse_ecr_url(self): + + valid = [ + {"url": self.image_uri, "result": {"repository": "mock-image-repo", "image_tag": "mock-tag"}}, + {"url": "mock-image-rep:mock-tag", "result": {"repository": "mock-image-rep", "image_tag": "mock-tag"}}, + { + "url": "mock-image-repo", + "result": {"repository": "mock-image-repo", "image_tag": "latest"}, + } + ] + + for config in valid: + result = ECRUploader.parse_ecr_url( + image_uri=config["url"] + ) + + self.assertEqual(result, config["result"]) From 30e3c02abd975ac09e7ef8ef921845de838de127 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Thu, 8 Jul 2021 23:03:33 -0400 Subject: [PATCH 087/121] Reformatted Template class init to pass template_str and init template_dict --- samcli/commands/delete/delete_context.py | 13 ++++++------- samcli/lib/package/artifact_exporter.py | 11 +++++------ tests/unit/lib/package/test_artifact_exporter.py | 14 ++++++++++++-- tests/unit/lib/package/test_ecr_uploader.py | 8 +++----- 4 files changed, 26 insertions(+), 20 deletions(-) diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 2914762318..6491307247 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -14,7 +14,6 @@ from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.package.artifact_exporter import mktempfile, get_cf_template_name -from samcli.yamlhelper import yaml_parse from samcli.lib.package.artifact_exporter import Template from samcli.lib.package.ecr_uploader import ECRUploader @@ -40,7 +39,6 @@ def __init__(self, stack_name: str, region: str, profile: str, config_file: str, self.cf_utils = None self.s3_uploader = None self.uploaders = None - self.template = None self.cf_template_file_name = None self.delete_artifacts_folder = None self.delete_cf_template_file = None @@ -104,7 +102,6 @@ def init_clients(self): self.uploaders = Uploaders(self.s3_uploader, ecr_uploader) self.cf_utils = CfUtils(cloudformation_client) - self.template = Template(None, None, self.uploaders, None) def guided_prompts(self): """ @@ -145,9 +142,8 @@ def delete(self): Delete method calls for Cloudformation stacks and S3 and ECR artifacts """ # Fetch the template using the stack-name - template = self.cf_utils.get_stack_template(self.stack_name, TEMPLATE_STAGE) - template_str = template.get("TemplateBody", None) - template_dict = yaml_parse(template_str) + cf_template = self.cf_utils.get_stack_template(self.stack_name, TEMPLATE_STAGE) + template_str = cf_template.get("TemplateBody", None) # Get the cloudformation template name using template_str with mktempfile() as temp_file: @@ -162,7 +158,10 @@ def delete(self): LOG.debug("Deleted Cloudformation stack: %s", self.stack_name) # Delete the artifacts - self.template.delete(template_dict) + template = Template( + template_path=None, parent_dir=None, uploaders=self.uploaders, code_signer=None, template_str=template_str + ) + template.delete() # Delete the CF template file in S3 if self.delete_cf_template_file: diff --git a/samcli/lib/package/artifact_exporter.py b/samcli/lib/package/artifact_exporter.py index 2ec18eec68..00fa5cb089 100644 --- a/samcli/lib/package/artifact_exporter.py +++ b/samcli/lib/package/artifact_exporter.py @@ -16,7 +16,7 @@ # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import os -from typing import Dict +from typing import Dict, Optional from botocore.utils import set_value_from_jmespath @@ -128,11 +128,12 @@ def __init__( RESOURCES_EXPORT_LIST + [CloudFormationStackResource, ServerlessApplicationResource] ), metadata_to_export=frozenset(METADATA_EXPORT_LIST), + template_str: Optional[str] = None, ): """ Reads the template and makes it ready for export """ - if template_path and parent_dir: + if not template_str: if not (is_local_folder(parent_dir) and os.path.isabs(parent_dir)): raise ValueError("parent_dir parameter must be " "an absolute path to a folder {0}".format(parent_dir)) @@ -142,9 +143,9 @@ def __init__( with open(abs_template_path, "r") as handle: template_str = handle.read() - self.template_dict = yaml_parse(template_str) self.template_dir = template_dir self.code_signer = code_signer + self.template_dict = yaml_parse(template_str) self.resources_to_export = resources_to_export self.metadata_to_export = metadata_to_export self.uploaders = uploaders @@ -239,12 +240,10 @@ def export(self) -> Dict: return self.template_dict - def delete(self, template_dict): + def delete(self): """ Deletes all the artifacts referenced by the given Cloudformation template """ - self.template_dict = template_dict - if "Resources" not in self.template_dict: return diff --git a/tests/unit/lib/package/test_artifact_exporter.py b/tests/unit/lib/package/test_artifact_exporter.py index 1167876ece..750317ed20 100644 --- a/tests/unit/lib/package/test_artifact_exporter.py +++ b/tests/unit/lib/package/test_artifact_exporter.py @@ -1,3 +1,4 @@ +import json import tempfile import os import string @@ -1422,9 +1423,18 @@ def test_template_delete(self): "Resource3": {"Type": "some-other-type", "Properties": properties, "DeletionPolicy": "Retain"}, } } + template_str = json.dumps(template_dict, indent=4, ensure_ascii=False) + + template_exporter = Template( + template_path=None, + parent_dir=None, + uploaders=self.uploaders_mock, + code_signer=None, + resources_to_export=resources_to_export, + template_str=template_str, + ) - template_exporter = Template(None, None, self.uploaders_mock, None, resources_to_export) - template_exporter.delete(template_dict) + template_exporter.delete() resource_type1_class.assert_called_once_with(self.uploaders_mock, None) resource_type1_instance.delete.assert_called_once_with("Resource1", mock.ANY) diff --git a/tests/unit/lib/package/test_ecr_uploader.py b/tests/unit/lib/package/test_ecr_uploader.py index 6264fe5d6b..2fa0e0433d 100644 --- a/tests/unit/lib/package/test_ecr_uploader.py +++ b/tests/unit/lib/package/test_ecr_uploader.py @@ -243,12 +243,10 @@ def test_parse_ecr_url(self): { "url": "mock-image-repo", "result": {"repository": "mock-image-repo", "image_tag": "latest"}, - } + }, ] - + for config in valid: - result = ECRUploader.parse_ecr_url( - image_uri=config["url"] - ) + result = ECRUploader.parse_ecr_url(image_uri=config["url"]) self.assertEqual(result, config["result"]) From b8a2591f3c2505d50728296eb597ce8c82a21910 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Fri, 9 Jul 2021 12:22:09 -0400 Subject: [PATCH 088/121] Changed how s3 url is obtained for resource_zip edge-case: aws:glue:job --- samcli/lib/package/packageable_resources.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samcli/lib/package/packageable_resources.py b/samcli/lib/package/packageable_resources.py index 02d76faeb6..486e90ebb4 100644 --- a/samcli/lib/package/packageable_resources.py +++ b/samcli/lib/package/packageable_resources.py @@ -164,7 +164,7 @@ def delete(self, resource_id, resource_dict): """ if resource_dict is None: return - resource_path = resource_dict[self.PROPERTY_NAME] + resource_path = jmespath.search(self.PROPERTY_NAME, resource_dict) parsed_s3_url = self.uploader.parse_s3_url(resource_path) if not self.uploader.bucket_name: self.uploader.bucket_name = parsed_s3_url["Bucket"] From 7292353ec8339d4bfad9e411e45fd6424e07a84b Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Fri, 9 Jul 2021 15:30:23 -0400 Subject: [PATCH 089/121] Fixed edge case where resource artifact points to a path style url --- samcli/lib/package/packageable_resources.py | 9 +++++++- samcli/lib/package/s3_uploader.py | 22 +++++++++++++++++++ .../lib/package/test_artifact_exporter.py | 22 +++++++++++++++++++ 3 files changed, 52 insertions(+), 1 deletion(-) diff --git a/samcli/lib/package/packageable_resources.py b/samcli/lib/package/packageable_resources.py index 486e90ebb4..15ce8fc361 100644 --- a/samcli/lib/package/packageable_resources.py +++ b/samcli/lib/package/packageable_resources.py @@ -165,7 +165,14 @@ def delete(self, resource_id, resource_dict): if resource_dict is None: return resource_path = jmespath.search(self.PROPERTY_NAME, resource_dict) - parsed_s3_url = self.uploader.parse_s3_url(resource_path) + parsed_s3_url = [] + if isinstance(resource_path, str) and resource_path.startswith("https://s3"): + # Path-style s3 url parsing for resources that return these urls + # For resources e.g. CloudFormation::Stack and Serverless::Application + parsed_s3_url = self.uploader.parse_path_style_s3_url(resource_path) + else: + # urls which start with s3:// + parsed_s3_url = self.uploader.parse_s3_url(resource_path) if not self.uploader.bucket_name: self.uploader.bucket_name = parsed_s3_url["Bucket"] self.uploader.delete_artifact(parsed_s3_url["Key"], True) diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index 76b7ff1ec7..cf67efa4ea 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -263,6 +263,28 @@ def parse_s3_url( raise ValueError("URL given to the parse method is not a valid S3 url " "{0}".format(url)) + @staticmethod + def parse_path_style_s3_url( + url: Any, + bucket_name_property: str = "Bucket", + object_key_property: str = "Key", + ) -> Dict: + """ + Static method for parsing path style s3 urls. + e.g. https://s3.us-east-1.amazonaws.com/bucket/key + """ + if isinstance(url, str) and url.startswith("https://s3"): + parsed = urlparse(url) + result = dict() + # path would point to /bucket/key + s3_bucket_key = parsed.path.split('/', 2)[1:] + + result[bucket_name_property] = s3_bucket_key[0] + result[object_key_property] = s3_bucket_key[1] + + return result + raise ValueError("URL given to the parse method is not a valid path-style S3 url " "{0}".format(url)) + class ProgressPercentage: # This class was copied directly from S3Transfer docs diff --git a/tests/unit/lib/package/test_artifact_exporter.py b/tests/unit/lib/package/test_artifact_exporter.py index 750317ed20..62e185c7f3 100644 --- a/tests/unit/lib/package/test_artifact_exporter.py +++ b/tests/unit/lib/package/test_artifact_exporter.py @@ -241,6 +241,28 @@ def test_parse_s3_url(self): with self.assertRaises(ValueError): S3Uploader.parse_s3_url(url) + def test_parse_path_style_s3_url(self): + valid = [ + {"url": "https://s3-eu-west-1.amazonaws.com/bucket/long/key", "result": {"Bucket": "bucket", "Key": "long/key"}}, + {"url": "https://s3.us-east-1.amazonaws.com/bucket/key", "result": {"Bucket": "bucket", "Key": "key"}}, + ] + + invalid = [ + "https://www.amazon.com", + "https://bucket-name.s3.Region.amazonaws.com/key" + ] + + for config in valid: + result = S3Uploader.parse_path_style_s3_url( + config["url"], bucket_name_property="Bucket", object_key_property="Key" + ) + + self.assertEqual(result, config["result"]) + + for url in invalid: + with self.assertRaises(ValueError): + S3Uploader.parse_path_style_s3_url(url) + def test_is_local_file(self): with tempfile.NamedTemporaryFile() as handle: self.assertTrue(is_local_file(handle.name)) From 6ff2e22c6832b22beef985dfd754986a1804c666 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Fri, 9 Jul 2021 15:31:24 -0400 Subject: [PATCH 090/121] run Make black --- samcli/lib/package/s3_uploader.py | 2 +- tests/unit/lib/package/test_artifact_exporter.py | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index cf67efa4ea..dfd5db90fe 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -277,7 +277,7 @@ def parse_path_style_s3_url( parsed = urlparse(url) result = dict() # path would point to /bucket/key - s3_bucket_key = parsed.path.split('/', 2)[1:] + s3_bucket_key = parsed.path.split("/", 2)[1:] result[bucket_name_property] = s3_bucket_key[0] result[object_key_property] = s3_bucket_key[1] diff --git a/tests/unit/lib/package/test_artifact_exporter.py b/tests/unit/lib/package/test_artifact_exporter.py index 62e185c7f3..d452403136 100644 --- a/tests/unit/lib/package/test_artifact_exporter.py +++ b/tests/unit/lib/package/test_artifact_exporter.py @@ -243,14 +243,14 @@ def test_parse_s3_url(self): def test_parse_path_style_s3_url(self): valid = [ - {"url": "https://s3-eu-west-1.amazonaws.com/bucket/long/key", "result": {"Bucket": "bucket", "Key": "long/key"}}, + { + "url": "https://s3-eu-west-1.amazonaws.com/bucket/long/key", + "result": {"Bucket": "bucket", "Key": "long/key"}, + }, {"url": "https://s3.us-east-1.amazonaws.com/bucket/key", "result": {"Bucket": "bucket", "Key": "key"}}, ] - invalid = [ - "https://www.amazon.com", - "https://bucket-name.s3.Region.amazonaws.com/key" - ] + invalid = ["https://www.amazon.com", "https://bucket-name.s3.Region.amazonaws.com/key"] for config in valid: result = S3Uploader.parse_path_style_s3_url( From 6c9a060fa886832b9cd04198d524430d6a40d06a Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Sat, 10 Jul 2021 17:46:48 -0400 Subject: [PATCH 091/121] Made the parse s3 url funcs protected and defined a parent method and modified delete method for ResourceImageDict --- samcli/lib/package/packageable_resources.py | 15 ++--- samcli/lib/package/s3_uploader.py | 61 +++++++++++++------ .../lib/package/test_artifact_exporter.py | 39 ++++-------- 3 files changed, 59 insertions(+), 56 deletions(-) diff --git a/samcli/lib/package/packageable_resources.py b/samcli/lib/package/packageable_resources.py index 15ce8fc361..39d9b73867 100644 --- a/samcli/lib/package/packageable_resources.py +++ b/samcli/lib/package/packageable_resources.py @@ -165,14 +165,7 @@ def delete(self, resource_id, resource_dict): if resource_dict is None: return resource_path = jmespath.search(self.PROPERTY_NAME, resource_dict) - parsed_s3_url = [] - if isinstance(resource_path, str) and resource_path.startswith("https://s3"): - # Path-style s3 url parsing for resources that return these urls - # For resources e.g. CloudFormation::Stack and Serverless::Application - parsed_s3_url = self.uploader.parse_path_style_s3_url(resource_path) - else: - # urls which start with s3:// - parsed_s3_url = self.uploader.parse_s3_url(resource_path) + parsed_s3_url = self.uploader.parse_s3_url(resource_path) if not self.uploader.bucket_name: self.uploader.bucket_name = parsed_s3_url["Bucket"] self.uploader.delete_artifact(parsed_s3_url["Key"], True) @@ -227,13 +220,13 @@ def delete(self, resource_id, resource_dict): if resource_dict is None: return - remote_path = resource_dict[self.PROPERTY_NAME][self.EXPORT_PROPERTY_CODE_KEY] + remote_path = resource_dict.get(self.PROPERTY_NAME, {}).get(self.EXPORT_PROPERTY_CODE_KEY) if is_ecr_url(remote_path): self.uploader.delete_artifact( image_uri=remote_path, resource_id=resource_id, property_name=self.PROPERTY_NAME ) else: - raise ValueError("URL given to the parse method is not a valid ECR url " "{0}".format(remote_path)) + raise ValueError("URL given to the parse method is not a valid ECR url {0}".format(remote_path)) class ResourceImage(Resource): @@ -289,7 +282,7 @@ def delete(self, resource_id, resource_dict): image_uri=remote_path, resource_id=resource_id, property_name=self.PROPERTY_NAME ) else: - raise ValueError("URL given to the parse method is not a valid ECR url " "{0}".format(remote_path)) + raise ValueError("URL given to the parse method is not a valid ECR url {0}".format(remote_path)) class ResourceWithS3UrlDict(ResourceZip): diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index dfd5db90fe..b3fbe53c7d 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -243,28 +243,51 @@ def parse_s3_url( object_key_property: str = "Key", version_property: Optional[str] = None, ) -> Dict: - if isinstance(url, str) and url.startswith("s3://"): - parsed = urlparse(url) - query = parse_qs(parsed.query) + return S3Uploader._parse_s3_format_url( + url=url, + bucket_name_property=bucket_name_property, + object_key_property=object_key_property, + version_property=version_property, + ) + + if isinstance(url, str) and url.startswith("https://s3"): + return S3Uploader._parse_path_style_s3_url( + url=url, bucket_name_property=bucket_name_property, object_key_property=object_key_property + ) - if parsed.netloc and parsed.path: - result = dict() - result[bucket_name_property] = parsed.netloc - result[object_key_property] = parsed.path.lstrip("/") + raise ValueError("URL given to the parse method is not a valid S3 url {0}".format(url)) + + @staticmethod + def _parse_s3_format_url( + url: Any, + bucket_name_property: str = "Bucket", + object_key_property: str = "Key", + version_property: Optional[str] = None, + ) -> Dict: + """ + Method for parsing s3 urls that begin with s3:// + e.g. s3://bucket/key + """ + parsed = urlparse(url) + query = parse_qs(parsed.query) + if parsed.netloc and parsed.path: + result = dict() + result[bucket_name_property] = parsed.netloc + result[object_key_property] = parsed.path.lstrip("/") - # If there is a query string that has a single versionId field, - # set the object version and return - if version_property is not None and "versionId" in query and len(query["versionId"]) == 1: - result[version_property] = query["versionId"][0] + # If there is a query string that has a single versionId field, + # set the object version and return + if version_property is not None and "versionId" in query and len(query["versionId"]) == 1: + result[version_property] = query["versionId"][0] - return result + return result - raise ValueError("URL given to the parse method is not a valid S3 url " "{0}".format(url)) + raise ValueError("URL given to the parse method is not a valid S3 url {0}".format(url)) @staticmethod - def parse_path_style_s3_url( + def _parse_path_style_s3_url( url: Any, bucket_name_property: str = "Bucket", object_key_property: str = "Key", @@ -273,17 +296,17 @@ def parse_path_style_s3_url( Static method for parsing path style s3 urls. e.g. https://s3.us-east-1.amazonaws.com/bucket/key """ - if isinstance(url, str) and url.startswith("https://s3"): - parsed = urlparse(url) - result = dict() - # path would point to /bucket/key + parsed = urlparse(url) + result = dict() + # parsed.path would point to /bucket/key + if parsed.path: s3_bucket_key = parsed.path.split("/", 2)[1:] result[bucket_name_property] = s3_bucket_key[0] result[object_key_property] = s3_bucket_key[1] return result - raise ValueError("URL given to the parse method is not a valid path-style S3 url " "{0}".format(url)) + raise ValueError("URL given to the parse method is not a valid S3 url {0}".format(url)) class ProgressPercentage: diff --git a/tests/unit/lib/package/test_artifact_exporter.py b/tests/unit/lib/package/test_artifact_exporter.py index d452403136..e6b9d14320 100644 --- a/tests/unit/lib/package/test_artifact_exporter.py +++ b/tests/unit/lib/package/test_artifact_exporter.py @@ -181,14 +181,14 @@ def test_is_s3_url(self): "s3://foo/bar/baz?versionId=abc", "s3://www.amazon.com/foo/bar", "s3://my-new-bucket/foo/bar?a=1&a=2&a=3&b=1", + "https://s3-eu-west-1.amazonaws.com/bucket/key", + "https://s3.us-east-1.amazonaws.com/bucket/key", ] invalid = [ # For purposes of exporter, we need S3 URLs to point to an object # and not a bucket "s3://foo", - # two versionIds is invalid - "https://s3-eu-west-1.amazonaws.com/bucket/key", "https://www.amazon.com", ] @@ -219,15 +219,24 @@ def test_parse_s3_url(self): "url": "s3://foo/bar/baz?versionId=abc&versionId=123", "result": {"Bucket": "foo", "Key": "bar/baz"}, }, + { + # Path style url + "url": "https://s3-eu-west-1.amazonaws.com/bucket/key", + "result": {"Bucket": "bucket", "Key": "key"}, + }, + { + # Path style url + "url": "https://s3.us-east-1.amazonaws.com/bucket/key", + "result": {"Bucket": "bucket", "Key": "key"}, + }, ] invalid = [ # For purposes of exporter, we need S3 URLs to point to an object # and not a bucket "s3://foo", - # two versionIds is invalid - "https://s3-eu-west-1.amazonaws.com/bucket/key", "https://www.amazon.com", + "https://s3.us-east-1.amazonaws.com", ] for config in valid: @@ -241,28 +250,6 @@ def test_parse_s3_url(self): with self.assertRaises(ValueError): S3Uploader.parse_s3_url(url) - def test_parse_path_style_s3_url(self): - valid = [ - { - "url": "https://s3-eu-west-1.amazonaws.com/bucket/long/key", - "result": {"Bucket": "bucket", "Key": "long/key"}, - }, - {"url": "https://s3.us-east-1.amazonaws.com/bucket/key", "result": {"Bucket": "bucket", "Key": "key"}}, - ] - - invalid = ["https://www.amazon.com", "https://bucket-name.s3.Region.amazonaws.com/key"] - - for config in valid: - result = S3Uploader.parse_path_style_s3_url( - config["url"], bucket_name_property="Bucket", object_key_property="Key" - ) - - self.assertEqual(result, config["result"]) - - for url in invalid: - with self.assertRaises(ValueError): - S3Uploader.parse_path_style_s3_url(url) - def test_is_local_file(self): with tempfile.NamedTemporaryFile() as handle: self.assertTrue(is_local_file(handle.name)) From 1675b7ed231b6472d38eeeeb25e39f6310bbb86f Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Mon, 12 Jul 2021 12:28:19 -0400 Subject: [PATCH 092/121] Changed parse_ecr_url function name to parse_image_url --- samcli/lib/package/ecr_uploader.py | 4 ++-- tests/unit/lib/package/test_ecr_uploader.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/samcli/lib/package/ecr_uploader.py b/samcli/lib/package/ecr_uploader.py index 9aa6aaa159..e899bab7c8 100644 --- a/samcli/lib/package/ecr_uploader.py +++ b/samcli/lib/package/ecr_uploader.py @@ -101,7 +101,7 @@ def delete_artifact(self, image_uri: str, resource_id: str, property_name: str): :param property_name: provided property_name for the resource """ try: - repo_image_tag = self.parse_ecr_url(image_uri=image_uri) + repo_image_tag = self.parse_image_url(image_uri=image_uri) repository = repo_image_tag["repository"] image_tag = repo_image_tag["image_tag"] resp = self.ecr_client.batch_delete_image( @@ -140,7 +140,7 @@ def delete_artifact(self, image_uri: str, resource_id: str, property_name: str): raise DeleteArtifactFailedError(resource_id=resource_id, property_name=property_name, ex=ex) from ex @staticmethod - def parse_ecr_url(image_uri: str) -> Dict: + def parse_image_url(image_uri: str) -> Dict: result = {} registry_repo_tag = image_uri.split("/") repo_colon_image_tag = None diff --git a/tests/unit/lib/package/test_ecr_uploader.py b/tests/unit/lib/package/test_ecr_uploader.py index 2fa0e0433d..25b2c4a047 100644 --- a/tests/unit/lib/package/test_ecr_uploader.py +++ b/tests/unit/lib/package/test_ecr_uploader.py @@ -235,7 +235,7 @@ def test_delete_artifact_client_error(self): image_uri=self.image_uri, resource_id=self.resource_id, property_name=self.property_name ) - def test_parse_ecr_url(self): + def test_parse_image_url(self): valid = [ {"url": self.image_uri, "result": {"repository": "mock-image-repo", "image_tag": "mock-tag"}}, @@ -247,6 +247,6 @@ def test_parse_ecr_url(self): ] for config in valid: - result = ECRUploader.parse_ecr_url(image_uri=config["url"]) + result = ECRUploader.parse_image_url(image_uri=config["url"]) self.assertEqual(result, config["result"]) From 53550280396f99b6160ae346d612d898cbd1ecfd Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Tue, 13 Jul 2021 13:22:51 -0400 Subject: [PATCH 093/121] Defined UI for auto ecr deleton and method calls from companion_stack_manager --- samcli/commands/delete/delete_context.py | 72 +++++++++++++++++++ .../companion_stack_manager.py | 11 +++ samcli/lib/package/ecr_uploader.py | 2 +- 3 files changed, 84 insertions(+), 1 deletion(-) diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 6491307247..a836372279 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -2,6 +2,7 @@ Delete a SAM stack """ import logging +from typing import Union, Dict import boto3 @@ -11,8 +12,11 @@ from samcli.cli.cli_config_file import TomlProvider from samcli.lib.utils.botoconfig import get_boto_config_with_user_agent from samcli.lib.delete.cf_utils import CfUtils + from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.package.artifact_exporter import mktempfile, get_cf_template_name +from samcli.lib.bootstrap.companion_stack.data_types import ECRRepo +from samcli.lib.bootstrap.companion_stack.companion_stack_manager import CompanionStackManager from samcli.lib.package.artifact_exporter import Template @@ -27,6 +31,9 @@ class DeleteContext: + + ecr_repos: Dict[str, Dict[str, Union[str, ECRRepo]]] + def __init__(self, stack_name: str, region: str, profile: str, config_file: str, config_env: str, no_prompts: bool): self.stack_name = stack_name self.region = region @@ -42,6 +49,10 @@ def __init__(self, stack_name: str, region: str, profile: str, config_file: str, self.cf_template_file_name = None self.delete_artifacts_folder = None self.delete_cf_template_file = None + self.companion_stack_manager = None + self.companion_stack_name = None + self.delete_ecr_companion_stack_prompt = None + self.ecr_repos = {} def __enter__(self): self.parse_config_file() @@ -103,6 +114,10 @@ def init_clients(self): self.uploaders = Uploaders(self.s3_uploader, ecr_uploader) self.cf_utils = CfUtils(cloudformation_client) + self.companion_stack_manager = CompanionStackManager( + stack_name=self.stack_name, region=self.region, s3_bucket=self.s3_bucket, s3_prefix=self.s3_prefix + ) + def guided_prompts(self): """ Guided prompts asking customer to delete artifacts @@ -137,6 +152,43 @@ def guided_prompts(self): else: self.delete_cf_template_file = True + def ecr_companion_stack_prompts(self): + """ + Guided prompts asking customer to delete ECR companion stack + and the related artifacts. + """ + self.companion_stack_name = self.companion_stack_manager.get_companion_stack_name() + click.echo(f"\tFound ECR Companion Stack {self.companion_stack_name}") + if not self.no_prompts: + self.delete_ecr_companion_stack_prompt = confirm( + click.style( + "\tDo you you want to delete the ECR companion stack" + + f" {self.companion_stack_name} in the region {self.region} ?", + bold=True, + ), + default=False, + ) + if self.no_prompts or self.delete_ecr_companion_stack_prompt: + ecr_repos = self.companion_stack_manager.list_deployed_repos() + if ecr_repos: + click.echo("\t#Note: Empty repositories created by SAM CLI will be deleted automatically.") + + for repo in ecr_repos: + # Get all the repos from the companion stack + repo_logical_id = repo.logical_id + self.ecr_repos[repo_logical_id] = {"repo": repo} + + if self.delete_ecr_companion_stack_prompt: + delete_repo = confirm( + click.style( + f"\tECR repository {self.companion_stack_manager.get_repo_uri(repo)}" + + " may not be empty. Do you want to delete the repository and all the images in it ?", + bold=True, + ), + default=False, + ) + self.ecr_repos[repo_logical_id]["delete_repo"] = delete_repo + def delete(self): """ Delete method calls for Cloudformation stacks and S3 and ECR artifacts @@ -151,6 +203,11 @@ def delete(self): self.guided_prompts() + # ECR companion stack delete prompts, if it exists + ecr_companion_stack_exists = self.companion_stack_manager.does_companion_stack_exist() + if ecr_companion_stack_exists: + self.ecr_companion_stack_prompts() + # Delete the primary stack click.echo(f"\n\t- Deleting Cloudformation stack {self.stack_name}") self.cf_utils.delete_stack(stack_name=self.stack_name) @@ -180,6 +237,21 @@ def delete(self): fg="yellow", ) + # Delete the ECR companion stack if it exists + if ecr_companion_stack_exists: + click.echo(f"\t- Deleting ECR Companion Stack {self.companion_stack_name}") + self.companion_stack_manager.delete_companion_stack() + + # # Delete the repos created by ECR companion stack if it exists + # if ecr_companion_stack_exists and (self.no_prompts or self.delete_companion_stack_prompt): + # for key in self.repos: + # repo = self.repos[key]["repo"] + # is_delete = self.repos[key].get("delete_repo", None) + # if no_prompts or is_delete: + # click.echo(f"\tDeleting ECR repository {repo.get_repo_uri(repo)}" + # "") + # self.ecr_uploader.delete_repository(repo.physical_id) + def run(self): """ Delete the stack based on the argument provided by customers and samconfig.toml. diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py index 31a8d254f7..f01d2ba8f0 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py @@ -263,3 +263,14 @@ def is_repo_uri(self, repo_uri: str, function_logical_id: str) -> bool: Returns True if repo_uri is a companion stack repo. """ return repo_uri == self.get_repo_uri(ECRRepo(self._companion_stack, function_logical_id)) + + def get_companion_stack_name(self): + """ + Returns the name of the companion stack + + Returns + ------- + str + Name of the companion stack + """ + return self._companion_stack.stack_name diff --git a/samcli/lib/package/ecr_uploader.py b/samcli/lib/package/ecr_uploader.py index e899bab7c8..9b4ac2b191 100644 --- a/samcli/lib/package/ecr_uploader.py +++ b/samcli/lib/package/ecr_uploader.py @@ -142,7 +142,7 @@ def delete_artifact(self, image_uri: str, resource_id: str, property_name: str): @staticmethod def parse_image_url(image_uri: str) -> Dict: result = {} - registry_repo_tag = image_uri.split("/") + registry_repo_tag = image_uri.split("/", 1) repo_colon_image_tag = None if len(registry_repo_tag) == 1: # If there is no registry specified, e.g. repo:tag From b160efc097a5c390caf3952f8e51e40984aac055 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Tue, 13 Jul 2021 15:32:56 -0400 Subject: [PATCH 094/121] Added code for deleting repos from companion stack --- samcli/commands/delete/delete_context.py | 22 +++++++++++----------- samcli/lib/package/ecr_uploader.py | 13 +++++++++++++ 2 files changed, 24 insertions(+), 11 deletions(-) diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index a836372279..517d6863ac 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -45,6 +45,7 @@ def __init__(self, stack_name: str, region: str, profile: str, config_file: str, self.s3_prefix = None self.cf_utils = None self.s3_uploader = None + self.ecr_uploader = None self.uploaders = None self.cf_template_file_name = None self.delete_artifacts_folder = None @@ -109,9 +110,9 @@ def init_clients(self): self.region = s3_client._client_config.region_name if s3_client else self.region # pylint: disable=W0212 self.s3_uploader = S3Uploader(s3_client=s3_client, bucket_name=self.s3_bucket, prefix=self.s3_prefix) - ecr_uploader = ECRUploader(docker_client=None, ecr_client=ecr_client, ecr_repo=None, ecr_repo_multi=None) + self.ecr_uploader = ECRUploader(docker_client=None, ecr_client=ecr_client, ecr_repo=None, ecr_repo_multi=None) - self.uploaders = Uploaders(self.s3_uploader, ecr_uploader) + self.uploaders = Uploaders(self.s3_uploader, self.ecr_uploader) self.cf_utils = CfUtils(cloudformation_client) self.companion_stack_manager = CompanionStackManager( @@ -181,7 +182,7 @@ def ecr_companion_stack_prompts(self): if self.delete_ecr_companion_stack_prompt: delete_repo = confirm( click.style( - f"\tECR repository {self.companion_stack_manager.get_repo_uri(repo)}" + f"\tECR repository {repo.physical_id}" + " may not be empty. Do you want to delete the repository and all the images in it ?", bold=True, ), @@ -243,14 +244,13 @@ def delete(self): self.companion_stack_manager.delete_companion_stack() # # Delete the repos created by ECR companion stack if it exists - # if ecr_companion_stack_exists and (self.no_prompts or self.delete_companion_stack_prompt): - # for key in self.repos: - # repo = self.repos[key]["repo"] - # is_delete = self.repos[key].get("delete_repo", None) - # if no_prompts or is_delete: - # click.echo(f"\tDeleting ECR repository {repo.get_repo_uri(repo)}" - # "") - # self.ecr_uploader.delete_repository(repo.physical_id) + if ecr_companion_stack_exists and (self.no_prompts or self.delete_ecr_companion_stack_prompt): + for key in self.ecr_repos: + repo = self.ecr_repos[key]["repo"] + is_delete = self.ecr_repos[key].get("delete_repo", None) + if self.no_prompts or is_delete: + click.echo(f"\tDeleting ECR repository {repo.physical_id}") + self.ecr_uploader.delete_ecr_repository(physical_id=repo.physical_id) def run(self): """ diff --git a/samcli/lib/package/ecr_uploader.py b/samcli/lib/package/ecr_uploader.py index 9b4ac2b191..675a2accf5 100644 --- a/samcli/lib/package/ecr_uploader.py +++ b/samcli/lib/package/ecr_uploader.py @@ -139,6 +139,19 @@ def delete_artifact(self, image_uri: str, resource_id: str, property_name: str): LOG.error("DeleteArtifactFailedError Exception : %s", str(ex)) raise DeleteArtifactFailedError(resource_id=resource_id, property_name=property_name, ex=ex) from ex + def delete_ecr_repository(self, physical_id: str): + """ + Delete ECR repository using the physical_id + + :param: physical_id of the repository to be deleted + """ + try: + self.ecr_client.delete_repository(repositoryName=physical_id, force=True) + except self.ecr_client.exceptions.RepositoryNotFoundException: + # If the repository is empty, cloudformation automatically deletes + # the repository when cf_client.delete_stack is called. + pass + @staticmethod def parse_image_url(image_uri: str) -> Dict: result = {} From 4267e82fd79f339c04b33f86397c2a9f9830d07f Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Tue, 13 Jul 2021 23:08:12 -0400 Subject: [PATCH 095/121] Handle json templates deployed to cf --- samcli/commands/delete/delete_context.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 517d6863ac..95c34e372b 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -7,8 +7,10 @@ import click +import json from click import confirm from click import prompt + from samcli.cli.cli_config_file import TomlProvider from samcli.lib.utils.botoconfig import get_boto_config_with_user_agent from samcli.lib.delete.cf_utils import CfUtils @@ -198,6 +200,9 @@ def delete(self): cf_template = self.cf_utils.get_stack_template(self.stack_name, TEMPLATE_STAGE) template_str = cf_template.get("TemplateBody", None) + if isinstance(template_str, dict): + template_str = json.dumps(cf_template.get("TemplateBody", None), indent=4, ensure_ascii=False) + # Get the cloudformation template name using template_str with mktempfile() as temp_file: self.cf_template_file_name = get_cf_template_name(temp_file, template_str, "template") From b172255ae1308d3b26a23a01705c997f88fad0e7 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Wed, 14 Jul 2021 12:10:01 -0400 Subject: [PATCH 096/121] Changed the order of companion stack and ecr repos deletion --- samcli/commands/delete/delete_context.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 95c34e372b..42901d11f7 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -201,7 +201,7 @@ def delete(self): template_str = cf_template.get("TemplateBody", None) if isinstance(template_str, dict): - template_str = json.dumps(cf_template.get("TemplateBody", None), indent=4, ensure_ascii=False) + template_str = json.dumps(template_str, indent=4, ensure_ascii=False) # Get the cloudformation template name using template_str with mktempfile() as temp_file: @@ -243,11 +243,6 @@ def delete(self): fg="yellow", ) - # Delete the ECR companion stack if it exists - if ecr_companion_stack_exists: - click.echo(f"\t- Deleting ECR Companion Stack {self.companion_stack_name}") - self.companion_stack_manager.delete_companion_stack() - # # Delete the repos created by ECR companion stack if it exists if ecr_companion_stack_exists and (self.no_prompts or self.delete_ecr_companion_stack_prompt): for key in self.ecr_repos: @@ -257,6 +252,11 @@ def delete(self): click.echo(f"\tDeleting ECR repository {repo.physical_id}") self.ecr_uploader.delete_ecr_repository(physical_id=repo.physical_id) + # Delete the ECR companion stack if it exists + if ecr_companion_stack_exists: + click.echo(f"\t- Deleting ECR Companion Stack {self.companion_stack_name}") + self.companion_stack_manager.delete_companion_stack() + def run(self): """ Delete the stack based on the argument provided by customers and samconfig.toml. From 7b5732d52bfa5495116bbf2e4a6110d84bbdd58b Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Wed, 14 Jul 2021 22:39:55 -0400 Subject: [PATCH 097/121] Handle delete_failed status for ecr companion stack and changed delete_stack to include retain_resources --- samcli/commands/delete/delete_context.py | 16 ++++++++++++---- samcli/lib/delete/cf_utils.py | 15 ++++++--------- tests/unit/lib/delete/test_cf_utils.py | 15 ++++++++++++++- 3 files changed, 32 insertions(+), 14 deletions(-) diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 42901d11f7..a013a597dd 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -243,19 +243,27 @@ def delete(self): fg="yellow", ) - # # Delete the repos created by ECR companion stack if it exists + # Delete the repos created by ECR companion stack if it exists if ecr_companion_stack_exists and (self.no_prompts or self.delete_ecr_companion_stack_prompt): + retain_repos = [] for key in self.ecr_repos: repo = self.ecr_repos[key]["repo"] is_delete = self.ecr_repos[key].get("delete_repo", None) if self.no_prompts or is_delete: click.echo(f"\tDeleting ECR repository {repo.physical_id}") self.ecr_uploader.delete_ecr_repository(physical_id=repo.physical_id) + else: + retain_repos.append(repo.logical_id) - # Delete the ECR companion stack if it exists - if ecr_companion_stack_exists: + # Delete the ECR companion stack if it exists click.echo(f"\t- Deleting ECR Companion Stack {self.companion_stack_name}") - self.companion_stack_manager.delete_companion_stack() + try: + # If delete_stack fails and its status changes to DELETE_FAILED, retain + # the user input repositories and delete the stack. + self.cf_utils.delete_stack(stack_name=self.companion_stack_name) + self.cf_utils.wait_for_delete(stack_name=self.companion_stack_name) + except ValueError as ex: + self.cf_utils.delete_stack(stack_name=self.companion_stack_name, retain_repos=retain_repos) def run(self): """ diff --git a/samcli/lib/delete/cf_utils.py b/samcli/lib/delete/cf_utils.py index 40d3b58183..c5c74cc698 100644 --- a/samcli/lib/delete/cf_utils.py +++ b/samcli/lib/delete/cf_utils.py @@ -5,7 +5,7 @@ import logging -from typing import Dict +from typing import Dict, List from botocore.exceptions import ClientError, BotoCoreError, WaiterError from samcli.commands.delete.exceptions import DeleteFailedError, FetchTemplateFailedError @@ -84,14 +84,14 @@ def get_stack_template(self, stack_name: str, stage: str) -> Dict: LOG.error("Unable to get stack details.", exc_info=e) raise e - def delete_stack(self, stack_name: str): + def delete_stack(self, stack_name: str, retain_repos: List = []): """ Delete the Cloudformation stack with the given stack_name :param stack_name: Name or ID of the stack """ try: - self._client.delete_stack(StackName=stack_name) + self._client.delete_stack(StackName=stack_name, RetainResources=retain_repos) except (ClientError, BotoCoreError) as e: # If there are credentials, environment errors, @@ -120,10 +120,7 @@ def wait_for_delete(self, stack_name): waiter.wait(StackName=stack_name, WaiterConfig=waiter_config) except WaiterError as ex: - resp = ex.last_response - status = resp["Status"] - reason = resp["StatusReason"] + if "DELETE_FAILED" in str(ex): + raise ValueError - raise DeleteFailedError( - stack_name=stack_name, msg="ex: {0} Status: {1}. Reason: {2}".format(ex, status, reason) - ) from ex + raise DeleteFailedError(stack_name=stack_name, msg="ex: {0}".format(ex)) from ex diff --git a/tests/unit/lib/delete/test_cf_utils.py b/tests/unit/lib/delete/test_cf_utils.py index 90d764a5c4..49dd911596 100644 --- a/tests/unit/lib/delete/test_cf_utils.py +++ b/tests/unit/lib/delete/test_cf_utils.py @@ -95,7 +95,7 @@ def test_cf_utils_delete_stack_exception_botocore(self): def test_cf_utils_delete_stack_exception(self): self.cf_utils._client.delete_stack = MagicMock(side_effect=Exception()) with self.assertRaises(Exception): - self.cf_utils.delete_stack("test") + self.cf_utils.delete_stack("test", ["retain_logical_id"]) def test_cf_utils_wait_for_delete_exception(self): self.cf_utils._client.get_waiter = MagicMock( @@ -109,3 +109,16 @@ def test_cf_utils_wait_for_delete_exception(self): ) with self.assertRaises(DeleteFailedError): self.cf_utils.wait_for_delete("test") + + def test_cf_utils_wait_for_delete_failed_status(self): + self.cf_utils._client.get_waiter = MagicMock( + return_value=MockDeleteWaiter( + ex=WaiterError( + name="wait_for_delete", + reason="DELETE_FAILED ", + last_response={"Status": "Failed", "StatusReason": "It's a unit test"}, + ) + ) + ) + with self.assertRaises(ValueError): + self.cf_utils.wait_for_delete("test") From bbe7cec2afc4e423974ebd55818aea6764541093 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Thu, 15 Jul 2021 10:28:19 -0400 Subject: [PATCH 098/121] Reformatted auto ecr deletion to handle deleting companion stack as input stack name --- samcli/commands/delete/delete_context.py | 151 ++++++++++++++--------- samcli/commands/delete/exceptions.py | 10 ++ samcli/lib/delete/cf_utils.py | 38 +++++- tests/unit/lib/delete/test_cf_utils.py | 8 +- 4 files changed, 140 insertions(+), 67 deletions(-) diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index a013a597dd..54ec7db408 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -2,24 +2,24 @@ Delete a SAM stack """ import logging -from typing import Union, Dict +from typing import Dict +import json import boto3 import click -import json from click import confirm from click import prompt +from samcli.lib.utils.hash import str_checksum from samcli.cli.cli_config_file import TomlProvider from samcli.lib.utils.botoconfig import get_boto_config_with_user_agent from samcli.lib.delete.cf_utils import CfUtils from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.package.artifact_exporter import mktempfile, get_cf_template_name -from samcli.lib.bootstrap.companion_stack.data_types import ECRRepo -from samcli.lib.bootstrap.companion_stack.companion_stack_manager import CompanionStackManager +from samcli.commands.delete.exceptions import CfDeleteFailedStatusError from samcli.lib.package.artifact_exporter import Template from samcli.lib.package.ecr_uploader import ECRUploader @@ -34,7 +34,7 @@ class DeleteContext: - ecr_repos: Dict[str, Dict[str, Union[str, ECRRepo]]] + ecr_repos: Dict[str, Dict[str, str]] def __init__(self, stack_name: str, region: str, profile: str, config_file: str, config_env: str, no_prompts: bool): self.stack_name = stack_name @@ -52,7 +52,6 @@ def __init__(self, stack_name: str, region: str, profile: str, config_file: str, self.cf_template_file_name = None self.delete_artifacts_folder = None self.delete_cf_template_file = None - self.companion_stack_manager = None self.companion_stack_name = None self.delete_ecr_companion_stack_prompt = None self.ecr_repos = {} @@ -105,6 +104,9 @@ def init_clients(self): cloudformation_client = boto3.client( "cloudformation", region_name=self.region if self.region else None, config=boto_config ) + cloudformation_resource_client = boto3.resource( + "cloudformation", region_name=self.region if self.region else None, config=boto_config + ) s3_client = boto3.client("s3", region_name=self.region if self.region else None, config=boto_config) ecr_client = boto3.client("ecr", region_name=self.region if self.region else None, config=boto_config) @@ -115,11 +117,7 @@ def init_clients(self): self.ecr_uploader = ECRUploader(docker_client=None, ecr_client=ecr_client, ecr_repo=None, ecr_repo_multi=None) self.uploaders = Uploaders(self.s3_uploader, self.ecr_uploader) - self.cf_utils = CfUtils(cloudformation_client) - - self.companion_stack_manager = CompanionStackManager( - stack_name=self.stack_name, region=self.region, s3_bucket=self.s3_bucket, s3_prefix=self.s3_prefix - ) + self.cf_utils = CfUtils(cloudformation_client, cloudformation_resource_client) def guided_prompts(self): """ @@ -157,11 +155,10 @@ def guided_prompts(self): def ecr_companion_stack_prompts(self): """ - Guided prompts asking customer to delete ECR companion stack - and the related artifacts. + User prompt to delete the ECR companion stack. """ - self.companion_stack_name = self.companion_stack_manager.get_companion_stack_name() click.echo(f"\tFound ECR Companion Stack {self.companion_stack_name}") + if not self.no_prompts: self.delete_ecr_companion_stack_prompt = confirm( click.style( @@ -171,26 +168,47 @@ def ecr_companion_stack_prompts(self): ), default=False, ) + + def ecr_repos_prompts(self): + """ + User prompts to delete the ECR repositories. + """ if self.no_prompts or self.delete_ecr_companion_stack_prompt: - ecr_repos = self.companion_stack_manager.list_deployed_repos() - if ecr_repos: + self.ecr_repos = self.cf_utils.get_deployed_repos(stack_name=self.companion_stack_name) + if self.ecr_repos: click.echo("\t#Note: Empty repositories created by SAM CLI will be deleted automatically.") - for repo in ecr_repos: + for logical_id in self.ecr_repos: # Get all the repos from the companion stack - repo_logical_id = repo.logical_id - self.ecr_repos[repo_logical_id] = {"repo": repo} - + repo = self.ecr_repos[logical_id] + repo_physical_id = repo["physical_id"] if self.delete_ecr_companion_stack_prompt: delete_repo = confirm( click.style( - f"\tECR repository {repo.physical_id}" + f"\tECR repository {repo_physical_id}" + " may not be empty. Do you want to delete the repository and all the images in it ?", bold=True, ), default=False, ) - self.ecr_repos[repo_logical_id]["delete_repo"] = delete_repo + repo["delete_repo"] = delete_repo + + def delete_ecr_repos(self): + """ + Delete the ECR repositories and return the repositories + that the user wants to retain. + """ + retain_repos = [] + for logical_id in self.ecr_repos: + repo = self.ecr_repos[logical_id] + physical_id = repo["physical_id"] + is_delete = repo.get("delete_repo", None) + if self.no_prompts or is_delete: + click.echo(f"\tDeleting ECR repository {physical_id}") + self.ecr_uploader.delete_ecr_repository(physical_id=physical_id) + else: + retain_repos.append(logical_id) + return retain_repos def delete(self): """ @@ -200,7 +218,17 @@ def delete(self): cf_template = self.cf_utils.get_stack_template(self.stack_name, TEMPLATE_STAGE) template_str = cf_template.get("TemplateBody", None) + ecr_companion_stack_exists = False if isinstance(template_str, dict): + metadata_stack_name = template_str.get("Metadata", {}).get("CompanionStackname", None) + # Check if the input stack is ecr companion stack + if metadata_stack_name == self.stack_name: + ecr_companion_stack_exists = True + self.companion_stack_name = self.stack_name + + if not self.no_prompts: + self.delete_ecr_companion_stack_prompt = True + template_str = json.dumps(template_str, indent=4, ensure_ascii=False) # Get the cloudformation template name using template_str @@ -209,22 +237,36 @@ def delete(self): self.guided_prompts() - # ECR companion stack delete prompts, if it exists - ecr_companion_stack_exists = self.companion_stack_manager.does_companion_stack_exist() - if ecr_companion_stack_exists: - self.ecr_companion_stack_prompts() - - # Delete the primary stack - click.echo(f"\n\t- Deleting Cloudformation stack {self.stack_name}") - self.cf_utils.delete_stack(stack_name=self.stack_name) - self.cf_utils.wait_for_delete(self.stack_name) - LOG.debug("Deleted Cloudformation stack: %s", self.stack_name) + # If the input stack name is ecr companion stack, skip the below steps + if not ecr_companion_stack_exists: + + # ECR companion stack delete prompts, if it exists + parent_stack_hash = str_checksum(self.stack_name) + possible_companion_stack_name = f"{self.stack_name[:104]}-{parent_stack_hash[:8]}-CompanionStack" + ecr_companion_stack_exists = self.cf_utils.has_stack(stack_name=possible_companion_stack_name) + if ecr_companion_stack_exists: + self.companion_stack_name = possible_companion_stack_name + self.ecr_companion_stack_prompts() + self.ecr_repos_prompts() + + # Delete the primary stack + click.echo(f"\n\t- Deleting Cloudformation stack {self.stack_name}") + self.cf_utils.delete_stack(stack_name=self.stack_name) + self.cf_utils.wait_for_delete(self.stack_name) + LOG.debug("Deleted Cloudformation stack: %s", self.stack_name) + + # Delete the artifacts + template = Template( + template_path=None, + parent_dir=None, + uploaders=self.uploaders, + code_signer=None, + template_str=template_str, + ) + template.delete() - # Delete the artifacts - template = Template( - template_path=None, parent_dir=None, uploaders=self.uploaders, code_signer=None, template_str=template_str - ) - template.delete() + else: + self.ecr_repos_prompts() # Delete the CF template file in S3 if self.delete_cf_template_file: @@ -234,37 +276,28 @@ def delete(self): elif self.delete_artifacts_folder: self.s3_uploader.delete_prefix_artifacts() - # If s3_bucket information is not available - elif not self.s3_bucket: - LOG.debug("Cannot delete s3 files as no s3_bucket found") - click.secho( - "\nWarning: s3_bucket and s3_prefix information cannot be obtained," - " delete the files manually if required", - fg="yellow", - ) - - # Delete the repos created by ECR companion stack if it exists + # Delete the repos created by ECR companion stack and the stack if it exists if ecr_companion_stack_exists and (self.no_prompts or self.delete_ecr_companion_stack_prompt): - retain_repos = [] - for key in self.ecr_repos: - repo = self.ecr_repos[key]["repo"] - is_delete = self.ecr_repos[key].get("delete_repo", None) - if self.no_prompts or is_delete: - click.echo(f"\tDeleting ECR repository {repo.physical_id}") - self.ecr_uploader.delete_ecr_repository(physical_id=repo.physical_id) - else: - retain_repos.append(repo.logical_id) - - # Delete the ECR companion stack if it exists + retain_repos = self.delete_ecr_repos() + click.echo(f"\t- Deleting ECR Companion Stack {self.companion_stack_name}") try: # If delete_stack fails and its status changes to DELETE_FAILED, retain # the user input repositories and delete the stack. self.cf_utils.delete_stack(stack_name=self.companion_stack_name) self.cf_utils.wait_for_delete(stack_name=self.companion_stack_name) - except ValueError as ex: + except CfDeleteFailedStatusError: self.cf_utils.delete_stack(stack_name=self.companion_stack_name, retain_repos=retain_repos) + # If s3_bucket information is not available, warn the user + if not self.s3_bucket: + LOG.debug("Cannot delete s3 files as no s3_bucket found") + click.secho( + "\nWarning: s3_bucket and s3_prefix information could not be obtained from local config file" + " or cloudformation template, delete the s3 files manually if required", + fg="yellow", + ) + def run(self): """ Delete the stack based on the argument provided by customers and samconfig.toml. diff --git a/samcli/commands/delete/exceptions.py b/samcli/commands/delete/exceptions.py index 7e2ba5105c..9a4b6a81cd 100644 --- a/samcli/commands/delete/exceptions.py +++ b/samcli/commands/delete/exceptions.py @@ -14,6 +14,16 @@ def __init__(self, stack_name, msg): super().__init__(message=message_fmt.format(stack_name=self.stack_name, msg=msg)) +class CfDeleteFailedStatusError(UserException): + def __init__(self, stack_name, msg): + self.stack_name = stack_name + self.msg = msg + + message_fmt = "Stack could not be deleted as it encountered DELETE_FAILED status: {stack_name}, {msg}" + + super().__init__(message=message_fmt.format(stack_name=self.stack_name, msg=msg)) + + class FetchTemplateFailedError(UserException): def __init__(self, stack_name, msg): self.stack_name = stack_name diff --git a/samcli/lib/delete/cf_utils.py b/samcli/lib/delete/cf_utils.py index c5c74cc698..4317a934f8 100644 --- a/samcli/lib/delete/cf_utils.py +++ b/samcli/lib/delete/cf_utils.py @@ -5,17 +5,18 @@ import logging -from typing import Dict, List +from typing import Dict, List, Optional from botocore.exceptions import ClientError, BotoCoreError, WaiterError -from samcli.commands.delete.exceptions import DeleteFailedError, FetchTemplateFailedError +from samcli.commands.delete.exceptions import DeleteFailedError, FetchTemplateFailedError, CfDeleteFailedStatusError LOG = logging.getLogger(__name__) class CfUtils: - def __init__(self, cloudformation_client): + def __init__(self, cloudformation_client, cloudformation_resource_client): self._client = cloudformation_client + self._resource_client = cloudformation_resource_client def has_stack(self, stack_name: str) -> bool: """ @@ -84,12 +85,15 @@ def get_stack_template(self, stack_name: str, stage: str) -> Dict: LOG.error("Unable to get stack details.", exc_info=e) raise e - def delete_stack(self, stack_name: str, retain_repos: List = []): + def delete_stack(self, stack_name: str, retain_repos: Optional[List] = None): """ Delete the Cloudformation stack with the given stack_name :param stack_name: Name or ID of the stack + :param retain_repos: List of repositories to retain if the stack has DELETE_FAILED status. """ + if not retain_repos: + retain_repos = [] try: self._client.delete_stack(StackName=stack_name, RetainResources=retain_repos) @@ -121,6 +125,30 @@ def wait_for_delete(self, stack_name): except WaiterError as ex: if "DELETE_FAILED" in str(ex): - raise ValueError + raise CfDeleteFailedStatusError(stack_name=stack_name, msg="ex: {0}".format(ex)) from ex raise DeleteFailedError(stack_name=stack_name, msg="ex: {0}".format(ex)) from ex + + def get_deployed_repos(self, stack_name: str) -> Dict[str, Dict[str, str]]: + """ + List deployed ECR repos for this companion stack and return as a dict + + :param stack_name: Stack name + + Returns + ------- + Dict[str, Dict[str, str]] + List of ECR repos deployed for this companion stack + Returns empty list if companion stack does not exist + """ + repos = dict() + stack = self._resource_client.Stack(stack_name) + resources = stack.resource_summaries.all() + for resource in resources: + if resource.resource_type == "AWS::ECR::Repository": + logical_id = resource.logical_resource_id + physical_id = resource.physical_resource_id + + repos[logical_id] = {"physical_id": physical_id} + + return repos diff --git a/tests/unit/lib/delete/test_cf_utils.py b/tests/unit/lib/delete/test_cf_utils.py index 49dd911596..f68526af32 100644 --- a/tests/unit/lib/delete/test_cf_utils.py +++ b/tests/unit/lib/delete/test_cf_utils.py @@ -2,7 +2,7 @@ from unittest import TestCase -from samcli.commands.delete.exceptions import DeleteFailedError, FetchTemplateFailedError +from samcli.commands.delete.exceptions import DeleteFailedError, FetchTemplateFailedError, CfDeleteFailedStatusError from botocore.exceptions import ClientError, BotoCoreError, WaiterError from samcli.lib.delete.cf_utils import CfUtils @@ -22,11 +22,13 @@ class TestCfUtils(TestCase): def setUp(self): self.session = MagicMock() self.cloudformation_client = self.session.client("cloudformation") + self.cloudformation_resource_client = self.session.resource("cloudformation") self.s3_client = self.session.client("s3") - self.cf_utils = CfUtils(self.cloudformation_client) + self.cf_utils = CfUtils(self.cloudformation_client, self.cloudformation_resource_client) def test_cf_utils_init(self): self.assertEqual(self.cf_utils._client, self.cloudformation_client) + self.assertEqual(self.cf_utils._resource_client, self.cloudformation_resource_client) def test_cf_utils_has_no_stack(self): self.cf_utils._client.describe_stacks = MagicMock(return_value={"Stacks": []}) @@ -120,5 +122,5 @@ def test_cf_utils_wait_for_delete_failed_status(self): ) ) ) - with self.assertRaises(ValueError): + with self.assertRaises(CfDeleteFailedStatusError): self.cf_utils.wait_for_delete("test") From 8b0d9798acec00ffd292da2f60733c52880e18af Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Thu, 15 Jul 2021 17:05:31 -0400 Subject: [PATCH 099/121] Fixed and added more unit tests for delete_context --- samcli/commands/delete/delete_context.py | 5 +- samcli/lib/delete/cf_utils.py | 4 +- .../commands/delete/test_delete_context.py | 129 ++++++++++++++++-- 3 files changed, 124 insertions(+), 14 deletions(-) diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 54ec7db408..c35c5e418d 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -204,7 +204,7 @@ def delete_ecr_repos(self): physical_id = repo["physical_id"] is_delete = repo.get("delete_repo", None) if self.no_prompts or is_delete: - click.echo(f"\tDeleting ECR repository {physical_id}") + click.echo(f"\t- Deleting ECR repository {physical_id}") self.ecr_uploader.delete_ecr_repository(physical_id=physical_id) else: retain_repos.append(logical_id) @@ -223,6 +223,7 @@ def delete(self): metadata_stack_name = template_str.get("Metadata", {}).get("CompanionStackname", None) # Check if the input stack is ecr companion stack if metadata_stack_name == self.stack_name: + LOG.debug("Input stack name is ecr companion stack for an unknown stack") ecr_companion_stack_exists = True self.companion_stack_name = self.stack_name @@ -245,6 +246,7 @@ def delete(self): possible_companion_stack_name = f"{self.stack_name[:104]}-{parent_stack_hash[:8]}-CompanionStack" ecr_companion_stack_exists = self.cf_utils.has_stack(stack_name=possible_companion_stack_name) if ecr_companion_stack_exists: + LOG.debug("ECR Companion stack found for the input stack") self.companion_stack_name = possible_companion_stack_name self.ecr_companion_stack_prompts() self.ecr_repos_prompts() @@ -287,6 +289,7 @@ def delete(self): self.cf_utils.delete_stack(stack_name=self.companion_stack_name) self.cf_utils.wait_for_delete(stack_name=self.companion_stack_name) except CfDeleteFailedStatusError: + LOG.debug("delete_stack resulted failed and so re-try with retain_resources") self.cf_utils.delete_stack(stack_name=self.companion_stack_name, retain_repos=retain_repos) # If s3_bucket information is not available, warn the user diff --git a/samcli/lib/delete/cf_utils.py b/samcli/lib/delete/cf_utils.py index 4317a934f8..b570ed730a 100644 --- a/samcli/lib/delete/cf_utils.py +++ b/samcli/lib/delete/cf_utils.py @@ -146,9 +146,7 @@ def get_deployed_repos(self, stack_name: str) -> Dict[str, Dict[str, str]]: resources = stack.resource_summaries.all() for resource in resources: if resource.resource_type == "AWS::ECR::Repository": - logical_id = resource.logical_resource_id + logical_id = resource.logical_id physical_id = resource.physical_resource_id - repos[logical_id] = {"physical_id": physical_id} - return repos diff --git a/tests/unit/commands/delete/test_delete_context.py b/tests/unit/commands/delete/test_delete_context.py index f0975f144e..4db9e30950 100644 --- a/tests/unit/commands/delete/test_delete_context.py +++ b/tests/unit/commands/delete/test_delete_context.py @@ -7,6 +7,7 @@ from samcli.cli.cli_config_file import TomlProvider from samcli.lib.delete.cf_utils import CfUtils from samcli.lib.package.s3_uploader import S3Uploader +from samcli.lib.package.ecr_uploader import ECRUploader class TestDeleteContext(TestCase): @@ -93,6 +94,7 @@ def test_delete_context_parse_config_file(self, patched_click_get_current_contex @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) @patch.object(CfUtils, "delete_stack", MagicMock()) @patch.object(CfUtils, "wait_for_delete", MagicMock()) + @patch.object(CfUtils, "get_deployed_repos", MagicMock(return_value=({}))) @patch.object(S3Uploader, "delete_prefix_artifacts", MagicMock()) @patch("samcli.commands.deploy.guided_context.click.get_current_context") def test_delete_context_valid_execute_run(self, patched_click_get_current_context): @@ -107,15 +109,16 @@ def test_delete_context_valid_execute_run(self, patched_click_get_current_contex ) as delete_context: delete_context.run() - self.assertEqual(CfUtils.has_stack.call_count, 1) + self.assertEqual(CfUtils.has_stack.call_count, 2) self.assertEqual(CfUtils.get_stack_template.call_count, 1) - self.assertEqual(CfUtils.delete_stack.call_count, 1) - self.assertEqual(CfUtils.wait_for_delete.call_count, 1) + self.assertEqual(CfUtils.delete_stack.call_count, 2) + self.assertEqual(CfUtils.wait_for_delete.call_count, 2) self.assertEqual(S3Uploader.delete_prefix_artifacts.call_count, 1) + self.assertEqual(CfUtils.get_deployed_repos.call_count, 1) @patch("samcli.commands.delete.delete_context.click.echo") @patch("samcli.commands.deploy.guided_context.click.secho") - @patch.object(CfUtils, "has_stack", MagicMock(return_value=(True))) + @patch.object(CfUtils, "has_stack", MagicMock(side_effect=(True, False))) @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) @patch.object(CfUtils, "delete_stack", MagicMock()) @patch.object(CfUtils, "wait_for_delete", MagicMock()) @@ -132,8 +135,8 @@ def test_delete_context_no_s3_bucket(self, patched_click_secho, patched_click_ec delete_context.run() expected_click_secho_calls = [ call( - "\nWarning: s3_bucket and s3_prefix information cannot be obtained," - " delete the files manually if required", + "\nWarning: s3_bucket and s3_prefix information could not be obtained from local config file" + " or cloudformation template, delete the s3 files manually if required", fg="yellow", ), ] @@ -147,7 +150,7 @@ def test_delete_context_no_s3_bucket(self, patched_click_secho, patched_click_ec @patch("samcli.commands.delete.delete_context.get_cf_template_name") @patch("samcli.commands.delete.delete_context.confirm") - @patch.object(CfUtils, "has_stack", MagicMock(return_value=(True))) + @patch.object(CfUtils, "has_stack", MagicMock(side_effect=(True, False))) @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) @patch.object(CfUtils, "delete_stack", MagicMock()) @patch.object(CfUtils, "wait_for_delete", MagicMock()) @@ -164,7 +167,6 @@ def test_guided_prompts_s3_bucket_prefix_present_execute_run(self, patched_confi no_prompts=None, ) as delete_context: patched_confirm.side_effect = [True, False, True] - delete_context.cf_template_file_name = "hello.template" delete_context.s3_bucket = "s3_bucket" delete_context.s3_prefix = "s3_prefix" @@ -201,11 +203,12 @@ def test_guided_prompts_s3_bucket_prefix_present_execute_run(self, patched_confi @patch("samcli.commands.delete.delete_context.get_cf_template_name") @patch("samcli.commands.delete.delete_context.confirm") - @patch.object(CfUtils, "has_stack", MagicMock(return_value=(True))) + @patch.object(CfUtils, "has_stack", MagicMock(side_effect=(True, False))) @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) @patch.object(CfUtils, "delete_stack", MagicMock()) @patch.object(CfUtils, "wait_for_delete", MagicMock()) @patch.object(S3Uploader, "delete_artifact", MagicMock()) + @patch.object(ECRUploader, "delete_ecr_repository", MagicMock()) def test_guided_prompts_s3_bucket_present_no_prefix_execute_run( self, patched_confirm, patched_get_cf_template_name ): @@ -220,7 +223,6 @@ def test_guided_prompts_s3_bucket_present_no_prefix_execute_run( no_prompts=None, ) as delete_context: patched_confirm.side_effect = [True, True] - delete_context.cf_template_file_name = "hello.template" delete_context.s3_bucket = "s3_bucket" delete_context.run() @@ -244,3 +246,110 @@ def test_guided_prompts_s3_bucket_present_no_prefix_execute_run( self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) self.assertTrue(delete_context.delete_cf_template_file) + + @patch("samcli.commands.delete.delete_context.get_cf_template_name") + @patch("samcli.commands.delete.delete_context.confirm") + @patch.object(CfUtils, "has_stack", MagicMock(side_effect=(True, True))) + @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) + @patch.object(CfUtils, "delete_stack", MagicMock()) + @patch.object(CfUtils, "wait_for_delete", MagicMock()) + @patch.object(S3Uploader, "delete_artifact", MagicMock()) + @patch.object(CfUtils, "get_deployed_repos", MagicMock(return_value=({"logical_id": {"physical_id": "test_id"}}))) + def test_guided_prompts_ecr_companion_stack_present_execute_run( + self, patched_confirm, patched_get_cf_template_name + ): + + patched_get_cf_template_name.return_value = "hello.template" + with DeleteContext( + stack_name="test", + region="us-east-1", + config_file="samconfig.toml", + config_env="default", + profile="test", + no_prompts=None, + ) as delete_context: + patched_confirm.side_effect = [True, False, True, True, True] + delete_context.s3_bucket = "s3_bucket" + delete_context.s3_prefix = "s3_prefix" + + delete_context.run() + # Now to check for all the defaults on confirmations. + expected_confirmation_calls = [ + call( + click.style( + f"\tAre you sure you want to delete the stack test" + f" in the region us-east-1 ?", + bold=True, + ), + default=False, + ), + call( + click.style( + "\tAre you sure you want to delete the folder" + + f" s3_prefix in S3 which contains the artifacts?", + bold=True, + ), + default=False, + ), + call( + click.style( + "\tDo you want to delete the template file hello.template in S3?", + bold=True, + ), + default=False, + ), + call( + click.style( + "\tDo you you want to delete the ECR companion stack" + + " test-098f6bcd-CompanionStack in the region us-east-1 ?", + bold=True, + ), + default=False, + ), + call( + click.style( + f"\tECR repository test_id" + + " may not be empty. Do you want to delete the repository and all the images in it ?", + bold=True, + ), + default=False, + ), + ] + + self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) + self.assertFalse(delete_context.delete_artifacts_folder) + self.assertTrue(delete_context.delete_cf_template_file) + + @patch("samcli.commands.delete.delete_context.get_cf_template_name") + @patch("samcli.commands.delete.delete_context.click.echo") + @patch.object(CfUtils, "has_stack", MagicMock(side_effect=(True, True))) + @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) + @patch.object(CfUtils, "delete_stack", MagicMock()) + @patch.object(CfUtils, "wait_for_delete", MagicMock()) + @patch.object(S3Uploader, "delete_prefix_artifacts", MagicMock()) + @patch.object(CfUtils, "get_deployed_repos", MagicMock(return_value=({"logical_id": {"physical_id": "test_id"}}))) + def test_no_prompts_input_is_ecr_companion_stack_present_execute_run( + self, patched_click_echo, patched_get_cf_template_name + ): + CfUtils.get_stack_template.return_value = { + "TemplateBody": {"Metadata": {"CompanionStackname": "test-098f6bcd-CompanionStack"}} + } + patched_get_cf_template_name.return_value = "hello.template" + with DeleteContext( + stack_name="test-098f6bcd-CompanionStack", + region="us-east-1", + config_file="samconfig.toml", + config_env="default", + profile="test", + no_prompts=True, + ) as delete_context: + delete_context.s3_bucket = "s3_bucket" + delete_context.s3_prefix = "s3_prefix" + + delete_context.run() + expected_click_echo_calls = [ + call("\t#Note: Empty repositories created by SAM CLI will be deleted automatically."), + call("\t- Deleting ECR repository test_id"), + call("\t- Deleting ECR Companion Stack test-098f6bcd-CompanionStack"), + call("\nDeleted successfully"), + ] + self.assertEqual(expected_click_echo_calls, patched_click_echo.call_args_list) From 189e3830e47503f2256e1e45f19bf1fdf83c9a72 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Fri, 16 Jul 2021 09:38:20 -0400 Subject: [PATCH 100/121] When region is not provided, prompt user to enter profile and region --- samcli/commands/delete/delete_context.py | 20 +++++++++++++++---- .../commands/delete/test_delete_context.py | 2 +- 2 files changed, 17 insertions(+), 5 deletions(-) diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index c35c5e418d..0b371512b8 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -19,6 +19,9 @@ from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.package.artifact_exporter import mktempfile, get_cf_template_name +from samcli.lib.schemas.schemas_aws_config import get_aws_configuration_choice +from samcli.cli.context import Context + from samcli.commands.delete.exceptions import CfDeleteFailedStatusError from samcli.lib.package.artifact_exporter import Template @@ -87,10 +90,10 @@ def parse_config_file(self): LOG.debug("Local config present and using the defined options") if not self.region: self.region = config_options.get("region", None) - click.get_current_context().region = self.region + Context.get_current_context().region = self.region if not self.profile: self.profile = config_options.get("profile", None) - click.get_current_context().profile = self.profile + Context.get_current_context().profile = self.profile self.s3_bucket = config_options.get("s3_bucket", None) self.s3_prefix = config_options.get("s3_prefix", None) @@ -98,6 +101,13 @@ def init_clients(self): """ Initialize all the clients being used by sam delete. """ + if not self.region: + aws_config = get_aws_configuration_choice() + self.region = aws_config["region"] + self.profile = aws_config["profile"] + Context.get_current_context().region = self.region + Context.get_current_context().profile = self.profile + boto_config = get_boto_config_with_user_agent() # Define cf_client based on the region as different regions can have same stack-names @@ -111,7 +121,6 @@ def init_clients(self): s3_client = boto3.client("s3", region_name=self.region if self.region else None, config=boto_config) ecr_client = boto3.client("ecr", region_name=self.region if self.region else None, config=boto_config) - self.region = s3_client._client_config.region_name if s3_client else self.region # pylint: disable=W0212 self.s3_uploader = S3Uploader(s3_client=s3_client, bucket_name=self.s3_bucket, prefix=self.s3_prefix) self.ecr_uploader = ECRUploader(docker_client=None, ecr_client=ecr_client, ecr_repo=None, ecr_repo_multi=None) @@ -324,4 +333,7 @@ def run(self): click.echo("\nDeleted successfully") else: LOG.debug("Input stack does not exists on Cloudformation") - click.echo(f"Error: The input stack {self.stack_name} does not exist on Cloudformation") + click.echo( + f"Error: The input stack {self.stack_name} does" + + f" not exist on Cloudformation in the region {self.region}" + ) diff --git a/tests/unit/commands/delete/test_delete_context.py b/tests/unit/commands/delete/test_delete_context.py index 4db9e30950..391b6e1fee 100644 --- a/tests/unit/commands/delete/test_delete_context.py +++ b/tests/unit/commands/delete/test_delete_context.py @@ -25,7 +25,7 @@ def test_delete_context_stack_does_not_exist(self, patched_click_echo): delete_context.run() expected_click_echo_calls = [ - call(f"Error: The input stack test does not exist on Cloudformation"), + call(f"Error: The input stack test does" + f" not exist on Cloudformation in the region us-east-1"), ] self.assertEqual(expected_click_echo_calls, patched_click_echo.call_args_list) From a933202ccbce1d68498712e4a2d660df96b849ce Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Sat, 17 Jul 2021 21:04:10 -0400 Subject: [PATCH 101/121] Removed region prompt and reading it from current session or assign a default instead --- samcli/commands/delete/delete_context.py | 7 +++---- tests/unit/commands/delete/test_delete_context.py | 2 ++ 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 0b371512b8..7b8d62f2fe 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -102,11 +102,10 @@ def init_clients(self): Initialize all the clients being used by sam delete. """ if not self.region: - aws_config = get_aws_configuration_choice() - self.region = aws_config["region"] - self.profile = aws_config["profile"] + session = boto3.Session() + region = session.region_name + self.region = region if region else "us-east-1" Context.get_current_context().region = self.region - Context.get_current_context().profile = self.profile boto_config = get_boto_config_with_user_agent() diff --git a/tests/unit/commands/delete/test_delete_context.py b/tests/unit/commands/delete/test_delete_context.py index 391b6e1fee..90507e23f4 100644 --- a/tests/unit/commands/delete/test_delete_context.py +++ b/tests/unit/commands/delete/test_delete_context.py @@ -254,6 +254,7 @@ def test_guided_prompts_s3_bucket_present_no_prefix_execute_run( @patch.object(CfUtils, "delete_stack", MagicMock()) @patch.object(CfUtils, "wait_for_delete", MagicMock()) @patch.object(S3Uploader, "delete_artifact", MagicMock()) + @patch.object(ECRUploader, "delete_ecr_repository", MagicMock()) @patch.object(CfUtils, "get_deployed_repos", MagicMock(return_value=({"logical_id": {"physical_id": "test_id"}}))) def test_guided_prompts_ecr_companion_stack_present_execute_run( self, patched_confirm, patched_get_cf_template_name @@ -326,6 +327,7 @@ def test_guided_prompts_ecr_companion_stack_present_execute_run( @patch.object(CfUtils, "delete_stack", MagicMock()) @patch.object(CfUtils, "wait_for_delete", MagicMock()) @patch.object(S3Uploader, "delete_prefix_artifacts", MagicMock()) + @patch.object(ECRUploader, "delete_ecr_repository", MagicMock()) @patch.object(CfUtils, "get_deployed_repos", MagicMock(return_value=({"logical_id": {"physical_id": "test_id"}}))) def test_no_prompts_input_is_ecr_companion_stack_present_execute_run( self, patched_click_echo, patched_get_cf_template_name From 80f65618e363b2d8706a2ee45478a81545e2e205 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Sun, 18 Jul 2021 22:25:44 -0400 Subject: [PATCH 102/121] Added ECR resource in packageable_resources and refactored ecr companion stack deletion --- samcli/commands/_utils/resources.py | 2 + samcli/commands/delete/delete_context.py | 181 +++++++++----------- samcli/lib/delete/cf_utils.py | 36 +--- samcli/lib/package/artifact_exporter.py | 29 +++- samcli/lib/package/ecr_uploader.py | 1 + samcli/lib/package/packageable_resources.py | 23 +++ 6 files changed, 142 insertions(+), 130 deletions(-) diff --git a/samcli/commands/_utils/resources.py b/samcli/commands/_utils/resources.py index d3b2a18be3..ce448d6968 100644 --- a/samcli/commands/_utils/resources.py +++ b/samcli/commands/_utils/resources.py @@ -23,6 +23,7 @@ AWS_GLUE_JOB = "AWS::Glue::Job" AWS_SERVERLESS_STATEMACHINE = "AWS::Serverless::StateMachine" AWS_STEPFUNCTIONS_STATEMACHINE = "AWS::StepFunctions::StateMachine" +AWS_ECR_REPOSITORY = "AWS::ECR::Repository" METADATA_WITH_LOCAL_PATHS = {AWS_SERVERLESSREPO_APPLICATION: ["LicenseUrl", "ReadmeUrl"]} @@ -50,6 +51,7 @@ RESOURCES_WITH_IMAGE_COMPONENT = { AWS_SERVERLESS_FUNCTION: ["ImageUri"], AWS_LAMBDA_FUNCTION: ["Code"], + AWS_ECR_REPOSITORY: ["RepositoryName"], } diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 7b8d62f2fe..6204e19da8 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -19,7 +19,6 @@ from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.package.artifact_exporter import mktempfile, get_cf_template_name -from samcli.lib.schemas.schemas_aws_config import get_aws_configuration_choice from samcli.cli.context import Context from samcli.commands.delete.exceptions import CfDeleteFailedStatusError @@ -56,8 +55,6 @@ def __init__(self, stack_name: str, region: str, profile: str, config_file: str, self.delete_artifacts_folder = None self.delete_cf_template_file = None self.companion_stack_name = None - self.delete_ecr_companion_stack_prompt = None - self.ecr_repos = {} def __enter__(self): self.parse_config_file() @@ -113,9 +110,6 @@ def init_clients(self): cloudformation_client = boto3.client( "cloudformation", region_name=self.region if self.region else None, config=boto_config ) - cloudformation_resource_client = boto3.resource( - "cloudformation", region_name=self.region if self.region else None, config=boto_config - ) s3_client = boto3.client("s3", region_name=self.region if self.region else None, config=boto_config) ecr_client = boto3.client("ecr", region_name=self.region if self.region else None, config=boto_config) @@ -125,9 +119,9 @@ def init_clients(self): self.ecr_uploader = ECRUploader(docker_client=None, ecr_client=ecr_client, ecr_repo=None, ecr_repo_multi=None) self.uploaders = Uploaders(self.s3_uploader, self.ecr_uploader) - self.cf_utils = CfUtils(cloudformation_client, cloudformation_resource_client) + self.cf_utils = CfUtils(cloudformation_client) - def guided_prompts(self): + def s3_prompts(self): """ Guided prompts asking customer to delete artifacts """ @@ -166,9 +160,8 @@ def ecr_companion_stack_prompts(self): User prompt to delete the ECR companion stack. """ click.echo(f"\tFound ECR Companion Stack {self.companion_stack_name}") - if not self.no_prompts: - self.delete_ecr_companion_stack_prompt = confirm( + delete_ecr_companion_stack_prompt = confirm( click.style( "\tDo you you want to delete the ECR companion stack" + f" {self.companion_stack_name} in the region {self.region} ?", @@ -176,48 +169,65 @@ def ecr_companion_stack_prompts(self): ), default=False, ) + return delete_ecr_companion_stack_prompt + return True - def ecr_repos_prompts(self): + def ecr_repos_prompts(self, template: Template): """ User prompts to delete the ECR repositories. """ - if self.no_prompts or self.delete_ecr_companion_stack_prompt: - self.ecr_repos = self.cf_utils.get_deployed_repos(stack_name=self.companion_stack_name) - if self.ecr_repos: - click.echo("\t#Note: Empty repositories created by SAM CLI will be deleted automatically.") - - for logical_id in self.ecr_repos: - # Get all the repos from the companion stack - repo = self.ecr_repos[logical_id] - repo_physical_id = repo["physical_id"] - if self.delete_ecr_companion_stack_prompt: - delete_repo = confirm( - click.style( - f"\tECR repository {repo_physical_id}" - + " may not be empty. Do you want to delete the repository and all the images in it ?", - bold=True, - ), - default=False, - ) - repo["delete_repo"] = delete_repo - - def delete_ecr_repos(self): - """ - Delete the ECR repositories and return the repositories - that the user wants to retain. - """ retain_repos = [] - for logical_id in self.ecr_repos: - repo = self.ecr_repos[logical_id] - physical_id = repo["physical_id"] - is_delete = repo.get("delete_repo", None) - if self.no_prompts or is_delete: - click.echo(f"\t- Deleting ECR repository {physical_id}") - self.ecr_uploader.delete_ecr_repository(physical_id=physical_id) - else: - retain_repos.append(logical_id) + ecr_repos = template.get_ecr_repos() + if ecr_repos: + click.echo("\t#Note: Empty repositories created by SAM CLI will be deleted automatically.") + if not self.no_prompts: + for logical_id in ecr_repos: + # Get all the repos from the companion stack + repo = ecr_repos[logical_id] + repo_name = repo["Repository"] + + delete_repo = confirm( + click.style( + f"\tECR repository {repo_name}" + + " may not be empty. Do you want to delete the repository and all the images in it ?", + bold=True, + ), + default=False, + ) + if not delete_repo: + retain_repos.append(logical_id) return retain_repos + def delete_ecr_companion_stack(self): + delete_ecr_companion_stack_prompt = self.ecr_companion_stack_prompts() + if delete_ecr_companion_stack_prompt or self.no_prompts: + cf_ecr_companion_stack = self.cf_utils.get_stack_template(self.companion_stack_name, TEMPLATE_STAGE) + ecr_stack_template_str = cf_ecr_companion_stack.get("TemplateBody", None) + ecr_stack_template_str = json.dumps(ecr_stack_template_str, indent=4, ensure_ascii=False) + + ecr_companion_stack_template = Template( + template_path=None, + parent_dir=None, + uploaders=self.uploaders, + code_signer=None, + template_str=ecr_stack_template_str, + ) + + # Delete the repos created by ECR companion stack and the stack + retain_repos = self.ecr_repos_prompts(ecr_companion_stack_template) + + ecr_companion_stack_template.delete(retain_resources=retain_repos) + + click.echo(f"\t- Deleting ECR Companion Stack {self.companion_stack_name}") + try: + # If delete_stack fails and its status changes to DELETE_FAILED, retain + # the user input repositories and delete the stack. + self.cf_utils.delete_stack(stack_name=self.companion_stack_name) + self.cf_utils.wait_for_delete(stack_name=self.companion_stack_name) + except CfDeleteFailedStatusError: + LOG.debug("delete_stack resulted failed and so re-try with retain_resources") + self.cf_utils.delete_stack(stack_name=self.companion_stack_name, retain_resources=retain_repos) + def delete(self): """ Delete method calls for Cloudformation stacks and S3 and ECR artifacts @@ -226,57 +236,36 @@ def delete(self): cf_template = self.cf_utils.get_stack_template(self.stack_name, TEMPLATE_STAGE) template_str = cf_template.get("TemplateBody", None) - ecr_companion_stack_exists = False if isinstance(template_str, dict): - metadata_stack_name = template_str.get("Metadata", {}).get("CompanionStackname", None) - # Check if the input stack is ecr companion stack - if metadata_stack_name == self.stack_name: - LOG.debug("Input stack name is ecr companion stack for an unknown stack") - ecr_companion_stack_exists = True - self.companion_stack_name = self.stack_name - - if not self.no_prompts: - self.delete_ecr_companion_stack_prompt = True - template_str = json.dumps(template_str, indent=4, ensure_ascii=False) # Get the cloudformation template name using template_str with mktempfile() as temp_file: self.cf_template_file_name = get_cf_template_name(temp_file, template_str, "template") - self.guided_prompts() + self.s3_prompts() - # If the input stack name is ecr companion stack, skip the below steps - if not ecr_companion_stack_exists: + # Delete the artifacts + template = Template( + template_path=None, + parent_dir=None, + uploaders=self.uploaders, + code_signer=None, + template_str=template_str, + ) - # ECR companion stack delete prompts, if it exists - parent_stack_hash = str_checksum(self.stack_name) - possible_companion_stack_name = f"{self.stack_name[:104]}-{parent_stack_hash[:8]}-CompanionStack" - ecr_companion_stack_exists = self.cf_utils.has_stack(stack_name=possible_companion_stack_name) - if ecr_companion_stack_exists: - LOG.debug("ECR Companion stack found for the input stack") - self.companion_stack_name = possible_companion_stack_name - self.ecr_companion_stack_prompts() - self.ecr_repos_prompts() + retain_repos = self.ecr_repos_prompts(template) - # Delete the primary stack - click.echo(f"\n\t- Deleting Cloudformation stack {self.stack_name}") - self.cf_utils.delete_stack(stack_name=self.stack_name) - self.cf_utils.wait_for_delete(self.stack_name) - LOG.debug("Deleted Cloudformation stack: %s", self.stack_name) + # ECR companion stack delete prompts, if it exists + parent_stack_hash = str_checksum(self.stack_name) + possible_companion_stack_name = f"{self.stack_name[:104]}-{parent_stack_hash[:8]}-CompanionStack" + ecr_companion_stack_exists = self.cf_utils.has_stack(stack_name=possible_companion_stack_name) + if ecr_companion_stack_exists: + LOG.debug("ECR Companion stack found for the input stack") + self.companion_stack_name = possible_companion_stack_name + self.delete_ecr_companion_stack() - # Delete the artifacts - template = Template( - template_path=None, - parent_dir=None, - uploaders=self.uploaders, - code_signer=None, - template_str=template_str, - ) - template.delete() - - else: - self.ecr_repos_prompts() + template.delete(retain_resources=retain_repos) # Delete the CF template file in S3 if self.delete_cf_template_file: @@ -286,19 +275,15 @@ def delete(self): elif self.delete_artifacts_folder: self.s3_uploader.delete_prefix_artifacts() - # Delete the repos created by ECR companion stack and the stack if it exists - if ecr_companion_stack_exists and (self.no_prompts or self.delete_ecr_companion_stack_prompt): - retain_repos = self.delete_ecr_repos() - - click.echo(f"\t- Deleting ECR Companion Stack {self.companion_stack_name}") - try: - # If delete_stack fails and its status changes to DELETE_FAILED, retain - # the user input repositories and delete the stack. - self.cf_utils.delete_stack(stack_name=self.companion_stack_name) - self.cf_utils.wait_for_delete(stack_name=self.companion_stack_name) - except CfDeleteFailedStatusError: - LOG.debug("delete_stack resulted failed and so re-try with retain_resources") - self.cf_utils.delete_stack(stack_name=self.companion_stack_name, retain_repos=retain_repos) + # Delete the primary stack + try: + click.echo(f"\t- Deleting Cloudformation stack {self.stack_name}") + self.cf_utils.delete_stack(stack_name=self.stack_name) + self.cf_utils.wait_for_delete(self.stack_name) + LOG.debug("Deleted Cloudformation stack: %s", self.stack_name) + except CfDeleteFailedStatusError: + LOG.debug("delete_stack resulted failed and so re-try with retain_resources") + self.cf_utils.delete_stack(stack_name=self.stack_name, retain_resources=retain_repos) # If s3_bucket information is not available, warn the user if not self.s3_bucket: diff --git a/samcli/lib/delete/cf_utils.py b/samcli/lib/delete/cf_utils.py index b570ed730a..37fab22eea 100644 --- a/samcli/lib/delete/cf_utils.py +++ b/samcli/lib/delete/cf_utils.py @@ -14,9 +14,9 @@ class CfUtils: - def __init__(self, cloudformation_client, cloudformation_resource_client): + def __init__(self, cloudformation_client): self._client = cloudformation_client - self._resource_client = cloudformation_resource_client + # self._resource_client = cloudformation_resource_client def has_stack(self, stack_name: str) -> bool: """ @@ -85,17 +85,17 @@ def get_stack_template(self, stack_name: str, stage: str) -> Dict: LOG.error("Unable to get stack details.", exc_info=e) raise e - def delete_stack(self, stack_name: str, retain_repos: Optional[List] = None): + def delete_stack(self, stack_name: str, retain_resources: Optional[List] = None): """ Delete the Cloudformation stack with the given stack_name :param stack_name: Name or ID of the stack - :param retain_repos: List of repositories to retain if the stack has DELETE_FAILED status. + :param retain_resources: List of repositories to retain if the stack has DELETE_FAILED status. """ - if not retain_repos: - retain_repos = [] + if not retain_resources: + retain_resources = [] try: - self._client.delete_stack(StackName=stack_name, RetainResources=retain_repos) + self._client.delete_stack(StackName=stack_name, RetainResources=retain_resources) except (ClientError, BotoCoreError) as e: # If there are credentials, environment errors, @@ -128,25 +128,3 @@ def wait_for_delete(self, stack_name): raise CfDeleteFailedStatusError(stack_name=stack_name, msg="ex: {0}".format(ex)) from ex raise DeleteFailedError(stack_name=stack_name, msg="ex: {0}".format(ex)) from ex - - def get_deployed_repos(self, stack_name: str) -> Dict[str, Dict[str, str]]: - """ - List deployed ECR repos for this companion stack and return as a dict - - :param stack_name: Stack name - - Returns - ------- - Dict[str, Dict[str, str]] - List of ECR repos deployed for this companion stack - Returns empty list if companion stack does not exist - """ - repos = dict() - stack = self._resource_client.Stack(stack_name) - resources = stack.resource_summaries.all() - for resource in resources: - if resource.resource_type == "AWS::ECR::Repository": - logical_id = resource.logical_id - physical_id = resource.physical_resource_id - repos[logical_id] = {"physical_id": physical_id} - return repos diff --git a/samcli/lib/package/artifact_exporter.py b/samcli/lib/package/artifact_exporter.py index 00fa5cb089..e2bbe8328c 100644 --- a/samcli/lib/package/artifact_exporter.py +++ b/samcli/lib/package/artifact_exporter.py @@ -16,7 +16,7 @@ # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import os -from typing import Dict, Optional +from typing import Dict, Optional, List from botocore.utils import set_value_from_jmespath @@ -33,6 +33,7 @@ METADATA_EXPORT_LIST, GLOBAL_EXPORT_DICT, ResourceZip, + ECRResource, ) from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.package.uploaders import Uploaders @@ -240,7 +241,7 @@ def export(self) -> Dict: return self.template_dict - def delete(self): + def delete(self, retain_resources: List): """ Deletes all the artifacts referenced by the given Cloudformation template """ @@ -256,7 +257,7 @@ def delete(self): resource_deletion_policy = resource.get("DeletionPolicy", None) # If the deletion policy is set to Retain, # do not delete the artifact for the resource. - if resource_deletion_policy != "Retain": + if resource_deletion_policy != "Retain" and resource_id not in retain_resources: for exporter_class in self.resources_to_export: if exporter_class.RESOURCE_TYPE != resource_type: continue @@ -265,3 +266,25 @@ def delete(self): # Delete code resources exporter = exporter_class(self.uploaders, None) exporter.delete(resource_id, resource_dict) + + def get_ecr_repos(self): + """ + Get all the ecr repos from the template + """ + ecr_repos = {} + if "Resources" not in self.template_dict: + return ecr_repos + + self._apply_global_values() + for resource_id, resource in self.template_dict["Resources"].items(): + + resource_type = resource.get("Type", None) + resource_dict = resource.get("Properties", {}) + resource_deletion_policy = resource.get("DeletionPolicy", None) + if resource_deletion_policy == "Retain" or resource_type != "AWS::ECR::Repository": + continue + + ecr_resource = ECRResource(self.uploaders, None) + ecr_repos[resource_id] = {"Repository": ecr_resource.get_property_value(resource_dict)} + + return ecr_repos diff --git a/samcli/lib/package/ecr_uploader.py b/samcli/lib/package/ecr_uploader.py index 675a2accf5..7fbbc4d372 100644 --- a/samcli/lib/package/ecr_uploader.py +++ b/samcli/lib/package/ecr_uploader.py @@ -146,6 +146,7 @@ def delete_ecr_repository(self, physical_id: str): :param: physical_id of the repository to be deleted """ try: + click.echo(f"\t- Deleting ECR repository {physical_id}") self.ecr_client.delete_repository(repositoryName=physical_id, force=True) except self.ecr_client.exceptions.RepositoryNotFoundException: # If the repository is empty, cloudformation automatically deletes diff --git a/samcli/lib/package/packageable_resources.py b/samcli/lib/package/packageable_resources.py index 39d9b73867..1b946683c3 100644 --- a/samcli/lib/package/packageable_resources.py +++ b/samcli/lib/package/packageable_resources.py @@ -46,6 +46,7 @@ METADATA_WITH_LOCAL_PATHS, RESOURCES_WITH_LOCAL_PATHS, RESOURCES_WITH_IMAGE_COMPONENT, + AWS_ECR_REPOSITORY, ) from samcli.lib.utils.packagetype import IMAGE, ZIP @@ -496,6 +497,27 @@ class CloudFormationResourceVersionSchemaHandlerPackage(ResourceZip): PROPERTY_NAME = RESOURCES_WITH_LOCAL_PATHS[AWS_CLOUDFORMATION_RESOURCEVERSION][0] +class ECRResource(Resource): + RESOURCE_TYPE = AWS_ECR_REPOSITORY + PROPERTY_NAME = RESOURCES_WITH_IMAGE_COMPONENT[RESOURCE_TYPE][0] + ARTIFACT_TYPE = ZIP + EXPORT_DESTINATION = Destination.ECR + + def delete(self, resource_id, resource_dict): + if resource_dict is None: + return + + repository_name = self.get_property_value(resource_dict) + if repository_name: + self.uploader.delete_ecr_repository(physical_id=repository_name) + + def get_property_value(self, resource_dict): + if resource_dict is None: + return None + + return jmespath.search(self.PROPERTY_NAME, resource_dict) + + RESOURCES_EXPORT_LIST = [ ServerlessFunctionResource, ServerlessFunctionImageResource, @@ -517,6 +539,7 @@ class CloudFormationResourceVersionSchemaHandlerPackage(ResourceZip): GlueJobCommandScriptLocationResource, CloudFormationModuleVersionModulePackage, CloudFormationResourceVersionSchemaHandlerPackage, + ECRResource, ] METADATA_EXPORT_LIST = [ServerlessRepoApplicationReadme, ServerlessRepoApplicationLicense] From 8d19497b5cd33264ceeb465122f19f33a5014dfc Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Mon, 19 Jul 2021 00:33:15 -0400 Subject: [PATCH 103/121] Added log statements and unit tests for ECRResource --- samcli/commands/delete/delete_context.py | 32 ++++++++----- .../commands/delete/test_delete_context.py | 23 +++++---- tests/unit/lib/delete/test_cf_utils.py | 10 ++-- .../lib/package/test_artifact_exporter.py | 47 ++++++++++++++++++- 4 files changed, 83 insertions(+), 29 deletions(-) diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 6204e19da8..19d78967f6 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -82,7 +82,7 @@ def parse_config_file(self): if not self.stack_name: self.stack_name = config_options.get("stack_name", None) # If the stack_name is same as the one present in samconfig file, - # get the information about parameters if not specified by customer. + # get the information about parameters if not specified by user. if self.stack_name and self.stack_name == config_options.get("stack_name", None): LOG.debug("Local config present and using the defined options") if not self.region: @@ -123,12 +123,12 @@ def init_clients(self): def s3_prompts(self): """ - Guided prompts asking customer to delete artifacts + Guided prompts asking user to delete s3 artifacts """ # Note: s3_bucket and s3_prefix information is only # available if a local toml file is present or if # this information is obtained from the template resources and so if this - # information is not found, warn the customer that S3 artifacts + # information is not found, warn the user that S3 artifacts # will need to be manually deleted. if not self.no_prompts and self.s3_bucket: @@ -174,12 +174,13 @@ def ecr_companion_stack_prompts(self): def ecr_repos_prompts(self, template: Template): """ - User prompts to delete the ECR repositories. + User prompts to delete the ECR repositories for the given template. + + :param template: Template to get the ECR repositories. """ retain_repos = [] ecr_repos = template.get_ecr_repos() - if ecr_repos: - click.echo("\t#Note: Empty repositories created by SAM CLI will be deleted automatically.") + if not self.no_prompts: for logical_id in ecr_repos: # Get all the repos from the companion stack @@ -199,6 +200,10 @@ def ecr_repos_prompts(self, template: Template): return retain_repos def delete_ecr_companion_stack(self): + """ + Delete the ECR companion stack and ECR repositories based + on user input. + """ delete_ecr_companion_stack_prompt = self.ecr_companion_stack_prompts() if delete_ecr_companion_stack_prompt or self.no_prompts: cf_ecr_companion_stack = self.cf_utils.get_stack_template(self.companion_stack_name, TEMPLATE_STAGE) @@ -213,9 +218,9 @@ def delete_ecr_companion_stack(self): template_str=ecr_stack_template_str, ) - # Delete the repos created by ECR companion stack and the stack retain_repos = self.ecr_repos_prompts(ecr_companion_stack_template) + # Delete the repos created by ECR companion stack if not retained ecr_companion_stack_template.delete(retain_resources=retain_repos) click.echo(f"\t- Deleting ECR Companion Stack {self.companion_stack_name}") @@ -224,6 +229,7 @@ def delete_ecr_companion_stack(self): # the user input repositories and delete the stack. self.cf_utils.delete_stack(stack_name=self.companion_stack_name) self.cf_utils.wait_for_delete(stack_name=self.companion_stack_name) + LOG.debug("Deleted ECR Companion Stack: %s", self.companion_stack_name) except CfDeleteFailedStatusError: LOG.debug("delete_stack resulted failed and so re-try with retain_resources") self.cf_utils.delete_stack(stack_name=self.companion_stack_name, retain_resources=retain_repos) @@ -245,7 +251,6 @@ def delete(self): self.s3_prompts() - # Delete the artifacts template = Template( template_path=None, parent_dir=None, @@ -254,7 +259,7 @@ def delete(self): template_str=template_str, ) - retain_repos = self.ecr_repos_prompts(template) + retain_resources = self.ecr_repos_prompts(template) # ECR companion stack delete prompts, if it exists parent_stack_hash = str_checksum(self.stack_name) @@ -265,7 +270,8 @@ def delete(self): self.companion_stack_name = possible_companion_stack_name self.delete_ecr_companion_stack() - template.delete(retain_resources=retain_repos) + # Delete the artifacts and retain resources user selected not to delete + template.delete(retain_resources=retain_resources) # Delete the CF template file in S3 if self.delete_cf_template_file: @@ -275,7 +281,7 @@ def delete(self): elif self.delete_artifacts_folder: self.s3_uploader.delete_prefix_artifacts() - # Delete the primary stack + # Delete the primary input stack try: click.echo(f"\t- Deleting Cloudformation stack {self.stack_name}") self.cf_utils.delete_stack(stack_name=self.stack_name) @@ -283,7 +289,7 @@ def delete(self): LOG.debug("Deleted Cloudformation stack: %s", self.stack_name) except CfDeleteFailedStatusError: LOG.debug("delete_stack resulted failed and so re-try with retain_resources") - self.cf_utils.delete_stack(stack_name=self.stack_name, retain_resources=retain_repos) + self.cf_utils.delete_stack(stack_name=self.stack_name, retain_resources=retain_resources) # If s3_bucket information is not available, warn the user if not self.s3_bucket: @@ -296,7 +302,7 @@ def delete(self): def run(self): """ - Delete the stack based on the argument provided by customers and samconfig.toml. + Delete the stack based on the argument provided by user and samconfig.toml. """ if not self.no_prompts: delete_stack = confirm( diff --git a/tests/unit/commands/delete/test_delete_context.py b/tests/unit/commands/delete/test_delete_context.py index 90507e23f4..efefe1644f 100644 --- a/tests/unit/commands/delete/test_delete_context.py +++ b/tests/unit/commands/delete/test_delete_context.py @@ -4,6 +4,7 @@ import click from samcli.commands.delete.delete_context import DeleteContext +from samcli.lib.package.artifact_exporter import Template from samcli.cli.cli_config_file import TomlProvider from samcli.lib.delete.cf_utils import CfUtils from samcli.lib.package.s3_uploader import S3Uploader @@ -94,7 +95,7 @@ def test_delete_context_parse_config_file(self, patched_click_get_current_contex @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) @patch.object(CfUtils, "delete_stack", MagicMock()) @patch.object(CfUtils, "wait_for_delete", MagicMock()) - @patch.object(CfUtils, "get_deployed_repos", MagicMock(return_value=({}))) + @patch.object(Template, "get_ecr_repos", MagicMock(return_value=({"logical_id": {"Repository": "test_id"}}))) @patch.object(S3Uploader, "delete_prefix_artifacts", MagicMock()) @patch("samcli.commands.deploy.guided_context.click.get_current_context") def test_delete_context_valid_execute_run(self, patched_click_get_current_context): @@ -110,11 +111,11 @@ def test_delete_context_valid_execute_run(self, patched_click_get_current_contex delete_context.run() self.assertEqual(CfUtils.has_stack.call_count, 2) - self.assertEqual(CfUtils.get_stack_template.call_count, 1) + self.assertEqual(CfUtils.get_stack_template.call_count, 2) self.assertEqual(CfUtils.delete_stack.call_count, 2) self.assertEqual(CfUtils.wait_for_delete.call_count, 2) self.assertEqual(S3Uploader.delete_prefix_artifacts.call_count, 1) - self.assertEqual(CfUtils.get_deployed_repos.call_count, 1) + self.assertEqual(Template.get_ecr_repos.call_count, 2) @patch("samcli.commands.delete.delete_context.click.echo") @patch("samcli.commands.deploy.guided_context.click.secho") @@ -143,7 +144,7 @@ def test_delete_context_no_s3_bucket(self, patched_click_secho, patched_click_ec self.assertEqual(expected_click_secho_calls, patched_click_secho.call_args_list) expected_click_echo_calls = [ - call("\n\t- Deleting Cloudformation stack test"), + call("\t- Deleting Cloudformation stack test"), call("\nDeleted successfully"), ] self.assertEqual(expected_click_echo_calls, patched_click_echo.call_args_list) @@ -255,7 +256,7 @@ def test_guided_prompts_s3_bucket_present_no_prefix_execute_run( @patch.object(CfUtils, "wait_for_delete", MagicMock()) @patch.object(S3Uploader, "delete_artifact", MagicMock()) @patch.object(ECRUploader, "delete_ecr_repository", MagicMock()) - @patch.object(CfUtils, "get_deployed_repos", MagicMock(return_value=({"logical_id": {"physical_id": "test_id"}}))) + @patch.object(Template, "get_ecr_repos", MagicMock(side_effect=({}, {"logical_id": {"Repository": "test_id"}}))) def test_guided_prompts_ecr_companion_stack_present_execute_run( self, patched_confirm, patched_get_cf_template_name ): @@ -278,7 +279,7 @@ def test_guided_prompts_ecr_companion_stack_present_execute_run( expected_confirmation_calls = [ call( click.style( - f"\tAre you sure you want to delete the stack test" + f" in the region us-east-1 ?", + f"\tAre you sure you want to delete the stack test in the region us-east-1 ?", bold=True, ), default=False, @@ -286,7 +287,7 @@ def test_guided_prompts_ecr_companion_stack_present_execute_run( call( click.style( "\tAre you sure you want to delete the folder" - + f" s3_prefix in S3 which contains the artifacts?", + + " s3_prefix in S3 which contains the artifacts?", bold=True, ), default=False, @@ -322,13 +323,13 @@ def test_guided_prompts_ecr_companion_stack_present_execute_run( @patch("samcli.commands.delete.delete_context.get_cf_template_name") @patch("samcli.commands.delete.delete_context.click.echo") - @patch.object(CfUtils, "has_stack", MagicMock(side_effect=(True, True))) + @patch.object(CfUtils, "has_stack", MagicMock(side_effect=(True, False))) @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) @patch.object(CfUtils, "delete_stack", MagicMock()) @patch.object(CfUtils, "wait_for_delete", MagicMock()) @patch.object(S3Uploader, "delete_prefix_artifacts", MagicMock()) @patch.object(ECRUploader, "delete_ecr_repository", MagicMock()) - @patch.object(CfUtils, "get_deployed_repos", MagicMock(return_value=({"logical_id": {"physical_id": "test_id"}}))) + @patch.object(Template, "get_ecr_repos", MagicMock(return_value=({"logical_id": {"Repository": "test_id"}}))) def test_no_prompts_input_is_ecr_companion_stack_present_execute_run( self, patched_click_echo, patched_get_cf_template_name ): @@ -349,9 +350,7 @@ def test_no_prompts_input_is_ecr_companion_stack_present_execute_run( delete_context.run() expected_click_echo_calls = [ - call("\t#Note: Empty repositories created by SAM CLI will be deleted automatically."), - call("\t- Deleting ECR repository test_id"), - call("\t- Deleting ECR Companion Stack test-098f6bcd-CompanionStack"), + call("\t- Deleting Cloudformation stack test-098f6bcd-CompanionStack"), call("\nDeleted successfully"), ] self.assertEqual(expected_click_echo_calls, patched_click_echo.call_args_list) diff --git a/tests/unit/lib/delete/test_cf_utils.py b/tests/unit/lib/delete/test_cf_utils.py index f68526af32..61c1c186e1 100644 --- a/tests/unit/lib/delete/test_cf_utils.py +++ b/tests/unit/lib/delete/test_cf_utils.py @@ -22,13 +22,11 @@ class TestCfUtils(TestCase): def setUp(self): self.session = MagicMock() self.cloudformation_client = self.session.client("cloudformation") - self.cloudformation_resource_client = self.session.resource("cloudformation") self.s3_client = self.session.client("s3") - self.cf_utils = CfUtils(self.cloudformation_client, self.cloudformation_resource_client) + self.cf_utils = CfUtils(self.cloudformation_client) def test_cf_utils_init(self): self.assertEqual(self.cf_utils._client, self.cloudformation_client) - self.assertEqual(self.cf_utils._resource_client, self.cloudformation_resource_client) def test_cf_utils_has_no_stack(self): self.cf_utils._client.describe_stacks = MagicMock(return_value={"Stacks": []}) @@ -89,6 +87,12 @@ def test_cf_utils_get_stack_template_exception(self): with self.assertRaises(Exception): self.cf_utils.get_stack_template("test", "Original") + def test_cf_utils_get_stack_template_success(self): + self.cf_utils._client.get_template = MagicMock(return_value=({"TemplateBody": "Hello World"})) + + response = self.cf_utils.get_stack_template("test", "Original") + self.assertEqual(response, {"TemplateBody": "Hello World"}) + def test_cf_utils_delete_stack_exception_botocore(self): self.cf_utils._client.delete_stack = MagicMock(side_effect=BotoCoreError()) with self.assertRaises(DeleteFailedError): diff --git a/tests/unit/lib/package/test_artifact_exporter.py b/tests/unit/lib/package/test_artifact_exporter.py index e6b9d14320..ce2b26b252 100644 --- a/tests/unit/lib/package/test_artifact_exporter.py +++ b/tests/unit/lib/package/test_artifact_exporter.py @@ -52,6 +52,7 @@ CloudFormationResourceVersionSchemaHandlerPackage, ResourceZip, ResourceImage, + ECRResource, ) @@ -768,6 +769,25 @@ class MockResource(ResourceWithS3UrlDict): resource.delete(resource_id, resource_dict) self.s3_uploader_mock.delete_artifact.assert_called_once_with(remote_path="key1/key2", is_key=True) + def test_ecr_resource_delete(self): + # Property value is set to an image + + class MockResource(ECRResource): + PROPERTY_NAME = "foo" + + resource = MockResource(self.uploaders_mock, None) + + resource_id = "id" + resource_dict = {} + repository = "repository" + resource_dict[resource.PROPERTY_NAME] = repository + + self.ecr_uploader_mock.delete_ecr_repository = Mock() + + resource.delete(resource_id, resource_dict) + + self.ecr_uploader_mock.delete_ecr_repository.assert_called_once_with(physical_id="repository") + @patch("samcli.lib.package.packageable_resources.upload_local_artifacts") def test_resource_with_signing_configuration(self, upload_local_artifacts_mock): class MockResource(ResourceZip): @@ -1443,7 +1463,7 @@ def test_template_delete(self): template_str=template_str, ) - template_exporter.delete() + template_exporter.delete(retain_resources=[]) resource_type1_class.assert_called_once_with(self.uploaders_mock, None) resource_type1_instance.delete.assert_called_once_with("Resource1", mock.ANY) @@ -1451,3 +1471,28 @@ def test_template_delete(self): resource_type2_instance.delete.assert_called_once_with("Resource2", mock.ANY) resource_type3_class.assert_not_called() resource_type3_instance.delete.assert_not_called() + + def test_get_ecr_repos(self): + resources_to_export = [ECRResource] + + properties = {"RepositoryName": "test_repo"} + template_dict = { + "Resources": { + "Resource1": {"Type": "AWS::ECR::Repository", "Properties": properties}, + "Resource2": {"Type": "resource_type1", "Properties": properties}, + "Resource3": {"Type": "AWS::ECR::Repository", "Properties": properties, "DeletionPolicy": "Retain"}, + } + } + template_str = json.dumps(template_dict, indent=4, ensure_ascii=False) + + template_exporter = Template( + template_path=None, + parent_dir=None, + uploaders=self.uploaders_mock, + code_signer=None, + resources_to_export=resources_to_export, + template_str=template_str, + ) + + repos = template_exporter.get_ecr_repos() + self.assertEqual(repos, {"Resource1": {"Repository": "test_repo"}}) From 7bcd636409fcbf420cdd521a51e9d05940cc55b4 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Mon, 19 Jul 2021 19:21:07 -0400 Subject: [PATCH 104/121] Better error handling for ecr delete_artifact --- samcli/lib/package/ecr_uploader.py | 35 ++++++++++----------- tests/unit/lib/package/test_ecr_uploader.py | 33 ++++++++++++------- 2 files changed, 38 insertions(+), 30 deletions(-) diff --git a/samcli/lib/package/ecr_uploader.py b/samcli/lib/package/ecr_uploader.py index 7fbbc4d372..56c780fd71 100644 --- a/samcli/lib/package/ecr_uploader.py +++ b/samcli/lib/package/ecr_uploader.py @@ -16,7 +16,6 @@ DockerPushFailedError, DockerLoginFailedError, ECRAuthorizationError, - ImageNotFoundError, DeleteArtifactFailedError, ) from samcli.lib.package.image_utils import tag_translation @@ -114,25 +113,23 @@ def delete_artifact(self, image_uri: str, resource_id: str, property_name: str): # Image not found image_details = resp["failures"][0] if image_details["failureCode"] == "ImageNotFound": - LOG.error("ImageNotFound Exception") - message_fmt = ( - "Could not delete image for {property_name}" - " parameter of {resource_id} resource as it does not exist. \n" + LOG.debug( + "Could not delete image for %s" " parameter of %s resource as it does not exist. \n", + property_name, + resource_id, ) - raise ImageNotFoundError(resource_id, property_name, message_fmt=message_fmt) - - LOG.error( - "Could not delete the image for the resource %s. FailureCode: %s, FailureReason: %s", - property_name, - image_details["failureCode"], - image_details["failureReason"], - ) - raise DeleteArtifactFailedError( - resource_id=resource_id, property_name=property_name, ex=image_details["failureReason"] - ) - - LOG.debug("Deleting ECR image with tag %s", image_tag) - click.echo(f"\t- Deleting ECR image {image_tag} in repository {repository}") + click.echo(f"\t- Could not find image with tag {image_tag} in repository {repository}") + else: + LOG.debug( + "Could not delete the image for the resource %s. FailureCode: %s, FailureReason: %s", + property_name, + image_details["failureCode"], + image_details["failureReason"], + ) + click.echo(f"\t- Could not delete image with tag {image_tag} in repository {repository}") + else: + LOG.debug("Deleting ECR image with tag %s", image_tag) + click.echo(f"\t- Deleting ECR image {image_tag} in repository {repository}") except botocore.exceptions.ClientError as ex: # Handle Client errors such as RepositoryNotFoundException or InvalidParameterException diff --git a/tests/unit/lib/package/test_ecr_uploader.py b/tests/unit/lib/package/test_ecr_uploader.py index 25b2c4a047..68ca5a5ec7 100644 --- a/tests/unit/lib/package/test_ecr_uploader.py +++ b/tests/unit/lib/package/test_ecr_uploader.py @@ -1,10 +1,11 @@ from unittest import TestCase -from unittest.mock import MagicMock, patch +from unittest.mock import MagicMock, patch, call from botocore.exceptions import ClientError from docker.errors import APIError, BuildError from parameterized import parameterized +# import click from samcli.commands.package.exceptions import ( DockerLoginFailedError, DockerPushFailedError, @@ -176,7 +177,8 @@ def test_upload_failure_while_streaming(self): with self.assertRaises(DockerPushFailedError): ecr_uploader.upload(image, resource_name="HelloWorldFunction") - def test_delete_artifact_no_image_error(self): + @patch("samcli.lib.package.ecr_uploader.click.echo") + def test_delete_artifact_no_image_error(self, patched_click_echo): ecr_uploader = ECRUploader( docker_client=self.docker_client, ecr_client=self.ecr_client, @@ -188,12 +190,17 @@ def test_delete_artifact_no_image_error(self): "failures": [{"imageId": {"imageTag": self.tag}, "failureCode": "ImageNotFound"}] } - with self.assertRaises(ImageNotFoundError): - ecr_uploader.delete_artifact( - image_uri=self.image_uri, resource_id=self.resource_id, property_name=self.property_name - ) + ecr_uploader.delete_artifact( + image_uri=self.image_uri, resource_id=self.resource_id, property_name=self.property_name + ) - def test_delete_artifact_resp_failure(self): + expected_click_echo_calls = [ + call(f"\t- Could not find image with tag {self.tag} in repository mock-image-repo"), + ] + self.assertEqual(expected_click_echo_calls, patched_click_echo.call_args_list) + + @patch("samcli.lib.package.ecr_uploader.click.echo") + def test_delete_artifact_resp_failure(self, patched_click_echo): ecr_uploader = ECRUploader( docker_client=self.docker_client, ecr_client=self.ecr_client, @@ -211,10 +218,14 @@ def test_delete_artifact_resp_failure(self): ] } - with self.assertRaises(DeleteArtifactFailedError): - ecr_uploader.delete_artifact( - image_uri=self.image_uri, resource_id=self.resource_id, property_name=self.property_name - ) + ecr_uploader.delete_artifact( + image_uri=self.image_uri, resource_id=self.resource_id, property_name=self.property_name + ) + + expected_click_echo_calls = [ + call(f"\t- Could not delete image with tag {self.tag} in repository mock-image-repo"), + ] + self.assertEqual(expected_click_echo_calls, patched_click_echo.call_args_list) def test_delete_artifact_client_error(self): ecr_uploader = ECRUploader( From 9363b93bdfa48022a1d510734a14520da0a1f199 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Mon, 19 Jul 2021 20:23:48 -0400 Subject: [PATCH 105/121] Revert "Merge remote-tracking branch 'wiltons-repo/feat/auto-ecr' into auto-ecr-delete" This reverts commit 0e159c2fa3630b874f13f19336802f6085a92de9, reversing changes made to 1675b7ed231b6472d38eeeeb25e39f6310bbb86f. --- appveyor.yml | 8 +- requirements/base.txt | 2 +- requirements/reproducible-linux.txt | 18 +- samcli/__init__.py | 2 +- samcli/commands/deploy/command.py | 40 +- samcli/commands/deploy/guided_config.py | 5 +- samcli/commands/deploy/guided_context.py | 245 ++--------- samcli/commands/package/command.py | 4 +- samcli/commands/package/exceptions.py | 3 +- samcli/lib/bootstrap/bootstrap.py | 92 ++-- .../lib/bootstrap/companion_stack/__init__.py | 0 .../companion_stack_builder.py | 130 ------ .../companion_stack_manager.py | 276 ------------ .../companion_stack_manager_helper.py | 80 ---- .../bootstrap/companion_stack/data_types.py | 137 ------ samcli/lib/build/build_strategy.py | 7 +- .../image_repository_validation.py | 24 +- samcli/lib/package/ecr_utils.py | 2 +- samcli/lib/package/stream_cursor_utils.py | 9 - samcli/lib/providers/sam_base_provider.py | 28 -- samcli/lib/providers/sam_function_provider.py | 17 +- .../lib/utils/managed_cloudformation_stack.py | 3 +- .../models/function_with_mq_virtual_host.yaml | 19 - tests/integration/buildcmd/test_build_cmd.py | 30 -- tests/integration/deploy/deploy_integ_base.py | 3 - .../integration/deploy/test_deploy_command.py | 94 +---- .../deploy/regression_deploy_base.py | 3 - tests/unit/commands/deploy/test_command.py | 67 +-- .../commands/deploy/test_guided_context.py | 394 ++++-------------- .../local/lib/test_sam_function_provider.py | 163 ++------ .../unit/commands/samconfig/test_samconfig.py | 2 - .../test_companion_stack_builder.py | 93 ----- .../test_companion_stack_manager.py | 253 ----------- .../test_companion_stack_manager_helper.py | 51 --- .../companion_stack/test_data_types.py | 63 --- .../lib/build_module/test_build_strategy.py | 9 +- .../test_image_repository_validation.py | 5 +- 37 files changed, 282 insertions(+), 2099 deletions(-) delete mode 100644 samcli/lib/bootstrap/companion_stack/__init__.py delete mode 100644 samcli/lib/bootstrap/companion_stack/companion_stack_builder.py delete mode 100644 samcli/lib/bootstrap/companion_stack/companion_stack_manager.py delete mode 100644 samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py delete mode 100644 samcli/lib/bootstrap/companion_stack/data_types.py delete mode 100644 tests/functional/commands/validate/lib/models/function_with_mq_virtual_host.yaml delete mode 100644 tests/unit/lib/bootstrap/companion_stack/test_companion_stack_builder.py delete mode 100644 tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager.py delete mode 100644 tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager_helper.py delete mode 100644 tests/unit/lib/bootstrap/companion_stack/test_data_types.py diff --git a/appveyor.yml b/appveyor.yml index ed730e0a24..606fe62d5f 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -159,7 +159,7 @@ for: # Pre-dev Tests - "pip install -e \".[pre-dev]\"" - "pylint --rcfile .pylintrc samcli" - + # Dev Tests - "pip install -e \".[dev]\"" - "pytest --cov samcli --cov-report term-missing --cov-fail-under 94 tests/unit" @@ -170,11 +170,7 @@ for: # Runs only in Linux, logging docker hub when running canary and docker cred is available - sh: " if [[ -n $BY_CANARY ]] && [[ -n $DOCKER_USER ]] && [[ -n $DOCKER_PASS ]]; - then echo Logging in Docker Hub; echo $DOCKER_PASS | docker login --username $DOCKER_USER --password-stdin registry-1.docker.io; - fi" - - sh: " - if [[ -n $BY_CANARY ]]; - then echo Logging in Public ECR; aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin public.ecr.aws; + then echo Logging in Docker Hub; echo $DOCKER_PASS | docker login --username $DOCKER_USER --password-stdin; fi" - sh: "pytest -vv tests/integration" - sh: "pytest -vv tests/regression" diff --git a/requirements/base.txt b/requirements/base.txt index 25efa93b05..ab432ff159 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ boto3~=1.14 jmespath~=0.10.0 PyYAML~=5.3 cookiecutter~=1.7.2 -aws-sam-translator==1.37.0 +aws-sam-translator==1.36.0 #docker minor version updates can include breaking changes. Auto update micro version only. docker~=4.2.0 dateparser~=0.7 diff --git a/requirements/reproducible-linux.txt b/requirements/reproducible-linux.txt index a2f725e5fd..a7a92e25a4 100644 --- a/requirements/reproducible-linux.txt +++ b/requirements/reproducible-linux.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with python 3.7 +# This file is autogenerated by pip-compile # To update, run: # # pip-compile --allow-unsafe --generate-hashes --output-file=requirements/reproducible-linux.txt @@ -17,10 +17,10 @@ aws-lambda-builders==1.4.0 \ --hash=sha256:5d4e4ecb3d3290f0eec1f62b7b0d9d6b91160ae71447d95899eede392d05f75f \ --hash=sha256:d32f79cf67b189a7598793f69797f284b2eb9a9fada562175b1e854187f95aed # via aws-sam-cli (setup.py) -aws-sam-translator==1.37.0 \ - --hash=sha256:12cbf4af9e95acf73dabfbc44af990dc1e880f35697bb8c04f31b3bb90ab5526 \ - --hash=sha256:26e4866627e4284afc367bee2bd04d3cf23cecc8ff879b419457715a738395a9 \ - --hash=sha256:6884d942a815450637bac48e297996df2dacc27077d25ced09d8e9ce1f6a585c +aws-sam-translator==1.36.0 \ + --hash=sha256:4195ae8196f04803e7f0384a2b5ccd8c2b06ce0d8dc408aa1f1ce96c23bcf39d \ + --hash=sha256:f7d51b661fe1f5613a882f4733d1c92eff4dac36a076eafd18031d209b178695 \ + --hash=sha256:fa1b990d9329d19052e7b91cf0b19371ed9d31a529054b616005884cd662b584 # via aws-sam-cli (setup.py) binaryornot==0.4.4 \ --hash=sha256:359501dfc9d40632edc9fac890e19542db1a287bbcfa58175b66658392018061 \ @@ -88,6 +88,10 @@ itsdangerous==1.1.0 \ --hash=sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19 \ --hash=sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749 # via flask +jinja2-time==0.2.0 \ + --hash=sha256:d14eaa4d315e7688daa4969f616f226614350c48730bfa1692d2caebd8c90d40 \ + --hash=sha256:d3eab6605e3ec8b7a0863df09cc1d23714908fa61aa6986a845c20ba488b4efa + # via cookiecutter jinja2==2.11.3 \ --hash=sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419 \ --hash=sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6 @@ -95,10 +99,6 @@ jinja2==2.11.3 \ # cookiecutter # flask # jinja2-time -jinja2-time==0.2.0 \ - --hash=sha256:d14eaa4d315e7688daa4969f616f226614350c48730bfa1692d2caebd8c90d40 \ - --hash=sha256:d3eab6605e3ec8b7a0863df09cc1d23714908fa61aa6986a845c20ba488b4efa - # via cookiecutter jmespath==0.10.0 \ --hash=sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9 \ --hash=sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f diff --git a/samcli/__init__.py b/samcli/__init__.py index 1c484dccfd..3fe00ac134 100644 --- a/samcli/__init__.py +++ b/samcli/__init__.py @@ -2,4 +2,4 @@ SAM CLI version """ -__version__ = "1.26.0" +__version__ = "1.24.1" diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index 8713b830f7..371dc61c4d 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -26,7 +26,6 @@ from samcli.lib.utils import osutils from samcli.lib.bootstrap.bootstrap import manage_stack from samcli.lib.utils.version_checker import check_newer_version -from samcli.lib.bootstrap.companion_stack.companion_stack_manager_helper import CompanionStackManagerHelper SHORT_HELP = "Deploy an AWS SAM application." @@ -154,19 +153,9 @@ "--resolve-s3", required=False, is_flag=True, - help="Automatically resolve s3 bucket for non-guided deployments. " - "Enabling this option will also create a managed default s3 bucket for you. " - "If you do not provide a --s3-bucket value, the managed bucket will be used. " + help="Automatically resolve s3 bucket for non-guided deployments." "Do not use --s3-guided parameter with this option.", ) -@click.option( - "--resolve-image-repos", - required=False, - is_flag=True, - help="Automatically create and delete ECR repositories for image-based functions in non-guided deployments. " - "A companion stack containing ECR repos for each function will be deployed along with the template stack. " - "Automatically created image repositories will be deleted if the corresponding functions are removed.", -) @metadata_override_option @notification_arns_override_option @tags_override_option @@ -205,7 +194,6 @@ def cli( confirm_changeset, signing_profiles, resolve_s3, - resolve_image_repos, config_file, config_env, ): @@ -240,7 +228,6 @@ def cli( resolve_s3, config_file, config_env, - resolve_image_repos, ) # pragma: no cover @@ -271,7 +258,6 @@ def do_cli( resolve_s3, config_file, config_env, - resolve_image_repos, ): """ Implementation of the ``cli`` method @@ -301,23 +287,13 @@ def do_cli( config_file=config_file, ) guided_context.run() - else: - if resolve_s3: - if bool(s3_bucket): - raise DeployResolveS3AndS3SetError() - s3_bucket = manage_stack(profile=profile, region=region) - click.echo(f"\n\t\tManaged S3 bucket: {s3_bucket}") - click.echo("\t\tA different default S3 bucket can be set in samconfig.toml") - click.echo("\t\tOr by specifying --s3-bucket explicitly.") - - if resolve_image_repos: - if image_repositories is None: - image_repositories = {} - manager_helper = CompanionStackManagerHelper( - stack_name, region, s3_bucket, s3_prefix, template_file, image_repositories - ) - image_repositories.update(manager_helper.manager.get_repository_mapping()) - manager_helper.manager.sync_repos() + elif resolve_s3 and bool(s3_bucket): + raise DeployResolveS3AndS3SetError() + elif resolve_s3: + s3_bucket = manage_stack(profile=profile, region=region) + click.echo(f"\n\t\tManaged S3 bucket: {s3_bucket}") + click.echo("\t\tA different default S3 bucket can be set in samconfig.toml") + click.echo("\t\tOr by specifying --s3-bucket explicitly.") with osutils.tempfile_platform_independent() as output_template_file: diff --git a/samcli/commands/deploy/guided_config.py b/samcli/commands/deploy/guided_config.py index a236c18808..eef259af9c 100644 --- a/samcli/commands/deploy/guided_config.py +++ b/samcli/commands/deploy/guided_config.py @@ -102,10 +102,7 @@ def _save_parameter_overrides(self, cmd_names, config_env, parameter_overrides, samconfig.put(cmd_names, self.section, "parameter_overrides", " ".join(_params), env=config_env) def _save_image_repositories(self, cmd_names, config_env, samconfig, image_repositories): - # Check for None only as empty dict should be saved to config - # This can happen in an edge case where all companion stack repos are deleted and - # the config needs to be updated. - if image_repositories is not None: + if image_repositories: _image_repositories = [f"{key}={value}" for key, value in image_repositories.items()] samconfig.put(cmd_names, self.section, "image_repositories", _image_repositories, env=config_env) diff --git a/samcli/commands/deploy/guided_context.py b/samcli/commands/deploy/guided_context.py index 475a81e9ad..b4bb65a0d6 100644 --- a/samcli/commands/deploy/guided_context.py +++ b/samcli/commands/deploy/guided_context.py @@ -3,7 +3,7 @@ """ import logging -from typing import Dict, Any, List, Optional +from typing import Dict, Any, List import click from botocore.session import get_session @@ -14,6 +14,8 @@ from samcli.commands._utils.options import _space_separated_list_func_type from samcli.commands._utils.template import ( get_template_parameters, + get_template_artifacts_format, + get_template_function_resource_ids, ) from samcli.commands.deploy.auth_utils import auth_per_resource from samcli.commands.deploy.code_signer_utils import ( @@ -30,12 +32,11 @@ from samcli.lib.intrinsic_resolver.intrinsics_symbol_table import IntrinsicsSymbolTable from samcli.lib.package.ecr_utils import is_ecr_url from samcli.lib.package.image_utils import tag_translation, NonLocalImageException, NoImageFoundException -from samcli.lib.providers.provider import Function, Stack +from samcli.lib.providers.provider import Stack +from samcli.lib.providers.sam_function_provider import SamFunctionProvider from samcli.lib.providers.sam_stack_provider import SamLocalStackProvider from samcli.lib.utils.colors import Colored from samcli.lib.utils.packagetype import IMAGE -from samcli.lib.providers.sam_function_provider import SamFunctionProvider -from samcli.lib.bootstrap.companion_stack.companion_stack_manager_helper import CompanionStackManagerHelper LOG = logging.getLogger(__name__) @@ -136,6 +137,7 @@ def guided_prompts(self, parameter_override_keys): parameter_overrides=sanitize_parameter_overrides(input_parameter_overrides), global_parameter_overrides=global_parameter_overrides, ) + image_repositories = self.prompt_image_repository(stacks) click.secho("\t#Shows you resources changes to be deployed and require a 'Y' to initiate deploy") confirm_changeset = confirm( @@ -171,14 +173,9 @@ def guided_prompts(self, parameter_override_keys): type=click.STRING, ) - click.echo("\n\tLooking for resources needed for deployment:") s3_bucket = manage_stack(profile=self.profile, region=region) - click.echo(f"\t Managed S3 bucket: {s3_bucket}") - click.echo("\t A different default S3 bucket can be set in samconfig.toml") - - image_repositories = self.prompt_image_repository( - stack_name, stacks, self.image_repositories, region, s3_bucket, self.s3_prefix - ) + click.echo(f"\n\t\tManaged S3 bucket: {s3_bucket}") + click.echo("\t\tA different default S3 bucket can be set in samconfig.toml") self.guided_stack_name = stack_name self.guided_s3_bucket = s3_bucket @@ -292,213 +289,51 @@ def prompt_parameters( _prompted_param_overrides[parameter_key] = {"Value": parameter, "Hidden": False} return _prompted_param_overrides - def prompt_image_repository( - self, - stack_name, - stacks: List[Stack], - image_repositories: Optional[Dict[str, str]], - region: str, - s3_bucket: str, - s3_prefix: str, - ) -> Dict[str, str]: + def prompt_image_repository(self, stacks: List[Stack]): """ Prompt for the image repository to push the images. For each image function found in build artifacts, it will prompt for an image repository. Parameters ---------- - stack_name : List[Stack] - Name of the stack to be deployed. - stacks : List[Stack] List of stacks to look for image functions. - image_repositories: Dict[str, str] - Dictionary with function logical ID as key and image repo URI as value. - - region: str - Region for the image repos. - - s3_bucket: str - s3 bucket URI to be used for uploading companion stack template - - s3_prefix: str - s3 prefix to be used for uploading companion stack template - Returns ------- - Dict[str, str] + Dict A dictionary contains image function logical ID as key, image repository as value. """ - updated_repositories = image_repositories.copy() if image_repositories is not None else {} - self.function_provider = SamFunctionProvider(stacks, ignore_code_extraction_warnings=True) - - manager_helper = CompanionStackManagerHelper( - stack_name, region, s3_bucket, s3_prefix, self.template_file, updated_repositories - ) - - create_all_repos = self.prompt_create_all_repos( - manager_helper.function_logical_ids, manager_helper.missing_repo_functions - ) - if create_all_repos: - updated_repositories.update(manager_helper.manager.get_repository_mapping()) - else: - updated_repositories = self.prompt_specify_repos( - manager_helper.missing_repo_functions, updated_repositories - ) - manager_helper.update_specified_image_repos(updated_repositories) - - self.prompt_delete_unreferenced_repos( - [manager_helper.manager.get_repo_uri(repo) for repo in manager_helper.unreferenced_repos] - ) - - updated_repositories = manager_helper.remove_unreferenced_repos_from_mapping(updated_repositories) - GuidedContext.verify_images_exist_locally(self.function_provider.functions) - - manager_helper.manager.sync_repos() - return updated_repositories - - def prompt_specify_repos( - self, - functions_without_repos: List[str], - image_repositories: Dict[str, str], - ) -> Dict[str, str]: - """ - Show prompts for each function that isn't associated with a image repo - - Parameters - ---------- - functions_without_repos: List[str] - List of functions without associating repos - - image_repositories: Dict[str, str] - Current image repo dictionary with function logical ID as key and image repo URI as value. - - Returns - ------- - Dict[str, str] - Updated image repo dictionary with values(image repo URIs) filled by user input - """ - updated_repositories = image_repositories.copy() - for function_logical_id in functions_without_repos: - image_uri = prompt( - f"\t {self.start_bold}ECR repository for {function_logical_id}{self.end_bold}", - type=click.STRING, - ) - if function_logical_id not in image_repositories or not is_ecr_url( - str(image_repositories[function_logical_id]) - ): - raise GuidedDeployFailedError(f"Invalid Image Repository ECR URI: {image_uri}") - - updated_repositories[function_logical_id] = image_uri - - return updated_repositories - - def prompt_create_all_repos(self, functions: List[str], functions_without_repo: List[str]) -> bool: - """ - Prompt whether to create all repos - - Parameters - ---------- - functions: List[str] - List of function logical IDs that are image based - functions_without_repo: List[str] - List of function logical IDs that do not have an ECR image repo specified - - Returns - ------- - Boolean - Returns False if there is no missing function or denied by prompt - """ - if not functions: - return False - - if functions == functions_without_repo: - click.echo("\t Image repositories: Not found.") - click.echo( - "\t #Managed repositories will be deleted when " - "their functions are removed from the template and deployed" - ) - return confirm( - f"\t {self.start_bold}Create managed ECR repositories for all functions?{self.end_bold}", default=True - ) - - functions_with_repo_count = len(functions) - len(functions_without_repo) - click.echo( - "\t Image repositories: " - f"Found ({functions_with_repo_count} of {len(functions)})" - " #Different image repositories can be set in samconfig.toml" - ) - - if not functions_without_repo: - return False - - click.echo( - "\t #Managed repositories will be deleted when their functions are " - "removed from the template and deployed" - ) - return ( - confirm( - f"\t {self.start_bold}Create managed ECR repositories for the " - f"{len(functions_without_repo)} functions without?{self.end_bold}", - default=True, - ) - if functions_without_repo - else True - ) - - def prompt_delete_unreferenced_repos(self, unreferenced_repo_uris: List[str]) -> None: - """ - Prompt user for deleting unreferenced companion stack image repos. - Throws GuidedDeployFailedError if delete repos has been denied by the user. - This function does not actually remove the functions from the stack. - - Parameters - ---------- - - unreferenced_repo_uris: List[str] - List of unreferenced image repos that need to be deleted. - """ - if not unreferenced_repo_uris: - return - - click.echo("\t Checking for unreferenced ECR repositories to clean-up: " f"{len(unreferenced_repo_uris)} found") - for repo_uri in unreferenced_repo_uris: - click.echo(f"\t {repo_uri}") - delete_repos = confirm( - f"\t {self.start_bold}Delete the unreferenced repositories listed above when deploying?{self.end_bold}", - default=False, - ) - if not delete_repos: - click.echo("\t Deployment aborted!") - click.echo( - "\t #The deployment was aborted to prevent " - "unreferenced managed ECR repositories from being deleted.\n" - "\t #You may remove repositories from the SAMCLI " - "managed stack to retain them and resolve this unreferenced check." - ) - raise GuidedDeployFailedError("Unreferenced Auto Created ECR Repos Must Be Deleted.") - - @staticmethod - def verify_images_exist_locally(functions: Dict[str, Function]) -> None: - """ - Verify all images associated with deploying functions exist locally. - - Parameters - ---------- - functions: Dict[str, Function] - Dictionary of functions in the stack to be deployed with key as their logical ID. - """ - for _, function_prop in functions.items(): - if function_prop.packagetype != IMAGE: - continue - image = function_prop.imageuri - try: - tag_translation(image) - except NonLocalImageException: - pass - except NoImageFoundException as ex: - raise GuidedDeployFailedError("No images found to deploy, try running sam build") from ex + image_repositories = {} + artifacts_format = get_template_artifacts_format(template_file=self.template_file) + if IMAGE in artifacts_format: + self.function_provider = SamFunctionProvider(stacks, ignore_code_extraction_warnings=True) + function_resources = get_template_function_resource_ids(template_file=self.template_file, artifact=IMAGE) + for resource_id in function_resources: + image_repositories[resource_id] = prompt( + f"\t{self.start_bold}Image Repository for {resource_id}{self.end_bold}", + default=self.image_repositories.get(resource_id, "") + if isinstance(self.image_repositories, dict) + else "" or self.image_repository, + ) + if not is_ecr_url(image_repositories.get(resource_id)): + raise GuidedDeployFailedError( + f"Invalid Image Repository ECR URI: {image_repositories.get(resource_id)}" + ) + for resource_id, function_prop in self.function_provider.functions.items(): + if function_prop.packagetype == IMAGE: + image = function_prop.imageuri + try: + tag = tag_translation(image) + except NonLocalImageException: + pass + except NoImageFoundException as ex: + raise GuidedDeployFailedError("No images found to deploy, try running sam build") from ex + else: + click.secho(f"\t {image} to be pushed to {image_repositories.get(resource_id)}:{tag}") + click.secho(nl=True) + + return image_repositories def run(self): diff --git a/samcli/commands/package/command.py b/samcli/commands/package/command.py index cc0dc35c5d..cab68b6d88 100644 --- a/samcli/commands/package/command.py +++ b/samcli/commands/package/command.py @@ -121,9 +121,7 @@ def resources_and_properties_help_string(): exc_set=PackageResolveS3AndS3SetError, exc_not_set=PackageResolveS3AndS3NotSetError, ), - help="Automatically resolve s3 bucket for non-guided deployments. " - "Enabling this option will also create a managed default s3 bucket for you. " - "If you do not provide a --s3-bucket value, the managed bucket will be used. " + help="Automatically resolve s3 bucket for non-guided deployments." "Do not use --s3-guided parameter with this option.", ) @metadata_override_option diff --git a/samcli/commands/package/exceptions.py b/samcli/commands/package/exceptions.py index f5fdee0297..70ed0ba958 100644 --- a/samcli/commands/package/exceptions.py +++ b/samcli/commands/package/exceptions.py @@ -144,8 +144,7 @@ class BucketNotSpecifiedError(UserException): def __init__(self, **kwargs): self.kwargs = kwargs - message_fmt = "\nS3 Bucket not specified, use --s3-bucket to specify a bucket name, or use --resolve-s3 \ -to create a managed default bucket, or run sam deploy --guided" + message_fmt = "\nS3 Bucket not specified, use --s3-bucket to specify a bucket name or run sam deploy --guided" super().__init__(message=message_fmt.format(**self.kwargs)) diff --git a/samcli/lib/bootstrap/bootstrap.py b/samcli/lib/bootstrap/bootstrap.py index 9ca8a117c8..81c30c7748 100644 --- a/samcli/lib/bootstrap/bootstrap.py +++ b/samcli/lib/bootstrap/bootstrap.py @@ -32,47 +32,51 @@ def manage_stack(profile, region): def _get_stack_template(): gc = GlobalConfig() - template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Transform": "AWS::Serverless-2016-10-31", - "Description": "Managed Stack for AWS SAM CLI", - "Metadata": { - "SamCliInfo": { - "version": __version__, - "installationId": gc.installation_id if gc.installation_id else "unknown", - } - }, - "Resources": { - "SamCliSourceBucket": { - "Type": "AWS::S3::Bucket", - "Properties": { - "VersioningConfiguration": {"Status": "Enabled"}, - "Tags": [{"Key": "ManagedStackSource", "Value": "AwsSamCli"}], - }, - }, - "SamCliSourceBucketBucketPolicy": { - "Type": "AWS::S3::BucketPolicy", - "Properties": { - "Bucket": "!Ref SamCliSourceBucket", - "PolicyDocument": { - "Statement": [ - { - "Action": ["s3:GetObject"], - "Effect": "Allow", - "Resource": { - "Fn::Join": [ - "", - ["arn:", "!Ref AWS::Partition", ":s3:::", "!Ref SamCliSourceBucket", "/*"], - ] - }, - "Principal": {"Service": "serverlessrepo.amazonaws.com"}, - "Condition": {"StringEquals": {"aws:SourceAccount": "!Ref AWS::AccountId"}}, - } - ] - }, - }, - }, - }, - "Outputs": {"SourceBucket": {"Value": "!Ref SamCliSourceBucket"}}, - } - return json.dumps(template) + info = {"version": __version__, "installationId": gc.installation_id if gc.installation_id else "unknown"} + + template = """ + AWSTemplateFormatVersion : '2010-09-09' + Transform: AWS::Serverless-2016-10-31 + Description: Managed Stack for AWS SAM CLI + + Metadata: + SamCliInfo: {info} + + Resources: + SamCliSourceBucket: + Type: AWS::S3::Bucket + Properties: + VersioningConfiguration: + Status: Enabled + Tags: + - Key: ManagedStackSource + Value: AwsSamCli + + SamCliSourceBucketBucketPolicy: + Type: AWS::S3::BucketPolicy + Properties: + Bucket: !Ref SamCliSourceBucket + PolicyDocument: + Statement: + - + Action: + - "s3:GetObject" + Effect: "Allow" + Resource: + Fn::Join: + - "" + - + - "arn:" + - !Ref AWS::Partition + - ":s3:::" + - !Ref SamCliSourceBucket + - "/*" + Principal: + Service: serverlessrepo.amazonaws.com + + Outputs: + SourceBucket: + Value: !Ref SamCliSourceBucket + """ + + return template.format(info=json.dumps(info)) diff --git a/samcli/lib/bootstrap/companion_stack/__init__.py b/samcli/lib/bootstrap/companion_stack/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_builder.py b/samcli/lib/bootstrap/companion_stack/companion_stack_builder.py deleted file mode 100644 index 85280c2513..0000000000 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_builder.py +++ /dev/null @@ -1,130 +0,0 @@ -""" - Companion stack template builder -""" -import json - -from typing import Dict - -from samcli.lib.bootstrap.companion_stack.data_types import CompanionStack, ECRRepo -from samcli import __version__ as VERSION - - -class CompanionStackBuilder: - """ - CFN template builder for the companion stack - """ - - _parent_stack_name: str - _companion_stack: CompanionStack - _repo_mapping: Dict[str, ECRRepo] - - def __init__(self, companion_stack: CompanionStack) -> None: - self._companion_stack = companion_stack - self._repo_mapping: Dict[str, ECRRepo] = dict() - - def add_function(self, function_logical_id: str) -> None: - """ - Add an ECR repo associated with the function to the companion stack template - """ - self._repo_mapping[function_logical_id] = ECRRepo(self._companion_stack, function_logical_id) - - def clear_functions(self) -> None: - """ - Remove all functions that need ECR repos - """ - self._repo_mapping = dict() - - def build(self) -> str: - """ - Build companion stack CFN template with current functions - Returns - ------- - str - CFN template for companions stack - """ - template_dict = self._build_template_dict() - for _, ecr_repo in self._repo_mapping.items(): - template_dict["Resources"][ecr_repo.logical_id] = self._build_repo_dict(ecr_repo) - template_dict["Outputs"][ecr_repo.output_logical_id] = CompanionStackBuilder._build_output_dict(ecr_repo) - - return json.dumps(template_dict) - - def _build_template_dict(self) -> Dict: - """ - Build Companion stack template dictionary with Resources and Outputs not filled - Returns - ------- - dict - Companion stack template dictionary - """ - template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Transform": "AWS::Serverless-2016-10-31", - "Description": "AWS SAM CLI Managed ECR Repo Stack", - "Metadata": {"SamCliInfo": VERSION, "CompanionStackname": self._companion_stack.stack_name}, - "Resources": {}, - "Outputs": {}, - } - return template - - def _build_repo_dict(self, repo: ECRRepo) -> Dict: - """ - Build a single ECR repo resource dictionary - - Parameters - ---------- - repo - ECR repo that will be turned into CFN resource - - Returns - ------- - dict - ECR repo resource dictionary - """ - return { - "Type": "AWS::ECR::Repository", - "Properties": { - "RepositoryName": repo.physical_id, - "Tags": [ - {"Key": "ManagedStackSource", "Value": "AwsSamCli"}, - {"Key": "AwsSamCliCompanionStack", "Value": self._companion_stack.stack_name}, - ], - "RepositoryPolicyText": { - "Version": "2012-10-17", - "Statement": [ - { - "Sid": "AllowLambdaSLR", - "Effect": "Allow", - "Principal": {"Service": ["lambda.amazonaws.com"]}, - "Action": ["ecr:GetDownloadUrlForLayer", "ecr:GetRepositoryPolicy", "ecr:BatchGetImage"], - } - ], - }, - }, - } - - @staticmethod - def _build_output_dict(repo: ECRRepo) -> Dict: - """ - Build a single ECR repo output resource dictionary - - Parameters - ---------- - repo - ECR repo that will be turned into CFN output resource - - Returns - ------- - dict - ECR repo output resource dictionary - """ - return { - "Value": f"!Sub ${{AWS::AccountId}}.dkr.ecr.${{AWS::Region}}.${{AWS::URLSuffix}}/${{{repo.logical_id}}}" - } - - @property - def repo_mapping(self) -> Dict[str, ECRRepo]: - """ - Repo mapping dictionary with key as function logical ID and value as ECRRepo object - """ - return self._repo_mapping diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py deleted file mode 100644 index f01d2ba8f0..0000000000 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py +++ /dev/null @@ -1,276 +0,0 @@ -""" - Companion stack manager -""" -from typing import List, Dict -import typing - -import boto3 -from botocore.config import Config -from botocore.exceptions import ClientError, NoRegionError, NoCredentialsError - -from samcli.commands.exceptions import CredentialsError, RegionError -from samcli.lib.bootstrap.companion_stack.companion_stack_builder import CompanionStackBuilder -from samcli.lib.bootstrap.companion_stack.data_types import CompanionStack, ECRRepo -from samcli.lib.package.artifact_exporter import mktempfile -from samcli.lib.package.s3_uploader import S3Uploader - -# pylint: disable=E0401 -if typing.TYPE_CHECKING: # pragma: no cover - from mypy_boto3_cloudformation.client import CloudFormationClient - from mypy_boto3_s3.client import S3Client -else: # pragma: no cover - CloudFormationClient = object - S3Client = object - - -class CompanionStackManager: - """ - Manager class for a companion stack - Used to create/update the remote stack - """ - - _companion_stack: CompanionStack - _builder: CompanionStackBuilder - _boto_config: Config - _update_stack_waiter_config: Dict[str, int] - _delete_stack_waiter_config: Dict[str, int] - _s3_bucket: str - _s3_prefix: str - _cfn_client: CloudFormationClient - _s3_client: S3Client - - def __init__(self, stack_name, region, s3_bucket, s3_prefix): - self._companion_stack = CompanionStack(stack_name) - self._builder = CompanionStackBuilder(self._companion_stack) - self._boto_config = Config(region_name=region if region else None) - self._update_stack_waiter_config = {"Delay": 5, "MaxAttempts": 240} - self._delete_stack_waiter_config = {"Delay": 5, "MaxAttempts": 120} - self._s3_bucket = s3_bucket - self._s3_prefix = s3_prefix - try: - self._cfn_client = boto3.client("cloudformation", config=self._boto_config) - self._ecr_client = boto3.client("ecr", config=self._boto_config) - self._s3_client = boto3.client("s3", config=self._boto_config) - self._account_id = boto3.client("sts").get_caller_identity().get("Account") - self._region_name = self._cfn_client.meta.region_name - except NoCredentialsError as ex: - raise CredentialsError( - "Error Setting Up Managed Stack Client: Unable to resolve " - "credentials for the AWS SDK for Python client. " - "Please see their documentation for options to pass in credentials: " - "https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html" - ) from ex - except NoRegionError as ex: - raise RegionError( - "Error Setting Up Managed Stack Client: Unable to resolve a region. " - "Please provide a region via the --region parameter or by the AWS_REGION environment variable." - ) from ex - - def set_functions(self, function_logical_ids: List[str]) -> None: - """ - Sets functions that need to have ECR repos created - - Parameters - ---------- - function_logical_ids: List[str] - Function logical IDs that need to have ECR repos created - """ - self._builder.clear_functions() - for function_logical_id in function_logical_ids: - self._builder.add_function(function_logical_id) - - def update_companion_stack(self) -> None: - """ - Blocking call to create or update the companion stack based on current functions - Companion stack template will be updated to the s3 bucket first before deployment - """ - if not self._builder.repo_mapping: - return - - stack_name = self._companion_stack.stack_name - template = self._builder.build() - - with mktempfile() as temporary_file: - temporary_file.write(template) - temporary_file.flush() - - s3_uploader = S3Uploader( - self._s3_client, bucket_name=self._s3_bucket, prefix=self._s3_prefix, no_progressbar=True - ) - # TemplateUrl property requires S3 URL to be in path-style format - parts = S3Uploader.parse_s3_url( - s3_uploader.upload_with_dedup(temporary_file.name, "template"), version_property="Version" - ) - - template_url = s3_uploader.to_path_style_s3_url(parts["Key"], parts.get("Version", None)) - - exists = self.does_companion_stack_exist() - if exists: - self._cfn_client.update_stack( - StackName=stack_name, TemplateURL=template_url, Capabilities=["CAPABILITY_AUTO_EXPAND"] - ) - waiter = self._cfn_client.get_waiter("stack_update_complete") - else: - self._cfn_client.create_stack( - StackName=stack_name, TemplateURL=template_url, Capabilities=["CAPABILITY_AUTO_EXPAND"] - ) - waiter = self._cfn_client.get_waiter("stack_create_complete") - - waiter.wait(StackName=stack_name, WaiterConfig=self._update_stack_waiter_config) # type: ignore - - def delete_companion_stack(self): - """ - Blocking call to delete the companion stack - """ - stack_name = self._companion_stack.stack_name - waiter = self._cfn_client.get_waiter("stack_delete_complete") - self._cfn_client.delete_stack(StackName=stack_name) - waiter.wait(StackName=stack_name, WaiterConfig=self._delete_stack_waiter_config) - - def list_deployed_repos(self) -> List[ECRRepo]: - """ - List deployed ECR repos for this companion stack - Not using create_change_set as it is slow. - - Returns - ------- - List[ECRRepo] - List of ECR repos deployed for this companion stack - Returns empty list if companion stack does not exist - """ - if not self.does_companion_stack_exist(): - return [] - repos: List[ECRRepo] = list() - stack = boto3.resource("cloudformation", config=self._boto_config).Stack(self._companion_stack.stack_name) - resources = stack.resource_summaries.all() - for resource in resources: - if resource.resource_type == "AWS::ECR::Repository": - repos.append( - ECRRepo(logical_id=resource.logical_resource_id, physical_id=resource.physical_resource_id) - ) - return repos - - def get_unreferenced_repos(self) -> List[ECRRepo]: - """ - List deployed ECR repos that is not referenced by current list of functions - - Returns - ------- - List[ECRRepo] - List of deployed ECR repos that is not referenced by current list of functions - Returns empty list if companion stack does not exist - """ - if not self.does_companion_stack_exist(): - return [] - deployed_repos: List[ECRRepo] = self.list_deployed_repos() - current_mapping = self._builder.repo_mapping - - unreferenced_repos: List[ECRRepo] = list() - for deployed_repo in deployed_repos: - for _, current_repo in current_mapping.items(): - if current_repo.logical_id == deployed_repo.logical_id: - break - else: - unreferenced_repos.append(deployed_repo) - return unreferenced_repos - - def delete_unreferenced_repos(self) -> None: - """ - Blocking call to delete all deployed ECR repos that are unreferenced by a function - If repo does not exist, this will simply skip it. - """ - repos = self.get_unreferenced_repos() - for repo in repos: - try: - self._ecr_client.delete_repository(repositoryName=repo.physical_id, force=True) - except self._ecr_client.exceptions.RepositoryNotFoundException: - pass - - def sync_repos(self) -> None: - """ - Blocking call to sync companion stack with the following actions - Creates the stack if it does not exist, and updates it if it does. - Deletes unreferenced repos if they exist. - Deletes companion stack if there isn't any repo left. - """ - exists = self.does_companion_stack_exist() - has_repo = bool(self.get_repository_mapping()) - if exists: - self.delete_unreferenced_repos() - if has_repo: - self.update_companion_stack() - else: - self.delete_companion_stack() - elif not exists and has_repo: - self.update_companion_stack() - - def does_companion_stack_exist(self) -> bool: - """ - Does companion stack exist - - Returns - ------- - bool - Returns True if companion stack exists - """ - try: - self._cfn_client.describe_stacks(StackName=self._companion_stack.stack_name) - return True - except ClientError: - return False - - def get_repository_mapping(self) -> Dict[str, str]: - """ - Get current function to repo mapping - - Returns - ------- - Dict[str, str] - Dictionary with key as function logical ID and value as ECR repo URI. - """ - return dict((k, self.get_repo_uri(v)) for (k, v) in self._builder.repo_mapping.items()) - - def get_repo_uri(self, repo: ECRRepo) -> str: - """ - Get repo URI for a ECR repo - - Parameters - ---------- - repo: ECRRepo - - Returns - ------- - str - ECR repo URI based on account ID and region. - """ - return repo.get_repo_uri(self._account_id, self._region_name) - - def is_repo_uri(self, repo_uri: str, function_logical_id: str) -> bool: - """ - Check whether repo URI is a companion stack repo - - Parameters - ---------- - repo_uri: str - Repo URI to be checked. - - function_logical_id: str - Function logical ID associated with the image repo. - - Returns - ------- - bool - Returns True if repo_uri is a companion stack repo. - """ - return repo_uri == self.get_repo_uri(ECRRepo(self._companion_stack, function_logical_id)) - - def get_companion_stack_name(self): - """ - Returns the name of the companion stack - - Returns - ------- - str - Name of the companion stack - """ - return self._companion_stack.stack_name diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py deleted file mode 100644 index 660989606b..0000000000 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager_helper.py +++ /dev/null @@ -1,80 +0,0 @@ -""" - Helper class to bridge CLI functions and CompanionStackManager -""" -from typing import Dict, List - -from samcli.lib.bootstrap.companion_stack.data_types import ECRRepo - -from samcli.commands._utils.template import get_template_function_resource_ids -from samcli.lib.utils.packagetype import IMAGE -from samcli.lib.bootstrap.companion_stack.companion_stack_manager import CompanionStackManager - - -class CompanionStackManagerHelper: - missing_repo_functions: List[str] - auto_ecr_repo_functions: List[str] - deployed_repos: List[ECRRepo] - deployed_repo_uris: List[str] - unreferenced_repos: List[ECRRepo] - - def __init__( - self, - stack_name: str, - region: str, - s3_bucket: str, - s3_prefix: str, - template_file: str, - specified_image_repos: Dict[str, str], - ): - self.function_logical_ids = get_template_function_resource_ids(template_file=template_file, artifact=IMAGE) - self.missing_repo_functions = list() - self.auto_ecr_repo_functions = list() - self.manager = CompanionStackManager(stack_name, region, s3_bucket, s3_prefix) - self.deployed_repos = self.manager.list_deployed_repos() - self.deployed_repo_uris = [self.manager.get_repo_uri(repo) for repo in self.deployed_repos] - self.update_specified_image_repos(specified_image_repos) - self.unreferenced_repos = self.manager.get_unreferenced_repos() - - def update_specified_image_repos(self, specified_image_repos: Dict[str, str]) -> None: - """ - Update list of image repos specified for each function. - updates missing_repo_functions and auto_ecr_repo_functions accordingly. - - Parameters - ---------- - specified_image_repos: Dict[str, str] - Dictionary of image repo URIs with key as function logical ID and value as image repo URI - """ - self.missing_repo_functions.clear() - self.auto_ecr_repo_functions.clear() - for function_logical_id in self.function_logical_ids: - if not specified_image_repos or function_logical_id not in specified_image_repos: - self.missing_repo_functions.append(function_logical_id) - continue - - repo_uri = specified_image_repos[function_logical_id] - if self.manager.is_repo_uri(repo_uri, function_logical_id): - self.auto_ecr_repo_functions.append(function_logical_id) - self.manager.set_functions(self.missing_repo_functions + self.auto_ecr_repo_functions) - - def remove_unreferenced_repos_from_mapping(self, image_repositories: Dict[str, str]) -> Dict[str, str]: - """ - Removes image repos that are not referenced by a function - - Parameters - ---------- - image_repositories: Dict[str, str] - Dictionary of image repo URIs with key as function logical ID and value as image repo URI - - Returns - ---------- - Dict[str, str] - Copy of image_repositories that have unreferenced image repos removed - """ - output_image_repositories = image_repositories.copy() - for function_logical_id, repo_uri in image_repositories.items(): - for repo in self.unreferenced_repos: - if self.manager.get_repo_uri(repo) == repo_uri: - del output_image_repositories[function_logical_id] - break - return output_image_repositories diff --git a/samcli/lib/bootstrap/companion_stack/data_types.py b/samcli/lib/bootstrap/companion_stack/data_types.py deleted file mode 100644 index 2493fd349c..0000000000 --- a/samcli/lib/bootstrap/companion_stack/data_types.py +++ /dev/null @@ -1,137 +0,0 @@ -""" - Date type classes for companion stacks -""" -import re -from typing import Optional -from samcli.lib.utils.hash import str_checksum - - -class CompanionStack: - """ - Abstraction class for the companion stack - Companion stack name will be generated by this class. - """ - - _parent_stack_name: str - _escaped_parent_stack_name: str - _parent_stack_hash: str - _stack_name: str - - def __init__(self, parent_stack_name: str) -> None: - self._parent_stack_name = parent_stack_name - self._escaped_parent_stack_name = re.sub(r"[^a-z0-9]", "", self._parent_stack_name.lower()) - self._parent_stack_hash = str_checksum(self._parent_stack_name) - # There is max 128 characters limit on the length of stack name. - # Using MD5 to avoid collision after trucating - # 104 + 1 + 8 + 15 = 128 max char - self._stack_name = f"{self._parent_stack_name[:104]}-{self._parent_stack_hash[:8]}-CompanionStack" - - @property - def parent_stack_name(self) -> str: - """ - Parent stack name - """ - return self._parent_stack_name - - @property - def escaped_parent_stack_name(self) -> str: - """ - Parent stack name with only alpha numerica characters - """ - return self._escaped_parent_stack_name - - @property - def parent_stack_hash(self) -> str: - """ - MD5 hash of parent stack name - """ - return self._parent_stack_hash - - @property - def stack_name(self) -> str: - """ - Companion stack stack name - """ - return self._stack_name - - -class ECRRepo: - """ - Abstraction class for ECR repos in companion stacks - Logical ID, Physical ID, and Repo URI will be generated with this class. - """ - - _function_logical_id: Optional[str] - _escaped_function_logical_id: Optional[str] - _function_md5: Optional[str] - _companion_stack: Optional[CompanionStack] - _logical_id: Optional[str] - _physical_id: Optional[str] - _output_logical_id: Optional[str] - - def __init__( - self, - companion_stack: Optional[CompanionStack] = None, - function_logical_id: Optional[str] = None, - logical_id: Optional[str] = None, - physical_id: Optional[str] = None, - output_logical_id: Optional[str] = None, - ): - """ - Must be specified either with - companion_stack and function_logical_id - or - logical_id, physical_id, and output_logical_id - """ - self._function_logical_id = function_logical_id - self._escaped_function_logical_id = ( - re.sub(r"[^a-z0-9]", "", self._function_logical_id.lower()) - if self._function_logical_id is not None - else None - ) - self._function_md5 = str_checksum(self._function_logical_id) if self._function_logical_id is not None else None - self._companion_stack = companion_stack - - self._logical_id = logical_id - self._physical_id = physical_id - self._output_logical_id = output_logical_id - - @property - def logical_id(self) -> Optional[str]: - if self._logical_id is None and self._function_logical_id and self._function_md5: - # MD5 is used to avoid two having the same escaped name with different Lambda Functions - # For example: Helloworld and HELLO-WORLD - # 52 + 8 + 4 = 64 max char - self._logical_id = self._function_logical_id[:52] + self._function_md5[:8] + "Repo" - return self._logical_id - - @property - def physical_id(self) -> Optional[str]: - if ( - self._physical_id is None - and self._companion_stack - and self._function_md5 - and self._escaped_function_logical_id - ): - # The physical ID is constructed with escaped_stack_name + stack_md5[:8] as prefix/path and - # followed by escaped_lambda_logical_id + function_md5[:8] + "repo" to show - # the linkage between the function and the repo - # 128 + 8 + 1 + 64 + 8 + 4 = 213 max char - self._physical_id = ( - self._companion_stack.escaped_parent_stack_name - + self._companion_stack.parent_stack_hash[:8] - + "/" - + self._escaped_function_logical_id - + self._function_md5[:8] - + "repo" - ) - return self._physical_id - - @property - def output_logical_id(self) -> Optional[str]: - if self._output_logical_id is None and self._function_logical_id and self._function_md5: - self._output_logical_id = self._function_logical_id[:52] + self._function_md5[:8] + "Out" - return self._output_logical_id - - def get_repo_uri(self, account_id, region) -> str: - return f"{account_id}.dkr.ecr.{region}.amazonaws.com/{self.physical_id}" diff --git a/samcli/lib/build/build_strategy.py b/samcli/lib/build/build_strategy.py index 258101ba2d..ecded3a743 100644 --- a/samcli/lib/build/build_strategy.py +++ b/samcli/lib/build/build_strategy.py @@ -5,7 +5,6 @@ import pathlib import shutil from abc import abstractmethod, ABC -from copy import deepcopy from typing import Callable, Dict, List, Any, Optional, cast from samcli.commands.build.exceptions import MissingBuildMethodException @@ -115,10 +114,6 @@ def build_single_function_definition(self, build_definition: FunctionBuildDefini LOG.debug("Building to following folder %s", single_build_dir) - # we should create a copy and pass it down, otherwise additional env vars like LAMBDA_BUILDERS_LOG_LEVEL - # will make cache invalid all the time - container_env_vars = deepcopy(build_definition.env_vars) - # when a function is passed here, it is ZIP function, codeuri and runtime are not None result = self._build_function( build_definition.get_function_name(), @@ -128,7 +123,7 @@ def build_single_function_definition(self, build_definition: FunctionBuildDefini build_definition.get_handler_name(), single_build_dir, build_definition.metadata, - container_env_vars, + build_definition.env_vars, ) function_build_results[single_full_path] = result diff --git a/samcli/lib/cli_validation/image_repository_validation.py b/samcli/lib/cli_validation/image_repository_validation.py index 20db8301a6..329e855019 100644 --- a/samcli/lib/cli_validation/image_repository_validation.py +++ b/samcli/lib/cli_validation/image_repository_validation.py @@ -12,7 +12,7 @@ def image_repository_validation(func): """ Wrapper Validation function that will run last after the all cli parmaters have been loaded - to check for conditions surrounding `--image-repository`, `--image-repositories`, and `--resolve-image-repos`. The + to check for conditions surrounding `--image-repository` and `--image-repositories`. The reason they are done last instead of in callback functions, is because the options depend on each other, and this breaks cyclic dependencies. @@ -25,12 +25,11 @@ def wrapped(*args, **kwargs): guided = ctx.params.get("guided", False) or ctx.params.get("g", False) image_repository = ctx.params.get("image_repository", False) image_repositories = ctx.params.get("image_repositories", False) or {} - resolve_image_repos = ctx.params.get("resolve_image_repos", False) template_file = ( ctx.params.get("t", False) or ctx.params.get("template_file", False) or ctx.params.get("template", False) ) - # Check if `--image-repository`, `--image-repositories`, or `--resolve-image-repos` are required by + # Check if `--image-repository` or `--image-repositories` are required by # looking for resources that have an IMAGE based packagetype. required = any( @@ -51,22 +50,11 @@ def wrapped(*args, **kwargs): ), ), Validator( - validation_function=lambda: image_repository and resolve_image_repos, - exception=click.BadOptionUsage( - option_name="--resolve-image-repos", - ctx=ctx, - message="Both '--resolve-image-repos' and '--image-repository' cannot be provided. " - "Do you have both specified in the command or in a configuration file?", - ), - ), - Validator( - validation_function=lambda: not guided - and not (image_repository or image_repositories or resolve_image_repos) - and required, + validation_function=lambda: not guided and not (image_repository or image_repositories) and required, exception=click.BadOptionUsage( option_name="--image-repositories", ctx=ctx, - message="Missing option '--image-repository', '--image-repositories', or '--resolve-image-repos'", + message="Missing option '--image-repository' or '--image-repositories'", ), ), Validator( @@ -74,13 +62,11 @@ def wrapped(*args, **kwargs): and ( set(image_repositories.keys()) != set(get_template_function_resource_ids(template_file, IMAGE)) and image_repositories - and not resolve_image_repos ), exception=click.BadOptionUsage( option_name="--image-repositories", ctx=ctx, - message="Incomplete list of function logical ids specified for '--image-repositories'. " - "You can also add --resolve-image-repos to automatically create missing repositories.", + message="Incomplete list of function logical ids specified for '--image-repositories'", ), ), ] diff --git a/samcli/lib/package/ecr_utils.py b/samcli/lib/package/ecr_utils.py index f4bedc4a27..6186d24099 100644 --- a/samcli/lib/package/ecr_utils.py +++ b/samcli/lib/package/ecr_utils.py @@ -6,5 +6,5 @@ from samcli.lib.package.regexpr import ECR_URL -def is_ecr_url(url: str) -> bool: +def is_ecr_url(url): return bool(re.match(ECR_URL, url)) if url else False diff --git a/samcli/lib/package/stream_cursor_utils.py b/samcli/lib/package/stream_cursor_utils.py index c8a037f4ab..908293c317 100644 --- a/samcli/lib/package/stream_cursor_utils.py +++ b/samcli/lib/package/stream_cursor_utils.py @@ -1,20 +1,11 @@ """ Stream cursor utilities for moving cursor in the terminal. """ -import os -import platform # NOTE: ANSI escape codes. # NOTE: Still needs investigation on non terminal environments. ESC = "\u001B[" -# Enables ANSI escape codes on Windows -if platform.system().lower() == "windows": - try: - os.system("color") - except Exception: - pass - def cursor_up(count=1): return ESC + str(count) + "A" diff --git a/samcli/lib/providers/sam_base_provider.py b/samcli/lib/providers/sam_base_provider.py index 7a75c70cc8..c059284eb8 100644 --- a/samcli/lib/providers/sam_base_provider.py +++ b/samcli/lib/providers/sam_base_provider.py @@ -10,8 +10,6 @@ from samcli.lib.intrinsic_resolver.intrinsics_symbol_table import IntrinsicsSymbolTable from samcli.lib.samlib.resource_metadata_normalizer import ResourceMetadataNormalizer from samcli.lib.samlib.wrapper import SamTranslatorWrapper -from samcli.lib.package.ecr_utils import is_ecr_url - LOG = logging.getLogger(__name__) @@ -36,11 +34,6 @@ class SamBaseProvider: SERVERLESS_LAYER: "ContentUri", } - IMAGE_PROPERTY_KEYS = { - LAMBDA_FUNCTION: "Code", - SERVERLESS_FUNCTION: "ImageUri", - } - def get(self, name: str) -> Optional[Any]: """ Given name of the function, this method must return the Function object @@ -95,17 +88,6 @@ def _is_s3_location(location: Optional[Union[str, Dict]]) -> bool: isinstance(location, str) and location.startswith("s3://") ) - @staticmethod - def _is_ecr_uri(location: Optional[Union[str, Dict]]) -> bool: - """ - the input could be: - - ImageUri of Serverless::Function - - Code of Lambda::Function - """ - return location is not None and is_ecr_url( - str(location.get("ImageUri", "")) if isinstance(location, dict) else location - ) - @staticmethod def _warn_code_extraction(resource_type: str, resource_name: str, code_property: str) -> None: LOG.warning( @@ -116,16 +98,6 @@ def _warn_code_extraction(resource_type: str, resource_name: str, code_property: code_property, ) - @staticmethod - def _warn_imageuri_extraction(resource_type: str, resource_name: str, image_property: str) -> None: - LOG.warning( - "The resource %s '%s' has specified ECR registry image for %s. " - "It will not be built and SAM CLI does not support invoking it locally.", - resource_type, - resource_name, - image_property, - ) - @staticmethod def _extract_lambda_function_imageuri(resource_properties: Dict, code_property_key: str) -> Optional[str]: """ diff --git a/samcli/lib/providers/sam_function_provider.py b/samcli/lib/providers/sam_function_provider.py index 6bffc4bf75..7bc231f929 100644 --- a/samcli/lib/providers/sam_function_provider.py +++ b/samcli/lib/providers/sam_function_provider.py @@ -130,28 +130,13 @@ def _extract_functions( resource_properties["Metadata"] = resource_metadata if resource_type in [SamFunctionProvider.SERVERLESS_FUNCTION, SamFunctionProvider.LAMBDA_FUNCTION]: - resource_package_type = resource_properties.get("PackageType", ZIP) - code_property_key = SamBaseProvider.CODE_PROPERTY_KEYS[resource_type] - image_property_key = SamBaseProvider.IMAGE_PROPERTY_KEYS[resource_type] - - if resource_package_type == ZIP and SamBaseProvider._is_s3_location( - resource_properties.get(code_property_key) - ): - + if SamBaseProvider._is_s3_location(resource_properties.get(code_property_key)): # CodeUri can be a dictionary of S3 Bucket/Key or a S3 URI, neither of which are supported if not ignore_code_extraction_warnings: SamFunctionProvider._warn_code_extraction(resource_type, name, code_property_key) continue - if resource_package_type == IMAGE and SamBaseProvider._is_ecr_uri( - resource_properties.get(image_property_key) - ): - # ImageUri can be an ECR uri, which is not supported - if not ignore_code_extraction_warnings: - SamFunctionProvider._warn_imageuri_extraction(resource_type, name, image_property_key) - continue - if resource_type == SamFunctionProvider.SERVERLESS_FUNCTION: layers = SamFunctionProvider._parse_layer_info( stack, diff --git a/samcli/lib/utils/managed_cloudformation_stack.py b/samcli/lib/utils/managed_cloudformation_stack.py index 493a4fb48e..25973fbc8b 100644 --- a/samcli/lib/utils/managed_cloudformation_stack.py +++ b/samcli/lib/utils/managed_cloudformation_stack.py @@ -59,10 +59,11 @@ def _create_or_get_stack(cloudformation_client, stack_name, template_body): ds_resp = cloudformation_client.describe_stacks(StackName=stack_name) stacks = ds_resp["Stacks"] stack = stacks[0] + click.echo("\n\tLooking for resources needed for deployment: Found!") _check_sanity_of_stack(stack, stack_name) return stack["Outputs"] except ClientError: - pass + click.echo("\n\tLooking for resources needed for deployment: Not found.") try: stack = _create_stack( diff --git a/tests/functional/commands/validate/lib/models/function_with_mq_virtual_host.yaml b/tests/functional/commands/validate/lib/models/function_with_mq_virtual_host.yaml deleted file mode 100644 index b5d2c62085..0000000000 --- a/tests/functional/commands/validate/lib/models/function_with_mq_virtual_host.yaml +++ /dev/null @@ -1,19 +0,0 @@ -Resources: - MQFunction: - Type: 'AWS::Serverless::Function' - Properties: - CodeUri: s3://sam-demo-bucket/queues.zip - Handler: queue.mq_handler - Runtime: python2.7 - Events: - MyMQQueue: - Type: MQ - Properties: - Broker: arn:aws:mq:us-east-2:123456789012:broker:MyBroker:b-1234a5b6-78cd-901e-2fgh-3i45j6k178l9 - Queues: - - "Queue1" - SourceAccessConfigurations: - - Type: BASIC_AUTH - URI: arn:aws:secretsmanager:us-west-2:123456789012:secret:my-path/my-secret-name-1a2b3c - - Type: VIRTUAL_HOST - URI: vhost_name \ No newline at end of file diff --git a/tests/integration/buildcmd/test_build_cmd.py b/tests/integration/buildcmd/test_build_cmd.py index 4681a4f2eb..ba25849672 100644 --- a/tests/integration/buildcmd/test_build_cmd.py +++ b/tests/integration/buildcmd/test_build_cmd.py @@ -1346,36 +1346,6 @@ def test_cache_build(self, use_container, code_uri, function1_handler, function2 expected_messages, command_result, self._make_parameter_override_arg(overrides) ) - @skipIf(SKIP_DOCKER_TESTS, SKIP_DOCKER_MESSAGE) - def test_cached_build_with_env_vars(self): - """ - Build 2 times to verify that second time hits the cached build - """ - overrides = { - "FunctionCodeUri": "Python", - "Function1Handler": "main.first_function_handler", - "Function2Handler": "main.second_function_handler", - "FunctionRuntime": "python3.8", - } - cmdlist = self.get_command_list( - use_container=True, parameter_overrides=overrides, cached=True, container_env_var="FOO=BAR" - ) - - LOG.info("Running Command (cache should be invalid): %s", cmdlist) - command_result = run_command(cmdlist, cwd=self.working_dir) - self.assertTrue( - "Cache is invalid, running build and copying resources to function build definition" - in command_result.stderr.decode("utf-8") - ) - - LOG.info("Re-Running Command (valid cache should exist): %s", cmdlist) - command_result_with_cache = run_command(cmdlist, cwd=self.working_dir) - - self.assertTrue( - "Valid cache found, copying previously built resources from function build definition" - in command_result_with_cache.stderr.decode("utf-8") - ) - @skipIf( ((IS_WINDOWS and RUNNING_ON_CI) and not CI_OVERRIDE), diff --git a/tests/integration/deploy/deploy_integ_base.py b/tests/integration/deploy/deploy_integ_base.py index 870ee7203a..72ecb0145c 100644 --- a/tests/integration/deploy/deploy_integ_base.py +++ b/tests/integration/deploy/deploy_integ_base.py @@ -46,7 +46,6 @@ def get_deploy_command_list( resolve_s3=False, config_file=None, signing_profiles=None, - resolve_image_repos=False, ): command_list = [self.base_command(), "deploy"] @@ -104,8 +103,6 @@ def get_deploy_command_list( command_list = command_list + ["--config-file", str(config_file)] if signing_profiles: command_list = command_list + ["--signing-profiles", str(signing_profiles)] - if resolve_image_repos: - command_list = command_list + ["--resolve-image-repos"] return command_list diff --git a/tests/integration/deploy/test_deploy_command.py b/tests/integration/deploy/test_deploy_command.py index 13128db486..3e4bd53f87 100644 --- a/tests/integration/deploy/test_deploy_command.py +++ b/tests/integration/deploy/test_deploy_command.py @@ -1,5 +1,4 @@ import os -from samcli.lib.bootstrap.companion_stack.data_types import CompanionStack import shutil import tempfile import time @@ -8,7 +7,6 @@ from unittest import skipIf import boto3 -from botocore.exceptions import ClientError import docker from botocore.config import Config from parameterized import parameterized @@ -50,8 +48,7 @@ def setUpClass(cls): DeployIntegBase.setUpClass() def setUp(self): - self.cfn_client = boto3.client("cloudformation") - self.ecr_client = boto3.client("ecr") + self.cf_client = boto3.client("cloudformation") self.sns_arn = os.environ.get("AWS_SNS") self.stacks = [] time.sleep(CFN_SLEEP) @@ -64,12 +61,10 @@ def tearDown(self): stack_name = stack["name"] if stack_name != SAM_CLI_STACK_NAME: region = stack.get("region") - cfn_client = ( + cf_client = ( self.cf_client if not region else boto3.client("cloudformation", config=Config(region_name=region)) ) - ecr_client = self.ecr_client if not region else boto3.client("ecr", config=Config(region_name=region)) - self._delete_companion_stack(cfn_client, ecr_client, self._stack_name_to_companion_stack(stack_name)) - cfn_client.delete_stack(StackName=stack_name) + cf_client.delete_stack(StackName=stack_name) super().tearDown() @parameterized.expand(["aws-serverless-function.yaml"]) @@ -201,33 +196,6 @@ def test_no_package_and_deploy_with_s3_bucket_all_args_image_repositories(self, deploy_process_execute = run_command(deploy_command_list) self.assertEqual(deploy_process_execute.process.returncode, 0) - @parameterized.expand(["aws-serverless-function-image.yaml"]) - def test_no_package_and_deploy_with_s3_bucket_all_args_resolve_image_repos(self, template_file): - template_path = self.test_data_path.joinpath(template_file) - - stack_name = self._method_to_stack_name(self.id()) - self.stack_names.append(stack_name) - - # Package and Deploy in one go without confirming change set. - deploy_command_list = self.get_deploy_command_list( - template_file=template_path, - stack_name=stack_name, - capabilities="CAPABILITY_IAM", - s3_prefix="integ_deploy", - s3_bucket=self.s3_bucket.name, - force_upload=True, - notification_arns=self.sns_arn, - parameter_overrides="Parameter=Clarity", - kms_key_id=self.kms_key, - no_execute_changeset=False, - tags="integ=true clarity=yes foo_bar=baz", - confirm_changeset=False, - resolve_image_repos=True, - ) - - deploy_process_execute = run_command(deploy_command_list) - self.assertEqual(deploy_process_execute.process.returncode, 0) - @parameterized.expand(["aws-serverless-function.yaml"]) def test_no_package_and_deploy_with_s3_bucket_and_no_confirm_changeset(self, template_file): template_path = self.test_data_path.joinpath(template_file) @@ -345,8 +313,7 @@ def test_deploy_without_s3_bucket(self, template_file): self.assertEqual(deploy_process_execute.process.returncode, 1) self.assertIn( bytes( - f"S3 Bucket not specified, use --s3-bucket to specify a bucket name, or use --resolve-s3 \ -to create a managed default bucket, or run sam deploy --guided", + f"S3 Bucket not specified, use --s3-bucket to specify a bucket name or run sam deploy --guided", encoding="utf-8", ), deploy_process_execute.stderr, @@ -597,7 +564,7 @@ def test_deploy_guided_zip(self, template_file): os.remove(self.test_data_path.joinpath(DEFAULT_CONFIG_FILE_NAME)) @parameterized.expand(["aws-serverless-function-image.yaml"]) - def test_deploy_guided_image_auto(self, template_file): + def test_deploy_guided_image(self, template_file): template_path = self.test_data_path.joinpath(template_file) stack_name = self._method_to_stack_name(self.id()) @@ -607,7 +574,7 @@ def test_deploy_guided_image_auto(self, template_file): deploy_command_list = self.get_deploy_command_list(template_file=template_path, guided=True) deploy_process_execute = run_command_with_input( - deploy_command_list, f"{stack_name}\n\n\n\ny\n\n\ny\n\n\n\n".encode() + deploy_command_list, f"{stack_name}\n\n{self.ecr_repo_name}\n\n\ny\n\n\n\n\n\n".encode() ) # Deploy should succeed with a managed stack @@ -616,34 +583,6 @@ def test_deploy_guided_image_auto(self, template_file): # Remove samconfig.toml os.remove(self.test_data_path.joinpath(DEFAULT_CONFIG_FILE_NAME)) - @parameterized.expand(["aws-serverless-function-image.yaml"]) - def test_deploy_guided_image_specify(self, template_file): - template_path = self.test_data_path.joinpath(template_file) - - stack_name = self._method_to_stack_name(self.id()) - self.stack_names.append(stack_name) - - # Package and Deploy in one go without confirming change set. - deploy_command_list = self.get_deploy_command_list(template_file=template_path, guided=True) - - deploy_process_execute = run_command_with_input( - deploy_command_list, f"{stack_name}\n\n\n\ny\n\n\n\nn\n{self.ecr_repo_name}\n\n\n\n".encode() - ) - - # Deploy should succeed with a managed stack - self.assertEqual(deploy_process_execute.process.returncode, 0) - # Verify companion stack does not exist - try: - self.cfn_client.describe_stacks(StackName=self._stack_name_to_companion_stack(stack_name)) - except ClientError: - pass - else: - self.fail("Companion stack was created. This should not happen with specifying image repos.") - - self.stack_names.append(SAM_CLI_STACK_NAME) - # Remove samconfig.toml - os.remove(self.test_data_path.joinpath(DEFAULT_CONFIG_FILE_NAME)) - @parameterized.expand(["aws-serverless-function.yaml"]) def test_deploy_guided_set_parameter(self, template_file): template_path = self.test_data_path.joinpath(template_file) @@ -934,24 +873,3 @@ def _method_to_stack_name(self, method_name): """Method expects method name which can be a full path. Eg: test.integration.test_deploy_command.method_name""" method_name = method_name.split(".")[-1] return f"{method_name.replace('_', '-')}-{CFN_PYTHON_VERSION_SUFFIX}" - - def _stack_name_to_companion_stack(self, stack_name): - return CompanionStack(stack_name).stack_name - - def _delete_companion_stack(self, cfn_client, ecr_client, companion_stack_name): - repos = list() - try: - cfn_client.describe_stacks(StackName=companion_stack_name) - except ClientError: - return - stack = boto3.resource("cloudformation").Stack(companion_stack_name) - resources = stack.resource_summaries.all() - for resource in resources: - if resource.resource_type == "AWS::ECR::Repository": - repos.append(resource.physical_resource_id) - for repo in repos: - try: - ecr_client.delete_repository(repositoryName=repo, force=True) - except ecr_client.exceptions.RepositoryNotFoundException: - pass - cfn_client.delete_stack(StackName=companion_stack_name) diff --git a/tests/regression/deploy/regression_deploy_base.py b/tests/regression/deploy/regression_deploy_base.py index 2154ad6910..9c482d7a3c 100644 --- a/tests/regression/deploy/regression_deploy_base.py +++ b/tests/regression/deploy/regression_deploy_base.py @@ -42,7 +42,6 @@ def get_deploy_command_list( tags=None, profile=None, region=None, - resolve_image_repos=False, ): command_list = self.base_command(base=base) @@ -80,8 +79,6 @@ def get_deploy_command_list( command_list = command_list + ["--region", str(region)] if profile: command_list = command_list + ["--profile", str(profile)] - if resolve_image_repos: - command_list = command_list + ["--resolve-image-repos"] return command_list diff --git a/tests/unit/commands/deploy/test_command.py b/tests/unit/commands/deploy/test_command.py index d2547cecad..6781972a58 100644 --- a/tests/unit/commands/deploy/test_command.py +++ b/tests/unit/commands/deploy/test_command.py @@ -47,26 +47,8 @@ def setUp(self): self.config_env = "mock-default-env" self.config_file = "mock-default-filename" self.signing_profiles = None - self.resolve_image_repos = False MOCK_SAM_CONFIG.reset_mock() - self.companion_stack_manager_helper_patch = patch( - "samcli.commands.deploy.guided_context.CompanionStackManagerHelper" - ) - self.companion_stack_manager_helper_mock = self.companion_stack_manager_helper_patch.start() - self.companion_stack_manager_helper_mock.return_value.missing_repo_functions = ["HelloWorldFunction"] - self.companion_stack_manager_helper_mock.return_value.function_logical_ids = ["HelloWorldFunction"] - self.companion_stack_manager_helper_mock.return_value.unreferenced_repos = ["HelloWorldFunctionB"] - self.companion_stack_manager_helper_mock.return_value.get_repository_mapping.return_value = { - "HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1" - } - self.companion_stack_manager_helper_mock.return_value.remove_unreferenced_repos_from_mapping.return_value = { - "HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1" - } - - def tearDown(self): - self.companion_stack_manager_helper_patch.stop() - @patch("samcli.commands.package.command.click") @patch("samcli.commands.package.package_context.PackageContext") @patch("samcli.commands.deploy.command.click") @@ -103,7 +85,6 @@ def test_all_args(self, mock_deploy_context, mock_deploy_click, mock_package_con resolve_s3=self.resolve_s3, config_env=self.config_env, config_file=self.config_file, - resolve_image_repos=self.resolve_image_repos, ) mock_deploy_context.assert_called_with( @@ -140,6 +121,7 @@ def test_all_args(self, mock_deploy_context, mock_deploy_click, mock_package_con @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.get_template_parameters") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") + @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") @patch.object(GuidedConfig, "get_config_ctx", MagicMock(return_value=(None, get_mock_sam_config()))) @@ -151,6 +133,7 @@ def test_all_args_guided_no_to_authorization_confirmation_prompt( mock_prompt, mock_signer_config_per_function, mock_sam_function_provider, + mock_get_template_artifacts_format, mock_get_buildable_stacks, mock_get_template_parameters, mockauth_per_resource, @@ -161,7 +144,8 @@ def test_all_args_guided_no_to_authorization_confirmation_prompt( mock_package_click, ): mock_get_buildable_stacks.return_value = (Mock(), []) - mock_sam_function_provider.return_value.functions = {} + mock_sam_function_provider.return_value = {} + mock_get_template_artifacts_format.return_value = [ZIP] context_mock = Mock() mockauth_per_resource.return_value = [("HelloWorldResource1", False), ("HelloWorldResource2", False)] mock_deploy_context.return_value.__enter__.return_value = context_mock @@ -213,7 +197,6 @@ def test_all_args_guided_no_to_authorization_confirmation_prompt( resolve_s3=self.resolve_s3, config_env=self.config_env, config_file=self.config_file, - resolve_image_repos=self.resolve_image_repos, ) @patch("samcli.commands.package.command.click") @@ -224,6 +207,8 @@ def test_all_args_guided_no_to_authorization_confirmation_prompt( @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.get_template_parameters") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") + @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") + @patch("samcli.commands.deploy.guided_context.get_template_function_resource_ids") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") @patch.object(GuidedConfig, "get_config_ctx", MagicMock(return_value=(None, get_mock_sam_config()))) @@ -237,6 +222,8 @@ def test_all_args_guided( mock_prompt, mock_signer_config_per_function, mock_sam_function_provider, + mock_get_template_function_resource_ids, + mock_get_template_artifacts_format, mock_get_buildable_stacks, mock_get_template_parameters, mockauth_per_resource, @@ -248,19 +235,22 @@ def test_all_args_guided( ): mock_get_buildable_stacks.return_value = (Mock(), []) mock_tag_translation.return_value = "helloworld-123456-v1" + mock_get_template_function_resource_ids.return_value = ["HelloWorldFunction"] context_mock = Mock() mock_sam_function_provider.return_value = MagicMock( functions={"HelloWorldFunction": MagicMock(packagetype=IMAGE, imageuri="helloworld:v1")} ) + mock_get_template_artifacts_format.return_value = [IMAGE] mockauth_per_resource.return_value = [("HelloWorldResource", False)] mock_deploy_context.return_value.__enter__.return_value = context_mock - mock_confirm.side_effect = [True, False, True, True, True, True] + mock_confirm.side_effect = [True, False, True, True] mock_prompt.side_effect = [ "sam-app", "us-east-1", "guidedParameter", "secure", + "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1", ("CAPABILITY_IAM",), "testconfig.toml", "test-env", @@ -303,7 +293,6 @@ def test_all_args_guided( resolve_s3=self.resolve_s3, config_env=self.config_env, config_file=self.config_file, - resolve_image_repos=self.resolve_image_repos, ) mock_deploy_context.assert_called_with( @@ -358,6 +347,8 @@ def test_all_args_guided( @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") @patch("samcli.commands.deploy.guided_context.get_template_parameters") + @patch("samcli.commands.deploy.guided_context.get_template_function_resource_ids") + @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") @patch.object( @@ -375,6 +366,8 @@ def test_all_args_guided_no_save_echo_param_to_config( mock_prompt, mock_signer_config_per_function, mock_sam_function_provider, + mock_get_template_artifacts_format, + mock_get_template_function_resource_ids, mock_get_template_parameters, mock_get_buildable_stacks, mockauth_per_resource, @@ -386,11 +379,13 @@ def test_all_args_guided_no_save_echo_param_to_config( ): mock_get_buildable_stacks.return_value = (Mock(), []) mock_tag_translation.return_value = "helloworld-123456-v1" + mock_get_template_function_resource_ids.return_value = ["HelloWorldFunction"] context_mock = Mock() mock_sam_function_provider.return_value = MagicMock( functions={"HelloWorldFunction": MagicMock(packagetype=IMAGE, imageuri="helloworld:v1")} ) + mock_get_template_artifacts_format.return_value = [IMAGE] mockauth_per_resource.return_value = [("HelloWorldResource", False)] mock_get_template_parameters.return_value = { "Myparameter": {"Type": "String"}, @@ -404,11 +399,12 @@ def test_all_args_guided_no_save_echo_param_to_config( "guidedParameter", "guided parameter with spaces", "secure", + "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1", ("CAPABILITY_IAM",), "testconfig.toml", "test-env", ] - mock_confirm.side_effect = [True, False, True, True, True, True] + mock_confirm.side_effect = [True, False, True, True] mock_managed_stack.return_value = "managed-s3-bucket" mock_signer_config_per_function.return_value = ({}, {}) @@ -440,7 +436,6 @@ def test_all_args_guided_no_save_echo_param_to_config( resolve_s3=self.resolve_s3, config_env=self.config_env, config_file=self.config_file, - resolve_image_repos=self.resolve_image_repos, ) mock_deploy_context.assert_called_with( @@ -510,6 +505,8 @@ def test_all_args_guided_no_save_echo_param_to_config( @patch("samcli.commands.deploy.guided_context.manage_stack") @patch("samcli.commands.deploy.guided_context.get_template_parameters") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") + @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") + @patch("samcli.commands.deploy.guided_context.get_template_function_resource_ids") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch.object( GuidedConfig, @@ -529,6 +526,8 @@ def test_all_args_guided_no_params_save_config( mock_confirm, mock_prompt, mock_sam_function_provider, + mock_get_template_function_resource_ids, + mock_get_template_artifacts_format, mock_signer_config_per_function, mock_get_template_parameters, mock_managed_stack, @@ -541,11 +540,13 @@ def test_all_args_guided_no_params_save_config( ): mock_get_buildable_stacks.return_value = (Mock(), []) mock_tag_translation.return_value = "helloworld-123456-v1" + mock_get_template_function_resource_ids.return_value = ["HelloWorldFunction"] context_mock = Mock() mock_sam_function_provider.return_value = MagicMock( functions={"HelloWorldFunction": MagicMock(packagetype=IMAGE, imageuri="helloworld:v1")} ) + mock_get_template_artifacts_format.return_value = [IMAGE] mockauth_per_resource.return_value = [("HelloWorldResource", False)] mock_get_template_parameters.return_value = {} @@ -553,11 +554,12 @@ def test_all_args_guided_no_params_save_config( mock_prompt.side_effect = [ "sam-app", "us-east-1", + "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1", ("CAPABILITY_IAM",), "testconfig.toml", "test-env", ] - mock_confirm.side_effect = [True, False, True, True, True, True] + mock_confirm.side_effect = [True, False, True, True] mock_get_cmd_names.return_value = ["deploy"] mock_managed_stack.return_value = "managed-s3-bucket" mock_signer_config_per_function.return_value = ({}, {}) @@ -589,7 +591,6 @@ def test_all_args_guided_no_params_save_config( config_env=self.config_env, config_file=self.config_file, signing_profiles=self.signing_profiles, - resolve_image_repos=self.resolve_image_repos, ) mock_deploy_context.assert_called_with( @@ -648,6 +649,8 @@ def test_all_args_guided_no_params_save_config( @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") @patch("samcli.commands.deploy.guided_context.get_template_parameters") + @patch("samcli.commands.deploy.guided_context.get_template_function_resource_ids") + @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") @patch.object(GuidedConfig, "get_config_ctx", MagicMock(return_value=(None, get_mock_sam_config()))) @@ -661,6 +664,8 @@ def test_all_args_guided_no_params_no_save_config( mock_prompt, mock_signer_config_per_function, mock_sam_function_provider, + mock_get_template_artifacts_format, + mock_get_template_function_resource_ids, mock_get_template_parameters, mock_get_buildable_stacks, mockauth_per_resource, @@ -672,20 +677,23 @@ def test_all_args_guided_no_params_no_save_config( ): mock_get_buildable_stacks.return_value = (Mock(), []) mock_tag_translation.return_value = "helloworld-123456-v1" + mock_get_template_function_resource_ids.return_value = ["HelloWorldFunction"] context_mock = Mock() mock_sam_function_provider.return_value = MagicMock( functions={"HelloWorldFunction": MagicMock(packagetype=IMAGE, imageuri="helloworld:v1")} ) + mock_get_template_artifacts_format.return_value = [IMAGE] mockauth_per_resource.return_value = [("HelloWorldResource", False)] mock_get_template_parameters.return_value = {} mock_deploy_context.return_value.__enter__.return_value = context_mock mock_prompt.side_effect = [ "sam-app", "us-east-1", + "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1", ("CAPABILITY_IAM",), ] - mock_confirm.side_effect = [True, False, True, False, True, True] + mock_confirm.side_effect = [True, False, True, False] mock_managed_stack.return_value = "managed-s3-bucket" mock_signer_config_per_function.return_value = ({}, {}) @@ -719,7 +727,6 @@ def test_all_args_guided_no_params_no_save_config( config_file=self.config_file, config_env=self.config_env, signing_profiles=self.signing_profiles, - resolve_image_repos=self.resolve_image_repos, ) mock_deploy_context.assert_called_with( @@ -789,7 +796,6 @@ def test_all_args_resolve_s3( config_file=self.config_file, config_env=self.config_env, signing_profiles=self.signing_profiles, - resolve_image_repos=self.resolve_image_repos, ) mock_deploy_context.assert_called_with( @@ -847,5 +853,4 @@ def test_resolve_s3_and_s3_bucket_both_set(self): config_file=self.config_file, config_env=self.config_env, signing_profiles=self.signing_profiles, - resolve_image_repos=self.resolve_image_repos, ) diff --git a/tests/unit/commands/deploy/test_guided_context.py b/tests/unit/commands/deploy/test_guided_context.py index eaf40d24dc..6e49b73a60 100644 --- a/tests/unit/commands/deploy/test_guided_context.py +++ b/tests/unit/commands/deploy/test_guided_context.py @@ -21,54 +21,34 @@ def setUp(self): image_repository=None, image_repositories={"HelloWorldFunction": "image-repo"}, ) - self.companion_stack_manager_helper_patch = patch( - "samcli.commands.deploy.guided_context.CompanionStackManagerHelper" - ) - self.companion_stack_manager_helper_mock = self.companion_stack_manager_helper_patch.start() - self.companion_stack_manager_helper_mock.return_value.missing_repo_functions = ["HelloWorldFunction"] - self.companion_stack_manager_helper_mock.return_value.function_logical_ids = ["HelloWorldFunction"] - self.companion_stack_manager_helper_mock.return_value.unreferenced_repos = ["HelloWorldFunctionB"] - self.companion_stack_manager_helper_mock.return_value.get_repository_mapping.return_value = { - "HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1" - } - self.companion_stack_manager_helper_mock.return_value.remove_unreferenced_repos_from_mapping.return_value = { - "HelloWorldFunction": "123456789012.dkr.ecr.us-east-1.amazonaws.com/test1" - } - self.companion_stack_manager_helper_mock.return_value.missing_repo_functions = ["HelloWorldFunction"] - - self.verify_image_patch = patch( - "samcli.commands.deploy.guided_context.GuidedContext.verify_images_exist_locally" - ) - self.verify_image_mock = self.verify_image_patch.start() - - def tearDown(self): - self.companion_stack_manager_helper_patch.stop() - self.verify_image_patch.stop() @patch("samcli.commands.deploy.guided_context.prompt") @patch("samcli.commands.deploy.guided_context.confirm") @patch("samcli.commands.deploy.guided_context.manage_stack") @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") + @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") def test_guided_prompts_check_defaults_non_public_resources_zips( self, patched_signer_config_per_function, patched_sam_function_provider, + patched_get_template_artifacts_format, patched_get_buildable_stacks, - patched_auth_per_resource, + patchedauth_per_resource, patched_manage_stack, patched_confirm, patched_prompt, ): - patched_sam_function_provider.return_value.functions = {} + patched_sam_function_provider.return_value = {} + patched_get_template_artifacts_format.return_value = [ZIP] patched_get_buildable_stacks.return_value = (Mock(), []) # Series of inputs to confirmations so that full range of questions are asked. - patched_auth_per_resource.return_value = [ + patchedauth_per_resource.return_value = [ ("HelloWorldFunction", True), ] - patched_confirm.side_effect = [True, False, "", True, True, True] + patched_confirm.side_effect = [True, False, "", True] patched_manage_stack.return_value = "managed_s3_stack" patched_signer_config_per_function.return_value = ({}, {}) self.gc.guided_prompts(parameter_override_keys=None) @@ -77,14 +57,6 @@ def test_guided_prompts_check_defaults_non_public_resources_zips( call(f"\t{self.gc.start_bold}Confirm changes before deploy{self.gc.end_bold}", default=True), call(f"\t{self.gc.start_bold}Allow SAM CLI IAM role creation{self.gc.end_bold}", default=True), call(f"\t{self.gc.start_bold}Save arguments to configuration file{self.gc.end_bold}", default=True), - call( - f"\t {self.gc.start_bold}Create managed ECR repositories for all functions?{self.gc.end_bold}", - default=True, - ), - call( - f"\t {self.gc.start_bold}Delete the unreferenced repositories listed above when deploying?{self.gc.end_bold}", - default=False, - ), ] self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) @@ -104,12 +76,14 @@ def test_guided_prompts_check_defaults_non_public_resources_zips( @patch("samcli.commands.deploy.guided_context.manage_stack") @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") + @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") def test_guided_prompts_check_defaults_public_resources_zips( self, patched_signer_config_per_function, patched_sam_function_provider, + patched_get_template_artifacts_format, patched_get_buildable_stacks, patchedauth_per_resource, patched_manage_stack, @@ -117,11 +91,12 @@ def test_guided_prompts_check_defaults_public_resources_zips( patched_prompt, ): patched_signer_config_per_function.return_value = (None, None) - patched_sam_function_provider.return_value.functions = {} + patched_sam_function_provider.return_value = {} + patched_get_template_artifacts_format.return_value = [ZIP] patched_get_buildable_stacks.return_value = (Mock(), []) # Series of inputs to confirmations so that full range of questions are asked. patchedauth_per_resource.return_value = [("HelloWorldFunction", False)] - patched_confirm.side_effect = [True, False, True, False, True, True] + patched_confirm.side_effect = [True, False, True, False, ""] patched_manage_stack.return_value = "managed_s3_stack" self.gc.guided_prompts(parameter_override_keys=None) # Now to check for all the defaults on confirmations. @@ -133,14 +108,6 @@ def test_guided_prompts_check_defaults_public_resources_zips( default=False, ), call(f"\t{self.gc.start_bold}Save arguments to configuration file{self.gc.end_bold}", default=True), - call( - f"\t {self.gc.start_bold}Create managed ECR repositories for all functions?{self.gc.end_bold}", - default=True, - ), - call( - f"\t {self.gc.start_bold}Delete the unreferenced repositories listed above when deploying?{self.gc.end_bold}", - default=False, - ), ] self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) @@ -157,6 +124,8 @@ def test_guided_prompts_check_defaults_public_resources_zips( @patch("samcli.commands.deploy.guided_context.manage_stack") @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") + @patch("samcli.commands.deploy.guided_context.get_template_function_resource_ids") + @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.click.secho") @patch("samcli.commands.deploy.guided_context.tag_translation") @@ -167,6 +136,8 @@ def test_guided_prompts_check_defaults_public_resources_images( patched_tag_translation, patched_click_secho, patched_sam_function_provider, + patched_get_template_artifacts_format, + mock_get_template_function_resource_ids, patched_get_buildable_stacks, patchedauth_per_resource, patched_manage_stack, @@ -174,20 +145,23 @@ def test_guided_prompts_check_defaults_public_resources_images( patched_prompt, ): + mock_get_template_function_resource_ids.return_value = ["HelloWorldFunction"] patched_signer_config_per_function.return_value = (None, None) patched_tag_translation.return_value = "helloworld-123456-v1" patched_sam_function_provider.return_value = MagicMock( functions={"HelloWorldFunction": MagicMock(packagetype=IMAGE, imageuri="helloworld:v1")} ) + patched_get_template_artifacts_format.return_value = [IMAGE] patched_get_buildable_stacks.return_value = (Mock(), []) patched_prompt.side_effect = [ "sam-app", "region", + "123456789012.dkr.ecr.region.amazonaws.com/myrepo", "CAPABILITY_IAM", ] # Series of inputs to confirmations so that full range of questions are asked. patchedauth_per_resource.return_value = [("HelloWorldFunction", False)] - patched_confirm.side_effect = [True, False, True, False, True, True] + patched_confirm.side_effect = [True, False, True, False, ""] patched_manage_stack.return_value = "managed_s3_stack" self.gc.guided_prompts(parameter_override_keys=None) # Now to check for all the defaults on confirmations. @@ -199,14 +173,6 @@ def test_guided_prompts_check_defaults_public_resources_images( default=False, ), call(f"\t{self.gc.start_bold}Save arguments to configuration file{self.gc.end_bold}", default=True), - call( - f"\t {self.gc.start_bold}Create managed ECR repositories for all functions?{self.gc.end_bold}", - default=True, - ), - call( - f"\t {self.gc.start_bold}Delete the unreferenced repositories listed above when deploying?{self.gc.end_bold}", - default=False, - ), ] self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) @@ -214,6 +180,10 @@ def test_guided_prompts_check_defaults_public_resources_images( expected_prompt_calls = [ call(f"\t{self.gc.start_bold}Stack Name{self.gc.end_bold}", default="test", type=click.STRING), call(f"\t{self.gc.start_bold}AWS Region{self.gc.end_bold}", default="region", type=click.STRING), + call( + f"\t{self.gc.start_bold}Image Repository for HelloWorldFunction{self.gc.end_bold}", + default="image-repo", + ), call(f"\t{self.gc.start_bold}Capabilities{self.gc.end_bold}", default=["CAPABILITY_IAM"], type=ANY), ] self.assertEqual(expected_prompt_calls, patched_prompt.call_args_list) @@ -221,6 +191,10 @@ def test_guided_prompts_check_defaults_public_resources_images( print(expected_prompt_calls) print(patched_prompt.call_args_list) expected_click_secho_calls = [ + call( + f"\t helloworld:v1 to be pushed to 123456789012.dkr.ecr.region.amazonaws.com/myrepo:helloworld-123456-v1" + ), + call(nl=True), call("\t#Shows you resources changes to be deployed and require a 'Y' to initiate deploy"), call("\t#SAM needs permission to be able to create roles to connect to the resources in your template"), ] @@ -231,6 +205,8 @@ def test_guided_prompts_check_defaults_public_resources_images( @patch("samcli.commands.deploy.guided_context.manage_stack") @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") + @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") + @patch("samcli.commands.deploy.guided_context.get_template_function_resource_ids") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.click.secho") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") @@ -239,12 +215,16 @@ def test_guided_prompts_check_defaults_public_resources_images_ecr_url( patched_signer_config_per_function, patched_click_secho, patched_sam_function_provider, + mock_get_template_function_resource_ids, + patched_get_template_artifacts_format, patched_get_buildable_stacks, patchedauth_per_resource, patched_manage_stack, patched_confirm, patched_prompt, ): + mock_get_template_function_resource_ids.return_value = ["HelloWorldFunction"] + patched_sam_function_provider.return_value = MagicMock( functions={ "HelloWorldFunction": MagicMock( @@ -252,16 +232,17 @@ def test_guided_prompts_check_defaults_public_resources_images_ecr_url( ) } ) + patched_get_template_artifacts_format.return_value = [IMAGE] patched_get_buildable_stacks.return_value = (Mock(), []) patched_prompt.side_effect = [ "sam-app", "region", + "123456789012.dkr.ecr.region.amazonaws.com/myrepo", "CAPABILITY_IAM", - "abc", ] # Series of inputs to confirmations so that full range of questions are asked. patchedauth_per_resource.return_value = [("HelloWorldFunction", False)] - patched_confirm.side_effect = [True, False, True, False, True, True] + patched_confirm.side_effect = [True, False, True, False, ""] patched_manage_stack.return_value = "managed_s3_stack" patched_signer_config_per_function.return_value = ({}, {}) self.gc.guided_prompts(parameter_override_keys=None) @@ -274,14 +255,6 @@ def test_guided_prompts_check_defaults_public_resources_images_ecr_url( default=False, ), call(f"\t{self.gc.start_bold}Save arguments to configuration file{self.gc.end_bold}", default=True), - call( - f"\t {self.gc.start_bold}Create managed ECR repositories for all functions?{self.gc.end_bold}", - default=True, - ), - call( - f"\t {self.gc.start_bold}Delete the unreferenced repositories listed above when deploying?{self.gc.end_bold}", - default=False, - ), ] self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) @@ -289,125 +262,16 @@ def test_guided_prompts_check_defaults_public_resources_images_ecr_url( expected_prompt_calls = [ call(f"\t{self.gc.start_bold}Stack Name{self.gc.end_bold}", default="test", type=click.STRING), call(f"\t{self.gc.start_bold}AWS Region{self.gc.end_bold}", default="region", type=click.STRING), - call(f"\t{self.gc.start_bold}Capabilities{self.gc.end_bold}", default=["CAPABILITY_IAM"], type=ANY), - ] - self.assertEqual(expected_prompt_calls, patched_prompt.call_args_list) - # Now to check click secho outputs and no references to images pushed. - expected_click_secho_calls = [ - call("\t#Shows you resources changes to be deployed and require a 'Y' to initiate deploy"), - call("\t#SAM needs permission to be able to create roles to connect to the resources in your template"), - ] - self.assertEqual(expected_click_secho_calls, patched_click_secho.call_args_list) - - @patch("samcli.commands.deploy.guided_context.prompt") - @patch("samcli.commands.deploy.guided_context.confirm") - @patch("samcli.commands.deploy.guided_context.manage_stack") - @patch("samcli.commands.deploy.guided_context.auth_per_resource") - @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") - @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") - @patch("samcli.commands.deploy.guided_context.click.secho") - @patch("samcli.commands.deploy.guided_context.signer_config_per_function") - def test_guided_prompts_images_illegal_image_uri( - self, - patched_signer_config_per_function, - patched_click_secho, - patched_sam_function_provider, - patched_get_buildable_stacks, - patchedauth_per_resource, - patched_manage_stack, - patched_confirm, - patched_prompt, - ): - - # Set ImageUri to be None, the sam app was never built. - patched_sam_function_provider.return_value = MagicMock( - functions={"HelloWorldFunction": MagicMock(packagetype=IMAGE, imageuri=None)} - ) - patched_get_buildable_stacks.return_value = (Mock(), []) - patched_prompt.side_effect = [ - "sam-app", - "region", - "CAPABILITY_IAM", - "illegaluri", - ] - # Series of inputs to confirmations so that full range of questions are asked. - patchedauth_per_resource.return_value = [("HelloWorldFunction", False)] - patched_confirm.side_effect = [True, False, True, False, False, True] - patched_manage_stack.return_value = "managed_s3_stack" - patched_signer_config_per_function.return_value = ({}, {}) - with self.assertRaises(GuidedDeployFailedError): - self.gc.guided_prompts(parameter_override_keys=None) - - @patch("samcli.commands.deploy.guided_context.prompt") - @patch("samcli.commands.deploy.guided_context.confirm") - @patch("samcli.commands.deploy.guided_context.manage_stack") - @patch("samcli.commands.deploy.guided_context.auth_per_resource") - @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") - @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") - @patch("samcli.commands.deploy.guided_context.click.secho") - @patch("samcli.commands.deploy.guided_context.signer_config_per_function") - def test_guided_prompts_images_missing_repo( - self, - patched_signer_config_per_function, - patched_click_secho, - patched_sam_function_provider, - patched_get_buildable_stacks, - patchedauth_per_resource, - patched_manage_stack, - patched_confirm, - patched_prompt, - ): - - self.companion_stack_manager_helper_mock.return_value.function_logical_ids = [ - "HelloWorldFunction", - "GoodbyeWorldFunction", - ] - # Set ImageUri to be None, the sam app was never built. - patched_sam_function_provider.return_value = MagicMock( - functions={"HelloWorldFunction": MagicMock(packagetype=IMAGE, imageuri=None)} - ) - patched_get_buildable_stacks.return_value = (Mock(), []) - patched_prompt.side_effect = [ - "sam-app", - "region", - "CAPABILITY_IAM", - ] - # Series of inputs to confirmations so that full range of questions are asked. - patchedauth_per_resource.return_value = [("HelloWorldFunction", False)] - patched_confirm.side_effect = [True, False, True, False, True, True] - patched_manage_stack.return_value = "managed_s3_stack" - patched_signer_config_per_function.return_value = ({}, {}) - - self.gc.guided_prompts(parameter_override_keys=None) - # Now to check for all the defaults on confirmations. - expected_confirmation_calls = [ - call(f"\t{self.gc.start_bold}Confirm changes before deploy{self.gc.end_bold}", default=True), - call(f"\t{self.gc.start_bold}Allow SAM CLI IAM role creation{self.gc.end_bold}", default=True), - call( - f"\t{self.gc.start_bold}HelloWorldFunction may not have authorization defined, Is this okay?{self.gc.end_bold}", - default=False, - ), - call(f"\t{self.gc.start_bold}Save arguments to configuration file{self.gc.end_bold}", default=True), - call( - f"\t {self.gc.start_bold}Create managed ECR repositories for the 1 functions without?{self.gc.end_bold}", - default=True, - ), call( - f"\t {self.gc.start_bold}Delete the unreferenced repositories listed above when deploying?{self.gc.end_bold}", - default=False, + f"\t{self.gc.start_bold}Image Repository for HelloWorldFunction{self.gc.end_bold}", + default="image-repo", ), - ] - self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) - - # Now to check for all the defaults on prompts. - expected_prompt_calls = [ - call(f"\t{self.gc.start_bold}Stack Name{self.gc.end_bold}", default="test", type=click.STRING), - call(f"\t{self.gc.start_bold}AWS Region{self.gc.end_bold}", default="region", type=click.STRING), call(f"\t{self.gc.start_bold}Capabilities{self.gc.end_bold}", default=["CAPABILITY_IAM"], type=ANY), ] self.assertEqual(expected_prompt_calls, patched_prompt.call_args_list) # Now to check click secho outputs and no references to images pushed. expected_click_secho_calls = [ + call(nl=True), call("\t#Shows you resources changes to be deployed and require a 'Y' to initiate deploy"), call("\t#SAM needs permission to be able to create roles to connect to the resources in your template"), ] @@ -418,106 +282,41 @@ def test_guided_prompts_images_missing_repo( @patch("samcli.commands.deploy.guided_context.manage_stack") @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") + @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") + @patch("samcli.commands.deploy.guided_context.get_template_function_resource_ids") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.click.secho") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") - def test_guided_prompts_images_no_repo( + def test_guided_prompts_images_no_image_uri( self, patched_signer_config_per_function, patched_click_secho, patched_sam_function_provider, + mock_get_template_function_resource_ids, + patched_get_template_artifacts_format, patched_get_buildable_stacks, patchedauth_per_resource, patched_manage_stack, patched_confirm, patched_prompt, ): + mock_get_template_function_resource_ids.return_value = ["HelloWorldFunction"] - self.companion_stack_manager_helper_mock.return_value.function_logical_ids = [] # Set ImageUri to be None, the sam app was never built. patched_sam_function_provider.return_value = MagicMock( functions={"HelloWorldFunction": MagicMock(packagetype=IMAGE, imageuri=None)} ) + patched_get_template_artifacts_format.return_value = [IMAGE] patched_get_buildable_stacks.return_value = (Mock(), []) patched_prompt.side_effect = [ "sam-app", "region", - "CAPABILITY_IAM", "123456789012.dkr.ecr.region.amazonaws.com/myrepo", - ] - # Series of inputs to confirmations so that full range of questions are asked. - patchedauth_per_resource.return_value = [("HelloWorldFunction", False)] - patched_confirm.side_effect = [True, False, True, False, True, True] - patched_manage_stack.return_value = "managed_s3_stack" - patched_signer_config_per_function.return_value = ({}, {}) - - self.gc.guided_prompts(parameter_override_keys=None) - # Now to check for all the defaults on confirmations. - expected_confirmation_calls = [ - call(f"\t{self.gc.start_bold}Confirm changes before deploy{self.gc.end_bold}", default=True), - call(f"\t{self.gc.start_bold}Allow SAM CLI IAM role creation{self.gc.end_bold}", default=True), - call( - f"\t{self.gc.start_bold}HelloWorldFunction may not have authorization defined, Is this okay?{self.gc.end_bold}", - default=False, - ), - call(f"\t{self.gc.start_bold}Save arguments to configuration file{self.gc.end_bold}", default=True), - call( - f"\t {self.gc.start_bold}Delete the unreferenced repositories listed above when deploying?{self.gc.end_bold}", - default=False, - ), - ] - self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) - - # Now to check for all the defaults on prompts. - expected_prompt_calls = [ - call(f"\t{self.gc.start_bold}Stack Name{self.gc.end_bold}", default="test", type=click.STRING), - call(f"\t{self.gc.start_bold}AWS Region{self.gc.end_bold}", default="region", type=click.STRING), - call(f"\t{self.gc.start_bold}Capabilities{self.gc.end_bold}", default=["CAPABILITY_IAM"], type=ANY), - call( - f"\t {self.gc.start_bold}ECR repository for HelloWorldFunction{self.gc.end_bold}", - type=click.STRING, - ), - ] - self.assertEqual(expected_prompt_calls, patched_prompt.call_args_list) - # Now to check click secho outputs and no references to images pushed. - expected_click_secho_calls = [ - call("\t#Shows you resources changes to be deployed and require a 'Y' to initiate deploy"), - call("\t#SAM needs permission to be able to create roles to connect to the resources in your template"), - ] - self.assertEqual(expected_click_secho_calls, patched_click_secho.call_args_list) - - @patch("samcli.commands.deploy.guided_context.prompt") - @patch("samcli.commands.deploy.guided_context.confirm") - @patch("samcli.commands.deploy.guided_context.manage_stack") - @patch("samcli.commands.deploy.guided_context.auth_per_resource") - @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") - @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") - @patch("samcli.commands.deploy.guided_context.click.secho") - @patch("samcli.commands.deploy.guided_context.signer_config_per_function") - def test_guided_prompts_images_deny_deletion( - self, - patched_signer_config_per_function, - patched_click_secho, - patched_sam_function_provider, - patched_get_buildable_stacks, - patchedauth_per_resource, - patched_manage_stack, - patched_confirm, - patched_prompt, - ): - # Set ImageUri to be None, the sam app was never built. - patched_sam_function_provider.return_value = MagicMock( - functions={"HelloWorldFunction": MagicMock(packagetype=IMAGE, imageuri=None)} - ) - patched_get_buildable_stacks.return_value = (Mock(), []) - patched_prompt.side_effect = [ - "sam-app", - "region", "CAPABILITY_IAM", ] # Series of inputs to confirmations so that full range of questions are asked. patchedauth_per_resource.return_value = [("HelloWorldFunction", False)] - patched_confirm.side_effect = [True, False, True, False, True, False] + patched_confirm.side_effect = [True, False, True, False, ""] patched_manage_stack.return_value = "managed_s3_stack" patched_signer_config_per_function.return_value = ({}, {}) with self.assertRaises(GuidedDeployFailedError): @@ -528,6 +327,8 @@ def test_guided_prompts_images_deny_deletion( @patch("samcli.commands.deploy.guided_context.manage_stack") @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") + @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") + @patch("samcli.commands.deploy.guided_context.get_template_function_resource_ids") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.click.secho") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") @@ -536,26 +337,30 @@ def test_guided_prompts_images_blank_image_repository( patched_signer_config_per_function, patched_click_secho, patched_sam_function_provider, + mock_get_template_function_resource_ids, + patched_get_template_artifacts_format, patched_get_buildable_stacks, patchedauth_per_resource, patched_manage_stack, patched_confirm, patched_prompt, ): + mock_get_template_function_resource_ids.return_value = ["HelloWorldFunction"] + patched_sam_function_provider.return_value = MagicMock( functions={"HelloWorldFunction": MagicMock(packagetype=IMAGE, imageuri="mysamapp:v1")} ) + patched_get_template_artifacts_format.return_value = [IMAGE] patched_get_buildable_stacks.return_value = (Mock(), []) # set Image repository to be blank. patched_prompt.side_effect = [ "sam-app", "region", "", - "", ] # Series of inputs to confirmations so that full range of questions are asked. patchedauth_per_resource.return_value = [("HelloWorldFunction", False)] - patched_confirm.side_effect = [True, False, True, False, False, True] + patched_confirm.side_effect = [True, False, True, False, ""] patched_manage_stack.return_value = "managed_s3_stack" patched_signer_config_per_function.return_value = ({}, {}) with self.assertRaises(GuidedDeployFailedError): @@ -580,6 +385,7 @@ def test_guided_prompts_images_blank_image_repository( @patch("samcli.commands.deploy.guided_context.manage_stack") @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") + @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") def test_guided_prompts_with_given_capabilities( @@ -587,6 +393,7 @@ def test_guided_prompts_with_given_capabilities( given_capabilities, patched_signer_config_per_function, patched_sam_function_provider, + patched_get_template_artifacts_format, patched_get_buildable_stacks, patchedauth_per_resource, patched_manage_stack, @@ -597,21 +404,13 @@ def test_guided_prompts_with_given_capabilities( patched_get_buildable_stacks.return_value = (Mock(), []) self.gc.capabilities = given_capabilities # Series of inputs to confirmations so that full range of questions are asked. - patched_confirm.side_effect = [True, False, "", True, True, True] + patched_confirm.side_effect = [True, False, "", True] self.gc.guided_prompts(parameter_override_keys=None) # Now to check for all the defaults on confirmations. expected_confirmation_calls = [ call(f"\t{self.gc.start_bold}Confirm changes before deploy{self.gc.end_bold}", default=True), call(f"\t{self.gc.start_bold}Allow SAM CLI IAM role creation{self.gc.end_bold}", default=True), call(f"\t{self.gc.start_bold}Save arguments to configuration file{self.gc.end_bold}", default=True), - call( - f"\t {self.gc.start_bold}Create managed ECR repositories for all functions?{self.gc.end_bold}", - default=True, - ), - call( - f"\t {self.gc.start_bold}Delete the unreferenced repositories listed above when deploying?{self.gc.end_bold}", - default=False, - ), ] self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) @@ -629,24 +428,27 @@ def test_guided_prompts_with_given_capabilities( @patch("samcli.commands.deploy.guided_context.manage_stack") @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") + @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") def test_guided_prompts_check_configuration_file_prompt_calls( self, patched_signer_config_per_function, patched_sam_function_provider, + patched_get_template_artifacts_format, patched_get_buildable_stacks, patchedauth_per_resource, patched_manage_stack, patched_confirm, patched_prompt, ): - patched_sam_function_provider.return_value.fucntions = {} + patched_sam_function_provider.return_value = {} + patched_get_template_artifacts_format.return_value = [ZIP] patched_get_buildable_stacks.return_value = (Mock(), []) patched_signer_config_per_function.return_value = ({}, {}) # Series of inputs to confirmations so that full range of questions are asked. patchedauth_per_resource.return_value = [("HelloWorldFunction", False)] - patched_confirm.side_effect = [True, False, True, True, True, True] + patched_confirm.side_effect = [True, False, True, True, ""] patched_manage_stack.return_value = "managed_s3_stack" self.gc.guided_prompts(parameter_override_keys=None) # Now to check for all the defaults on confirmations. @@ -658,14 +460,6 @@ def test_guided_prompts_check_configuration_file_prompt_calls( default=False, ), call(f"\t{self.gc.start_bold}Save arguments to configuration file{self.gc.end_bold}", default=True), - call( - f"\t {self.gc.start_bold}Create managed ECR repositories for all functions?{self.gc.end_bold}", - default=True, - ), - call( - f"\t {self.gc.start_bold}Delete the unreferenced repositories listed above when deploying?{self.gc.end_bold}", - default=False, - ), ] self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) @@ -691,23 +485,26 @@ def test_guided_prompts_check_configuration_file_prompt_calls( @patch("samcli.commands.deploy.guided_context.manage_stack") @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") + @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") def test_guided_prompts_check_parameter_from_template( self, patched_signer_config_per_function, patched_sam_function_provider, + patched_get_template_artifacts_format, patched_get_buildable_stacks, patchedauth_per_resource, patched_manage_stack, patched_confirm, patched_prompt, ): - patched_sam_function_provider.return_value.functions = {} + patched_sam_function_provider.return_value = {} + patched_get_template_artifacts_format.return_value = [ZIP] patched_get_buildable_stacks.return_value = (Mock(), []) # Series of inputs to confirmations so that full range of questions are asked. patchedauth_per_resource.return_value = [("HelloWorldFunction", False)] - patched_confirm.side_effect = [True, False, True, False, True, True] + patched_confirm.side_effect = [True, False, True, False, ""] patched_manage_stack.return_value = "managed_s3_stack" patched_signer_config_per_function.return_value = ({}, {}) parameter_override_from_template = {"MyTestKey": {"Default": "MyTemplateDefaultVal"}} @@ -722,14 +519,6 @@ def test_guided_prompts_check_parameter_from_template( default=False, ), call(f"\t{self.gc.start_bold}Save arguments to configuration file{self.gc.end_bold}", default=True), - call( - f"\t {self.gc.start_bold}Create managed ECR repositories for all functions?{self.gc.end_bold}", - default=True, - ), - call( - f"\t {self.gc.start_bold}Delete the unreferenced repositories listed above when deploying?{self.gc.end_bold}", - default=False, - ), ] self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) @@ -750,23 +539,26 @@ def test_guided_prompts_check_parameter_from_template( @patch("samcli.commands.deploy.guided_context.manage_stack") @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") + @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") def test_guided_prompts_check_parameter_from_cmd_or_config( self, patched_signer_config_per_function, patched_sam_function_provider, + patched_get_template_artifacts_format, patched_get_buildable_stacks, patchedauth_per_resource, patched_manage_stack, patched_confirm, patched_prompt, ): - patched_sam_function_provider.return_value.functions = {} + patched_sam_function_provider.return_value = {} + patched_get_template_artifacts_format.return_value = [ZIP] patched_get_buildable_stacks.return_value = (Mock(), []) # Series of inputs to confirmations so that full range of questions are asked. patchedauth_per_resource.return_value = [("HelloWorldFunction", False)] - patched_confirm.side_effect = [True, False, True, False, True, True] + patched_confirm.side_effect = [True, False, True, False, ""] patched_signer_config_per_function.return_value = ({}, {}) patched_manage_stack.return_value = "managed_s3_stack" parameter_override_from_template = {"MyTestKey": {"Default": "MyTemplateDefaultVal"}} @@ -781,14 +573,6 @@ def test_guided_prompts_check_parameter_from_cmd_or_config( default=False, ), call(f"\t{self.gc.start_bold}Save arguments to configuration file{self.gc.end_bold}", default=True), - call( - f"\t {self.gc.start_bold}Create managed ECR repositories for all functions?{self.gc.end_bold}", - default=True, - ), - call( - f"\t {self.gc.start_bold}Delete the unreferenced repositories listed above when deploying?{self.gc.end_bold}", - default=False, - ), ] self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) @@ -820,12 +604,14 @@ def test_guided_prompts_check_parameter_from_cmd_or_config( @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") + @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") def test_guided_prompts_with_code_signing( self, given_sign_packages_flag, given_code_signing_configs, patched_sam_function_provider, + patched_get_template_artifacts_format, patched_signer_config_per_function, patched_get_buildable_stacks, patchedauth_per_resource, @@ -836,11 +622,12 @@ def test_guided_prompts_with_code_signing( ): # given_sign_packages_flag = True # given_code_signing_configs = ({"MyFunction1"}, {"MyLayer1": {"MyFunction1"}, "MyLayer2": {"MyFunction1"}}) - patched_sam_function_provider.return_value.functions = {} + patched_sam_function_provider.return_value = {} + patched_get_template_artifacts_format.return_value = [ZIP] patched_signer_config_per_function.return_value = given_code_signing_configs patched_get_buildable_stacks.return_value = (Mock(), []) # Series of inputs to confirmations so that full range of questions are asked. - patched_confirm.side_effect = [True, False, given_sign_packages_flag, "", True, True, True] + patched_confirm.side_effect = [True, False, given_sign_packages_flag, "", True] self.gc.guided_prompts(parameter_override_keys=None) # Now to check for all the defaults on confirmations. expected_confirmation_calls = [ @@ -851,14 +638,6 @@ def test_guided_prompts_with_code_signing( default=True, ), call(f"\t{self.gc.start_bold}Save arguments to configuration file{self.gc.end_bold}", default=True), - call( - f"\t {self.gc.start_bold}Create managed ECR repositories for all functions?{self.gc.end_bold}", - default=True, - ), - call( - f"\t {self.gc.start_bold}Delete the unreferenced repositories listed above when deploying?{self.gc.end_bold}", - default=False, - ), ] self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) @@ -893,12 +672,14 @@ def test_guided_prompts_with_code_signing( @patch("samcli.commands.deploy.guided_context.manage_stack") @patch("samcli.commands.deploy.guided_context.auth_per_resource") @patch("samcli.commands.deploy.guided_context.SamLocalStackProvider.get_stacks") + @patch("samcli.commands.deploy.guided_context.get_template_artifacts_format") @patch("samcli.commands.deploy.guided_context.SamFunctionProvider") @patch("samcli.commands.deploy.guided_context.signer_config_per_function") def test_guided_prompts_check_default_config_region( self, patched_signer_config_per_function, patched_sam_function_provider, + patched_get_template_artifacts_format, patched_get_buildable_stacks, patchedauth_per_resource, patched_manage_stack, @@ -906,11 +687,12 @@ def test_guided_prompts_check_default_config_region( patched_prompt, patched_get_session, ): - patched_sam_function_provider.return_value.functions = {} + patched_sam_function_provider.return_value = {} + patched_get_template_artifacts_format.return_value = [ZIP] patched_get_buildable_stacks.return_value = (Mock(), []) # Series of inputs to confirmations so that full range of questions are asked. patchedauth_per_resource.return_value = [("HelloWorldFunction", False)] - patched_confirm.side_effect = [True, False, True, True, True, True] + patched_confirm.side_effect = [True, False, True, True, ""] patched_signer_config_per_function.return_value = ({}, {}) patched_manage_stack.return_value = "managed_s3_stack" patched_get_session.return_value.get_config_variable.return_value = "default_config_region" @@ -926,14 +708,6 @@ def test_guided_prompts_check_default_config_region( default=False, ), call(f"\t{self.gc.start_bold}Save arguments to configuration file{self.gc.end_bold}", default=True), - call( - f"\t {self.gc.start_bold}Create managed ECR repositories for all functions?{self.gc.end_bold}", - default=True, - ), - call( - f"\t {self.gc.start_bold}Delete the unreferenced repositories listed above when deploying?{self.gc.end_bold}", - default=False, - ), ] self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) diff --git a/tests/unit/commands/local/lib/test_sam_function_provider.py b/tests/unit/commands/local/lib/test_sam_function_provider.py index 9daf92abc0..3d33f1a312 100644 --- a/tests/unit/commands/local/lib/test_sam_function_provider.py +++ b/tests/unit/commands/local/lib/test_sam_function_provider.py @@ -63,6 +63,10 @@ class TestSamFunctionProviderEndToEnd(TestCase): "Handler": "index.handler", }, }, + "SamFunc4": { + "Type": "AWS::Serverless::Function", + "Properties": {"ImageUri": "123456789012.dkr.ecr.us-east-1.amazonaws.com/myrepo", "PackageType": IMAGE}, + }, "SamFuncWithFunctionNameOverride": { "Type": "AWS::Serverless::Function", "Properties": { @@ -72,29 +76,6 @@ class TestSamFunctionProviderEndToEnd(TestCase): "Handler": "index.handler", }, }, - "SamFuncWithImage1": { - "Type": "AWS::Serverless::Function", - "Properties": { - "PackageType": IMAGE, - }, - "Metadata": {"DockerTag": "tag", "DockerContext": "./image", "Dockerfile": "Dockerfile"}, - }, - "SamFuncWithImage2": { - "Type": "AWS::Serverless::Function", - "Properties": { - "ImageUri": "image:tag", - "PackageType": IMAGE, - }, - "Metadata": {"DockerTag": "tag", "DockerContext": "./image", "Dockerfile": "Dockerfile"}, - }, - "SamFuncWithImage3": { - # ImageUri is unsupported ECR location - "Type": "AWS::Serverless::Function", - "Properties": { - "ImageUri": "123456789012.dkr.ecr.us-east-1.amazonaws.com/myrepo:myimage", - "PackageType": IMAGE, - }, - }, "LambdaFunc1": { "Type": "AWS::Lambda::Function", "Properties": { @@ -103,37 +84,21 @@ class TestSamFunctionProviderEndToEnd(TestCase): "Handler": "index.handler", }, }, - "LambdaFuncWithImage1": { - "Type": "AWS::Lambda::Function", - "Properties": { - "PackageType": IMAGE, - }, - "Metadata": {"DockerTag": "tag", "DockerContext": "./image", "Dockerfile": "Dockerfile"}, - }, - "LambdaFuncWithImage2": { + "LambdaFuncWithInlineCode": { "Type": "AWS::Lambda::Function", "Properties": { - "Code": {"ImageUri": "image:tag"}, - "PackageType": IMAGE, + "Code": {"ZipFile": "testcode"}, + "Runtime": "nodejs4.3", + "Handler": "index.handler", }, - "Metadata": {"DockerTag": "tag", "DockerContext": "./image", "Dockerfile": "Dockerfile"}, }, - "LambdaFuncWithImage3": { - # ImageUri is unsupported ECR location + "LambdaFunc2": { "Type": "AWS::Lambda::Function", "Properties": { "Code": {"ImageUri": "123456789012.dkr.ecr.us-east-1.amazonaws.com/myrepo"}, "PackageType": IMAGE, }, }, - "LambdaFuncWithInlineCode": { - "Type": "AWS::Lambda::Function", - "Properties": { - "Code": {"ZipFile": "testcode"}, - "Runtime": "nodejs4.3", - "Handler": "index.handler", - }, - }, "LambdaFuncWithLocalPath": { "Type": "AWS::Lambda::Function", "Properties": {"Code": "./some/path/to/code", "Runtime": "nodejs4.3", "Handler": "index.handler"}, @@ -283,10 +248,10 @@ def setUp(self): ("SamFunc2", None), # codeuri is a s3 location, ignored ("SamFunc3", None), # codeuri is a s3 location, ignored ( - "SamFuncWithImage1", + "SamFunc4", Function( - name="SamFuncWithImage1", - functionname="SamFuncWithImage1", + name="SamFunc4", + functionname="SamFunc4", runtime=None, handler=None, codeuri=".", @@ -297,46 +262,14 @@ def setUp(self): layers=[], events=None, inlinecode=None, - imageuri=None, + imageuri="123456789012.dkr.ecr.us-east-1.amazonaws.com/myrepo", imageconfig=None, packagetype=IMAGE, - metadata={ - "DockerTag": "tag", - "DockerContext": os.path.join("image"), - "Dockerfile": "Dockerfile", - }, - codesign_config_arn=None, - stack_path="", - ), - ), - ( - "SamFuncWithImage2", - Function( - name="SamFuncWithImage2", - functionname="SamFuncWithImage2", - runtime=None, - handler=None, - codeuri=".", - memory=None, - timeout=None, - environment=None, - rolearn=None, - layers=[], - events=None, - inlinecode=None, - imageuri="image:tag", - imageconfig=None, - packagetype=IMAGE, - metadata={ - "DockerTag": "tag", - "DockerContext": os.path.join("image"), - "Dockerfile": "Dockerfile", - }, + metadata=None, codesign_config_arn=None, stack_path="", ), ), - ("SamFuncWithImage3", None), # imageuri is ecr location, ignored ( "SamFuncWithFunctionNameOverride-x", Function( @@ -362,37 +295,33 @@ def setUp(self): ), ("LambdaFunc1", None), # codeuri is a s3 location, ignored ( - "LambdaFuncWithImage1", + "LambdaFuncWithInlineCode", Function( - name="LambdaFuncWithImage1", - functionname="LambdaFuncWithImage1", - runtime=None, - handler=None, - codeuri=".", + name="LambdaFuncWithInlineCode", + functionname="LambdaFuncWithInlineCode", + runtime="nodejs4.3", + handler="index.handler", + codeuri=None, memory=None, timeout=None, environment=None, rolearn=None, layers=[], events=None, - metadata={ - "DockerTag": "tag", - "DockerContext": os.path.join("image"), - "Dockerfile": "Dockerfile", - }, - inlinecode=None, + metadata=None, + inlinecode="testcode", + codesign_config_arn=None, imageuri=None, imageconfig=None, - packagetype=IMAGE, - codesign_config_arn=None, + packagetype=ZIP, stack_path="", ), ), ( - "LambdaFuncWithImage2", + "LambdaFunc2", Function( - name="LambdaFuncWithImage2", - functionname="LambdaFuncWithImage2", + name="LambdaFunc2", + functionname="LambdaFunc2", runtime=None, handler=None, codeuri=".", @@ -402,43 +331,15 @@ def setUp(self): rolearn=None, layers=[], events=None, - metadata={ - "DockerTag": "tag", - "DockerContext": os.path.join("image"), - "Dockerfile": "Dockerfile", - }, + metadata=None, inlinecode=None, - imageuri="image:tag", + imageuri="123456789012.dkr.ecr.us-east-1.amazonaws.com/myrepo", imageconfig=None, packagetype=IMAGE, codesign_config_arn=None, stack_path="", ), ), - ("LambdaFuncWithImage3", None), # imageuri is a ecr location, ignored - ( - "LambdaFuncWithInlineCode", - Function( - name="LambdaFuncWithInlineCode", - functionname="LambdaFuncWithInlineCode", - runtime="nodejs4.3", - handler="index.handler", - codeuri=None, - memory=None, - timeout=None, - environment=None, - rolearn=None, - layers=[], - events=None, - metadata=None, - inlinecode="testcode", - codesign_config_arn=None, - imageuri=None, - imageconfig=None, - packagetype=ZIP, - stack_path="", - ), - ), ( "LambdaFuncWithLocalPath", Function( @@ -593,13 +494,11 @@ def test_get_all_must_return_all_functions(self): result = {posixpath.join(f.stack_path, f.name) for f in self.provider.get_all()} expected = { "SamFunctions", - "SamFuncWithImage1", - "SamFuncWithImage2", "SamFuncWithInlineCode", + "SamFunc4", "SamFuncWithFunctionNameOverride", - "LambdaFuncWithImage1", - "LambdaFuncWithImage2", "LambdaFuncWithInlineCode", + "LambdaFunc2", "LambdaFuncWithLocalPath", "LambdaFuncWithFunctionNameOverride", "LambdaFuncWithCodeSignConfig", diff --git a/tests/unit/commands/samconfig/test_samconfig.py b/tests/unit/commands/samconfig/test_samconfig.py index 1d943c169c..b6ba2b9891 100644 --- a/tests/unit/commands/samconfig/test_samconfig.py +++ b/tests/unit/commands/samconfig/test_samconfig.py @@ -604,7 +604,6 @@ def test_deploy(self, do_cli_mock, get_template_artifacts_format_mock): False, "samconfig.toml", "default", - False, ) @patch("samcli.commands.deploy.command.do_cli") @@ -713,7 +712,6 @@ def test_deploy_different_parameter_override_format(self, do_cli_mock, get_templ False, "samconfig.toml", "default", - False, ) @patch("samcli.commands.logs.command.do_cli") diff --git a/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_builder.py b/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_builder.py deleted file mode 100644 index f395ebee95..0000000000 --- a/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_builder.py +++ /dev/null @@ -1,93 +0,0 @@ -from samcli.lib.bootstrap.companion_stack.companion_stack_builder import CompanionStackBuilder -from unittest import TestCase -from unittest.mock import Mock, patch - - -class TestCompanionStackBuilder(TestCase): - @patch("samcli.lib.bootstrap.companion_stack.companion_stack_builder.ECRRepo") - def test_building_single_function(self, ecr_repo_mock): - companion_stack_name = "CompanionStackA" - function_a = "FunctionA" - - repo_logical_id = "RepoLogicalIDA" - repo_physical_id = "RepoPhysicalIDA" - repo_output_id = "RepoOutputIDA" - - ecr_repo_instance = ecr_repo_mock.return_value - ecr_repo_instance.logical_id = repo_logical_id - ecr_repo_instance.physical_id = repo_physical_id - ecr_repo_instance.output_logical_id = repo_output_id - - companion_stack = Mock() - companion_stack.stack_name = companion_stack_name - builder = CompanionStackBuilder(companion_stack) - - builder.add_function(function_a) - template = builder.build() - self.assertIn(f'"{repo_logical_id}":', template) - self.assertIn(f'"RepositoryName": "{repo_physical_id}"', template) - self.assertIn(f'"{repo_output_id}":', template) - - @patch("samcli.lib.bootstrap.companion_stack.companion_stack_builder.ECRRepo") - def test_building_multiple_functions(self, ecr_repo_mock): - companion_stack_name = "CompanionStackA" - function_prefix = "Function" - function_names = ["A", "B", "C", "D", "E", "F"] - - repo_logical_id_prefix = "RepoLogicalID" - repo_physical_id_prefix = "RepoPhysicalID" - repo_output_id_prefix = "RepoOutputID" - - ecr_repo_instances = list() - for function_name in function_names: - ecr_repo_instance = Mock() - ecr_repo_instance.logical_id = repo_logical_id_prefix + function_name - ecr_repo_instance.physical_id = repo_physical_id_prefix + function_name - ecr_repo_instance.output_logical_id = repo_output_id_prefix + function_name - ecr_repo_instances.append(ecr_repo_instance) - - ecr_repo_mock.side_effect = ecr_repo_instances - - companion_stack = Mock() - companion_stack.stack_name = companion_stack_name - builder = CompanionStackBuilder(companion_stack) - - for function_name in function_names: - builder.add_function(function_prefix + function_name) - template = builder.build() - for function_name in function_names: - self.assertIn(f'"{repo_logical_id_prefix + function_name}":', template) - self.assertIn(f'"RepositoryName": "{repo_physical_id_prefix + function_name}"', template) - self.assertIn(f'"{repo_output_id_prefix + function_name}":', template) - - @patch("samcli.lib.bootstrap.companion_stack.companion_stack_builder.ECRRepo") - def test_mapping_multiple_functions(self, ecr_repo_mock): - companion_stack_name = "CompanionStackA" - function_prefix = "Function" - function_names = ["A", "B", "C", "D", "E", "F"] - - repo_logical_id_prefix = "RepoLogicalID" - repo_physical_id_prefix = "RepoPhysicalID" - repo_output_id_prefix = "RepoOutputID" - - ecr_repo_instances = list() - for function_name in function_names: - ecr_repo_instance = Mock() - ecr_repo_instance.logical_id = repo_logical_id_prefix + function_name - ecr_repo_instance.physical_id = repo_physical_id_prefix + function_name - ecr_repo_instance.output_logical_id = repo_output_id_prefix + function_name - ecr_repo_instances.append(ecr_repo_instance) - - ecr_repo_mock.side_effect = ecr_repo_instances - - companion_stack = Mock() - companion_stack.stack_name = companion_stack_name - builder = CompanionStackBuilder(companion_stack) - - for function_name in function_names: - builder.add_function(function_prefix + function_name) - for function_name in function_names: - self.assertIn( - (function_prefix + function_name, ecr_repo_instances[function_names.index(function_name)]), - builder.repo_mapping.items(), - ) diff --git a/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager.py b/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager.py deleted file mode 100644 index 71afef6467..0000000000 --- a/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager.py +++ /dev/null @@ -1,253 +0,0 @@ -from botocore.exceptions import ClientError -from samcli.lib.bootstrap.companion_stack.companion_stack_manager import CompanionStackManager -from unittest import TestCase -from unittest.mock import ANY, Mock, patch - - -class TestCompanionStackManager(TestCase): - def setUp(self): - self.stack_name = "StackA" - self.companion_stack_name = "CompanionStackA" - - self.boto3_client_patch = patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.boto3.client") - self.boto3_client_mock = self.boto3_client_patch.start() - - self.companion_stack_patch = patch( - "samcli.lib.bootstrap.companion_stack.companion_stack_manager.CompanionStack" - ) - self.companion_stack_mock = self.companion_stack_patch.start() - - self.companion_stack_builder_patch = patch( - "samcli.lib.bootstrap.companion_stack.companion_stack_manager.CompanionStackBuilder" - ) - self.companion_stack_builder_mock = self.companion_stack_builder_patch.start() - - self.cfn_client = Mock() - self.ecr_client = Mock() - self.s3_client = Mock() - self.sts_client = Mock() - - self.companion_stack_mock.return_value.stack_name = self.companion_stack_name - self.boto3_client_mock.side_effect = [self.cfn_client, self.ecr_client, self.s3_client, self.sts_client] - self.manager = CompanionStackManager(self.stack_name, "region", "s3_bucket", "s3_prefix") - - def tearDown(self): - self.boto3_client_patch.stop() - self.companion_stack_patch.stop() - self.companion_stack_builder_patch.stop() - - def test_set_functions(self): - function_a = "FunctionA" - function_b = "FunctionB" - - self.manager.set_functions([function_a, function_b]) - - self.companion_stack_builder_mock.return_value.clear_functions.assert_called_once() - self.companion_stack_builder_mock.return_value.add_function.assert_any_call(function_a) - self.companion_stack_builder_mock.return_value.add_function.assert_any_call(function_b) - - @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.mktempfile") - @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.S3Uploader") - def test_create_companion_stack( - self, - s3_uploader_mock, - mktempfile_mock, - ): - cfn_waiter = Mock() - self.cfn_client.get_waiter.return_value = cfn_waiter - - self.manager.does_companion_stack_exist = lambda: False - - self.manager.update_companion_stack() - - self.companion_stack_builder_mock.return_value.build.assert_called_once() - s3_uploader_mock.return_value.upload_with_dedup.assert_called_once() - self.cfn_client.create_stack.assert_called_once_with( - StackName=self.companion_stack_name, TemplateURL=ANY, Capabilities=ANY - ) - self.cfn_client.get_waiter.assert_called_once_with("stack_create_complete") - cfn_waiter.wait.assert_called_once_with(StackName=self.companion_stack_name, WaiterConfig=ANY) - - @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.mktempfile") - @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.S3Uploader") - def test_update_companion_stack( - self, - s3_uploader_mock, - mktempfile_mock, - ): - cfn_waiter = Mock() - self.cfn_client.get_waiter.return_value = cfn_waiter - - self.manager.does_companion_stack_exist = lambda: True - - self.manager.update_companion_stack() - - self.companion_stack_builder_mock.return_value.build.assert_called_once() - s3_uploader_mock.return_value.upload_with_dedup.assert_called_once() - self.cfn_client.update_stack.assert_called_once_with( - StackName=self.companion_stack_name, TemplateURL=ANY, Capabilities=ANY - ) - self.cfn_client.get_waiter.assert_called_once_with("stack_update_complete") - cfn_waiter.wait.assert_called_once_with(StackName=self.companion_stack_name, WaiterConfig=ANY) - - def test_delete_companion_stack(self): - cfn_waiter = Mock() - self.cfn_client.get_waiter.return_value = cfn_waiter - - self.manager.delete_companion_stack() - - self.cfn_client.delete_stack.assert_called_once_with(StackName=self.companion_stack_name) - self.cfn_client.get_waiter.assert_called_once_with("stack_delete_complete") - cfn_waiter.wait.assert_called_once_with(StackName=self.companion_stack_name, WaiterConfig=ANY) - - @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.ECRRepo") - @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.boto3.resource") - def test_list_deployed_repos(self, boto3_resource_mock, ecr_repo_mock): - repo_a = "ECRRepoA" - repo_b = "ECRRepoB" - - resource_a = Mock() - resource_a.resource_type = "AWS::ECR::Repository" - resource_a.logical_resource_id = repo_a - resource_b = Mock() - resource_b.resource_type = "AWS::ECR::Repository" - resource_b.logical_resource_id = repo_b - resource_c = Mock() - resource_c.resource_type = "RandomResource" - resources = [resource_a, resource_b, resource_c] - boto3_resource_mock.return_value.Stack.return_value.resource_summaries.all.return_value = resources - - self.manager.does_companion_stack_exist = lambda: True - - repos = self.manager.list_deployed_repos() - self.assertTrue(len(repos) == 2) - ecr_repo_mock.assert_any_call(logical_id=repo_a, physical_id=ANY) - ecr_repo_mock.assert_any_call(logical_id=repo_b, physical_id=ANY) - - @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.ECRRepo") - @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.boto3.resource") - def test_list_deployed_repos_does_not_exist(self, boto3_resource_mock, ecr_repo_mock): - repo_a = "ECRRepoA" - repo_b = "ECRRepoB" - - resource_a = Mock() - resource_a.resource_type = "AWS::ECR::Repository" - resource_a.logical_resource_id = repo_a - resource_b = Mock() - resource_b.resource_type = "AWS::ECR::Repository" - resource_b.logical_resource_id = repo_b - resource_c = Mock() - resource_c.resource_type = "RandomResource" - resources = [resource_a, resource_b, resource_c] - boto3_resource_mock.return_value.Stack.return_value.resource_summaries.all.return_value = resources - - self.manager.does_companion_stack_exist = lambda: False - - repos = self.manager.list_deployed_repos() - self.assertEqual(repos, []) - - @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.ECRRepo") - def test_get_unreferenced_repos(self, ecr_repo_mock): - repo_a_id = "ECRRepoA" - repo_b_id = "ECRRepoB" - - current_repo_a = Mock() - current_repo_a.logical_id = repo_a_id - current_repos = {"FunctionA": current_repo_a} - - repo_a = Mock() - repo_a.logical_id = repo_a_id - repo_b = Mock() - repo_b.logical_id = repo_b_id - deployed_repos = [repo_a, repo_b] - - self.manager.does_companion_stack_exist = lambda: True - self.manager.list_deployed_repos = lambda: deployed_repos - self.companion_stack_builder_mock.return_value.repo_mapping = current_repos - - unreferenced_repos = self.manager.get_unreferenced_repos() - self.assertEqual(len(unreferenced_repos), 1) - self.assertEqual(unreferenced_repos[0].logical_id, repo_b_id) - - @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.ECRRepo") - def test_get_unreferenced_repos_does_not_exist(self, ecr_repo_mock): - repo_a_id = "ECRRepoA" - repo_b_id = "ECRRepoB" - - current_repo_a = Mock() - current_repo_a.logical_id = repo_a_id - current_repos = {"FunctionA": current_repo_a} - - repo_a = Mock() - repo_a.logical_id = repo_a_id - repo_b = Mock() - repo_b.logical_id = repo_b_id - deployed_repos = [repo_a, repo_b] - - self.manager.does_companion_stack_exist = lambda: False - self.manager.list_deployed_repos = lambda: deployed_repos - self.companion_stack_builder_mock.return_value.repo_mapping = current_repos - - unreferenced_repos = self.manager.get_unreferenced_repos() - self.assertEqual(unreferenced_repos, []) - - def test_delete_unreferenced_repos(self): - repo_a_id = "ECRRepoA" - repo_b_id = "ECRRepoB" - - repo_a = Mock() - repo_a.physical_id = repo_a_id - repo_b = Mock() - repo_b.physical_id = repo_b_id - unreferenced_repos = [repo_a, repo_b] - - self.manager.get_unreferenced_repos = lambda: unreferenced_repos - - self.manager.delete_unreferenced_repos() - - self.ecr_client.delete_repository.assert_any_call(repositoryName=repo_a_id, force=True) - self.ecr_client.delete_repository.assert_any_call(repositoryName=repo_b_id, force=True) - - def test_sync_repos_exists(self): - self.manager.does_companion_stack_exist = lambda: True - self.manager.get_repository_mapping = lambda: {"a": ""} - self.manager.delete_unreferenced_repos = Mock() - self.manager.update_companion_stack = Mock() - self.manager.delete_companion_stack = Mock() - - self.manager.sync_repos() - self.manager.delete_unreferenced_repos.assert_called_once() - self.manager.delete_companion_stack.assert_not_called() - self.manager.update_companion_stack.assert_called_once() - - def test_sync_repos_exists_with_no_repo(self): - self.manager.does_companion_stack_exist = lambda: True - self.manager.get_repository_mapping = lambda: {} - self.manager.delete_unreferenced_repos = Mock() - self.manager.update_companion_stack = Mock() - self.manager.delete_companion_stack = Mock() - - self.manager.sync_repos() - self.manager.delete_unreferenced_repos.assert_called_once() - self.manager.delete_companion_stack.assert_called_once() - self.manager.update_companion_stack.assert_not_called() - - def test_sync_repos_does_not_exist(self): - self.manager.does_companion_stack_exist = lambda: False - self.manager.get_repository_mapping = lambda: {"a": ""} - self.manager.delete_unreferenced_repos = Mock() - self.manager.update_companion_stack = Mock() - self.manager.delete_companion_stack = Mock() - - self.manager.sync_repos() - self.manager.delete_unreferenced_repos.assert_not_called() - self.manager.delete_companion_stack.assert_not_called() - self.manager.update_companion_stack.assert_called_once() - - def test_does_companion_stack_exist_true(self): - self.cfn_client.describe_stacks.return_value = {"a": "a"} - self.assertTrue(self.manager.does_companion_stack_exist()) - - def test_does_companion_stack_exist_false(self): - self.cfn_client.describe_stacks.side_effect = ClientError({}, Mock()) - self.assertFalse(self.manager.does_companion_stack_exist()) diff --git a/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager_helper.py b/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager_helper.py deleted file mode 100644 index 89a3e5b788..0000000000 --- a/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager_helper.py +++ /dev/null @@ -1,51 +0,0 @@ -from samcli.lib.bootstrap.companion_stack.companion_stack_manager_helper import CompanionStackManagerHelper -from unittest import TestCase -from unittest.mock import Mock, patch - - -class TestCompanionStackManagerHelper(TestCase): - def setUp(self): - self.stack_name = "stackname" - self.function_a_id = "FunctionA" - self.function_b_id = "FunctionB" - self.function_c_id = "FunctionC" - self.get_template_function_resource_ids_patch = patch( - "samcli.lib.bootstrap.companion_stack.companion_stack_manager_helper.get_template_function_resource_ids" - ) - self.get_template_function_resource_ids_mock = self.get_template_function_resource_ids_patch.start() - self.get_template_function_resource_ids_mock.return_value = [self.function_a_id, self.function_b_id] - - self.companion_stack_manager_patch = patch( - "samcli.lib.bootstrap.companion_stack.companion_stack_manager_helper.CompanionStackManager" - ) - self.companion_stack_manager_mock = self.companion_stack_manager_patch.start().return_value - self.companion_stack_manager_mock.list_deployed_repos.return_value = [] - self.companion_stack_manager_mock.get_repo_uri.return_value = "" - self.companion_stack_manager_mock.is_repo_uri.return_value = True - self.companion_stack_manager_mock.get_unreferenced_repos.return_value = [Mock()] - - self.manager_helper = CompanionStackManagerHelper( - self.stack_name, "region", "s3_bucket", "s3_prefix", "template_file", {} - ) - - def tearDown(self): - self.get_template_function_resource_ids_patch.stop() - self.companion_stack_manager_patch.stop() - - def test_init(self): - self.assertEqual(self.manager_helper.missing_repo_functions, [self.function_a_id, self.function_b_id]) - self.assertEqual(len(self.manager_helper.unreferenced_repos), 1) - - def test_update_specified_image_repos(self): - self.manager_helper.update_specified_image_repos({"FunctionA": "abc"}) - self.assertEqual(self.manager_helper.missing_repo_functions, [self.function_b_id]) - self.assertEqual(len(self.manager_helper.unreferenced_repos), 1) - - def test_remove_unreferenced_repos_from_mapping(self): - self.companion_stack_manager_mock.get_repo_uri = lambda x: "repo_uri" - - image_repositories = {self.function_a_id: "a", self.function_b_id: "b", self.function_c_id: "repo_uri"} - init_image_repositories = image_repositories.copy() - output_image_repositories = self.manager_helper.remove_unreferenced_repos_from_mapping(image_repositories) - self.assertEqual(init_image_repositories, image_repositories) - self.assertEqual(output_image_repositories, {self.function_a_id: "a", self.function_b_id: "b"}) diff --git a/tests/unit/lib/bootstrap/companion_stack/test_data_types.py b/tests/unit/lib/bootstrap/companion_stack/test_data_types.py deleted file mode 100644 index 4e6e6fd954..0000000000 --- a/tests/unit/lib/bootstrap/companion_stack/test_data_types.py +++ /dev/null @@ -1,63 +0,0 @@ -from samcli.lib.bootstrap.companion_stack.data_types import CompanionStack, ECRRepo -from samcli.lib.bootstrap.companion_stack.companion_stack_builder import CompanionStackBuilder -from unittest import TestCase -from unittest.mock import Mock, patch - - -class TestCompanionStack(TestCase): - def setUp(self): - self.check_sum = "checksum" - self.parent_stack_name = "Parent-Stack" - self.check_sum_patch = patch("samcli.lib.bootstrap.companion_stack.data_types.str_checksum") - self.check_sum_mock = self.check_sum_patch.start() - self.check_sum_mock.return_value = self.check_sum - self.companion_stack = CompanionStack(self.parent_stack_name) - - def tearDown(self): - self.check_sum_patch.stop() - - def test_parent_stack_name(self): - self.assertEqual(self.companion_stack.parent_stack_name, self.parent_stack_name) - - def test_escaped_parent_stack_name(self): - self.assertEqual(self.companion_stack.escaped_parent_stack_name, "parentstack") - - def test_parent_stack_hash(self): - self.assertEqual(self.companion_stack.parent_stack_hash, "checksum") - - def test_stack_name(self): - self.assertEqual(self.companion_stack.stack_name, "Parent-Stack-checksum-CompanionStack") - - -class TestECRRepo(TestCase): - def setUp(self): - self.check_sum = "qwertyuiop" - self.parent_stack_name = "Parent-Stack" - self.function_id = "FunctionA" - - self.check_sum_patch = patch("samcli.lib.bootstrap.companion_stack.data_types.str_checksum") - self.check_sum_mock = self.check_sum_patch.start() - self.check_sum_mock.return_value = self.check_sum - - self.companion_stack_mock = Mock() - self.companion_stack_mock.escaped_parent_stack_name = "parentstackname" - self.companion_stack_mock.parent_stack_hash = "abcdefghijklmn" - self.ecr_repo = ECRRepo(companion_stack=self.companion_stack_mock, function_logical_id=self.function_id) - - def tearDown(self): - self.check_sum_patch.stop() - - def test_logical_id(self): - self.assertEqual(self.ecr_repo.logical_id, "FunctionAqwertyuiRepo") - - def test_physical_id(self): - self.assertEqual(self.ecr_repo.physical_id, "parentstacknameabcdefgh/functionaqwertyuirepo") - - def test_output_logical_id(self): - self.assertEqual(self.ecr_repo.output_logical_id, "FunctionAqwertyuiOut") - - def test_get_repo_uri(self): - self.assertEqual( - self.ecr_repo.get_repo_uri("12345", "us-west-2"), - "12345.dkr.ecr.us-west-2.amazonaws.com/parentstacknameabcdefgh/functionaqwertyuirepo", - ) diff --git a/tests/unit/lib/build_module/test_build_strategy.py b/tests/unit/lib/build_module/test_build_strategy.py index 1fae5b7962..7e9902a172 100644 --- a/tests/unit/lib/build_module/test_build_strategy.py +++ b/tests/unit/lib/build_module/test_build_strategy.py @@ -1,4 +1,3 @@ -from copy import deepcopy from unittest import TestCase from unittest.mock import Mock, patch, MagicMock, call, ANY @@ -219,15 +218,11 @@ def test_build_single_function_definition_image_functions_with_same_metadata(sel function2.name = "Function2" function2.full_path = "Function2" function2.packagetype = IMAGE - build_definition = FunctionBuildDefinition("3.7", "codeuri", IMAGE, {}, env_vars={"FOO": "BAR"}) + build_definition = FunctionBuildDefinition("3.7", "codeuri", IMAGE, {}) # since they have the same metadata, they are put into the same build_definition. build_definition.functions = [function1, function2] - with patch("samcli.lib.build.build_strategy.deepcopy", wraps=deepcopy) as patched_deepcopy: - result = default_build_strategy.build_single_function_definition(build_definition) - - patched_deepcopy.assert_called_with(build_definition.env_vars) - + result = default_build_strategy.build_single_function_definition(build_definition) # both of the function name should show up in results self.assertEqual(result, {"Function": built_image, "Function2": built_image}) diff --git a/tests/unit/lib/cli_validation/test_image_repository_validation.py b/tests/unit/lib/cli_validation/test_image_repository_validation.py index 9df0e83727..9773cbc9d0 100644 --- a/tests/unit/lib/cli_validation/test_image_repository_validation.py +++ b/tests/unit/lib/cli_validation/test_image_repository_validation.py @@ -131,10 +131,7 @@ def test_image_repository_validation_failure_IMAGE_missing_image_repositories( with self.assertRaises(click.BadOptionUsage) as ex: self.foobar() - self.assertIn( - "Missing option '--image-repository', '--image-repositories', or '--resolve-image-repos'", - ex.exception.message, - ) + self.assertIn("Missing option '--image-repository' or '--image-repositories'", ex.exception.message) @patch("samcli.lib.cli_validation.image_repository_validation.click") @patch("samcli.lib.cli_validation.image_repository_validation.get_template_function_resource_ids") From 203ea3a1e9eaa02f8020166ac24efc5a72fded2e Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Mon, 19 Jul 2021 22:13:46 -0400 Subject: [PATCH 106/121] Added unit test for delete ecr repository --- samcli/lib/package/ecr_uploader.py | 1 + tests/unit/lib/package/test_ecr_uploader.py | 20 +++++++++++++++++++- 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/samcli/lib/package/ecr_uploader.py b/samcli/lib/package/ecr_uploader.py index 56c780fd71..c4062daa78 100644 --- a/samcli/lib/package/ecr_uploader.py +++ b/samcli/lib/package/ecr_uploader.py @@ -148,6 +148,7 @@ def delete_ecr_repository(self, physical_id: str): except self.ecr_client.exceptions.RepositoryNotFoundException: # If the repository is empty, cloudformation automatically deletes # the repository when cf_client.delete_stack is called. + LOG.debug("Could not find repository %s", physical_id) pass @staticmethod diff --git a/tests/unit/lib/package/test_ecr_uploader.py b/tests/unit/lib/package/test_ecr_uploader.py index 68ca5a5ec7..4d6c9f4e46 100644 --- a/tests/unit/lib/package/test_ecr_uploader.py +++ b/tests/unit/lib/package/test_ecr_uploader.py @@ -178,7 +178,7 @@ def test_upload_failure_while_streaming(self): ecr_uploader.upload(image, resource_name="HelloWorldFunction") @patch("samcli.lib.package.ecr_uploader.click.echo") - def test_delete_artifact_no_image_error(self, patched_click_echo): + def test_delete_artifact_no_image_found(self, patched_click_echo): ecr_uploader = ECRUploader( docker_client=self.docker_client, ecr_client=self.ecr_client, @@ -246,6 +246,24 @@ def test_delete_artifact_client_error(self): image_uri=self.image_uri, resource_id=self.resource_id, property_name=self.property_name ) + @patch("samcli.lib.package.ecr_uploader.click.echo") + def test_delete_ecr_repository(self, patched_click_echo): + ecr_uploader = ECRUploader( + docker_client=self.docker_client, + ecr_client=self.ecr_client, + ecr_repo=self.ecr_repo, + ecr_repo_multi=self.ecr_repo_multi, + tag=self.tag, + ) + ecr_uploader.ecr_client.delete_repository = MagicMock() + + ecr_uploader.delete_ecr_repository(physical_id=self.ecr_repo) + + expected_click_echo_calls = [ + call(f"\t- Deleting ECR repository {self.ecr_repo}"), + ] + self.assertEqual(expected_click_echo_calls, patched_click_echo.call_args_list) + def test_parse_image_url(self): valid = [ From 0b25fbb6c51151b05bf8a88eab3730336466fc1e Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Tue, 20 Jul 2021 12:43:01 -0400 Subject: [PATCH 107/121] Fixed small string nits and added docstring for ECRResource --- samcli/commands/delete/delete_context.py | 11 +++++------ samcli/lib/delete/cf_utils.py | 1 - samcli/lib/package/ecr_uploader.py | 3 +-- samcli/lib/package/packageable_resources.py | 6 ++++++ 4 files changed, 12 insertions(+), 9 deletions(-) diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 22fa1df7d2..587005f96c 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -136,7 +136,7 @@ def s3_prompts(self): self.delete_artifacts_folder = confirm( click.style( "\tAre you sure you want to delete the folder" - + f" {self.s3_prefix} in S3 which contains the artifacts?", + f" {self.s3_prefix} in S3 which contains the artifacts?", bold=True, ), default=False, @@ -164,7 +164,7 @@ def ecr_companion_stack_prompts(self): delete_ecr_companion_stack_prompt = confirm( click.style( "\tDo you you want to delete the ECR companion stack" - + f" {self.companion_stack_name} in the region {self.region} ?", + f" {self.companion_stack_name} in the region {self.region} ?", bold=True, ), default=False, @@ -190,7 +190,7 @@ def ecr_repos_prompts(self, template: Template): delete_repo = confirm( click.style( f"\tECR repository {repo_name}" - + " may not be empty. Do you want to delete the repository and all the images in it ?", + " may not be empty. Do you want to delete the repository and all the images in it ?", bold=True, ), default=False, @@ -317,8 +317,7 @@ def run(self): if not self.no_prompts: delete_stack = confirm( click.style( - f"\tAre you sure you want to delete the stack {self.stack_name}" - + f" in the region {self.region} ?", + f"\tAre you sure you want to delete the stack {self.stack_name}" f" in the region {self.region} ?", bold=True, ), default=False, @@ -335,5 +334,5 @@ def run(self): LOG.debug("Input stack does not exists on Cloudformation") click.echo( f"Error: The input stack {self.stack_name} does" - + f" not exist on Cloudformation in the region {self.region}" + f" not exist on Cloudformation in the region {self.region}" ) diff --git a/samcli/lib/delete/cf_utils.py b/samcli/lib/delete/cf_utils.py index 37fab22eea..d418306e00 100644 --- a/samcli/lib/delete/cf_utils.py +++ b/samcli/lib/delete/cf_utils.py @@ -16,7 +16,6 @@ class CfUtils: def __init__(self, cloudformation_client): self._client = cloudformation_client - # self._resource_client = cloudformation_resource_client def has_stack(self, stack_name: str) -> bool: """ diff --git a/samcli/lib/package/ecr_uploader.py b/samcli/lib/package/ecr_uploader.py index c4062daa78..c9546aa93e 100644 --- a/samcli/lib/package/ecr_uploader.py +++ b/samcli/lib/package/ecr_uploader.py @@ -114,7 +114,7 @@ def delete_artifact(self, image_uri: str, resource_id: str, property_name: str): image_details = resp["failures"][0] if image_details["failureCode"] == "ImageNotFound": LOG.debug( - "Could not delete image for %s" " parameter of %s resource as it does not exist. \n", + "Could not delete image for %s parameter of %s resource as it does not exist. \n", property_name, resource_id, ) @@ -149,7 +149,6 @@ def delete_ecr_repository(self, physical_id: str): # If the repository is empty, cloudformation automatically deletes # the repository when cf_client.delete_stack is called. LOG.debug("Could not find repository %s", physical_id) - pass @staticmethod def parse_image_url(image_uri: str) -> Dict: diff --git a/samcli/lib/package/packageable_resources.py b/samcli/lib/package/packageable_resources.py index 592c3d73be..808468a670 100644 --- a/samcli/lib/package/packageable_resources.py +++ b/samcli/lib/package/packageable_resources.py @@ -519,6 +519,12 @@ class CloudFormationResourceVersionSchemaHandlerPackage(ResourceZip): class ECRResource(Resource): + """ + Represents CloudFormation resources ECR for deleting the ECR + repository with the property name RepositoryName. This class is used + only for deleting the repository and not exporting anything. + """ + RESOURCE_TYPE = AWS_ECR_REPOSITORY PROPERTY_NAME = RESOURCES_WITH_IMAGE_COMPONENT[RESOURCE_TYPE][0] ARTIFACT_TYPE = ZIP From 37a65103cb92b69290bc8d693971e8e288b76c4f Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Tue, 20 Jul 2021 15:02:17 -0400 Subject: [PATCH 108/121] Added some unit tests for s3_uploader, ecr_uploader and delete_context --- .../commands/delete/test_delete_context.py | 28 +++++++++++++++-- tests/unit/lib/package/test_ecr_uploader.py | 25 +++++++++++++++ tests/unit/lib/package/test_s3_uploader.py | 31 ++++++++++++++----- 3 files changed, 74 insertions(+), 10 deletions(-) diff --git a/tests/unit/commands/delete/test_delete_context.py b/tests/unit/commands/delete/test_delete_context.py index efefe1644f..c5b77a47f2 100644 --- a/tests/unit/commands/delete/test_delete_context.py +++ b/tests/unit/commands/delete/test_delete_context.py @@ -59,7 +59,7 @@ def test_delete_context_enter(self): ) ), ) - @patch("samcli.commands.deploy.guided_context.click.get_current_context") + @patch("samcli.commands.delete.delete_context.click.get_current_context") def test_delete_context_parse_config_file(self, patched_click_get_current_context): patched_click_get_current_context = MagicMock() with DeleteContext( @@ -76,6 +76,30 @@ def test_delete_context_parse_config_file(self, patched_click_get_current_contex self.assertEqual(delete_context.s3_bucket, "s3-bucket") self.assertEqual(delete_context.s3_prefix, "s3-prefix") + @patch("samcli.commands.delete.delete_context.prompt") + @patch("samcli.commands.delete.delete_context.click.get_current_context") + @patch.object(CfUtils, "has_stack", MagicMock(return_value=(False))) + def test_delete_no_user_input(self, patched_click_get_current_context, patched_prompt): + patched_click_get_current_context = MagicMock() + with DeleteContext( + stack_name=None, + region=None, + config_file=None, + config_env=None, + profile=None, + no_prompts=True, + ) as delete_context: + delete_context.run() + + patched_prompt.side_effect = ["sam-app"] + + expected_prompt_calls = [ + call(click.style("\tEnter stack name you want to delete:", bold=True), type=click.STRING), + ] + + self.assertEqual(expected_prompt_calls, patched_prompt.call_args_list) + self.assertEqual(delete_context.region, "us-east-1") + @patch.object( TomlProvider, "__call__", @@ -97,7 +121,7 @@ def test_delete_context_parse_config_file(self, patched_click_get_current_contex @patch.object(CfUtils, "wait_for_delete", MagicMock()) @patch.object(Template, "get_ecr_repos", MagicMock(return_value=({"logical_id": {"Repository": "test_id"}}))) @patch.object(S3Uploader, "delete_prefix_artifacts", MagicMock()) - @patch("samcli.commands.deploy.guided_context.click.get_current_context") + @patch("samcli.commands.delete.delete_context.click.get_current_context") def test_delete_context_valid_execute_run(self, patched_click_get_current_context): patched_click_get_current_context = MagicMock() with DeleteContext( diff --git a/tests/unit/lib/package/test_ecr_uploader.py b/tests/unit/lib/package/test_ecr_uploader.py index 4d6c9f4e46..7a0a2ca540 100644 --- a/tests/unit/lib/package/test_ecr_uploader.py +++ b/tests/unit/lib/package/test_ecr_uploader.py @@ -177,6 +177,31 @@ def test_upload_failure_while_streaming(self): with self.assertRaises(DockerPushFailedError): ecr_uploader.upload(image, resource_name="HelloWorldFunction") + @patch("samcli.lib.package.ecr_uploader.click.echo") + def test_delete_artifact_successful(self, patched_click_echo): + ecr_uploader = ECRUploader( + docker_client=self.docker_client, + ecr_client=self.ecr_client, + ecr_repo=self.ecr_repo, + ecr_repo_multi=self.ecr_repo_multi, + tag=self.tag, + ) + ecr_uploader.ecr_client.batch_delete_image.return_value = { + "imageIds": [ + {"imageTag": self.tag}, + ], + "failures": [], + } + + ecr_uploader.delete_artifact( + image_uri=self.image_uri, resource_id=self.resource_id, property_name=self.property_name + ) + + expected_click_echo_calls = [ + call(f"\t- Deleting ECR image {self.tag} in repository {self.ecr_repo}"), + ] + self.assertEqual(expected_click_echo_calls, patched_click_echo.call_args_list) + @patch("samcli.lib.package.ecr_uploader.click.echo") def test_delete_artifact_no_image_found(self, patched_click_echo): ecr_uploader = ECRUploader( diff --git a/tests/unit/lib/package/test_s3_uploader.py b/tests/unit/lib/package/test_s3_uploader.py index 55b1bfbb2d..a78a7dcef4 100644 --- a/tests/unit/lib/package/test_s3_uploader.py +++ b/tests/unit/lib/package/test_s3_uploader.py @@ -172,10 +172,10 @@ def test_s3_upload_no_bucket(self): s3_uploader.upload(f.name, remote_path) self.assertEqual(BucketNotSpecifiedError().message, str(ex)) - def test_s3_delete_artifact(self): + def test_s3_delete_artifact_successfull(self): s3_uploader = S3Uploader( s3_client=self.s3, - bucket_name=None, + bucket_name=self.bucket_name, prefix=self.prefix, kms_key_id=self.kms_key_id, force_upload=self.force_upload, @@ -183,14 +183,14 @@ def test_s3_delete_artifact(self): ) self.s3.delete_object = MagicMock() self.s3.head_object = MagicMock() - with self.assertRaises(BucketNotSpecifiedError) as ex: - with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: - self.assertTrue(s3_uploader.delete_artifact(f.name)) + + with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: + self.assertTrue(s3_uploader.delete_artifact(f.name)) def test_s3_delete_non_existant_artifact(self): s3_uploader = S3Uploader( s3_client=self.s3, - bucket_name=None, + bucket_name=self.bucket_name, prefix=self.prefix, kms_key_id=self.kms_key_id, force_upload=self.force_upload, @@ -198,9 +198,24 @@ def test_s3_delete_non_existant_artifact(self): ) self.s3.delete_object = MagicMock() self.s3.head_object = MagicMock(side_effect=ClientError(error_response={}, operation_name="head_object")) - with self.assertRaises(BucketNotSpecifiedError) as ex: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: + self.assertFalse(s3_uploader.delete_artifact(f.name)) + + def test_s3_delete_artifact_client_error(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=self.bucket_name, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + no_progressbar=self.no_progressbar, + ) + self.s3.delete_object = MagicMock( + side_effect=ClientError(error_response={"Error": {"Code": "ClientError"}}, operation_name="delete_object") + ) + with self.assertRaises(ClientError): with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: - self.assertFalse(s3_uploader.delete_artifact(f.name)) + s3_uploader.delete_artifact(f.name) def test_s3_delete_artifact_no_bucket(self): s3_uploader = S3Uploader( From 0626f9463f565acebc5b733d16007fd4f666413f Mon Sep 17 00:00:00 2001 From: _sam <3804518+aahung@users.noreply.github.com> Date: Tue, 20 Jul 2021 14:02:07 -0700 Subject: [PATCH 109/121] feat: Add SAM Pipeline commands (#3085) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * sam pipeline bootstrap (#2811) * two-stages-pipeline plugin * typos * add docstring * make mypy happy * removing swap file * delete the two_stages_pipeline plugin as the pipeline-bootstrap command took over its responsibility * remove 'get_template_function_runtimes' function as the decision is made to not process the SAM template during pipeline init which was the only place we use the function * sam pipeline bootstrap command * move the pipelineconfig.toml file to .aws-sam * UX - rewriting Co-authored-by: Chris Rehn * UX improvements * make black happy * apply review comments * UX - rewriting Co-authored-by: Chris Rehn * refactor * Apply review comments * use python way of array elements assignments * Update samcli/lib/pipeline/bootstrap/stage.py Co-authored-by: _sam <3804518+aahung@users.noreply.github.com> * apply review comments * typo * read using utf-8 * create and user a safe version of the save_config method * apply review comments * rename _get_command_name to _get_command_names * don't save generated ARNs for now, will save during init * Revert "don't save generated ARNs for now, will save during init" This reverts commit d184e164022d9560131c62a826436edbc93da189. * Notify the user to rotate periodically rotate the IAM credentials * typo * Use AES instead of KMS for S3 SSE * rename Ecr to ECR and Iam to IAM * Grant lambda service explicit permissions to thhe ECR instead of relying on giving this permissions on ad-hoc while creating the container images Co-authored-by: Chris Rehn Co-authored-by: _sam <3804518+aahung@users.noreply.github.com> * sam pipeline init command (#2831) * sam pipeline init command * apply review comments * apply review comments * display a message that we have successfully created the pipeline configuration file(s). * doc typo * Let 'sam pipeline init' prefills pipeline's infrastructure resources… (#2894) * Let 'sam pipeline init' prefills pipeline's infrastructure resources' values from 'sam pipeline bootstrap' results. * save bootstrapped sateg region * make black happy * exclude non-dict keys from samconfig.get_env_names method. * Rename the pipeline 'Stage' concept to 'Environment' (#2908) * Rename the pipeline 'Stage' concept to 'Environment' * typo * Rename --environment-name argument to --environment * Sam pipelines ux rename ecr repo to image repository (#2910) * Rename ecr-repo to image-repository * UT Fixes * typo * typo * feat: Support creating pipeline files directly into . without hooks (#2911) * feat: Support creating pipeline files directly into . without hooks * Integration test for pipeline init and pipeline bootstrap (#2841) * Expose Environment._get_stack_name for integ test to predict stack name * Add integ test for pipeline bootstrap * Add init integ test * small UX improvements: (#2914) * small UX improvements: 1. show a message when the user cancels a bootstrapping command. 2. Don't prompt for CI/CD provider or provider templates if there is only one choice. 3. Make PipelineFileAlreadyExistsError a UserError. 4. use the Colored class instead of fg='color' when prompting a colored message. 5. Fix a bug where we were not allowing empty response for not required questions. * Fix Integration Test: We now don't ask the user to select a provider's pipeline template if there is only one * Add docs for PipelineFileAlreadyExistsError * make black happy * Sam pipelines s3 security (#2975) * Deny non https requests for the artifacts S3 bucket * enable bucket serverside logging * add integration tests for artifacts bucket SSL-only requests and access logging * typo * Ensure the ArtifactsLoggingBucket denies non ssl requests (#2976) * Sam pipelines ux round 3 (#2979) * rename customer facing message 'CI/CD provider' to 'CI/CD system' * add a note about what 'Environment Name' is during the pipeline bootstrap guided context * Apply suggestions from code review typo Co-authored-by: Chris Rehn Co-authored-by: Chris Rehn * let pipeline IAM user assume only IAM roles tagged with Role=pipeline-execution-role (#2982) * Adding AWS_ prefix to displayed out. (#2993) Co-authored-by: Tarun Mall * Add region to pipeline bootstrap interactive flow (#2997) * Ask AWS region in bootstrap interactive flow * Read default region from boto session first * Fix a unit test * Inform write to pipelineconfig.toml at the end of bootstrap (#3002) * Print info about pipelineconfig.toml after resources are bootstrapped * Update samcli/commands/pipeline/bootstrap/cli.py Co-authored-by: Chris Rehn Co-authored-by: Chris Rehn * List detected env names in pipeline init when prompt to input the env name (#3000) * Allow question.question can be resolved using key path * Pass the list of env names message (environment_names_message) into pipeline init interactive flow context * Update samcli/commands/pipeline/init/interactive_init_flow.py Co-authored-by: Chris Rehn * Fix unit test (trigger pr builds) * Fix integ test * Fix integ test Co-authored-by: Chris Rehn * Adding account id to bootstrap message. (#2998) * Adding account id to bootstrap message. * adding docstring * Addressing PR comments. * Adding unit tests. * Fixing unit tests. Co-authored-by: Tarun Mall * Cfn creds fix (#3014) * Removing pipeline user creds from cfn output. This maintains same user exp. Co-authored-by: Tarun Mall * Ux bootstrap revamp 20210706 (#3021) * Add intro paragraph to bootstrap * Add switch account prompt * Revamp stage definition prompt * Revamp existing resources prompt * Revamp security prompt * Allow answers to be changed later * Add exit message for bootstrap * Add exit message for bootstrap (1) * Add indentation to review values * Add "Below is the summary of the answers:" * Sweep pylint errors * Update unit tests * Update samcli/commands/pipeline/bootstrap/guided_context.py Co-authored-by: Chris Rehn * Update samcli/commands/pipeline/bootstrap/guided_context.py Co-authored-by: Chris Rehn * Update samcli/commands/pipeline/bootstrap/guided_context.py Co-authored-by: Chris Rehn * Update samcli/commands/pipeline/bootstrap/guided_context.py Co-authored-by: Chris Rehn * Update samcli/commands/pipeline/bootstrap/guided_context.py Co-authored-by: Chris Rehn * Update samcli/commands/pipeline/bootstrap/guided_context.py Co-authored-by: Chris Rehn * Update samcli/commands/pipeline/bootstrap/guided_context.py Co-authored-by: Chris Rehn * Update samcli/commands/pipeline/bootstrap/guided_context.py Co-authored-by: Chris Rehn * Update samcli/commands/pipeline/bootstrap/cli.py Co-authored-by: Chris Rehn * Update unit tests * Add bold to other literals Co-authored-by: Chris Rehn * Adding account condition for CFN execution role. (#3027) Co-authored-by: Tarun Mall * pipeline UX revamp 20210707 (#3031) * Allow running bootstrap inside pipeline init * Select account credential source within bootstrap * Add bootstrap decorations within pipeline init * Removing ip range option from bootstrap. (#3036) * Removing ip range option from bootstrap. * Fixing unit test from UX PR. Co-authored-by: Tarun Mall * Fix toml file incorrect read/write in init --bootstrap (#3037) * Temporarily removing account fix. (#3038) Co-authored-by: Tarun Mall * Rename environment to stage (#3040) * Improve account source selection (#3042) * Fixing various cosmetics UX issues with pipeline workflow. (#3046) * Fixing credential to credentials * Forcing text color to yellow. * Adding new line after stage diagram. * Adding extra line after checking bootstrap message. * Renaming config -> configuration * account source -> credential source * Removing old message. * Fixing indentation in list. * Fixing bunch of indentation. * fixing f string Co-authored-by: Tarun Mall * Auto skip questions if stage detected (#3045) * Autofill question if default value is presented * Allow to use index to select stage names (#3051) * Updating message when bootstrap stages are missing. (#3058) * Updating message when bootstrap stages are missing. * Fixing indendation Co-authored-by: Tarun Mall * Fixing bootstrap integ tests. (#3061) * Fixing bootstrap integ tests. * Cleaning up some integ tests. * Using environment variables when running integ test on CI. * Using expression instead of full loop. * Adding instruction to use default profile on local. Co-authored-by: Tarun Mall * Fix bootstrap test region (#3064) * Fix bootstrap region in integ test * Fix regions in non-interactive mode as well * Add more pipeline init integ test (#3065) * Fix existing pipeline init integ test * Add more pipeline init integ tests * Config file bug (#3066) * Validating config file after bootstrap stack creation. * Validating config file after bootstrap. Co-authored-by: Tarun Mall * Fix pipeline init integ test because of pipelineconfig file exists (#3067) * Make stage name randomized to avoid race condition among multi canary runs (#3078) * Load number of stages from pipeline template (#3059) * Load number of stages from templates * Rename variable and add debug log * Add encoding to open() * Allow roles with Tag aws-sam-pipeline-codebuild-service-role to assume PipelineExecutionRole (#2950) * pipeline init UX: Ask to confirm when file exists (#3079) * Ask to confirm overriding if files already exist, or save to another directory * Add doc links (#3087) * Adding accidentally removed tests back. (#3088) Co-authored-by: Tarun Mall Co-authored-by: elbayaaa <72949274+elbayaaa@users.noreply.github.com> Co-authored-by: Chris Rehn Co-authored-by: Ahmed Elbayaa Co-authored-by: Tarun Co-authored-by: Tarun Mall --- mypy.ini | 2 +- samcli/cli/command.py | 1 + samcli/cli/context.py | 4 +- samcli/commands/_utils/template.py | 6 +- samcli/commands/deploy/guided_context.py | 4 +- samcli/commands/exceptions.py | 19 + samcli/commands/pipeline/__init__.py | 0 .../commands/pipeline/bootstrap/__init__.py | 0 samcli/commands/pipeline/bootstrap/cli.py | 238 ++++++++ .../pipeline/bootstrap/guided_context.py | 249 ++++++++ samcli/commands/pipeline/external_links.py | 8 + samcli/commands/pipeline/init/__init__.py | 0 samcli/commands/pipeline/init/cli.py | 51 ++ .../pipeline/init/interactive_init_flow.py | 482 +++++++++++++++ .../init/pipeline_templates_manifest.py | 61 ++ samcli/commands/pipeline/pipeline.py | 21 + samcli/lib/bootstrap/bootstrap.py | 33 +- samcli/lib/config/samconfig.py | 14 +- samcli/lib/cookiecutter/exceptions.py | 4 +- samcli/lib/cookiecutter/interactive_flow.py | 22 +- .../cookiecutter/interactive_flow_creator.py | 6 +- samcli/lib/cookiecutter/processor.py | 2 +- samcli/lib/cookiecutter/question.py | 104 +++- samcli/lib/cookiecutter/template.py | 28 +- samcli/lib/pipeline/__init__.py | 0 samcli/lib/pipeline/bootstrap/__init__.py | 0 samcli/lib/pipeline/bootstrap/resource.py | 138 +++++ samcli/lib/pipeline/bootstrap/stage.py | 330 ++++++++++ .../pipeline/bootstrap/stage_resources.yaml | 358 +++++++++++ samcli/lib/utils/colors.py | 4 + samcli/lib/utils/defaults.py | 8 + samcli/lib/utils/git_repo.py | 2 +- .../lib/utils/managed_cloudformation_stack.py | 94 ++- samcli/lib/utils/profile.py | 10 + samcli/yamlhelper.py | 10 +- tests/integration/pipeline/__init__.py | 0 tests/integration/pipeline/base.py | 154 +++++ .../pipeline/test_bootstrap_command.py | 380 ++++++++++++ .../integration/pipeline/test_init_command.py | 299 +++++++++ .../custom_template/cookiecutter.json | 4 + .../pipeline/custom_template/metadata.json | 3 + .../pipeline/custom_template/questions.json | 7 + .../{{cookiecutter.outputDir}}/weather | 1 + .../testdata/pipeline/expected_jenkinsfile | 177 ++++++ tests/testing_utils.py | 5 + tests/unit/commands/_utils/test_template.py | 8 +- .../commands/deploy/test_guided_context.py | 6 +- tests/unit/commands/pipeline/__init__.py | 0 .../commands/pipeline/bootstrap/__init__.py | 0 .../commands/pipeline/bootstrap/test_cli.py | 276 +++++++++ .../pipeline/bootstrap/test_guided_context.py | 231 +++++++ tests/unit/commands/pipeline/init/__init__.py | 0 tests/unit/commands/pipeline/init/test_cli.py | 22 + .../init/test_initeractive_init_flow.py | 566 ++++++++++++++++++ .../init/test_pipeline_templates_manifest.py | 82 +++ tests/unit/lib/bootstrap/test_bootstrap.py | 34 +- tests/unit/lib/cookiecutter/test_question.py | 17 +- tests/unit/lib/cookiecutter/test_template.py | 19 +- tests/unit/lib/pipeline/__init__.py | 0 tests/unit/lib/pipeline/bootstrap/__init__.py | 0 .../pipeline/bootstrap/test_environment.py | 425 +++++++++++++ .../lib/pipeline/bootstrap/test_resource.py | 81 +++ tests/unit/lib/samconfig/test_samconfig.py | 34 +- .../test_managed_cloudformation_stack.py | 21 +- 64 files changed, 5052 insertions(+), 113 deletions(-) create mode 100644 samcli/commands/pipeline/__init__.py create mode 100644 samcli/commands/pipeline/bootstrap/__init__.py create mode 100644 samcli/commands/pipeline/bootstrap/cli.py create mode 100644 samcli/commands/pipeline/bootstrap/guided_context.py create mode 100644 samcli/commands/pipeline/external_links.py create mode 100644 samcli/commands/pipeline/init/__init__.py create mode 100644 samcli/commands/pipeline/init/cli.py create mode 100644 samcli/commands/pipeline/init/interactive_init_flow.py create mode 100644 samcli/commands/pipeline/init/pipeline_templates_manifest.py create mode 100644 samcli/commands/pipeline/pipeline.py create mode 100644 samcli/lib/pipeline/__init__.py create mode 100644 samcli/lib/pipeline/bootstrap/__init__.py create mode 100644 samcli/lib/pipeline/bootstrap/resource.py create mode 100644 samcli/lib/pipeline/bootstrap/stage.py create mode 100644 samcli/lib/pipeline/bootstrap/stage_resources.yaml create mode 100644 samcli/lib/utils/defaults.py create mode 100644 samcli/lib/utils/profile.py create mode 100644 tests/integration/pipeline/__init__.py create mode 100644 tests/integration/pipeline/base.py create mode 100644 tests/integration/pipeline/test_bootstrap_command.py create mode 100644 tests/integration/pipeline/test_init_command.py create mode 100644 tests/integration/testdata/pipeline/custom_template/cookiecutter.json create mode 100644 tests/integration/testdata/pipeline/custom_template/metadata.json create mode 100644 tests/integration/testdata/pipeline/custom_template/questions.json create mode 100644 tests/integration/testdata/pipeline/custom_template/{{cookiecutter.outputDir}}/weather create mode 100644 tests/integration/testdata/pipeline/expected_jenkinsfile create mode 100644 tests/unit/commands/pipeline/__init__.py create mode 100644 tests/unit/commands/pipeline/bootstrap/__init__.py create mode 100644 tests/unit/commands/pipeline/bootstrap/test_cli.py create mode 100644 tests/unit/commands/pipeline/bootstrap/test_guided_context.py create mode 100644 tests/unit/commands/pipeline/init/__init__.py create mode 100644 tests/unit/commands/pipeline/init/test_cli.py create mode 100644 tests/unit/commands/pipeline/init/test_initeractive_init_flow.py create mode 100644 tests/unit/commands/pipeline/init/test_pipeline_templates_manifest.py create mode 100644 tests/unit/lib/pipeline/__init__.py create mode 100644 tests/unit/lib/pipeline/bootstrap/__init__.py create mode 100644 tests/unit/lib/pipeline/bootstrap/test_environment.py create mode 100644 tests/unit/lib/pipeline/bootstrap/test_resource.py diff --git a/mypy.ini b/mypy.ini index 497c022c95..f6915caefe 100644 --- a/mypy.ini +++ b/mypy.ini @@ -59,6 +59,6 @@ ignore_missing_imports=True ignore_missing_imports=True # progressive add typechecks and these modules already complete the process, let's keep them clean -[mypy-samcli.commands.build,samcli.lib.build.*,samcli.commands.local.cli_common.invoke_context,samcli.commands.local.lib.local_lambda,samcli.lib.providers.*,samcli.lib.utils.git_repo.py] +[mypy-samcli.commands.build,samcli.lib.build.*,samcli.commands.local.cli_common.invoke_context,samcli.commands.local.lib.local_lambda,samcli.lib.providers.*,samcli.lib.utils.git_repo.py,samcli.lib.cookiecutter.*,samcli.lib.pipeline.*,samcli.commands.pipeline.*] disallow_untyped_defs=True disallow_incomplete_defs=True \ No newline at end of file diff --git a/samcli/cli/command.py b/samcli/cli/command.py index 384529f78b..c135400586 100644 --- a/samcli/cli/command.py +++ b/samcli/cli/command.py @@ -21,6 +21,7 @@ "samcli.commands.deploy", "samcli.commands.logs", "samcli.commands.publish", + "samcli.commands.pipeline.pipeline", # We intentionally do not expose the `bootstrap` command for now. We might open it up later # "samcli.commands.bootstrap", ] diff --git a/samcli/cli/context.py b/samcli/cli/context.py index a69ebb9ff2..74c35155a1 100644 --- a/samcli/cli/context.py +++ b/samcli/cli/context.py @@ -4,7 +4,7 @@ import logging import uuid -from typing import Optional, cast +from typing import Optional, cast, List import boto3 import botocore @@ -186,7 +186,7 @@ def _refresh_session(self): raise CredentialsError(str(ex)) from ex -def get_cmd_names(cmd_name, ctx): +def get_cmd_names(cmd_name, ctx) -> List[str]: """ Given the click core context, return a list representing all the subcommands passed to the CLI diff --git a/samcli/commands/_utils/template.py b/samcli/commands/_utils/template.py index bd9658b55b..08c02836da 100644 --- a/samcli/commands/_utils/template.py +++ b/samcli/commands/_utils/template.py @@ -9,9 +9,6 @@ import yaml from botocore.utils import set_value_from_jmespath -from samcli.commands.exceptions import UserException -from samcli.lib.utils.packagetype import ZIP -from samcli.yamlhelper import yaml_parse, yaml_dump from samcli.commands._utils.resources import ( METADATA_WITH_LOCAL_PATHS, RESOURCES_WITH_LOCAL_PATHS, @@ -19,6 +16,9 @@ AWS_LAMBDA_FUNCTION, get_packageable_resource_paths, ) +from samcli.commands.exceptions import UserException +from samcli.lib.utils.packagetype import ZIP +from samcli.yamlhelper import yaml_parse, yaml_dump class TemplateNotFoundException(UserException): diff --git a/samcli/commands/deploy/guided_context.py b/samcli/commands/deploy/guided_context.py index dafdf0a331..10fd3b6da8 100644 --- a/samcli/commands/deploy/guided_context.py +++ b/samcli/commands/deploy/guided_context.py @@ -6,7 +6,6 @@ from typing import Dict, Any, List import click -from botocore.session import get_session from click import confirm from click import prompt from click.types import FuncParamType @@ -36,6 +35,7 @@ from samcli.lib.providers.sam_function_provider import SamFunctionProvider from samcli.lib.providers.sam_stack_provider import SamLocalStackProvider from samcli.lib.utils.colors import Colored +from samcli.lib.utils.defaults import get_default_aws_region from samcli.lib.utils.packagetype import IMAGE LOG = logging.getLogger(__name__) @@ -110,7 +110,7 @@ def guided_prompts(self, parameter_override_keys): The keys of parameters to override, for each key, customers will be asked to provide a value """ default_stack_name = self.stack_name or "sam-app" - default_region = self.region or get_session().get_config_variable("region") or "us-east-1" + default_region = self.region or get_default_aws_region() default_capabilities = self.capabilities[0] or ("CAPABILITY_IAM",) default_config_env = self.config_env or DEFAULT_ENV default_config_file = self.config_file or DEFAULT_CONFIG_FILE_NAME diff --git a/samcli/commands/exceptions.py b/samcli/commands/exceptions.py index 7b8f253609..a27f4872cf 100644 --- a/samcli/commands/exceptions.py +++ b/samcli/commands/exceptions.py @@ -59,3 +59,22 @@ class ContainersInitializationException(UserException): """ Exception class when SAM is not able to initialize any of the lambda functions containers """ + + +class PipelineTemplateCloneException(UserException): + """ + Exception class when unable to download pipeline templates from a Git repository during `sam pipeline init` + """ + + +class AppPipelineTemplateManifestException(UserException): + """ + Exception class when SAM is not able to parse the "manifest.yaml" file located in the SAM pipeline templates + Git repo: "github.com/aws/aws-sam-cli-pipeline-init-templates.git + """ + + +class AppPipelineTemplateMetadataException(UserException): + """ + Exception class when SAM is not able to parse the "metadata.json" file located in the SAM pipeline templates + """ diff --git a/samcli/commands/pipeline/__init__.py b/samcli/commands/pipeline/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/commands/pipeline/bootstrap/__init__.py b/samcli/commands/pipeline/bootstrap/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/commands/pipeline/bootstrap/cli.py b/samcli/commands/pipeline/bootstrap/cli.py new file mode 100644 index 0000000000..9e8b454992 --- /dev/null +++ b/samcli/commands/pipeline/bootstrap/cli.py @@ -0,0 +1,238 @@ +""" +CLI command for "pipeline bootstrap", which sets up the require pipeline infrastructure resources +""" +import os +from textwrap import dedent +from typing import Any, Dict, List, Optional + +import click + +from samcli.cli.cli_config_file import configuration_option, TomlProvider +from samcli.cli.main import pass_context, common_options, aws_creds_options, print_cmdline_args +from samcli.lib.config.samconfig import SamConfig +from samcli.lib.pipeline.bootstrap.stage import Stage +from samcli.lib.telemetry.metric import track_command +from samcli.lib.utils.colors import Colored +from samcli.lib.utils.version_checker import check_newer_version +from .guided_context import GuidedContext +from ..external_links import CONFIG_AWS_CRED_ON_CICD_URL + +SHORT_HELP = "Generates the necessary AWS resources to connect your CI/CD system." + +HELP_TEXT = """ +SAM Pipeline Bootstrap generates the necessary AWS resources to connect your +CI/CD system. This step must be completed for each pipeline stage prior to +running sam pipeline init +""" + +PIPELINE_CONFIG_DIR = os.path.join(".aws-sam", "pipeline") +PIPELINE_CONFIG_FILENAME = "pipelineconfig.toml" + + +@click.command("bootstrap", short_help=SHORT_HELP, help=HELP_TEXT, context_settings=dict(max_content_width=120)) +@configuration_option(provider=TomlProvider(section="parameters")) +@click.option( + "--interactive/--no-interactive", + is_flag=True, + default=True, + help="Disable interactive prompting for bootstrap parameters, and fail if any required arguments are missing.", +) +@click.option( + "--stage", + help="The name of the corresponding stage. It is used as a suffix for the created resources.", + required=False, +) +@click.option( + "--pipeline-user", + help="An IAM user generated or referenced by sam pipeline bootstrap in order to " + "allow the connected CI/CD system to connect to the SAM CLI.", + required=False, +) +@click.option( + "--pipeline-execution-role", + help="Execution role that the CI/CD system assumes in order to make changes to resources on your behalf.", + required=False, +) +@click.option( + "--cloudformation-execution-role", + help="Execution role that CloudFormation assumes in order to make changes to resources on your behalf", + required=False, +) +@click.option( + "--bucket", + help="The name of the S3 bucket where this command uploads your CloudFormation template. This is required for" + "deployments of templates sized greater than 51,200 bytes.", + required=False, +) +@click.option( + "--create-image-repository/--no-create-image-repository", + is_flag=True, + default=False, + help="If set to true and no ECR image repository is provided, this command will create an ECR image repository " + "to hold the container images of Lambda functions having an Image package type.", +) +@click.option( + "--image-repository", + help="ECR repo uri where this command uploads the image artifacts that are referenced in your template.", + required=False, +) +@click.option( + "--confirm-changeset/--no-confirm-changeset", + default=True, + is_flag=True, + help="Prompt to confirm if the resources is to be deployed by SAM CLI.", +) +@common_options +@aws_creds_options +@pass_context +@track_command +@check_newer_version +@print_cmdline_args +def cli( + ctx: Any, + interactive: bool, + stage: Optional[str], + pipeline_user: Optional[str], + pipeline_execution_role: Optional[str], + cloudformation_execution_role: Optional[str], + bucket: Optional[str], + create_image_repository: bool, + image_repository: Optional[str], + confirm_changeset: bool, + config_file: Optional[str], + config_env: Optional[str], +) -> None: + """ + `sam pipeline bootstrap` command entry point + """ + do_cli( + region=ctx.region, + profile=ctx.profile, + interactive=interactive, + stage_name=stage, + pipeline_user_arn=pipeline_user, + pipeline_execution_role_arn=pipeline_execution_role, + cloudformation_execution_role_arn=cloudformation_execution_role, + artifacts_bucket_arn=bucket, + create_image_repository=create_image_repository, + image_repository_arn=image_repository, + confirm_changeset=confirm_changeset, + config_file=config_env, + config_env=config_file, + ) # pragma: no cover + + +def do_cli( + region: Optional[str], + profile: Optional[str], + interactive: bool, + stage_name: Optional[str], + pipeline_user_arn: Optional[str], + pipeline_execution_role_arn: Optional[str], + cloudformation_execution_role_arn: Optional[str], + artifacts_bucket_arn: Optional[str], + create_image_repository: bool, + image_repository_arn: Optional[str], + confirm_changeset: bool, + config_file: Optional[str], + config_env: Optional[str], + standalone: bool = True, +) -> None: + """ + implementation of `sam pipeline bootstrap` command + """ + if not pipeline_user_arn: + pipeline_user_arn = _load_saved_pipeline_user_arn() + + if interactive: + if standalone: + click.echo( + dedent( + """\ + + sam pipeline bootstrap generates the necessary AWS resources to connect a stage in + your CI/CD system. We will ask for [1] stage definition, [2] account details, and + [3] references to existing resources in order to bootstrap these pipeline + resources. + """ + ), + ) + + guided_context = GuidedContext( + profile=profile, + stage_name=stage_name, + pipeline_user_arn=pipeline_user_arn, + pipeline_execution_role_arn=pipeline_execution_role_arn, + cloudformation_execution_role_arn=cloudformation_execution_role_arn, + artifacts_bucket_arn=artifacts_bucket_arn, + create_image_repository=create_image_repository, + image_repository_arn=image_repository_arn, + region=region, + ) + guided_context.run() + stage_name = guided_context.stage_name + pipeline_user_arn = guided_context.pipeline_user_arn + pipeline_execution_role_arn = guided_context.pipeline_execution_role_arn + cloudformation_execution_role_arn = guided_context.cloudformation_execution_role_arn + artifacts_bucket_arn = guided_context.artifacts_bucket_arn + create_image_repository = guided_context.create_image_repository + image_repository_arn = guided_context.image_repository_arn + region = guided_context.region + profile = guided_context.profile + + if not stage_name: + raise click.UsageError("Missing required parameter '--stage'") + + environment: Stage = Stage( + name=stage_name, + aws_profile=profile, + aws_region=region, + pipeline_user_arn=pipeline_user_arn, + pipeline_execution_role_arn=pipeline_execution_role_arn, + cloudformation_execution_role_arn=cloudformation_execution_role_arn, + artifacts_bucket_arn=artifacts_bucket_arn, + create_image_repository=create_image_repository, + image_repository_arn=image_repository_arn, + ) + + bootstrapped: bool = environment.bootstrap(confirm_changeset=confirm_changeset) + + if bootstrapped: + environment.print_resources_summary() + + environment.save_config_safe( + config_dir=PIPELINE_CONFIG_DIR, filename=PIPELINE_CONFIG_FILENAME, cmd_names=_get_bootstrap_command_names() + ) + + click.secho( + dedent( + f"""\ + View the definition in {os.path.join(PIPELINE_CONFIG_DIR, PIPELINE_CONFIG_FILENAME)}, + run sam pipeline bootstrap to generate another set of resources, or proceed to + sam pipeline init to create your pipeline configuration file. + """ + ) + ) + + if not environment.pipeline_user.is_user_provided: + click.secho( + dedent( + f"""\ + Before running {Colored().bold("sam pipeline init")}, we recommend first setting up AWS credentials + in your CI/CD account. Read more about how to do so with your provider in + {CONFIG_AWS_CRED_ON_CICD_URL}. + """ + ) + ) + + +def _load_saved_pipeline_user_arn() -> Optional[str]: + samconfig: SamConfig = SamConfig(config_dir=PIPELINE_CONFIG_DIR, filename=PIPELINE_CONFIG_FILENAME) + if not samconfig.exists(): + return None + config: Dict[str, str] = samconfig.get_all(cmd_names=_get_bootstrap_command_names(), section="parameters") + return config.get("pipeline_user") + + +def _get_bootstrap_command_names() -> List[str]: + return ["pipeline", "bootstrap"] diff --git a/samcli/commands/pipeline/bootstrap/guided_context.py b/samcli/commands/pipeline/bootstrap/guided_context.py new file mode 100644 index 0000000000..a7f1f89b08 --- /dev/null +++ b/samcli/commands/pipeline/bootstrap/guided_context.py @@ -0,0 +1,249 @@ +""" +An interactive flow that prompt the user for required information to bootstrap the AWS account of an environment +with the required infrastructure +""" +import os +import sys +from textwrap import dedent +from typing import Optional, List, Tuple, Callable + +import click +from botocore.credentials import EnvProvider + +from samcli.commands.exceptions import CredentialsError +from samcli.commands.pipeline.external_links import CONFIG_AWS_CRED_DOC_URL +from samcli.lib.bootstrap.bootstrap import get_current_account_id +from samcli.lib.utils.colors import Colored + +from samcli.lib.utils.defaults import get_default_aws_region +from samcli.lib.utils.profile import list_available_profiles + + +class GuidedContext: + def __init__( + self, + profile: Optional[str] = None, + stage_name: Optional[str] = None, + pipeline_user_arn: Optional[str] = None, + pipeline_execution_role_arn: Optional[str] = None, + cloudformation_execution_role_arn: Optional[str] = None, + artifacts_bucket_arn: Optional[str] = None, + create_image_repository: bool = False, + image_repository_arn: Optional[str] = None, + region: Optional[str] = None, + ) -> None: + self.profile = profile + self.stage_name = stage_name + self.pipeline_user_arn = pipeline_user_arn + self.pipeline_execution_role_arn = pipeline_execution_role_arn + self.cloudformation_execution_role_arn = cloudformation_execution_role_arn + self.artifacts_bucket_arn = artifacts_bucket_arn + self.create_image_repository = create_image_repository + self.image_repository_arn = image_repository_arn + self.region = region + self.color = Colored() + + def _prompt_account_id(self) -> None: + profiles = list_available_profiles() + click.echo("The following AWS credential sources are available to use:") + click.echo( + dedent( + f"""\ + To know more about configuration AWS credentials, visit the link below: + {CONFIG_AWS_CRED_DOC_URL}\ + """ + ) + ) + has_env_creds = os.getenv(EnvProvider.ACCESS_KEY) and os.getenv(EnvProvider.SECRET_KEY) + click.echo(f"\t1 - Environment variables{' (not available)' if not has_env_creds else ''}") + for i, profile in enumerate(profiles): + click.echo(f"\t{i + 2} - {profile} (named profile)") + click.echo("\tq - Quit and configure AWS credentials") + answer = click.prompt( + "Select a credential source to associate with this stage", + show_choices=False, + show_default=False, + type=click.Choice((["1"] if has_env_creds else []) + [str(i + 2) for i in range(len(profiles))] + ["q"]), + ) + if answer == "q": + sys.exit(0) + elif answer == "1": + # by default, env variable has higher precedence + # https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-envvars.html#envvars-list + self.profile = None + else: + self.profile = profiles[int(answer) - 2] + + try: + account_id = get_current_account_id(self.profile) + click.echo(self.color.green(f"Associated account {account_id} with stage {self.stage_name}.")) + except CredentialsError as ex: + click.echo(f"{self.color.red(ex.message)}\n") + self._prompt_account_id() + + def _prompt_stage_name(self) -> None: + click.echo( + "Enter a name for this stage. This will be referenced later when you use the sam pipeline init command:" + ) + self.stage_name = click.prompt( + "Stage name", + default=self.stage_name, + type=click.STRING, + ) + + def _prompt_region_name(self) -> None: + self.region = click.prompt( + "Enter the region in which you want these resources to be created", + type=click.STRING, + default=get_default_aws_region(), + ) + + def _prompt_pipeline_user(self) -> None: + self.pipeline_user_arn = click.prompt( + "Enter the pipeline IAM user ARN if you have previously created one, or we will create one for you", + default="", + type=click.STRING, + ) + + def _prompt_pipeline_execution_role(self) -> None: + self.pipeline_execution_role_arn = click.prompt( + "Enter the pipeline execution role ARN if you have previously created one, " + "or we will create one for you", + default="", + type=click.STRING, + ) + + def _prompt_cloudformation_execution_role(self) -> None: + self.cloudformation_execution_role_arn = click.prompt( + "Enter the CloudFormation execution role ARN if you have previously created one, " + "or we will create one for you", + default="", + type=click.STRING, + ) + + def _prompt_artifacts_bucket(self) -> None: + self.artifacts_bucket_arn = click.prompt( + "Please enter the artifact bucket ARN for your Lambda function. " + "If you do not have a bucket, we will create one for you", + default="", + type=click.STRING, + ) + + def _prompt_image_repository(self) -> None: + if click.confirm("Does your application contain any IMAGE type Lambda functions?"): + self.image_repository_arn = click.prompt( + "Please enter the ECR image repository ARN(s) for your Image type function(s)." + "If you do not yet have a repository, we will create one for you", + default="", + type=click.STRING, + ) + self.create_image_repository = not bool(self.image_repository_arn) + else: + self.create_image_repository = False + + def _get_user_inputs(self) -> List[Tuple[str, Callable[[], None]]]: + return [ + (f"Account: {get_current_account_id(self.profile)}", self._prompt_account_id), + (f"Stage name: {self.stage_name}", self._prompt_stage_name), + (f"Region: {self.region}", self._prompt_region_name), + ( + f"Pipeline user ARN: {self.pipeline_user_arn}" + if self.pipeline_user_arn + else "Pipeline user: [to be created]", + self._prompt_pipeline_user, + ), + ( + f"Pipeline execution role ARN: {self.pipeline_execution_role_arn}" + if self.pipeline_execution_role_arn + else "Pipeline execution role: [to be created]", + self._prompt_pipeline_execution_role, + ), + ( + f"CloudFormation execution role ARN: {self.cloudformation_execution_role_arn}" + if self.cloudformation_execution_role_arn + else "CloudFormation execution role: [to be created]", + self._prompt_cloudformation_execution_role, + ), + ( + f"Artifacts bucket ARN: {self.artifacts_bucket_arn}" + if self.artifacts_bucket_arn + else "Artifacts bucket: [to be created]", + self._prompt_artifacts_bucket, + ), + ( + f"ECR image repository ARN: {self.image_repository_arn}" + if self.image_repository_arn + else f"ECR image repository: [{'to be created' if self.create_image_repository else 'skipped'}]", + self._prompt_image_repository, + ), + ] + + def run(self) -> None: # pylint: disable=too-many-branches + """ + Runs an interactive questionnaire to prompt the user for the ARNs of the AWS resources(infrastructure) required + for the pipeline to work. Users can provide all, none or some resources' ARNs and leave the remaining empty + and it will be created by the bootstrap command + """ + click.secho(self.color.bold("[1] Stage definition")) + if self.stage_name: + click.echo(f"Stage name: {self.stage_name}") + else: + self._prompt_stage_name() + click.echo() + + click.secho(self.color.bold("[2] Account details")) + self._prompt_account_id() + click.echo() + + if not self.region: + self._prompt_region_name() + + if self.pipeline_user_arn: + click.echo(f"Pipeline IAM user ARN: {self.pipeline_user_arn}") + else: + self._prompt_pipeline_user() + click.echo() + + click.secho(self.color.bold("[3] Reference application build resources")) + + if self.pipeline_execution_role_arn: + click.echo(f"Pipeline execution role ARN: {self.pipeline_execution_role_arn}") + else: + self._prompt_pipeline_execution_role() + + if self.cloudformation_execution_role_arn: + click.echo(f"CloudFormation execution role ARN: {self.cloudformation_execution_role_arn}") + else: + self._prompt_cloudformation_execution_role() + + if self.artifacts_bucket_arn: + click.echo(f"Artifacts bucket ARN: {self.cloudformation_execution_role_arn}") + else: + self._prompt_artifacts_bucket() + + if self.image_repository_arn: + click.echo(f"ECR image repository ARN: {self.image_repository_arn}") + else: + self._prompt_image_repository() + click.echo() + + # Ask customers to confirm the inputs + click.secho(self.color.bold("[4] Summary")) + while True: + inputs = self._get_user_inputs() + click.secho("Below is the summary of the answers:") + for i, (text, _) in enumerate(inputs): + click.secho(f"\t{i + 1} - {text}") + edit_input = click.prompt( + text="Press enter to confirm the values above, or select an item to edit the value", + default="0", + show_choices=False, + show_default=False, + type=click.Choice(["0"] + [str(i + 1) for i in range(len(inputs))]), + ) + click.echo() + if int(edit_input): + inputs[int(edit_input) - 1][1]() + click.echo() + else: + break diff --git a/samcli/commands/pipeline/external_links.py b/samcli/commands/pipeline/external_links.py new file mode 100644 index 0000000000..77301ebb1b --- /dev/null +++ b/samcli/commands/pipeline/external_links.py @@ -0,0 +1,8 @@ +""" +The module to store external links. Put them in a centralized place so that we can verify their +validity automatically. +""" +CONFIG_AWS_CRED_DOC_URL = "https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html" + +_SAM_DOC_PREFIX = "https://docs.aws.amazon.com/serverless-application-model/latest/developerguide" +CONFIG_AWS_CRED_ON_CICD_URL = _SAM_DOC_PREFIX + "/serverless-generating-example-ci-cd-others.html" diff --git a/samcli/commands/pipeline/init/__init__.py b/samcli/commands/pipeline/init/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/commands/pipeline/init/cli.py b/samcli/commands/pipeline/init/cli.py new file mode 100644 index 0000000000..bcbe205c6a --- /dev/null +++ b/samcli/commands/pipeline/init/cli.py @@ -0,0 +1,51 @@ +""" +CLI command for "pipeline init" command +""" +from typing import Any, Optional + +import click + +from samcli.cli.cli_config_file import configuration_option, TomlProvider +from samcli.cli.main import pass_context, common_options as cli_framework_options +from samcli.commands.pipeline.init.interactive_init_flow import InteractiveInitFlow +from samcli.lib.telemetry.metric import track_command + +SHORT_HELP = "Generates CI/CD pipeline configuration files." +HELP_TEXT = """ +sam pipeline init generates a pipeline configuration file that you can use to connect your +AWS account(s) to your CI/CD system. Before using sam pipeline init, you must +bootstrap the necessary resources for each stage in your pipeline. You can do this +by running sam pipeline init --bootstrap to be guided through the setup and configuration +file generation process, or refer to resources you have previously created with the +sam pipeline bootstrap command. +""" + + +@click.command("init", help=HELP_TEXT, short_help=SHORT_HELP) +@configuration_option(provider=TomlProvider(section="parameters")) +@click.option( + "--bootstrap", + is_flag=True, + default=False, + help="Allow bootstrapping resources.", +) +@cli_framework_options +@pass_context +@track_command # pylint: disable=R0914 +def cli(ctx: Any, config_env: Optional[str], config_file: Optional[str], bootstrap: bool) -> None: + """ + `sam pipeline init` command entry point + """ + + # Currently we support interactive mode only, i.e. the user doesn't provide the required arguments during the call + # so we call do_cli without any arguments. This will change after supporting the non interactive mode. + do_cli(bootstrap) + + +def do_cli(bootstrap: bool) -> None: + """ + implementation of `sam pipeline init` command + """ + # TODO non-interactive mode + init_flow = InteractiveInitFlow(bootstrap) + init_flow.do_interactive() diff --git a/samcli/commands/pipeline/init/interactive_init_flow.py b/samcli/commands/pipeline/init/interactive_init_flow.py new file mode 100644 index 0000000000..7504f3a66b --- /dev/null +++ b/samcli/commands/pipeline/init/interactive_init_flow.py @@ -0,0 +1,482 @@ +""" +Interactive flow that prompts that users for pipeline template (cookiecutter template) and used it to generate +pipeline configuration file +""" +import json +import logging +import os +from json import JSONDecodeError +from pathlib import Path +from textwrap import dedent +from typing import Dict, List, Tuple + +import click + +from samcli.cli.main import global_cfg +from samcli.commands.exceptions import ( + AppPipelineTemplateMetadataException, + PipelineTemplateCloneException, +) +from samcli.lib.config.samconfig import SamConfig +from samcli.lib.cookiecutter.interactive_flow import InteractiveFlow +from samcli.lib.cookiecutter.interactive_flow_creator import InteractiveFlowCreator +from samcli.lib.cookiecutter.question import Choice +from samcli.lib.cookiecutter.template import Template +from samcli.lib.utils import osutils +from samcli.lib.utils.colors import Colored +from samcli.lib.utils.git_repo import GitRepo, CloneRepoException +from .pipeline_templates_manifest import Provider, PipelineTemplateMetadata, PipelineTemplatesManifest +from ..bootstrap.cli import ( + do_cli as do_bootstrap, + PIPELINE_CONFIG_DIR, + PIPELINE_CONFIG_FILENAME, + _get_bootstrap_command_names, +) + +LOG = logging.getLogger(__name__) +shared_path: Path = global_cfg.config_dir +APP_PIPELINE_TEMPLATES_REPO_URL = "https://github.com/aws/aws-sam-cli-pipeline-init-templates.git" +APP_PIPELINE_TEMPLATES_REPO_LOCAL_NAME = "aws-sam-cli-app-pipeline-templates" +CUSTOM_PIPELINE_TEMPLATE_REPO_LOCAL_NAME = "custom-pipeline-template" +SAM_PIPELINE_TEMPLATE_SOURCE = "AWS Quick Start Pipeline Templates" +CUSTOM_PIPELINE_TEMPLATE_SOURCE = "Custom Pipeline Template Location" + + +class InteractiveInitFlow: + def __init__(self, allow_bootstrap: bool): + self.allow_bootstrap = allow_bootstrap + self.color = Colored() + + def do_interactive(self) -> None: + """ + An interactive flow that prompts the user for pipeline template (cookiecutter template) location, downloads it, + runs its specific questionnaire then generates the pipeline config file + based on the template and user's responses + """ + click.echo( + dedent( + """\ + + sam pipeline init generates a pipeline configuration file that you can use to connect your + AWS account(s) to your CI/CD system. We will guide you through the process to + bootstrap resources for each stage, then walk through the details necessary for + creating the pipeline config file. + + Please ensure you are in the root folder of your SAM application before you begin. + """ + ) + ) + + click.echo("Select a pipeline structure template to get started:") + pipeline_template_source_question = Choice( + key="pipeline-template-source", + text="Select template", + options=[SAM_PIPELINE_TEMPLATE_SOURCE, CUSTOM_PIPELINE_TEMPLATE_SOURCE], + is_required=True, + ) + source = pipeline_template_source_question.ask() + if source == CUSTOM_PIPELINE_TEMPLATE_SOURCE: + generated_files = self._generate_from_custom_location() + else: + generated_files = self._generate_from_app_pipeline_templates() + click.secho(Colored().green("Successfully created the pipeline configuration file(s):")) + for file in generated_files: + click.secho(Colored().green(f"\t- {file}")) + + def _generate_from_app_pipeline_templates( + self, + ) -> List[str]: + """ + Prompts the user to choose a pipeline template from SAM predefined set of pipeline templates hosted in the git + repository: aws/aws-sam-cli-pipeline-init-templates.git + downloads locally, then generates the pipeline configuration file from the selected pipeline template. + Finally, return the list of generated files. + """ + pipeline_templates_local_dir: Path = _clone_app_pipeline_templates() + pipeline_templates_manifest: PipelineTemplatesManifest = _read_app_pipeline_templates_manifest( + pipeline_templates_local_dir + ) + # The manifest contains multiple pipeline-templates so select one + selected_pipeline_template_metadata: PipelineTemplateMetadata = _prompt_pipeline_template( + pipeline_templates_manifest + ) + selected_pipeline_template_dir: Path = pipeline_templates_local_dir.joinpath( + selected_pipeline_template_metadata.location + ) + return self._generate_from_pipeline_template(selected_pipeline_template_dir) + + def _generate_from_custom_location( + self, + ) -> List[str]: + """ + Prompts the user for a custom pipeline template location, downloads locally, + then generates the pipeline config file and return the list of generated files + """ + pipeline_template_git_location: str = click.prompt("Template Git location") + if os.path.exists(pipeline_template_git_location): + return self._generate_from_pipeline_template(Path(pipeline_template_git_location)) + + with osutils.mkdir_temp(ignore_errors=True) as tempdir: + tempdir_path = Path(tempdir) + pipeline_template_local_dir: Path = _clone_pipeline_templates( + pipeline_template_git_location, tempdir_path, CUSTOM_PIPELINE_TEMPLATE_REPO_LOCAL_NAME + ) + return self._generate_from_pipeline_template(pipeline_template_local_dir) + + def _prompt_run_bootstrap_within_pipeline_init(self, stage_names: List[str], number_of_stages: int) -> bool: + """ + Prompt bootstrap if `--bootstrap` flag is provided. Return True if bootstrap process is executed. + """ + if not stage_names: + click.echo("[!] None detected in this account.") + else: + click.echo( + Colored().yellow( + f"Only {len(stage_names)} stage(s) were detected, " + f"fewer than what the template requires: {number_of_stages}." + ) + ) + click.echo() + + if self.allow_bootstrap: + if click.confirm( + "Do you want to go through stage setup process now? If you choose no, " + "you can still reference other bootstrapped resources." + ): + click.secho( + dedent( + """\ + + For each stage, we will ask for [1] stage definition, [2] account details, and [3] + reference application build resources in order to bootstrap these pipeline + resources. + + We recommend using an individual AWS account profiles for each stage in your + pipeline. You can set these profiles up using [little bit of info on how to do + this/docs]. + """ + ) + ) + + click.echo(Colored().bold(f"\nStage {len(stage_names) + 1} Setup\n")) + do_bootstrap( + region=None, + profile=None, + interactive=True, + stage_name=None, + pipeline_user_arn=None, + pipeline_execution_role_arn=None, + cloudformation_execution_role_arn=None, + artifacts_bucket_arn=None, + create_image_repository=False, + image_repository_arn=None, + confirm_changeset=True, + config_file=None, + config_env=None, + standalone=False, + ) + return True + else: + click.echo( + dedent( + """\ + To set up stage(s), please quit the process using Ctrl+C and use one of the following commands: + sam pipeline init --bootstrap To be guided through the stage and config file creation process. + sam pipeline bootstrap To specify details for an individual stage. + """ + ) + ) + click.prompt( + "To reference stage resources bootstrapped in a different account, press enter to proceed", default="" + ) + return False + + def _generate_from_pipeline_template(self, pipeline_template_dir: Path) -> List[str]: + """ + Generates a pipeline config file from a given pipeline template local location + and return the list of generated files. + """ + pipeline_template: Template = _initialize_pipeline_template(pipeline_template_dir) + number_of_stages = (pipeline_template.metadata or {}).get("number_of_stages") + if not number_of_stages: + LOG.debug("Cannot find number_of_stages from template's metadata, set to default 2.") + number_of_stages = 2 + click.echo(f"You are using the {number_of_stages}-stage pipeline template.") + _draw_stage_diagram(number_of_stages) + while True: + click.echo("Checking for existing stages...\n") + stage_names, bootstrap_context = _load_pipeline_bootstrap_resources() + if len(stage_names) < number_of_stages and self._prompt_run_bootstrap_within_pipeline_init( + stage_names, number_of_stages + ): + # the customers just went through the bootstrap process, + # refresh the pipeline bootstrap resources and see whether bootstrap is still needed + continue + break + + context: Dict = pipeline_template.run_interactive_flows(bootstrap_context) + with osutils.mkdir_temp() as generate_dir: + LOG.debug("Generating pipeline files into %s", generate_dir) + context["outputDir"] = "." # prevent cookiecutter from generating a sub-folder + pipeline_template.generate_project(context, generate_dir) + return _copy_dir_contents_to_cwd(generate_dir) + + +def _load_pipeline_bootstrap_resources() -> Tuple[List[str], Dict[str, str]]: + section = "parameters" + context: Dict = {} + + config = SamConfig(PIPELINE_CONFIG_DIR, PIPELINE_CONFIG_FILENAME) + if not config.exists(): + context[str(["stage_names_message"])] = "" + return [], context + + # config.get_stage_names() will return the list of + # bootstrapped stage names and "default" which is used to store shared values + # we don't want to include "default" here. + stage_names = [stage_name for stage_name in config.get_stage_names() if stage_name != "default"] + for index, stage in enumerate(stage_names): + for key, value in config.get_all(_get_bootstrap_command_names(), section, stage).items(): + context[str([stage, key])] = value + # create an index alias for each stage name + # so that if customers type "1," it is equivalent to the first stage name + context[str([str(index + 1), key])] = value + + # pre-load the list of stage names detected from pipelineconfig.toml + stage_names_message = ( + "Here are the stage names detected " + + f"in {os.path.join(PIPELINE_CONFIG_DIR, PIPELINE_CONFIG_FILENAME)}:\n" + + "\n".join([f"\t{index + 1} - {stage_name}" for index, stage_name in enumerate(stage_names)]) + ) + context[str(["stage_names_message"])] = stage_names_message + + return stage_names, context + + +def _copy_dir_contents_to_cwd(source_dir: str) -> List[str]: + """ + Copy the contents of source_dir into the current cwd. + If existing files are encountered, ask for confirmation. + If not confirmed, all files will be written to + .aws-sam/pipeline/generated-files/ + """ + file_paths: List[str] = [] + existing_file_paths: List[str] = [] + for root, _, files in os.walk(source_dir): + for filename in files: + file_path = Path(root, filename) + target_file_path = Path(".").joinpath(file_path.relative_to(source_dir)) + LOG.debug("Verify %s does not exist", target_file_path) + if target_file_path.exists(): + existing_file_paths.append(str(target_file_path)) + file_paths.append(str(target_file_path)) + if existing_file_paths: + click.echo("\nThe following files already exist:") + for existing_file_path in existing_file_paths: + click.echo(f"\t- {existing_file_path}") + if not click.confirm("Do you want to override them?"): + target_dir = str(Path(PIPELINE_CONFIG_DIR, "generated-files")) + osutils.copytree(source_dir, target_dir) + click.echo(f"All files are saved to {target_dir}.") + return [str(Path(target_dir, path)) for path in file_paths] + LOG.debug("Copy contents of %s to cwd", source_dir) + osutils.copytree(source_dir, ".") + return file_paths + + +def _clone_app_pipeline_templates() -> Path: + """ + clone aws/aws-sam-cli-pipeline-init-templates.git Git repo to the local machine in SAM shared directory. + Returns: + the local directory path where the repo is cloned. + """ + try: + return _clone_pipeline_templates( + repo_url=APP_PIPELINE_TEMPLATES_REPO_URL, + clone_dir=shared_path, + clone_name=APP_PIPELINE_TEMPLATES_REPO_LOCAL_NAME, + ) + except PipelineTemplateCloneException: + # If can't clone app pipeline templates, try using an old clone from a previous run if already exist + expected_previous_clone_local_path: Path = shared_path.joinpath(APP_PIPELINE_TEMPLATES_REPO_LOCAL_NAME) + if expected_previous_clone_local_path.exists(): + click.echo("Unable to download updated app pipeline templates, using existing ones") + return expected_previous_clone_local_path + raise + + +def _clone_pipeline_templates(repo_url: str, clone_dir: Path, clone_name: str) -> Path: + """ + clone a given pipeline templates' Git repo to the user machine inside the given clone_dir directory + under the given clone name. For example, if clone_name is "custom-pipeline-template" then the location to clone + to is "/clone/dir/path/custom-pipeline-template/" + + Parameters: + repo_url: the URL of the Git repo to clone + clone_dir: the local parent directory to clone to + clone_name: The folder name to give to the created clone inside clone_dir + + Returns: + Path to the local clone + """ + try: + repo: GitRepo = GitRepo(repo_url) + clone_path: Path = repo.clone(clone_dir, clone_name, replace_existing=True) + return clone_path + except (OSError, CloneRepoException) as ex: + raise PipelineTemplateCloneException(str(ex)) from ex + + +def _read_app_pipeline_templates_manifest(pipeline_templates_dir: Path) -> PipelineTemplatesManifest: + """ + parse and return the manifest yaml file located in the root directory of the SAM pipeline templates folder: + + Parameters: + pipeline_templates_dir: local directory of SAM pipeline templates + + Raises: + AppPipelineTemplateManifestException if the manifest is not found, ill-formatted or missing required keys + + Returns: + The manifest of the pipeline templates + """ + manifest_path: Path = pipeline_templates_dir.joinpath("manifest.yaml") + return PipelineTemplatesManifest(manifest_path) + + +def _prompt_pipeline_template(pipeline_templates_manifest: PipelineTemplatesManifest) -> PipelineTemplateMetadata: + """ + Prompts the user a list of the available CI/CD systems along with associated app pipeline templates to choose + one of them + + Parameters: + pipeline_templates_manifest: A manifest file lists the available providers and the associated pipeline templates + + Returns: + The manifest (A section in the pipeline_templates_manifest) of the chosen pipeline template; + """ + provider = _prompt_cicd_provider(pipeline_templates_manifest.providers) + provider_pipeline_templates: List[PipelineTemplateMetadata] = [ + t for t in pipeline_templates_manifest.templates if t.provider == provider.id + ] + selected_template_manifest: PipelineTemplateMetadata = _prompt_provider_pipeline_template( + provider_pipeline_templates + ) + return selected_template_manifest + + +def _prompt_cicd_provider(available_providers: List[Provider]) -> Provider: + """ + Prompts the user a list of the available CI/CD systems to choose from + + Parameters: + available_providers: List of available CI/CD systems such as Jenkins, Gitlab and CircleCI + + Returns: + The chosen provider + """ + if len(available_providers) == 1: + return available_providers[0] + + question_to_choose_provider = Choice( + key="provider", text="CI/CD system", options=[p.display_name for p in available_providers], is_required=True + ) + chosen_provider_display_name = question_to_choose_provider.ask() + return next(p for p in available_providers if p.display_name == chosen_provider_display_name) + + +def _prompt_provider_pipeline_template( + provider_available_pipeline_templates_metadata: List[PipelineTemplateMetadata], +) -> PipelineTemplateMetadata: + """ + Prompts the user a list of the available pipeline templates to choose from + + Parameters: + provider_available_pipeline_templates_metadata: List of available pipeline templates manifests + + Returns: + The chosen pipeline template manifest + """ + if len(provider_available_pipeline_templates_metadata) == 1: + return provider_available_pipeline_templates_metadata[0] + question_to_choose_pipeline_template = Choice( + key="pipeline-template", + text="Which pipeline template would you like to use?", + options=[t.display_name for t in provider_available_pipeline_templates_metadata], + ) + chosen_pipeline_template_display_name = question_to_choose_pipeline_template.ask() + return next( + t + for t in provider_available_pipeline_templates_metadata + if t.display_name == chosen_pipeline_template_display_name + ) + + +def _initialize_pipeline_template(pipeline_template_dir: Path) -> Template: + """ + Initialize a pipeline template from a given pipeline template (cookiecutter template) location + + Parameters: + pipeline_template_dir: The local location of the pipeline cookiecutter template + + Returns: + The initialized pipeline's cookiecutter template + """ + interactive_flow = _get_pipeline_template_interactive_flow(pipeline_template_dir) + metadata = _get_pipeline_template_metadata(pipeline_template_dir) + return Template(location=str(pipeline_template_dir), interactive_flows=[interactive_flow], metadata=metadata) + + +def _get_pipeline_template_metadata(pipeline_template_dir: Path) -> Dict: + """ + Load the metadata from the file metadata.json located in the template directory, + raise an exception if anything wrong. + """ + metadata_path = Path(pipeline_template_dir, "metadata.json") + if not metadata_path.exists(): + raise AppPipelineTemplateMetadataException(f"Cannot find metadata file {metadata_path}") + try: + with open(metadata_path, "r", encoding="utf-8") as file: + metadata = json.load(file) + if isinstance(metadata, dict): + return metadata + raise AppPipelineTemplateMetadataException(f"Invalid content found in {metadata_path}") + except JSONDecodeError as ex: + raise AppPipelineTemplateMetadataException(f"Invalid JSON found in {metadata_path}") from ex + + +def _get_pipeline_template_interactive_flow(pipeline_template_dir: Path) -> InteractiveFlow: + """ + A pipeline template defines its own interactive flow (questionnaire) in a JSON file named questions.json located + in the root directory of the template. This questionnaire defines a set of questions to prompt to the user and + use the responses as the cookiecutter context + + Parameters: + pipeline_template_dir: The local location of the pipeline cookiecutter template + + Raises: + QuestionsNotFoundException: if the pipeline template is missing questions.json file. + QuestionsFailedParsingException: if questions.json file is ill-formatted or missing required keys. + + Returns: + The interactive flow + """ + flow_definition_path: Path = pipeline_template_dir.joinpath("questions.json") + return InteractiveFlowCreator.create_flow(str(flow_definition_path)) + + +def _lines_for_stage(stage_index: int) -> List[str]: + return [ + " _________ ", + "| |", + f"| Stage {stage_index} |", + "|_________|", + ] + + +def _draw_stage_diagram(number_of_stages: int) -> None: + delimiters = [" ", " ", "->", " "] + stage_lines = [_lines_for_stage(i + 1) for i in range(number_of_stages)] + for i, delimiter in enumerate(delimiters): + click.echo(delimiter.join([stage_lines[stage_i][i] for stage_i in range(number_of_stages)])) + click.echo("") diff --git a/samcli/commands/pipeline/init/pipeline_templates_manifest.py b/samcli/commands/pipeline/init/pipeline_templates_manifest.py new file mode 100644 index 0000000000..8249e14d85 --- /dev/null +++ b/samcli/commands/pipeline/init/pipeline_templates_manifest.py @@ -0,0 +1,61 @@ +""" +Represents a manifest that lists the available SAM pipeline templates. +Example: + providers: + - displayName:Jenkins + id: jenkins + - displayName:Gitlab CI/CD + id: gitlab + - displayName:Github Actions + id: github-actions + templates: + - displayName: jenkins-two-environments-pipeline + provider: Jenkins + location: templates/cookiecutter-jenkins-two-environments-pipeline + - displayName: gitlab-two-environments-pipeline + provider: Gitlab + location: templates/cookiecutter-gitlab-two-environments-pipeline + - displayName: Github-Actions-two-environments-pipeline + provider: Github Actions + location: templates/cookiecutter-github-actions-two-environments-pipeline +""" +from pathlib import Path +from typing import Dict, List + +import yaml + +from samcli.commands.exceptions import AppPipelineTemplateManifestException +from samcli.yamlhelper import parse_yaml_file + + +class Provider: + """ CI/CD system such as Jenkins, Gitlab and GitHub-Actions""" + + def __init__(self, manifest: Dict) -> None: + self.id: str = manifest["id"] + self.display_name: str = manifest["displayName"] + + +class PipelineTemplateMetadata: + """ The metadata of a Given pipeline template""" + + def __init__(self, manifest: Dict) -> None: + self.display_name: str = manifest["displayName"] + self.provider: str = manifest["provider"] + self.location: str = manifest["location"] + + +class PipelineTemplatesManifest: + """ The metadata of the available CI/CD systems and the pipeline templates""" + + def __init__(self, manifest_path: Path) -> None: + try: + manifest: Dict = parse_yaml_file(file_path=str(manifest_path)) + self.providers: List[Provider] = list(map(Provider, manifest["providers"])) + self.templates: List[PipelineTemplateMetadata] = list(map(PipelineTemplateMetadata, manifest["templates"])) + except (FileNotFoundError, KeyError, TypeError, yaml.YAMLError) as ex: + raise AppPipelineTemplateManifestException( + "SAM pipeline templates manifest file is not found or ill-formatted. This could happen if the file " + f"{manifest_path} got deleted or modified." + "If you believe this is not the case, please file an issue at https://github.com/aws/aws-sam-cli/issues" + ) from ex diff --git a/samcli/commands/pipeline/pipeline.py b/samcli/commands/pipeline/pipeline.py new file mode 100644 index 0000000000..2d8df4463e --- /dev/null +++ b/samcli/commands/pipeline/pipeline.py @@ -0,0 +1,21 @@ +""" +Command group for "pipeline" suite commands. It provides common CLI arguments, template parsing capabilities, +setting up stdin/stdout etc +""" + +import click + +from .bootstrap.cli import cli as bootstrap_cli +from .init.cli import cli as init_cli + + +@click.group() +def cli() -> None: + """ + Manage the continuous delivery of the application + """ + + +# Add individual commands under this group +cli.add_command(bootstrap_cli) +cli.add_command(init_cli) diff --git a/samcli/lib/bootstrap/bootstrap.py b/samcli/lib/bootstrap/bootstrap.py index eaed58d630..a9a590dc7f 100644 --- a/samcli/lib/bootstrap/bootstrap.py +++ b/samcli/lib/bootstrap/bootstrap.py @@ -4,32 +4,51 @@ import json import logging +from typing import Optional + +import boto3 +from botocore.exceptions import ClientError + from samcli import __version__ from samcli.cli.global_config import GlobalConfig -from samcli.commands.exceptions import UserException -from samcli.lib.utils.managed_cloudformation_stack import manage_stack as manage_cloudformation_stack +from samcli.commands.exceptions import UserException, CredentialsError +from samcli.lib.utils.managed_cloudformation_stack import StackOutput, manage_stack as manage_cloudformation_stack SAM_CLI_STACK_NAME = "aws-sam-cli-managed-default" LOG = logging.getLogger(__name__) def manage_stack(profile, region): - outputs = manage_cloudformation_stack( + outputs: StackOutput = manage_cloudformation_stack( profile=None, region=region, stack_name=SAM_CLI_STACK_NAME, template_body=_get_stack_template() ) - try: - bucket_name = next(o for o in outputs if o["OutputKey"] == "SourceBucket")["OutputValue"] - except StopIteration as ex: + bucket_name = outputs.get("SourceBucket") + if bucket_name is None: msg = ( "Stack " + SAM_CLI_STACK_NAME + " exists, but is missing the managed source bucket key. " "Failing as this stack was likely not created by the AWS SAM CLI." ) - raise UserException(msg) from ex + raise UserException(msg) # This bucket name is what we would write to a config file return bucket_name +def get_current_account_id(profile: Optional[str] = None): + """Returns account ID based on used AWS credentials.""" + session = boto3.Session(profile_name=profile) # type: ignore + sts_client = session.client("sts") + try: + caller_identity = sts_client.get_caller_identity() + except ClientError as ex: + if ex.response["Error"]["Code"] == "InvalidClientTokenId": + raise CredentialsError("Cannot identify account due to invalid configured credentials.") from ex + raise CredentialsError("Cannot identify account based on configured credentials.") from ex + if "Account" not in caller_identity: + raise CredentialsError("Cannot identify account based on configured credentials.") + return caller_identity["Account"] + + def _get_stack_template(): gc = GlobalConfig() info = {"version": __version__, "installationId": gc.installation_id if gc.installation_id else "unknown"} diff --git a/samcli/lib/config/samconfig.py b/samcli/lib/config/samconfig.py index 996ac5f648..5af1c0080a 100644 --- a/samcli/lib/config/samconfig.py +++ b/samcli/lib/config/samconfig.py @@ -41,6 +41,12 @@ def __init__(self, config_dir, filename=None): """ self.filepath = Path(config_dir, filename or DEFAULT_CONFIG_FILE_NAME) + def get_stage_names(self): + self._read() + if isinstance(self.document, dict): + return [stage for stage, value in self.document.items() if isinstance(value, dict)] + return [] + def get_all(self, cmd_names, section, env=DEFAULT_ENV): """ Gets a value from the configuration file for the given environment, command and section @@ -153,6 +159,10 @@ def sanity_check(self): def exists(self): return self.filepath.exists() + def _ensure_exists(self): + self.filepath.parent.mkdir(parents=True, exist_ok=True) + self.filepath.touch() + def path(self): return str(self.filepath) @@ -183,8 +193,8 @@ def _read(self): def _write(self): if not self.document: return - if not self.exists(): - open(self.filepath, "a+").close() + + self._ensure_exists() current_version = self._version() if self._version() else SAM_CONFIG_VERSION try: diff --git a/samcli/lib/cookiecutter/exceptions.py b/samcli/lib/cookiecutter/exceptions.py index af19364811..5d379228d8 100644 --- a/samcli/lib/cookiecutter/exceptions.py +++ b/samcli/lib/cookiecutter/exceptions.py @@ -4,8 +4,8 @@ class CookiecutterErrorException(Exception): fmt = "An unspecified error occurred" - def __init__(self, **kwargs): - msg = self.fmt.format(**kwargs) + def __init__(self, **kwargs): # type: ignore + msg: str = self.fmt.format(**kwargs) Exception.__init__(self, msg) self.kwargs = kwargs diff --git a/samcli/lib/cookiecutter/interactive_flow.py b/samcli/lib/cookiecutter/interactive_flow.py index 996ac89ce3..95ce846dc0 100644 --- a/samcli/lib/cookiecutter/interactive_flow.py +++ b/samcli/lib/cookiecutter/interactive_flow.py @@ -1,7 +1,10 @@ """A flow of questions to be asked to the user in an interactive way.""" -from typing import Any, Dict, Optional +from typing import Any, Dict, Optional, List, Tuple + +import click from .question import Question +from ..utils.colors import Colored class InteractiveFlow: @@ -20,6 +23,7 @@ def __init__(self, questions: Dict[str, Question], first_question_key: str): self._questions: Dict[str, Question] = questions self._first_question_key: str = first_question_key self._current_question: Optional[Question] = None + self._color = Colored() def advance_to_next_question(self, current_answer: Optional[Any] = None) -> Optional[Question]: """ @@ -61,9 +65,25 @@ def run( associated to the key of the corresponding question """ context = context.copy() + answers: List[Tuple[str, Any]] = [] + question = self.advance_to_next_question() while question: answer = question.ask(context=context) context[question.key] = answer + answers.append((question.key, answer)) question = self.advance_to_next_question(answer) + + # print summary + click.echo(self._color.bold("SUMMARY")) + click.echo("We will generate a pipeline config file based on the following information:") + + for question_key, answer in answers: + if answer is None: + # ignore unanswered questions + continue + + question = self._questions[question_key] + click.echo(f"\t{question.text}: {self._color.underline(str(answer))}") + return context diff --git a/samcli/lib/cookiecutter/interactive_flow_creator.py b/samcli/lib/cookiecutter/interactive_flow_creator.py index d861174951..b3552d4065 100644 --- a/samcli/lib/cookiecutter/interactive_flow_creator.py +++ b/samcli/lib/cookiecutter/interactive_flow_creator.py @@ -17,7 +17,7 @@ class QuestionsFailedParsingException(UserException): class InteractiveFlowCreator: @staticmethod - def create_flow(flow_definition_path: str, extra_context: Optional[Dict] = None): + def create_flow(flow_definition_path: str, extra_context: Optional[Dict] = None) -> InteractiveFlow: """ This method parses the given json/yaml file to create an InteractiveFLow. It expects the file to define a list of questions. It parses the questions and add it to the flow in the same order they are defined @@ -77,7 +77,7 @@ def _load_questions( questions_definition = InteractiveFlowCreator._parse_questions_definition(flow_definition_path, extra_context) try: - for question in questions_definition.get("questions"): + for question in questions_definition.get("questions", []): q = QuestionFactory.create_question_from_json(question) if not first_question_key: first_question_key = q.key @@ -90,7 +90,7 @@ def _load_questions( raise QuestionsFailedParsingException(f"Failed to parse questions: {str(ex)}") from ex @staticmethod - def _parse_questions_definition(file_path, extra_context: Optional[Dict] = None): + def _parse_questions_definition(file_path: str, extra_context: Optional[Dict] = None) -> Dict: """ Read the questions definition file, do variable substitution, parse it as JSON/YAML diff --git a/samcli/lib/cookiecutter/processor.py b/samcli/lib/cookiecutter/processor.py index 5994c77949..4f34df06f8 100644 --- a/samcli/lib/cookiecutter/processor.py +++ b/samcli/lib/cookiecutter/processor.py @@ -9,7 +9,7 @@ class Processor(ABC): """ @abstractmethod - def run(self, context: Dict): + def run(self, context: Dict) -> Dict: """ the processing logic of this processor diff --git a/samcli/lib/cookiecutter/question.py b/samcli/lib/cookiecutter/question.py index 786836a400..4fad0ea020 100644 --- a/samcli/lib/cookiecutter/question.py +++ b/samcli/lib/cookiecutter/question.py @@ -1,4 +1,5 @@ """ This module represents the questions to ask to the user to fulfill the cookiecutter context. """ +from abc import ABC, abstractmethod from enum import Enum from typing import Any, Dict, List, Optional, Type, Union @@ -14,7 +15,18 @@ class QuestionKind(Enum): default = "default" -class Question: +class Promptable(ABC): + """ + Abstract class Question, Info, Choice, Confirm implement. + These classes need to implement their own prompt() method to prompt differently. + """ + + @abstractmethod + def prompt(self, text: str, default_answer: Optional[Any]) -> Any: + pass + + +class Question(Promptable): """ A question to be prompt to the user in an interactive flow where the response is used to fulfill the cookiecutter context. @@ -53,12 +65,14 @@ def __init__( text: str, default: Optional[Union[str, Dict]] = None, is_required: Optional[bool] = None, + allow_autofill: Optional[bool] = None, next_question_map: Optional[Dict[str, str]] = None, default_next_question_key: Optional[str] = None, ): self._key = key self._text = text self._required = is_required + self._allow_autofill = allow_autofill self._default_answer = default # if it is an optional question, set an empty default answer to prevent click from keep asking for an answer if not self._required and self._default_answer is None: @@ -67,30 +81,30 @@ def __init__( self._default_next_question_key = default_next_question_key @property - def key(self): + def key(self) -> str: return self._key @property - def text(self): + def text(self) -> str: return self._text @property - def default_answer(self): - return self._default_answer + def default_answer(self) -> Optional[Any]: + return self._resolve_default_answer() @property - def required(self): + def required(self) -> Optional[bool]: return self._required @property - def next_question_map(self): + def next_question_map(self) -> Dict[str, str]: return self._next_question_map @property - def default_next_question_key(self): + def default_next_question_key(self) -> Optional[str]: return self._default_next_question_key - def ask(self, context: Dict) -> Any: + def ask(self, context: Optional[Dict] = None) -> Any: """ prompt the user this question @@ -104,7 +118,20 @@ def ask(self, context: Dict) -> Any: The user provided answer. """ resolved_default_answer = self._resolve_default_answer(context) - return click.prompt(text=self._text, default=resolved_default_answer) + + # skip the question and directly use the default value if autofill is allowed. + if resolved_default_answer is not None and self._allow_autofill: + return resolved_default_answer + + # if it is an optional question with no default answer, + # set an empty default answer to prevent click from keep asking for an answer + if not self._required and resolved_default_answer is None: + resolved_default_answer = "" + + return self.prompt(self._resolve_text(context), resolved_default_answer) + + def prompt(self, text: str, default_answer: Optional[Any]) -> Any: + return click.prompt(text=text, default=default_answer) def get_next_question_key(self, answer: Any) -> Optional[str]: # _next_question_map is a Dict[str(answer), str(next question key)] @@ -112,7 +139,7 @@ def get_next_question_key(self, answer: Any) -> Optional[str]: answer = str(answer) return self._next_question_map.get(answer, self._default_next_question_key) - def set_default_next_question_key(self, next_question_key): + def set_default_next_question_key(self, next_question_key: str) -> None: self._default_next_question_key = next_question_key def _resolve_key_path(self, key_path: List, context: Dict) -> List[str]: @@ -150,49 +177,59 @@ def _resolve_key_path(self, key_path: List, context: Dict) -> List[str]: raise ValueError(f'Invalid value "{unresolved_key}" in key path') return resolved_key_path - def _resolve_default_answer(self, context: Dict) -> Optional[Any]: + def _resolve_value_from_expression(self, expression: Any, context: Optional[Dict] = None) -> Optional[Any]: """ - a question may have a default answer provided directly through the "default_answer" value + a question may have a value provided directly as string or number value or indirectly from cookiecutter context using a key path Parameters ---------- context - Cookiecutter context used to resolve default values and answered questions' answers. + Cookiecutter context used to resolve values. Raises ------ KeyError - When default value depends on the answer to a non-existent question + When an expression depends on the answer to a non-existent question ValueError - The default value is malformed + The expression is malformed Returns ------- - Optional default answer, it might be resolved from cookiecutter context using specified key path. + Optional value, it might be resolved from cookiecutter context using specified key path. """ - if isinstance(self._default_answer, dict): + if isinstance(expression, dict): + context = context if context else {} + # load value using key path from cookiecutter - if "keyPath" not in self._default_answer: - raise KeyError(f'Missing key "keyPath" in question default "{self._default_answer}".') - unresolved_key_path = self._default_answer.get("keyPath", []) + if "keyPath" not in expression: + raise KeyError(f'Missing key "keyPath" in "{expression}".') + unresolved_key_path = expression.get("keyPath", []) if not isinstance(unresolved_key_path, list): - raise ValueError(f'Invalid default answer "{self._default_answer}" for question {self.key}') + raise ValueError(f'Invalid expression "{expression}" in question {self.key}') return context.get(str(self._resolve_key_path(unresolved_key_path, context))) + return expression + + def _resolve_text(self, context: Optional[Dict] = None) -> str: + resolved_text = self._resolve_value_from_expression(self._text, context) + if resolved_text is None: + raise ValueError(f"Cannot resolve value from expression: {self._text}") + return str(resolved_text) - return self._default_answer + def _resolve_default_answer(self, context: Optional[Dict] = None) -> Optional[Any]: + return self._resolve_value_from_expression(self._default_answer, context) class Info(Question): - def ask(self, context: Dict) -> None: - return click.echo(message=self._text) + def prompt(self, text: str, default_answer: Optional[Any]) -> Any: + return click.echo(message=text) class Confirm(Question): - def ask(self, context: Dict) -> bool: - return click.confirm(text=self._text) + def prompt(self, text: str, default_answer: Optional[Any]) -> Any: + return click.confirm(text=text) class Choice(Question): @@ -203,26 +240,27 @@ def __init__( options: List[str], default: Optional[str] = None, is_required: Optional[bool] = None, + allow_autofill: Optional[bool] = None, next_question_map: Optional[Dict[str, str]] = None, default_next_question_key: Optional[str] = None, ): if not options: raise ValueError("No defined options") self._options = options - super().__init__(key, text, default, is_required, next_question_map, default_next_question_key) + super().__init__(key, text, default, is_required, allow_autofill, next_question_map, default_next_question_key) - def ask(self, context: Dict) -> str: - resolved_default_answer = self._resolve_default_answer(context) - click.echo(self._text) + def prompt(self, text: str, default_answer: Optional[Any]) -> Any: + click.echo(text) for index, option in enumerate(self._options): click.echo(f"\t{index + 1} - {option}") options_indexes = self._get_options_indexes(base=1) choices = list(map(str, options_indexes)) choice = click.prompt( text="Choice", - default=resolved_default_answer, + default=default_answer, show_choices=False, type=click.Choice(choices), + show_default=default_answer is not None, ) return self._options[int(choice) - 1] @@ -245,6 +283,7 @@ def create_question_from_json(question_json: Dict) -> Question: options = question_json.get("options") default = question_json.get("default") is_required = question_json.get("isRequired") + allow_autofill = question_json.get("allowAutofill") next_question_map = question_json.get("nextQuestion") default_next_question = question_json.get("defaultNextQuestion") kind_str = question_json.get("kind") @@ -256,6 +295,7 @@ def create_question_from_json(question_json: Dict) -> Question: "text": text, "default": default, "is_required": is_required, + "allow_autofill": allow_autofill, "next_question_map": next_question_map, "default_next_question_key": default_next_question, } diff --git a/samcli/lib/cookiecutter/template.py b/samcli/lib/cookiecutter/template.py index c7d643bb43..46b851985e 100644 --- a/samcli/lib/cookiecutter/template.py +++ b/samcli/lib/cookiecutter/template.py @@ -3,15 +3,17 @@ values of the context and how to generate a project from the given template and provided context """ import logging -from typing import Any, Dict, List, Optional +from typing import Dict, List, Optional + from cookiecutter.exceptions import RepositoryNotFound, UnknownRepoType from cookiecutter.main import cookiecutter + from samcli.commands.exceptions import UserException from samcli.lib.init.arbitrary_project import generate_non_cookiecutter_project +from .exceptions import GenerateProjectFailedError, InvalidLocationError, PreprocessingError, PostprocessingError from .interactive_flow import InteractiveFlow from .plugin import Plugin from .processor import Processor -from .exceptions import GenerateProjectFailedError, InvalidLocationError, PreprocessingError, PostprocessingError LOG = logging.getLogger(__name__) @@ -41,6 +43,8 @@ class Template: An optional series of plugins to be plugged in. A plugin defines its own interactive_flow, preprocessor and postprocessor. A plugin is a sub-set of the template, if there is a common behavior among multiple templates, it is better to be extracted to a plugin that can then be plugged in to each of these templates. + metadata: Optional[Dict] + An optional dictionary with extra information about the template Methods ------- @@ -61,6 +65,7 @@ def __init__( preprocessors: Optional[List[Processor]] = None, postprocessors: Optional[List[Processor]] = None, plugins: Optional[List[Plugin]] = None, + metadata: Optional[Dict] = None, ): """ Initialize the class @@ -84,6 +89,8 @@ def __init__( An optional series of plugins to be plugged in. A plugin defines its own interactive_flow, preprocessor and postprocessor. A plugin is a sub-set of the template, if there is a common behavior among multiple templates, it is better to be extracted to a plugin that can then be plugged in to each of these templates. + metadata: Optional[Dict] + An optional dictionary with extra information about the template """ self._location = location self._interactive_flows = interactive_flows or [] @@ -97,8 +104,9 @@ def __init__( self._preprocessors.append(plugin.preprocessor) if plugin.postprocessor: self._postprocessors.append(plugin.postprocessor) + self.metadata = metadata - def run_interactive_flows(self) -> Dict: + def run_interactive_flows(self, context: Optional[Dict] = None) -> Dict: """ prompt the user a series of questions' flows and gather the answers to create the cookiecutter context. The questions are identified by keys. If multiple questions, whether within the same flow or across @@ -112,14 +120,14 @@ def run_interactive_flows(self) -> Dict: A Dictionary in the form of {question.key: answer} representing user's answers to the flows' questions """ try: - context: Dict[str, Any] = {} + context = context if context else {} for flow in self._interactive_flows: context = flow.run(context) return context except Exception as e: raise UserException(str(e), wrapped_from=e.__class__.__name__) from e - def generate_project(self, context: Dict): + def generate_project(self, context: Dict, output_dir: str) -> None: """ Generates a project based on this cookiecutter template and the given context. The context is first processed and manipulated by series of preprocessors(if any) then the project is generated and finally @@ -129,6 +137,8 @@ def generate_project(self, context: Dict): ---------- context: Dict the cookiecutter context to fulfill the values of cookiecutter.json keys + output_dir: str + the directory where project will be generated in Raise: ------ @@ -144,7 +154,13 @@ def generate_project(self, context: Dict): try: LOG.debug("Baking a new template with cookiecutter with all parameters") - cookiecutter(template=self._location, output_dir=".", no_input=True, extra_context=context) + cookiecutter( + template=self._location, + output_dir=output_dir, + no_input=True, + extra_context=context, + overwrite_if_exists=True, + ) except RepositoryNotFound as e: # cookiecutter.json is not found in the template. Let's just clone it directly without # using cookiecutter and call it done. diff --git a/samcli/lib/pipeline/__init__.py b/samcli/lib/pipeline/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/lib/pipeline/bootstrap/__init__.py b/samcli/lib/pipeline/bootstrap/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/lib/pipeline/bootstrap/resource.py b/samcli/lib/pipeline/bootstrap/resource.py new file mode 100644 index 0000000000..a7b39dd965 --- /dev/null +++ b/samcli/lib/pipeline/bootstrap/resource.py @@ -0,0 +1,138 @@ +""" Represents AWS resource""" +from typing import Optional + + +class ARNParts: + """ + Decompose a given ARN into its parts https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html + + Attributes + ---------- + partition: str + the partition part(AWS, aws-cn or aws-us-gov) of the ARN + service: str + the service part(S3, IAM, ECR, ...etc) of the ARN + region: str + the AWS region part(us-east-1, eu-west-1, ...etc) of the ARN + account-id: str + the account-id part of the ARN + resource-id: str + the resource-id part of the ARN + resource-type: str + the resource-type part of the ARN + """ + + partition: str + service: str + region: str + account_id: str + resource_id: str + + def __init__(self, arn: str) -> None: + parts = arn.split(":") + try: + [_, self.partition, self.service, self.region, self.account_id, self.resource_id] = parts + except ValueError as ex: + raise ValueError(f"Invalid ARN ({arn})") from ex + + +class Resource: + """ + Represents an AWS resource + + Attributes + ---------- + arn: str + the ARN of the resource + comment: str + the comment of the resource + is_user_provided: bool + True if the user provided the ARN of the resource during the initialization. It indicates whether this pipeline- + resource is provided by the user or created by SAM during `sam pipeline bootstrap` + + Methods + ------- + name(self) -> Optional[str]: + extracts and returns the resource name from its ARN + """ + + def __init__(self, arn: Optional[str], comment: Optional[str]) -> None: + self.arn: Optional[str] = arn + self.comment: Optional[str] = comment + self.is_user_provided: bool = bool(arn) + + def name(self) -> Optional[str]: + """ + extracts and returns the resource name from its ARN + Raises + ------ + ValueError if the ARN is invalid + """ + if not self.arn: + return None + arn_parts: ARNParts = ARNParts(arn=self.arn) + return arn_parts.resource_id + + +class IAMUser(Resource): + """ + Represents an AWS IAM User resource + Attributes + ---------- + access_key_id: Optional[str] + holds the AccessKeyId of the credential of this IAM user, if any. + secret_access_key: Optional[str] + holds the SecretAccessKey of the credential of this IAM user, if any. + """ + + def __init__( + self, + arn: Optional[str], + comment: Optional[str], + access_key_id: Optional[str] = None, + secret_access_key: Optional[str] = None, + ) -> None: + self.access_key_id: Optional[str] = access_key_id + self.secret_access_key: Optional[str] = secret_access_key + super().__init__(arn=arn, comment=comment) + + +class S3Bucket(Resource): + """ + Represents an AWS S3Bucket resource + Attributes + ---------- + kms_key_arn: Optional[str] + The ARN of the KMS key used in encrypting this S3Bucket, if any. + """ + + def __init__(self, arn: Optional[str], comment: Optional[str], kms_key_arn: Optional[str] = None) -> None: + self.kms_key_arn: Optional[str] = kms_key_arn + super().__init__(arn=arn, comment=comment) + + +class ECRImageRepository(Resource): + """ Represents an AWS ECR image repository resource """ + + def __init__(self, arn: Optional[str], comment: Optional[str]) -> None: + super().__init__(arn=arn, comment=comment) + + def get_uri(self) -> Optional[str]: + """ + extracts and returns the URI of the given ECR image repository from its ARN + see https://docs.aws.amazon.com/AmazonECR/latest/userguide/Registries.html + Raises + ------ + ValueError if the ARN is invalid + """ + if not self.arn: + return None + arn_parts: ARNParts = ARNParts(self.arn) + # ECR's resource_id contains the resource-type("resource") which is excluded from the URL + # from docs: https://docs.aws.amazon.com/AmazonECR/latest/userguide/security_iam_service-with-iam.html + # ECR's ARN: arn:${Partition}:ecr:${Region}:${Account}:repository/${Repository-name} + if not arn_parts.resource_id.startswith("repository/"): + raise ValueError(f"Invalid ECR ARN ({self.arn}), can't extract the URL from it.") + i = len("repository/") + repo_name = arn_parts.resource_id[i:] + return f"{arn_parts.account_id}.dkr.ecr.{arn_parts.region}.amazonaws.com/{repo_name}" diff --git a/samcli/lib/pipeline/bootstrap/stage.py b/samcli/lib/pipeline/bootstrap/stage.py new file mode 100644 index 0000000000..d98081237b --- /dev/null +++ b/samcli/lib/pipeline/bootstrap/stage.py @@ -0,0 +1,330 @@ +""" Application Environment """ +import json +import os +import pathlib +import re +from itertools import chain +from typing import Dict, List, Optional, Tuple + +import boto3 +import click + +from samcli.lib.config.samconfig import SamConfig +from samcli.lib.utils.colors import Colored +from samcli.lib.utils.managed_cloudformation_stack import manage_stack, StackOutput +from samcli.lib.pipeline.bootstrap.resource import Resource, IAMUser, ECRImageRepository + +CFN_TEMPLATE_PATH = str(pathlib.Path(os.path.dirname(__file__))) +STACK_NAME_PREFIX = "aws-sam-cli-managed" +STAGE_RESOURCES_STACK_NAME_SUFFIX = "pipeline-resources" +STAGE_RESOURCES_CFN_TEMPLATE = "stage_resources.yaml" +PIPELINE_USER = "pipeline_user" +PIPELINE_EXECUTION_ROLE = "pipeline_execution_role" +CLOUDFORMATION_EXECUTION_ROLE = "cloudformation_execution_role" +ARTIFACTS_BUCKET = "artifacts_bucket" +ECR_IMAGE_REPOSITORY = "image_repository" +REGION = "region" + + +class Stage: + """ + Represents an application stage: Beta, Gamma, Prod ...etc + + Attributes + ---------- + name: str + The name of the environment + aws_profile: Optional[str] + The named AWS profile (in user's machine) of the AWS account to deploy this environment to. + aws_region: Optional[str] + The AWS region to deploy this environment to. + pipeline_user: IAMUser + The IAM User having its AccessKeyId and SecretAccessKey credentials shared with the CI/CD system + pipeline_execution_role: Resource + The IAM role assumed by the pipeline-user to get access to the AWS account and executes the + CloudFormation stack. + cloudformation_execution_role: Resource + The IAM role assumed by the CloudFormation service to executes the CloudFormation stack. + artifacts_bucket: Resource + The S3 bucket to hold the SAM build artifacts of the application's CFN template. + create_image_repository: bool + A boolean flag that determines whether the user wants to create an ECR image repository or not + image_repository: ECRImageRepository + The ECR image repository to hold the image container of lambda functions with Image package-type + + Methods: + -------- + did_user_provide_all_required_resources(self) -> bool: + checks if all of the environment's required resources (pipeline_user, pipeline_execution_role, + cloudformation_execution_role, artifacts_bucket and image_repository) are provided by the user. + bootstrap(self, confirm_changeset: bool = True) -> None: + deploys the CFN template ./stage_resources.yaml to the AWS account identified by aws_profile and + aws_region member fields. if aws_profile is not provided, it will fallback to default boto3 credentials' + resolving. Note that ./stage_resources.yaml template accepts the ARNs of already existing resources(if + any) as parameters and it will skip the creation of those resources but will use the ARNs to set the proper + permissions of other missing resources(resources created by the template) + save_config(self, config_dir: str, filename: str, cmd_names: List[str]): + save the Artifacts bucket name, ECR image repository URI and ARNs of pipeline_user, pipeline_execution_role and + cloudformation_execution_role to the "pipelineconfig.toml" file so that it can be auto-filled during + the `sam pipeline init` command. + print_resources_summary(self) -> None: + prints to the screen(console) the ARNs of the created and provided resources. + """ + + def __init__( + self, + name: str, + aws_profile: Optional[str] = None, + aws_region: Optional[str] = None, + pipeline_user_arn: Optional[str] = None, + pipeline_execution_role_arn: Optional[str] = None, + cloudformation_execution_role_arn: Optional[str] = None, + artifacts_bucket_arn: Optional[str] = None, + create_image_repository: bool = False, + image_repository_arn: Optional[str] = None, + ) -> None: + self.name: str = name + self.aws_profile: Optional[str] = aws_profile + self.aws_region: Optional[str] = aws_region + self.pipeline_user: IAMUser = IAMUser(arn=pipeline_user_arn, comment="Pipeline IAM user") + self.pipeline_execution_role: Resource = Resource( + arn=pipeline_execution_role_arn, comment="Pipeline execution role" + ) + self.cloudformation_execution_role: Resource = Resource( + arn=cloudformation_execution_role_arn, comment="CloudFormation execution role" + ) + self.artifacts_bucket: Resource = Resource(arn=artifacts_bucket_arn, comment="Artifact bucket") + self.create_image_repository: bool = create_image_repository + self.image_repository: ECRImageRepository = ECRImageRepository( + arn=image_repository_arn, comment="ECR image repository" + ) + self.color = Colored() + + def did_user_provide_all_required_resources(self) -> bool: + """Check if the user provided all of the environment resources or not""" + return all(resource.is_user_provided for resource in self._get_resources()) + + def _get_non_user_provided_resources_msg(self) -> str: + resource_comments = chain.from_iterable( + [ + [] if self.pipeline_user.is_user_provided else [self.pipeline_user.comment], + [] if self.pipeline_execution_role.is_user_provided else [self.pipeline_execution_role.comment], + [] + if self.cloudformation_execution_role.is_user_provided + else [self.cloudformation_execution_role.comment], + [] if self.artifacts_bucket.is_user_provided else [self.artifacts_bucket.comment], + [] + if self.image_repository.is_user_provided or not self.create_image_repository + else [self.image_repository.comment], + ] + ) + return "\n".join([f"\t- {comment}" for comment in resource_comments]) + + def bootstrap(self, confirm_changeset: bool = True) -> bool: + """ + Deploys the CFN template(./stage_resources.yaml) which deploys: + * Pipeline IAM User + * Pipeline execution IAM role + * CloudFormation execution IAM role + * Artifacts' S3 Bucket + * ECR image repository + to the AWS account associated with the given environment. It will not redeploy the stack if already exists. + This CFN template accepts the ARNs of the resources as parameters and will not create a resource if already + provided, this way we can conditionally create a resource only if the user didn't provide it + + THIS METHOD UPDATES THE STATE OF THE CALLING INSTANCE(self) IT WILL SET THE VALUES OF THE RESOURCES ATTRIBUTES + + Parameters + ---------- + confirm_changeset: bool + if set to false, the stage_resources.yaml CFN template will directly be deployed, otherwise, + the user will be prompted for confirmation + + Returns True if bootstrapped, otherwise False + """ + + if self.did_user_provide_all_required_resources(): + click.secho( + self.color.yellow(f"\nAll required resources for the {self.name} environment exist, skipping creation.") + ) + return True + + missing_resources_msg: str = self._get_non_user_provided_resources_msg() + click.echo( + f"This will create the following required resources for the '{self.name}' environment: \n" + f"{missing_resources_msg}" + ) + if confirm_changeset: + confirmed: bool = click.confirm("Should we proceed with the creation?") + if not confirmed: + click.secho(self.color.red("Canceling pipeline bootstrap creation.")) + return False + + environment_resources_template_body = Stage._read_template(STAGE_RESOURCES_CFN_TEMPLATE) + output: StackOutput = manage_stack( + stack_name=self._get_stack_name(), + region=self.aws_region, + profile=self.aws_profile, + template_body=environment_resources_template_body, + parameter_overrides={ + "PipelineUserArn": self.pipeline_user.arn or "", + "PipelineExecutionRoleArn": self.pipeline_execution_role.arn or "", + "CloudFormationExecutionRoleArn": self.cloudformation_execution_role.arn or "", + "ArtifactsBucketArn": self.artifacts_bucket.arn or "", + "CreateImageRepository": "true" if self.create_image_repository else "false", + "ImageRepositoryArn": self.image_repository.arn or "", + }, + ) + + pipeline_user_secret_sm_id = output.get("PipelineUserSecretKey") + + self.pipeline_user.arn = output.get("PipelineUser") + if pipeline_user_secret_sm_id: + ( + self.pipeline_user.access_key_id, + self.pipeline_user.secret_access_key, + ) = Stage._get_pipeline_user_secret_pair(pipeline_user_secret_sm_id, self.aws_profile, self.aws_region) + self.pipeline_execution_role.arn = output.get("PipelineExecutionRole") + self.cloudformation_execution_role.arn = output.get("CloudFormationExecutionRole") + self.artifacts_bucket.arn = output.get("ArtifactsBucket") + self.image_repository.arn = output.get("ImageRepository") + return True + + @staticmethod + def _get_pipeline_user_secret_pair( + secret_manager_arn: str, profile: Optional[str], region: Optional[str] + ) -> Tuple[str, str]: + """ + Helper method to fetch pipeline user's AWS Credentials from secrets manager. + SecretString need to be in following JSON format: + { + "aws_access_key_id": "AWSSECRETACCESSKEY123", + "aws_secret_access_key": "mYSuperSecretDummyKey" + } + Parameters + ---------- + secret_manager_arn: + ARN of secret manager entry which holds pipeline user key. + profile: + The named AWS profile (in user's machine) of the AWS account to deploy this environment to. + region: + The AWS region to deploy this environment to. + + Returns tuple of aws_access_key_id and aws_secret_access_key. + + """ + session = boto3.Session(profile_name=profile, region_name=region if region else None) # type: ignore + secrets_manager_client = session.client("secretsmanager") + response = secrets_manager_client.get_secret_value(SecretId=secret_manager_arn) + secret_string = response["SecretString"] + secret_json = json.loads(secret_string) + return secret_json["aws_access_key_id"], secret_json["aws_secret_access_key"] + + @staticmethod + def _read_template(template_file_name: str) -> str: + template_path: str = os.path.join(CFN_TEMPLATE_PATH, template_file_name) + with open(template_path, "r", encoding="utf-8") as fp: + template_body = fp.read() + return template_body + + def save_config(self, config_dir: str, filename: str, cmd_names: List[str]) -> None: + """ + save the Artifacts bucket name, ECR image repository URI and ARNs of pipeline_user, pipeline_execution_role and + cloudformation_execution_role to the given filename and directory. + + Parameters + ---------- + config_dir: str + the directory of the toml file to save to + filename: str + the name of the toml file to save to + cmd_names: List[str] + nested command name to scope the saved configs to inside the toml file + + Raises + ------ + ValueError: if the artifacts_bucket or ImageRepository ARNs are invalid + """ + + samconfig: SamConfig = SamConfig(config_dir=config_dir, filename=filename) + + if self.pipeline_user.arn: + samconfig.put(cmd_names=cmd_names, section="parameters", key=PIPELINE_USER, value=self.pipeline_user.arn) + + # Computing Artifacts bucket name and ECR image repository URL may through an exception if the ARNs are wrong + # Let's swallow such an exception to be able to save the remaining resources + try: + artifacts_bucket_name: Optional[str] = self.artifacts_bucket.name() + except ValueError: + artifacts_bucket_name = "" + try: + image_repository_uri: Optional[str] = self.image_repository.get_uri() or "" + except ValueError: + image_repository_uri = "" + + environment_specific_configs: Dict[str, Optional[str]] = { + PIPELINE_EXECUTION_ROLE: self.pipeline_execution_role.arn, + CLOUDFORMATION_EXECUTION_ROLE: self.cloudformation_execution_role.arn, + ARTIFACTS_BUCKET: artifacts_bucket_name, + # even image repository can be None, we want to save it as empty string + # so that pipeline init command can pick it up + ECR_IMAGE_REPOSITORY: image_repository_uri, + REGION: self.aws_region, + } + + for key, value in environment_specific_configs.items(): + if value is not None: + samconfig.put( + cmd_names=cmd_names, + section="parameters", + key=key, + value=value, + env=self.name, + ) + + samconfig.flush() + + def save_config_safe(self, config_dir: str, filename: str, cmd_names: List[str]) -> None: + """ + A safe version of save_config method that doesn't raise any exception + """ + try: + self.save_config(config_dir, filename, cmd_names) + except Exception: + pass + + def _get_resources(self) -> List[Resource]: + resources = [ + self.pipeline_user, + self.pipeline_execution_role, + self.cloudformation_execution_role, + self.artifacts_bucket, + ] + if self.create_image_repository or self.image_repository.arn: # Image Repository is optional + resources.append(self.image_repository) + return resources + + def print_resources_summary(self) -> None: + """prints to the screen(console) the ARNs of the created and provided resources.""" + + provided_resources = [] + created_resources = [] + for resource in self._get_resources(): + if resource.is_user_provided: + provided_resources.append(resource) + else: + created_resources.append(resource) + + if created_resources: + click.secho(self.color.green("The following resources were created in your account:")) + for resource in created_resources: + click.secho(self.color.green(f"\t- {resource.comment}")) + + if not self.pipeline_user.is_user_provided: + click.secho(self.color.green("Pipeline IAM user credential:")) + click.secho(self.color.green(f"\tAWS_ACCESS_KEY_ID: {self.pipeline_user.access_key_id}")) + click.secho(self.color.green(f"\tAWS_SECRET_ACCESS_KEY: {self.pipeline_user.secret_access_key}")) + + def _get_stack_name(self) -> str: + sanitized_stage_name: str = re.sub("[^0-9a-zA-Z]+", "-", self.name) + return f"{STACK_NAME_PREFIX}-{sanitized_stage_name}-{STAGE_RESOURCES_STACK_NAME_SUFFIX}" diff --git a/samcli/lib/pipeline/bootstrap/stage_resources.yaml b/samcli/lib/pipeline/bootstrap/stage_resources.yaml new file mode 100644 index 0000000000..bcc5e94423 --- /dev/null +++ b/samcli/lib/pipeline/bootstrap/stage_resources.yaml @@ -0,0 +1,358 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 + +Parameters: + PipelineUserArn: + Type: String + PipelineExecutionRoleArn: + Type: String + CloudFormationExecutionRoleArn: + Type: String + ArtifactsBucketArn: + Type: String + CreateImageRepository: + Type: String + Default: false + AllowedValues: [true, false] + ImageRepositoryArn: + Type: String + +Conditions: + MissingPipelineUser: !Equals [!Ref PipelineUserArn, ""] + MissingPipelineExecutionRole: !Equals [!Ref PipelineExecutionRoleArn, ""] + MissingCloudFormationExecutionRole: !Equals [!Ref CloudFormationExecutionRoleArn, ""] + MissingArtifactsBucket: !Equals [!Ref ArtifactsBucketArn, ""] + ShouldHaveImageRepository: !Or [!Equals [!Ref CreateImageRepository, "true"], !Not [!Equals [!Ref ImageRepositoryArn, ""]]] + MissingImageRepository: !And [!Condition ShouldHaveImageRepository, !Equals [!Ref ImageRepositoryArn, ""]] + +Resources: + PipelineUser: + Type: AWS::IAM::User + Condition: MissingPipelineUser + Properties: + Tags: + - Key: ManagedStackSource + Value: AwsSamCli + Policies: + - PolicyName: AssumeRoles + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - "sts:AssumeRole" + Resource: "*" + Condition: + StringEquals: + aws:ResourceTag/Role: pipeline-execution-role + + PipelineUserAccessKey: + Type: AWS::IAM::AccessKey + Condition: MissingPipelineUser + Properties: + Serial: 1 + Status: Active + UserName: !Ref PipelineUser + + PipelineUserSecretKey: + Type: AWS::SecretsManager::Secret + Condition: MissingPipelineUser + Properties: + SecretString: !Sub '{"aws_access_key_id": "${PipelineUserAccessKey}", "aws_secret_access_key": "${PipelineUserAccessKey.SecretAccessKey}"}' + + CloudFormationExecutionRole: + Type: AWS::IAM::Role + Condition: MissingCloudFormationExecutionRole + Properties: + Tags: + - Key: ManagedStackSource + Value: AwsSamCli + AssumeRolePolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Principal: + Service: cloudformation.amazonaws.com + Action: + - 'sts:AssumeRole' + Policies: + - PolicyName: GrantCloudFormationFullAccess + PolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Action: '*' + Resource: '*' + + PipelineExecutionRole: + Type: AWS::IAM::Role + Condition: MissingPipelineExecutionRole + Properties: + Tags: + - Key: ManagedStackSource + Value: AwsSamCli + - Key: Role + Value: pipeline-execution-role + AssumeRolePolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Principal: + AWS: + - Fn::If: + - MissingPipelineUser + - !GetAtt PipelineUser.Arn + - !Ref PipelineUserArn + Action: + - 'sts:AssumeRole' + - Effect: Allow + Principal: + # Allow roles with tag Role=aws-sam-pipeline-codebuild-service-role to assume this role. + # This is required when CodePipeline is the CI/CD system of choice. + AWS: + - !If + - MissingPipelineUser + - !Ref AWS::AccountId + - !Select [4, !Split [':', !Ref PipelineUserArn]] + Action: + - 'sts:AssumeRole' + Condition: + StringEquals: + aws:PrincipalTag/Role: aws-sam-pipeline-codebuild-service-role + + ArtifactsBucket: + Type: AWS::S3::Bucket + Condition: MissingArtifactsBucket + DeletionPolicy: "Retain" + Properties: + Tags: + - Key: ManagedStackSource + Value: AwsSamCli + LoggingConfiguration: + DestinationBucketName: + !Ref ArtifactsLoggingBucket + LogFilePrefix: "artifacts-logs" + VersioningConfiguration: + Status: Enabled + BucketEncryption: + ServerSideEncryptionConfiguration: + - ServerSideEncryptionByDefault: + SSEAlgorithm: AES256 + + ArtifactsBucketPolicy: + Type: AWS::S3::BucketPolicy + Condition: MissingArtifactsBucket + Properties: + Bucket: !Ref ArtifactsBucket + PolicyDocument: + Statement: + - Effect: "Deny" + Action: "s3:*" + Principal: "*" + Resource: + - !Join [ '',[ !GetAtt ArtifactsBucket.Arn, '/*' ] ] + - !GetAtt ArtifactsBucket.Arn + Condition: + Bool: + aws:SecureTransport: false + - Effect: "Allow" + Action: + - 's3:GetObject*' + - 's3:PutObject*' + - 's3:GetBucket*' + - 's3:List*' + Resource: + - !Join ['',[!GetAtt ArtifactsBucket.Arn, '/*']] + - !GetAtt ArtifactsBucket.Arn + Principal: + AWS: + - Fn::If: + - MissingPipelineExecutionRole + - !GetAtt PipelineExecutionRole.Arn + - !Ref PipelineExecutionRoleArn + - Fn::If: + - MissingCloudFormationExecutionRole + - !GetAtt CloudFormationExecutionRole.Arn + - !Ref CloudFormationExecutionRoleArn + + ArtifactsLoggingBucket: + Type: AWS::S3::Bucket + Condition: MissingArtifactsBucket + DeletionPolicy: "Retain" + Properties: + AccessControl: "LogDeliveryWrite" + Tags: + - Key: ManagedStackSource + Value: AwsSamCli + VersioningConfiguration: + Status: Enabled + BucketEncryption: + ServerSideEncryptionConfiguration: + - ServerSideEncryptionByDefault: + SSEAlgorithm: AES256 + + ArtifactsLoggingBucketPolicy: + Type: AWS::S3::BucketPolicy + Condition: MissingArtifactsBucket + Properties: + Bucket: !Ref ArtifactsLoggingBucket + PolicyDocument: + Statement: + - Effect: "Deny" + Action: "s3:*" + Principal: "*" + Resource: + - !Join [ '',[ !GetAtt ArtifactsLoggingBucket.Arn, '/*' ] ] + - !GetAtt ArtifactsLoggingBucket.Arn + Condition: + Bool: + aws:SecureTransport: false + + PipelineExecutionRolePermissionPolicy: + Type: AWS::IAM::Policy + Condition: MissingPipelineExecutionRole + Properties: + PolicyName: PipelineExecutionRolePermissions + PolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Action: 'iam:PassRole' + Resource: + Fn::If: + - MissingCloudFormationExecutionRole + - !GetAtt CloudFormationExecutionRole.Arn + - !Ref CloudFormationExecutionRoleArn + - Effect: Allow + Action: + - "cloudformation:CreateChangeSet" + - "cloudformation:DescribeChangeSet" + - "cloudformation:ExecuteChangeSet" + - "cloudformation:DescribeStackEvents" + - "cloudformation:DescribeStacks" + - "cloudformation:GetTemplateSummary" + - "cloudformation:DescribeStackResource" + Resource: '*' + - Effect: Allow + Action: + - 's3:GetObject*' + - 's3:PutObject*' + - 's3:GetBucket*' + - 's3:List*' + Resource: + Fn::If: + - MissingArtifactsBucket + - - !Join [ '',[ !GetAtt ArtifactsBucket.Arn, '/*' ] ] + - !GetAtt ArtifactsBucket.Arn + - - !Join [ '',[ !Ref ArtifactsBucketArn, '/*' ] ] + - !Ref ArtifactsBucketArn + - Fn::If: + - ShouldHaveImageRepository + - Effect: "Allow" + Action: "ecr:GetAuthorizationToken" + Resource: "*" + - !Ref AWS::NoValue + - Fn::If: + - ShouldHaveImageRepository + - Effect: "Allow" + Action: + - "ecr:GetDownloadUrlForLayer" + - "ecr:BatchGetImage" + - "ecr:BatchCheckLayerAvailability" + - "ecr:PutImage" + - "ecr:InitiateLayerUpload" + - "ecr:UploadLayerPart" + - "ecr:CompleteLayerUpload" + Resource: + Fn::If: + - MissingImageRepository + - !GetAtt ImageRepository.Arn + - !Ref ImageRepositoryArn + - !Ref AWS::NoValue + Roles: + - !Ref PipelineExecutionRole + + ImageRepository: + Type: AWS::ECR::Repository + Condition: MissingImageRepository + Properties: + RepositoryPolicyText: + Version: "2012-10-17" + Statement: + - Sid: LambdaECRImageRetrievalPolicy + Effect: Allow + Principal: + Service: lambda.amazonaws.com + Action: + - "ecr:GetDownloadUrlForLayer" + - "ecr:BatchGetImage" + - "ecr:GetRepositoryPolicy" + - "ecr:SetRepositoryPolicy" + - "ecr:DeleteRepositoryPolicy" + - Sid: AllowPushPull + Effect: Allow + Principal: + AWS: + - Fn::If: + - MissingPipelineExecutionRole + - !GetAtt PipelineExecutionRole.Arn + - !Ref PipelineExecutionRoleArn + - Fn::If: + - MissingCloudFormationExecutionRole + - !GetAtt CloudFormationExecutionRole.Arn + - !Ref CloudFormationExecutionRoleArn + Action: + - "ecr:GetDownloadUrlForLayer" + - "ecr:BatchGetImage" + - "ecr:BatchCheckLayerAvailability" + - "ecr:PutImage" + - "ecr:InitiateLayerUpload" + - "ecr:UploadLayerPart" + - "ecr:CompleteLayerUpload" + +Outputs: + PipelineUser: + Description: ARN of the Pipeline IAM User + Value: + Fn::If: + - MissingPipelineUser + - !GetAtt PipelineUser.Arn + - !Ref PipelineUserArn + + PipelineUserSecretKey: + Description: AWS Access Key and Secret Key of pipeline user. + Condition: MissingPipelineUser + Value: !Ref PipelineUserSecretKey + + CloudFormationExecutionRole: + Description: ARN of the IAM Role(CloudFormationExecutionRole) + Value: + Fn::If: + - MissingCloudFormationExecutionRole + - !GetAtt CloudFormationExecutionRole.Arn + - !Ref CloudFormationExecutionRoleArn + + PipelineExecutionRole: + Description: ARN of the IAM Role(PipelineExecutionRole) + Value: + Fn::If: + - MissingPipelineExecutionRole + - !GetAtt PipelineExecutionRole.Arn + - !Ref PipelineExecutionRoleArn + + ArtifactsBucket: + Description: ARN of the Artifacts bucket + Value: + Fn::If: + - MissingArtifactsBucket + - !GetAtt ArtifactsBucket.Arn + - !Ref ArtifactsBucketArn + + ImageRepository: + Description: ARN of the ECR image repository + Condition: ShouldHaveImageRepository + Value: + Fn::If: + - MissingImageRepository + - !GetAtt ImageRepository.Arn + - !Ref ImageRepositoryArn diff --git a/samcli/lib/utils/colors.py b/samcli/lib/utils/colors.py index 84e3cbdbd7..84767f0fec 100644 --- a/samcli/lib/utils/colors.py +++ b/samcli/lib/utils/colors.py @@ -58,6 +58,10 @@ def underline(self, msg): """Underline the input""" return click.style(msg, underline=True) if self.colorize else msg + def bold(self, msg): + """Bold the input""" + return click.style(msg, bold=True) if self.colorize else msg + def _color(self, msg, color): """Internal helper method to add colors to input""" kwargs = {"fg": color} diff --git a/samcli/lib/utils/defaults.py b/samcli/lib/utils/defaults.py new file mode 100644 index 0000000000..4a07b113ac --- /dev/null +++ b/samcli/lib/utils/defaults.py @@ -0,0 +1,8 @@ +""" +Contains helpers for providing default values +""" +from botocore.session import get_session + + +def get_default_aws_region() -> str: + return get_session().get_config_variable("region") or "us-east-1" diff --git a/samcli/lib/utils/git_repo.py b/samcli/lib/utils/git_repo.py index 33e4597726..ddc7fba52f 100644 --- a/samcli/lib/utils/git_repo.py +++ b/samcli/lib/utils/git_repo.py @@ -132,7 +132,7 @@ def clone(self, clone_dir: Path, clone_name: str, replace_existing: bool = False output = clone_error.output.decode("utf-8") if "not found" in output.lower(): LOG.warning("WARN: Could not clone repo %s", self.url, exc_info=clone_error) - raise CloneRepoException from clone_error + raise CloneRepoException(output) from clone_error finally: self.clone_attempted = True diff --git a/samcli/lib/utils/managed_cloudformation_stack.py b/samcli/lib/utils/managed_cloudformation_stack.py index 25973fbc8b..29d148a7d9 100644 --- a/samcli/lib/utils/managed_cloudformation_stack.py +++ b/samcli/lib/utils/managed_cloudformation_stack.py @@ -1,20 +1,17 @@ """ Bootstrap's user's development environment by creating cloud resources required by SAM CLI """ - import logging +from collections.abc import Collection +from typing import cast, Dict, List, Optional, Union import boto3 - import click - from botocore.config import Config from botocore.exceptions import ClientError, BotoCoreError, NoRegionError, NoCredentialsError, ProfileNotFound from samcli.commands.exceptions import UserException, CredentialsError, RegionError - -SAM_CLI_STACK_PREFIX = "aws-sam-cli-managed-" LOG = logging.getLogger(__name__) @@ -25,10 +22,45 @@ def __init__(self, ex): super().__init__(message=message_fmt.format(ex=self.ex)) -def manage_stack(profile, region, stack_name, template_body): +class StackOutput: + def __init__(self, stack_output: List[Dict[str, str]]): + self._stack_output: List[Dict[str, str]] = stack_output + + def get(self, key) -> Optional[str]: + try: + return next(o for o in self._stack_output if o.get("OutputKey") == key).get("OutputValue") + except StopIteration: + return None + + +def manage_stack( + region: Optional[str], + stack_name: str, + template_body: str, + profile: Optional[str] = None, + parameter_overrides: Optional[Dict[str, Union[str, List[str]]]] = None, +) -> StackOutput: + """ + get or create a CloudFormation stack + + Parameters + ---------- + region: str + AWS region for the CloudFormation stack + stack_name: str + CloudFormation stack name + template_body: str + CloudFormation template's content + profile: Optional[str] + AWS named profile for the AWS account + parameter_overrides: Optional[Dict[str, Union[str, List[str]]]] + Values of template parameters, if any. + + Returns: Stack output section(list of OutputKey, OutputValue pairs) + """ try: if profile: - session = boto3.Session(profile_name=profile, region_name=region if region else None) + session = boto3.Session(profile_name=profile, region_name=region if region else None) # type: ignore cloudformation_client = session.client("cloudformation") else: cloudformation_client = boto3.client( @@ -51,32 +83,41 @@ def manage_stack(profile, region, stack_name, template_body): "Error Setting Up Managed Stack Client: Unable to resolve a region. " "Please provide a region via the --region parameter or by the AWS_REGION environment variable." ) from ex - return _create_or_get_stack(cloudformation_client, stack_name, template_body) + return _create_or_get_stack(cloudformation_client, stack_name, template_body, parameter_overrides) -def _create_or_get_stack(cloudformation_client, stack_name, template_body): +# Todo Add _update_stack to handle the case when the values of the stack parameter got changed +def _create_or_get_stack( + cloudformation_client, + stack_name: str, + template_body: str, + parameter_overrides: Optional[Dict[str, Union[str, List[str]]]] = None, +) -> StackOutput: try: ds_resp = cloudformation_client.describe_stacks(StackName=stack_name) stacks = ds_resp["Stacks"] stack = stacks[0] click.echo("\n\tLooking for resources needed for deployment: Found!") - _check_sanity_of_stack(stack, stack_name) - return stack["Outputs"] + _check_sanity_of_stack(stack) + stack_outputs = cast(List[Dict[str, str]], stack["Outputs"]) + return StackOutput(stack_outputs) except ClientError: click.echo("\n\tLooking for resources needed for deployment: Not found.") try: stack = _create_stack( - cloudformation_client, stack_name, template_body + cloudformation_client, stack_name, template_body, parameter_overrides ) # exceptions are not captured from subcommands - _check_sanity_of_stack(stack, stack_name) - return stack["Outputs"] + _check_sanity_of_stack(stack) + stack_outputs = cast(List[Dict[str, str]], stack["Outputs"]) + return StackOutput(stack_outputs) except (ClientError, BotoCoreError) as ex: LOG.debug("Failed to create managed resources", exc_info=ex) raise ManagedStackError(str(ex)) from ex -def _check_sanity_of_stack(stack, stack_name): +def _check_sanity_of_stack(stack): + stack_name = stack.get("StackName") tags = stack.get("Tags", None) outputs = stack.get("Outputs", None) @@ -112,15 +153,23 @@ def _check_sanity_of_stack(stack, stack_name): raise UserException(msg) from ex -def _create_stack(cloudformation_client, stack_name, template_body): +def _create_stack( + cloudformation_client, + stack_name: str, + template_body: str, + parameter_overrides: Optional[Dict[str, Union[str, List[str]]]] = None, +): click.echo("\tCreating the required resources...") change_set_name = "InitialCreation" + parameters = _generate_stack_parameters(parameter_overrides) change_set_resp = cloudformation_client.create_change_set( StackName=stack_name, TemplateBody=template_body, Tags=[{"Key": "ManagedStackSource", "Value": "AwsSamCli"}], ChangeSetType="CREATE", ChangeSetName=change_set_name, # this must be unique for the stack, but we only create so that's fine + Capabilities=["CAPABILITY_IAM"], + Parameters=parameters, ) stack_id = change_set_resp["StackId"] change_waiter = cloudformation_client.get_waiter("change_set_create_complete") @@ -134,3 +183,16 @@ def _create_stack(cloudformation_client, stack_name, template_body): stacks = ds_resp["Stacks"] click.echo("\tSuccessfully created!") return stacks[0] + + +def _generate_stack_parameters( + parameter_overrides: Optional[Dict[str, Union[str, List[str]]]] = None +) -> List[Dict[str, str]]: + parameters = [] + if parameter_overrides: + for key, value in parameter_overrides.items(): + if isinstance(value, Collection) and not isinstance(value, str): + # Assumption: values don't include commas or spaces. Need to refactor to handle such a case if needed. + value = ",".join(value) + parameters.append({"ParameterKey": key, "ParameterValue": value}) + return parameters diff --git a/samcli/lib/utils/profile.py b/samcli/lib/utils/profile.py new file mode 100644 index 0000000000..47d0242eee --- /dev/null +++ b/samcli/lib/utils/profile.py @@ -0,0 +1,10 @@ +""" +Module for aws profile related helpers +""" +from typing import List, cast + +from botocore.session import Session + + +def list_available_profiles() -> List[str]: + return cast(List[str], Session().available_profiles) diff --git a/samcli/yamlhelper.py b/samcli/yamlhelper.py index ca091e61cb..222c7b717e 100644 --- a/samcli/yamlhelper.py +++ b/samcli/yamlhelper.py @@ -18,7 +18,7 @@ # pylint: disable=too-many-ancestors import json -from typing import Dict, Optional +from typing import cast, Dict, Optional from botocore.compat import OrderedDict import yaml @@ -109,20 +109,20 @@ def _dict_constructor(loader, node): return OrderedDict(loader.construct_pairs(node)) -def yaml_parse(yamlstr): +def yaml_parse(yamlstr) -> Dict: """Parse a yaml string""" try: # PyYAML doesn't support json as well as it should, so if the input # is actually just json it is better to parse it with the standard # json parser. - return json.loads(yamlstr, object_pairs_hook=OrderedDict) + return cast(Dict, json.loads(yamlstr, object_pairs_hook=OrderedDict)) except ValueError: yaml.SafeLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, _dict_constructor) yaml.SafeLoader.add_multi_constructor("!", intrinsics_multi_constructor) - return yaml.safe_load(yamlstr) + return cast(Dict, yaml.safe_load(yamlstr)) -def parse_yaml_file(file_path, extra_context: Optional[Dict] = None): +def parse_yaml_file(file_path, extra_context: Optional[Dict] = None) -> Dict: """ Read the file, do variable substitution, parse it as JSON/YAML diff --git a/tests/integration/pipeline/__init__.py b/tests/integration/pipeline/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/integration/pipeline/base.py b/tests/integration/pipeline/base.py new file mode 100644 index 0000000000..f82d27e357 --- /dev/null +++ b/tests/integration/pipeline/base.py @@ -0,0 +1,154 @@ +import os +import shutil +import logging +import uuid +from pathlib import Path +from typing import List, Optional, Set, Tuple, Any +from unittest import TestCase +from unittest.mock import Mock + +import boto3 +import botocore.exceptions +from botocore.exceptions import ClientError + +from samcli.lib.pipeline.bootstrap.stage import Stage + + +class PipelineBase(TestCase): + def base_command(self): + command = "sam" + if os.getenv("SAM_CLI_DEV"): + command = "samdev" + + return command + + +class InitIntegBase(PipelineBase): + generated_files: List[Path] = [] + + @classmethod + def setUpClass(cls) -> None: + # we need to compare the whole generated template, which is + # larger than normal diff size limit + cls.maxDiff = None + + def setUp(self) -> None: + super().setUp() + self.generated_files = [] + + def tearDown(self) -> None: + for generated_file in self.generated_files: + if generated_file.is_dir(): + shutil.rmtree(generated_file, ignore_errors=True) + elif generated_file.exists(): + generated_file.unlink() + super().tearDown() + + def get_init_command_list(self, with_bootstrap=False): + command_list = [self.base_command(), "pipeline", "init"] + if with_bootstrap: + command_list.append("--bootstrap") + return command_list + + +class BootstrapIntegBase(PipelineBase): + region = "us-east-1" + stack_names: List[str] + cf_client: Any + randomized_stage_suffix: str + + @classmethod + def setUpClass(cls): + cls.cf_client = boto3.client("cloudformation", region_name=cls.region) + cls.randomized_stage_suffix = uuid.uuid4().hex[-6:] + + def setUp(self): + self.stack_names = [] + super().setUp() + shutil.rmtree(os.path.join(os.getcwd(), ".aws-sam", "pipeline"), ignore_errors=True) + + def tearDown(self): + for stack_name in self.stack_names: + self._cleanup_s3_buckets(stack_name) + self.cf_client.delete_stack(StackName=stack_name) + shutil.rmtree(os.path.join(os.getcwd(), ".aws-sam", "pipeline"), ignore_errors=True) + super().tearDown() + + def _cleanup_s3_buckets(self, stack_name): + try: + stack_resources = self.cf_client.describe_stack_resources(StackName=stack_name) + buckets = [ + resource + for resource in stack_resources["StackResources"] + if resource["ResourceType"] == "AWS::S3::Bucket" + ] + s3_client = boto3.client("s3") + for bucket in buckets: + s3_client.delete_bucket(Bucket=bucket.get("PhysicalResourceId")) + except botocore.exceptions.ClientError: + """No need to fail in cleanup""" + + def get_bootstrap_command_list( + self, + no_interactive: bool = False, + stage_name: Optional[str] = None, + profile_name: Optional[str] = None, + region: Optional[str] = None, + pipeline_user: Optional[str] = None, + pipeline_execution_role: Optional[str] = None, + cloudformation_execution_role: Optional[str] = None, + bucket: Optional[str] = None, + create_image_repository: bool = False, + image_repository: Optional[str] = None, + no_confirm_changeset: bool = False, + ): + command_list = [self.base_command(), "pipeline", "bootstrap"] + + if no_interactive: + command_list += ["--no-interactive"] + if stage_name: + command_list += ["--stage", stage_name] + if profile_name: + command_list += ["--profile", profile_name] + if region: + command_list += ["--region", region] + if pipeline_user: + command_list += ["--pipeline-user", pipeline_user] + if pipeline_execution_role: + command_list += ["--pipeline-execution-role", pipeline_execution_role] + if cloudformation_execution_role: + command_list += ["--cloudformation-execution-role", cloudformation_execution_role] + if bucket: + command_list += ["--bucket", bucket] + if create_image_repository: + command_list += ["--create-image-repository"] + if image_repository: + command_list += ["--image-repository", image_repository] + if no_confirm_changeset: + command_list += ["--no-confirm-changeset"] + + return command_list + + def _extract_created_resource_logical_ids(self, stack_name: str) -> List[str]: + response = self.cf_client.describe_stack_resources(StackName=stack_name) + return [resource["LogicalResourceId"] for resource in response["StackResources"]] + + def _stack_exists(self, stack_name) -> bool: + try: + self.cf_client.describe_stacks(StackName=stack_name) + return True + except ClientError as ex: + if "does not exist" in ex.response.get("Error", {}).get("Message", ""): + return False + raise ex + + def _get_stage_and_stack_name(self, suffix: str = "") -> Tuple[str, str]: + # Method expects method name which can be a full path. Eg: test.integration.test_bootstrap_command.method_name + method_name = self.id().split(".")[-1] + stage_name = method_name.replace("_", "-") + suffix + "-" + self.randomized_stage_suffix + + mock_env = Mock() + mock_env.name = stage_name + stack_name = Stage._get_stack_name(mock_env) + + return stage_name, stack_name diff --git a/tests/integration/pipeline/test_bootstrap_command.py b/tests/integration/pipeline/test_bootstrap_command.py new file mode 100644 index 0000000000..0cf7741c5c --- /dev/null +++ b/tests/integration/pipeline/test_bootstrap_command.py @@ -0,0 +1,380 @@ +from unittest import skipIf + +from parameterized import parameterized + +from samcli.commands.pipeline.bootstrap.cli import PIPELINE_CONFIG_FILENAME, PIPELINE_CONFIG_DIR +from samcli.lib.config.samconfig import SamConfig +from tests.integration.pipeline.base import BootstrapIntegBase +from tests.testing_utils import ( + run_command_with_input, + RUNNING_ON_CI, + RUNNING_TEST_FOR_MASTER_ON_CI, + RUN_BY_CANARY, + run_command, + run_command_with_inputs, +) +import boto3 +from botocore.exceptions import ClientError + +# bootstrap tests require credentials and CI/CD will only add credentials to the env if the PR is from the same repo. +# This is to restrict tests to run outside of CI/CD, when the branch is not master or tests are not run by Canary +SKIP_BOOTSTRAP_TESTS = RUNNING_ON_CI and RUNNING_TEST_FOR_MASTER_ON_CI and not RUN_BY_CANARY + +# In order to run bootstrap integration test locally make sure your test account is configured as `default` account. +CREDENTIAL_PROFILE = "2" if not RUN_BY_CANARY else "1" + +CFN_OUTPUT_TO_CONFIG_KEY = { + "ArtifactsBucket": "artifacts_bucket", + "CloudFormationExecutionRole": "cloudformation_execution_role", + "PipelineExecutionRole": "pipeline_execution_role", + "PipelineUser": "pipeline_user", +} + + +@skipIf(SKIP_BOOTSTRAP_TESTS, "Skip bootstrap tests in CI/CD only") +class TestBootstrap(BootstrapIntegBase): + @parameterized.expand([("create_image_repository",), (False,)]) + def test_interactive_with_no_resources_provided(self, create_image_repository): + stage_name, stack_name = self._get_stage_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list() + + inputs = [ + stage_name, + CREDENTIAL_PROFILE, + self.region, # region + "", # pipeline user + "", # Pipeline execution role + "", # CloudFormation execution role + "", # Artifacts bucket + "y" if create_image_repository else "N", # Should we create ECR repo + ] + + if create_image_repository: + inputs.append("") # Create image repository + + inputs.append("") # Confirm summary + inputs.append("y") # Create resources + + bootstrap_process_execute = run_command_with_inputs(bootstrap_command_list, inputs) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + stdout = bootstrap_process_execute.stdout.decode() + # make sure pipeline user's credential is printed + self.assertIn("ACCESS_KEY_ID", stdout) + self.assertIn("SECRET_ACCESS_KEY", stdout) + + common_resources = { + "PipelineUser", + "PipelineUserAccessKey", + "PipelineUserSecretKey", + "CloudFormationExecutionRole", + "PipelineExecutionRole", + "ArtifactsBucket", + "ArtifactsLoggingBucket", + "ArtifactsLoggingBucketPolicy", + "ArtifactsBucketPolicy", + "PipelineExecutionRolePermissionPolicy", + } + if create_image_repository: + self.assertSetEqual( + { + *common_resources, + "ImageRepository", + }, + set(self._extract_created_resource_logical_ids(stack_name)), + ) + CFN_OUTPUT_TO_CONFIG_KEY["ImageRepository"] = "image_repository" + self.validate_pipeline_config(stack_name, stage_name, list(CFN_OUTPUT_TO_CONFIG_KEY.keys())) + del CFN_OUTPUT_TO_CONFIG_KEY["ImageRepository"] + else: + self.assertSetEqual(common_resources, set(self._extract_created_resource_logical_ids(stack_name))) + self.validate_pipeline_config(stack_name, stage_name) + + @parameterized.expand([("create_image_repository",), (False,)]) + def test_non_interactive_with_no_resources_provided(self, create_image_repository): + stage_name, stack_name = self._get_stage_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list( + no_interactive=True, + create_image_repository=create_image_repository, + no_confirm_changeset=True, + region=self.region, + ) + + bootstrap_process_execute = run_command(bootstrap_command_list) + + self.assertEqual(bootstrap_process_execute.process.returncode, 2) + stderr = bootstrap_process_execute.stderr.decode() + self.assertIn("Missing required parameter", stderr) + + def test_interactive_with_all_required_resources_provided(self): + stage_name, stack_name = self._get_stage_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list() + + inputs = [ + stage_name, + CREDENTIAL_PROFILE, + self.region, # region + "arn:aws:iam::123:user/user-name", # pipeline user + "arn:aws:iam::123:role/role-name", # Pipeline execution role + "arn:aws:iam::123:role/role-name", # CloudFormation execution role + "arn:aws:s3:::bucket-name", # Artifacts bucket + "N", # Should we create ECR repo, 3 - specify one + "", + ] + + bootstrap_process_execute = run_command_with_inputs(bootstrap_command_list, inputs) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + stdout = bootstrap_process_execute.stdout.decode() + self.assertIn("skipping creation", stdout) + + def test_no_interactive_with_all_required_resources_provided(self): + stage_name, stack_name = self._get_stage_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list( + no_interactive=True, + stage_name=stage_name, + pipeline_user="arn:aws:iam::123:user/user-name", # pipeline user + pipeline_execution_role="arn:aws:iam::123:role/role-name", # Pipeline execution role + cloudformation_execution_role="arn:aws:iam::123:role/role-name", # CloudFormation execution role + bucket="arn:aws:s3:::bucket-name", # Artifacts bucket + image_repository="arn:aws:ecr:::repository/repo-name", # ecr repo + region=self.region, + ) + + bootstrap_process_execute = run_command(bootstrap_command_list) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + stdout = bootstrap_process_execute.stdout.decode() + self.assertIn("skipping creation", stdout) + + def validate_pipeline_config(self, stack_name, stage_name, cfn_keys_to_check=None): + # Get output values from cloudformation + if cfn_keys_to_check is None: + cfn_keys_to_check = list(CFN_OUTPUT_TO_CONFIG_KEY.keys()) + response = self.cf_client.describe_stacks(StackName=stack_name) + stacks = response["Stacks"] + self.assertTrue(len(stacks) > 0) # in case stack name is invalid + stack_outputs = stacks[0]["Outputs"] + output_values = {} + for value in stack_outputs: + output_values[value["OutputKey"]] = value["OutputValue"] + + # Get values saved in config file + config = SamConfig(PIPELINE_CONFIG_DIR, PIPELINE_CONFIG_FILENAME) + config_values = config.get_all(["pipeline", "bootstrap"], "parameters", stage_name) + config_values = {**config_values, **config.get_all(["pipeline", "bootstrap"], "parameters")} + + for key in CFN_OUTPUT_TO_CONFIG_KEY: + if key not in cfn_keys_to_check: + continue + value = CFN_OUTPUT_TO_CONFIG_KEY[key] + cfn_value = output_values[key] + config_value = config_values[value] + if key == "ImageRepository": + self.assertEqual(cfn_value.split("/")[-1], config_value.split("/")[-1]) + else: + self.assertTrue(cfn_value.endswith(config_value) or cfn_value == config_value) + + @parameterized.expand([("confirm_changeset",), (False,)]) + def test_no_interactive_with_some_required_resources_provided(self, confirm_changeset: bool): + stage_name, stack_name = self._get_stage_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list( + no_interactive=True, + stage_name=stage_name, + pipeline_user="arn:aws:iam::123:user/user-name", # pipeline user + pipeline_execution_role="arn:aws:iam::123:role/role-name", # Pipeline execution role + # CloudFormation execution role missing + bucket="arn:aws:s3:::bucket-name", # Artifacts bucket + image_repository="arn:aws:ecr:::repository/repo-name", # ecr repo + no_confirm_changeset=not confirm_changeset, + region=self.region, + ) + + inputs = [ + "y", # proceed + ] + + bootstrap_process_execute = run_command_with_inputs(bootstrap_command_list, inputs if confirm_changeset else []) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + stdout = bootstrap_process_execute.stdout.decode() + self.assertIn("Successfully created!", stdout) + self.assertIn("CloudFormationExecutionRole", self._extract_created_resource_logical_ids(stack_name)) + + def test_interactive_cancelled_by_user(self): + stage_name, stack_name = self._get_stage_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list() + + inputs = [ + stage_name, + CREDENTIAL_PROFILE, + self.region, # region + "arn:aws:iam::123:user/user-name", # pipeline user + "arn:aws:iam::123:role/role-name", # Pipeline execution role + "", # CloudFormation execution role + "arn:aws:s3:::bucket-name", # Artifacts bucket + "N", # Do you have Lambda with package type Image + "", + "", # Create resources confirmation + ] + + bootstrap_process_execute = run_command_with_inputs(bootstrap_command_list, inputs) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + stdout = bootstrap_process_execute.stdout.decode() + self.assertTrue(stdout.strip().endswith("Canceling pipeline bootstrap creation.")) + self.assertFalse(self._stack_exists(stack_name)) + + def test_interactive_with_some_required_resources_provided(self): + stage_name, stack_name = self._get_stage_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list() + + inputs = [ + stage_name, + CREDENTIAL_PROFILE, + self.region, # region + "arn:aws:iam::123:user/user-name", # pipeline user + "arn:aws:iam::123:role/role-name", # Pipeline execution role + "", # CloudFormation execution role + "arn:aws:s3:::bucket-name", # Artifacts bucket + "N", # Do you have Lambda with package type Image + "", + "y", # Create resources confirmation + ] + + bootstrap_process_execute = run_command_with_inputs(bootstrap_command_list, inputs) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + stdout = bootstrap_process_execute.stdout.decode() + self.assertIn("Successfully created!", stdout) + # make sure the not provided resource is the only resource created. + self.assertIn("CloudFormationExecutionRole", self._extract_created_resource_logical_ids(stack_name)) + self.validate_pipeline_config(stack_name, stage_name) + + def test_interactive_pipeline_user_only_created_once(self): + """ + Create 3 stages, only the first stage resource stack creates + a pipeline user, and the remaining two share the same pipeline user. + """ + stage_names = [] + for suffix in ["1", "2", "3"]: + stage_name, stack_name = self._get_stage_and_stack_name(suffix) + stage_names.append(stage_name) + self.stack_names.append(stack_name) + + bootstrap_command_list = self.get_bootstrap_command_list() + + for i, stage_name in enumerate(stage_names): + inputs = [ + stage_name, + CREDENTIAL_PROFILE, + self.region, # region + *([""] if i == 0 else []), # pipeline user + "arn:aws:iam::123:role/role-name", # Pipeline execution role + "arn:aws:iam::123:role/role-name", # CloudFormation execution role + "arn:aws:s3:::bucket-name", # Artifacts bucket + "N", # Should we create ECR repo, 3 - specify one + "", + "y", # Create resources confirmation + ] + + bootstrap_process_execute = run_command_with_input( + bootstrap_command_list, ("\n".join(inputs) + "\n").encode() + ) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + stdout = bootstrap_process_execute.stdout.decode() + + # Only first environment creates pipeline user + if i == 0: + self.assertIn("The following resources were created in your account:", stdout) + resources = self._extract_created_resource_logical_ids(self.stack_names[i]) + self.assertTrue("PipelineUser" in resources) + self.assertTrue("PipelineUserAccessKey" in resources) + self.assertTrue("PipelineUserSecretKey" in resources) + self.validate_pipeline_config(self.stack_names[i], stage_name) + else: + self.assertIn("skipping creation", stdout) + + @parameterized.expand([("ArtifactsBucket",), ("ArtifactsLoggingBucket",)]) + def test_bootstrapped_buckets_accept_ssl_requests_only(self, bucket_logical_id): + stage_name, stack_name = self._get_stage_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list( + stage_name=stage_name, no_interactive=True, no_confirm_changeset=True, region=self.region + ) + + bootstrap_process_execute = run_command(bootstrap_command_list) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + + stack_resources = self.cf_client.describe_stack_resources(StackName=stack_name) + bucket = next( + resource + for resource in stack_resources["StackResources"] + if resource["LogicalResourceId"] == bucket_logical_id + ) + bucket_name = bucket["PhysicalResourceId"] + bucket_key = "any/testing/key.txt" + testing_data = b"any testing binary data" + + s3_ssl_client = boto3.client("s3", region_name=self.region) + s3_non_ssl_client = boto3.client("s3", use_ssl=False, region_name=self.region) + + # Assert SSL requests are accepted + s3_ssl_client.put_object(Body=testing_data, Bucket=bucket_name, Key=bucket_key) + res = s3_ssl_client.get_object(Bucket=bucket_name, Key=bucket_key) + retrieved_data = res["Body"].read() + self.assertEqual(retrieved_data, testing_data) + + # Assert non SSl requests are denied + with self.assertRaises(ClientError) as error: + s3_non_ssl_client.get_object(Bucket=bucket_name, Key=bucket_key) + self.assertEqual( + str(error.exception), "An error occurred (AccessDenied) when calling the GetObject operation: Access Denied" + ) + + def test_bootstrapped_artifacts_bucket_has_server_access_log_enabled(self): + stage_name, stack_name = self._get_stage_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list( + stage_name=stage_name, no_interactive=True, no_confirm_changeset=True, region=self.region + ) + + bootstrap_process_execute = run_command(bootstrap_command_list) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + + stack_resources = self.cf_client.describe_stack_resources(StackName=stack_name) + artifacts_bucket = next( + resource + for resource in stack_resources["StackResources"] + if resource["LogicalResourceId"] == "ArtifactsBucket" + ) + artifacts_bucket_name = artifacts_bucket["PhysicalResourceId"] + artifacts_logging_bucket = next( + resource + for resource in stack_resources["StackResources"] + if resource["LogicalResourceId"] == "ArtifactsLoggingBucket" + ) + artifacts_logging_bucket_name = artifacts_logging_bucket["PhysicalResourceId"] + + s3_client = boto3.client("s3", region_name=self.region) + res = s3_client.get_bucket_logging(Bucket=artifacts_bucket_name) + self.assertEqual(artifacts_logging_bucket_name, res["LoggingEnabled"]["TargetBucket"]) diff --git a/tests/integration/pipeline/test_init_command.py b/tests/integration/pipeline/test_init_command.py new file mode 100644 index 0000000000..182184a999 --- /dev/null +++ b/tests/integration/pipeline/test_init_command.py @@ -0,0 +1,299 @@ +import os.path +import shutil +from pathlib import Path +from textwrap import dedent +from typing import List +from unittest import skipIf + +from parameterized import parameterized + +from samcli.commands.pipeline.bootstrap.cli import PIPELINE_CONFIG_DIR, PIPELINE_CONFIG_FILENAME +from tests.integration.pipeline.base import InitIntegBase, BootstrapIntegBase +from tests.integration.pipeline.test_bootstrap_command import SKIP_BOOTSTRAP_TESTS, CREDENTIAL_PROFILE +from tests.testing_utils import run_command_with_inputs + +QUICK_START_JENKINS_INPUTS_WITHOUT_AUTO_FILL = [ + "1", # quick start + "1", # jenkins, this depends on the template repo. + "", + "credential-id", + "main", + "template.yaml", + "test", + "test-stack", + "test-pipeline-execution-role", + "test-cfn-execution-role", + "test-bucket", + "test-ecr", + "us-east-2", + "prod", + "prod-stack", + "prod-pipeline-execution-role", + "prod-cfn-execution-role", + "prod-bucket", + "prod-ecr", + "us-west-2", +] + + +class TestInit(InitIntegBase): + """ + Here we use Jenkins template for testing + """ + + def setUp(self) -> None: + # make sure there is no pipelineconfig.toml, otherwise the autofill could affect the question flow + pipelineconfig_file = Path(PIPELINE_CONFIG_DIR, PIPELINE_CONFIG_FILENAME) + if pipelineconfig_file.exists(): + pipelineconfig_file.unlink() + + def tearDown(self) -> None: + super().tearDown() + shutil.rmtree(PIPELINE_CONFIG_DIR, ignore_errors=True) + + def test_quick_start(self): + generated_jenkinsfile_path = Path("Jenkinsfile") + self.generated_files.append(generated_jenkinsfile_path) + + init_command_list = self.get_init_command_list() + init_process_execute = run_command_with_inputs(init_command_list, QUICK_START_JENKINS_INPUTS_WITHOUT_AUTO_FILL) + + self.assertEqual(init_process_execute.process.returncode, 0) + self.assertTrue(Path("Jenkinsfile").exists()) + + expected_file_path = Path(__file__).parent.parent.joinpath(Path("testdata", "pipeline", "expected_jenkinsfile")) + with open(expected_file_path, "r") as expected, open(generated_jenkinsfile_path, "r") as output: + self.assertEqual(expected.read(), output.read()) + + def test_failed_when_generated_file_already_exist_override(self): + generated_jenkinsfile_path = Path("Jenkinsfile") + generated_jenkinsfile_path.touch() # the file now pre-exists + self.generated_files.append(generated_jenkinsfile_path) + + init_command_list = self.get_init_command_list() + init_process_execute = run_command_with_inputs( + init_command_list, [*QUICK_START_JENKINS_INPUTS_WITHOUT_AUTO_FILL, "y"] + ) + + self.assertEqual(init_process_execute.process.returncode, 0) + self.assertTrue(Path("Jenkinsfile").exists()) + + expected_file_path = Path(__file__).parent.parent.joinpath(Path("testdata", "pipeline", "expected_jenkinsfile")) + with open(expected_file_path, "r") as expected, open(generated_jenkinsfile_path, "r") as output: + self.assertEqual(expected.read(), output.read()) + + def test_failed_when_generated_file_already_exist_not_override(self): + generated_jenkinsfile_path = Path("Jenkinsfile") + generated_jenkinsfile_path.touch() # the file now pre-exists + self.generated_files.append(generated_jenkinsfile_path) + + init_command_list = self.get_init_command_list() + init_process_execute = run_command_with_inputs( + init_command_list, [*QUICK_START_JENKINS_INPUTS_WITHOUT_AUTO_FILL, ""] + ) + + self.assertEqual(init_process_execute.process.returncode, 0) + + expected_file_path = Path(__file__).parent.parent.joinpath(Path("testdata", "pipeline", "expected_jenkinsfile")) + with open(expected_file_path, "r") as expected, open( + os.path.join(".aws-sam", "pipeline", "generated-files", "Jenkinsfile"), "r" + ) as output: + self.assertEqual(expected.read(), output.read()) + + # also check the Jenkinsfile is not overridden + self.assertEqual("", open("Jenkinsfile", "r").read()) + + def test_custom_template(self): + generated_file = Path("weather") + self.generated_files.append(generated_file) + + custom_template_path = Path(__file__).parent.parent.joinpath(Path("testdata", "pipeline", "custom_template")) + inputs = ["2", str(custom_template_path), "", "Rainy"] # custom template + + init_command_list = self.get_init_command_list() + init_process_execute = run_command_with_inputs(init_command_list, inputs) + + self.assertEqual(init_process_execute.process.returncode, 0) + + self.assertTrue(generated_file.exists()) + + with open(generated_file, "r") as f: + self.assertEqual("Rainy\n", f.read()) + + @parameterized.expand([("with_bootstrap",), (False,)]) + def test_with_pipelineconfig_has_all_stage_values(self, with_bootstrap): + generated_jenkinsfile_path = Path("Jenkinsfile") + self.generated_files.append(generated_jenkinsfile_path) + + Path(PIPELINE_CONFIG_DIR).mkdir(parents=True, exist_ok=True) + pipelineconfig_path = Path(PIPELINE_CONFIG_DIR, PIPELINE_CONFIG_FILENAME) + with open(pipelineconfig_path, "w") as f: + f.write( + dedent( + """\ + version = 0.1 + [default] + [default.pipeline_bootstrap] + [default.pipeline_bootstrap.parameters] + pipeline_user = "arn:aws:iam::123:user/aws-sam-cli-managed-test-pipeline-res-PipelineUser-123" + + [test] + [test.pipeline_bootstrap] + [test.pipeline_bootstrap.parameters] + pipeline_execution_role = "test-pipeline-execution-role" + cloudformation_execution_role = "test-cfn-execution-role" + artifacts_bucket = "test-bucket" + image_repository = "test-ecr" + region = "us-east-2" + + [prod] + [prod.pipeline_bootstrap] + [prod.pipeline_bootstrap.parameters] + pipeline_execution_role = "prod-pipeline-execution-role" + cloudformation_execution_role = "prod-cfn-execution-role" + artifacts_bucket = "prod-bucket" + image_repository = "prod-ecr" + region = "us-west-2" + """ + ) + ) + + inputs = [ + "1", # quick start + "1", # jenkins, this depends on the template repo. + "credential-id", + "main", + "template.yaml", + "1", + "test-stack", + "2", + "prod-stack", + ] + + init_command_list = self.get_init_command_list(with_bootstrap) + init_process_execute = run_command_with_inputs(init_command_list, inputs) + + self.assertEqual(init_process_execute.process.returncode, 0) + self.assertTrue(Path("Jenkinsfile").exists()) + + expected_file_path = Path(__file__).parent.parent.joinpath(Path("testdata", "pipeline", "expected_jenkinsfile")) + with open(expected_file_path, "r") as expected, open(generated_jenkinsfile_path, "r") as output: + self.assertEqual(expected.read(), output.read()) + + +@skipIf(SKIP_BOOTSTRAP_TESTS, "Skip bootstrap tests in CI/CD only") +class TestInitWithBootstrap(BootstrapIntegBase): + generated_files: List[Path] = [] + + def setUp(self): + super().setUp() + self.command_list = [self.base_command(), "pipeline", "init", "--bootstrap"] + generated_jenkinsfile_path = Path("Jenkinsfile") + self.generated_files.append(generated_jenkinsfile_path) + + def tearDown(self) -> None: + for generated_file in self.generated_files: + if generated_file.is_dir(): + shutil.rmtree(generated_file, ignore_errors=True) + elif generated_file.exists(): + generated_file.unlink() + super().tearDown() + + def test_without_stages_in_pipeline_config(self): + stage_names = [] + for suffix in ["1", "2"]: + stage_name, stack_name = self._get_stage_and_stack_name(suffix) + stage_names.append(stage_name) + self.stack_names.append(stack_name) + + inputs = [ + "1", # quick start + "1", # jenkins, this depends on the template repo. + "y", # Do you want to go through stage setup process now? + stage_names[0], + CREDENTIAL_PROFILE, + self.region, + "", # pipeline user + "", # Pipeline execution role + "", # CloudFormation execution role + "", # Artifacts bucket + "N", # no ECR repo + "", # Confirm summary + "y", # Create resources + "y", # Do you want to go through stage setup process now? + stage_names[1], + CREDENTIAL_PROFILE, + self.region, + "", # pipeline user + "", # Pipeline execution role + "", # CloudFormation execution role + "", # Artifacts bucket + "N", # no ECR repo + "", # Confirm summary + "y", # Create resources + "credential-id", + "main", + "template.yaml", + "1", + "test-stack", + "2", + "prod-stack", + ] + init_process_execute = run_command_with_inputs(self.command_list, inputs) + self.assertEqual(init_process_execute.process.returncode, 0) + self.assertIn("Here are the stage names detected", init_process_execute.stdout.decode()) + self.assertIn(stage_names[0], init_process_execute.stdout.decode()) + self.assertIn(stage_names[1], init_process_execute.stdout.decode()) + + def test_with_one_stages_in_pipeline_config(self): + stage_names = [] + for suffix in ["1", "2"]: + stage_name, stack_name = self._get_stage_and_stack_name(suffix) + stage_names.append(stage_name) + self.stack_names.append(stack_name) + + bootstrap_command_list = self.get_bootstrap_command_list() + + inputs = [ + stage_names[0], + CREDENTIAL_PROFILE, + self.region, # region + "", # pipeline user + "", # Pipeline execution role + "", # CloudFormation execution role + "", # Artifacts bucket + "N", # no + "", # Confirm summary + "y", # Create resources + ] + + bootstrap_process_execute = run_command_with_inputs(bootstrap_command_list, inputs) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + + inputs = [ + "1", # quick start + "1", # jenkins, this depends on the template repo. + "y", # Do you want to go through stage setup process now? + stage_names[1], + CREDENTIAL_PROFILE, + self.region, + "", # Pipeline execution role + "", # CloudFormation execution role + "", # Artifacts bucket + "N", # no ECR repo + "", # Confirm summary + "y", # Create resources + "credential-id", + "main", + "template.yaml", + "1", + "test-stack", + "2", + "prod-stack", + ] + init_process_execute = run_command_with_inputs(self.command_list, inputs) + self.assertEqual(init_process_execute.process.returncode, 0) + self.assertIn("Here are the stage names detected", init_process_execute.stdout.decode()) + self.assertIn(stage_names[0], init_process_execute.stdout.decode()) + self.assertIn(stage_names[1], init_process_execute.stdout.decode()) diff --git a/tests/integration/testdata/pipeline/custom_template/cookiecutter.json b/tests/integration/testdata/pipeline/custom_template/cookiecutter.json new file mode 100644 index 0000000000..c02b7caed1 --- /dev/null +++ b/tests/integration/testdata/pipeline/custom_template/cookiecutter.json @@ -0,0 +1,4 @@ +{ + "outputDir": "aws-sam-pipeline", + "weather": "" +} \ No newline at end of file diff --git a/tests/integration/testdata/pipeline/custom_template/metadata.json b/tests/integration/testdata/pipeline/custom_template/metadata.json new file mode 100644 index 0000000000..689fe297f8 --- /dev/null +++ b/tests/integration/testdata/pipeline/custom_template/metadata.json @@ -0,0 +1,3 @@ +{ + "number_of_stages": 0 +} diff --git a/tests/integration/testdata/pipeline/custom_template/questions.json b/tests/integration/testdata/pipeline/custom_template/questions.json new file mode 100644 index 0000000000..a0fe2167bf --- /dev/null +++ b/tests/integration/testdata/pipeline/custom_template/questions.json @@ -0,0 +1,7 @@ +{ + "questions": [{ + "key": "weather", + "question": "How is the weather today?", + "default": "Sunny" + }] +} \ No newline at end of file diff --git a/tests/integration/testdata/pipeline/custom_template/{{cookiecutter.outputDir}}/weather b/tests/integration/testdata/pipeline/custom_template/{{cookiecutter.outputDir}}/weather new file mode 100644 index 0000000000..3501ffd0ae --- /dev/null +++ b/tests/integration/testdata/pipeline/custom_template/{{cookiecutter.outputDir}}/weather @@ -0,0 +1 @@ +{{cookiecutter.weather}} diff --git a/tests/integration/testdata/pipeline/expected_jenkinsfile b/tests/integration/testdata/pipeline/expected_jenkinsfile new file mode 100644 index 0000000000..7a213a30f9 --- /dev/null +++ b/tests/integration/testdata/pipeline/expected_jenkinsfile @@ -0,0 +1,177 @@ +pipeline { + agent any + environment { + PIPELINE_USER_CREDENTIAL_ID = 'credential-id' + SAM_TEMPLATE = 'template.yaml' + MAIN_BRANCH = 'main' + TESTING_STACK_NAME = 'test-stack' + TESTING_PIPELINE_EXECUTION_ROLE = 'test-pipeline-execution-role' + TESTING_CLOUDFORMATION_EXECUTION_ROLE = 'test-cfn-execution-role' + TESTING_ARTIFACTS_BUCKET = 'test-bucket' + TESTING_IMAGE_REPOSITORY = 'test-ecr' + TESTING_REGION = 'us-east-2' + PROD_STACK_NAME = 'prod-stack' + PROD_PIPELINE_EXECUTION_ROLE = 'prod-pipeline-execution-role' + PROD_CLOUDFORMATION_EXECUTION_ROLE = 'prod-cfn-execution-role' + PROD_ARTIFACTS_BUCKET = 'prod-bucket' + PROD_IMAGE_REPOSITORY = 'prod-ecr' + PROD_REGION = 'us-west-2' + } + stages { + // uncomment and modify the following step for running the unit-tests + // stage('test') { + // steps { + // sh ''' + // # trigger the tests here + // ''' + // } + // } + + stage('build-and-deploy-feature') { + // this stage is triggered only for feature branches (feature*), + // which will build the stack and deploy to a stack named with branch name. + when { + branch 'feature*' + } + agent { + docker { + image 'public.ecr.aws/sam/build-provided' + args '--user 0:0 -v /var/run/docker.sock:/var/run/docker.sock' + } + } + steps { + sh 'sam build --template ${SAM_TEMPLATE} --use-container' + withAWS( + credentials: env.PIPELINE_USER_CREDENTIAL_ID, + region: env.TESTING_REGION, + role: env.TESTING_PIPELINE_EXECUTION_ROLE, + roleSessionName: 'deploying-feature') { + sh ''' + sam deploy --stack-name $(echo ${BRANCH_NAME} | tr -cd '[a-zA-Z0-9-]') \ + --capabilities CAPABILITY_IAM \ + --region ${TESTING_REGION} \ + --s3-bucket ${TESTING_ARTIFACTS_BUCKET} \ + --image-repository ${TESTING_IMAGE_REPOSITORY} \ + --no-fail-on-empty-changeset \ + --role-arn ${TESTING_CLOUDFORMATION_EXECUTION_ROLE} + ''' + } + } + } + + stage('build-and-package') { + when { + branch env.MAIN_BRANCH + } + agent { + docker { + image 'public.ecr.aws/sam/build-provided' + args '--user 0:0 -v /var/run/docker.sock:/var/run/docker.sock' + } + } + steps { + sh 'sam build --template ${SAM_TEMPLATE} --use-container' + withAWS( + credentials: env.PIPELINE_USER_CREDENTIAL_ID, + region: env.TESTING_REGION, + role: env.TESTING_PIPELINE_EXECUTION_ROLE, + roleSessionName: 'testing-packaging') { + sh ''' + sam package \ + --s3-bucket ${TESTING_ARTIFACTS_BUCKET} \ + --image-repository ${TESTING_IMAGE_REPOSITORY} \ + --region ${TESTING_REGION} \ + --output-template-file packaged-testing.yaml + ''' + } + + withAWS( + credentials: env.PIPELINE_USER_CREDENTIAL_ID, + region: env.PROD_REGION, + role: env.PROD_PIPELINE_EXECUTION_ROLE, + roleSessionName: 'prod-packaging') { + sh ''' + sam package \ + --s3-bucket ${PROD_ARTIFACTS_BUCKET} \ + --image-repository ${PROD_IMAGE_REPOSITORY} \ + --region ${PROD_REGION} \ + --output-template-file packaged-prod.yaml + ''' + } + + archiveArtifacts artifacts: 'packaged-testing.yaml' + archiveArtifacts artifacts: 'packaged-prod.yaml' + } + } + + stage('deploy-testing') { + when { + branch env.MAIN_BRANCH + } + agent { + docker { + image 'public.ecr.aws/sam/build-provided' + } + } + steps { + withAWS( + credentials: env.PIPELINE_USER_CREDENTIAL_ID, + region: env.TESTING_REGION, + role: env.TESTING_PIPELINE_EXECUTION_ROLE, + roleSessionName: 'testing-deployment') { + sh ''' + sam deploy --stack-name ${TESTING_STACK_NAME} \ + --template packaged-testing.yaml \ + --capabilities CAPABILITY_IAM \ + --region ${TESTING_REGION} \ + --s3-bucket ${TESTING_ARTIFACTS_BUCKET} \ + --image-repository ${TESTING_IMAGE_REPOSITORY} \ + --no-fail-on-empty-changeset \ + --role-arn ${TESTING_CLOUDFORMATION_EXECUTION_ROLE} + ''' + } + } + } + + // uncomment and modify the following step for running the integration-tests + // stage('integration-test') { + // when { + // branch env.MAIN_BRANCH + // } + // steps { + // sh ''' + // # trigger the integration tests here + // ''' + // } + // } + + stage('deploy-prod') { + when { + branch env.MAIN_BRANCH + } + agent { + docker { + image 'public.ecr.aws/sam/build-provided' + } + } + steps { + withAWS( + credentials: env.PIPELINE_USER_CREDENTIAL_ID, + region: env.PROD_REGION, + role: env.PROD_PIPELINE_EXECUTION_ROLE, + roleSessionName: 'prod-deployment') { + sh ''' + sam deploy --stack-name ${PROD_STACK_NAME} \ + --template packaged-prod.yaml \ + --capabilities CAPABILITY_IAM \ + --region ${PROD_REGION} \ + --s3-bucket ${PROD_ARTIFACTS_BUCKET} \ + --image-repository ${PROD_IMAGE_REPOSITORY} \ + --no-fail-on-empty-changeset \ + --role-arn ${PROD_CLOUDFORMATION_EXECUTION_ROLE} + ''' + } + } + } + } +} diff --git a/tests/testing_utils.py b/tests/testing_utils.py index 0cc7aa3067..78da67ab0c 100644 --- a/tests/testing_utils.py +++ b/tests/testing_utils.py @@ -5,6 +5,7 @@ import shutil from collections import namedtuple from subprocess import Popen, PIPE, TimeoutExpired +from typing import List IS_WINDOWS = platform.system().lower() == "windows" RUNNING_ON_CI = os.environ.get("APPVEYOR", False) @@ -50,6 +51,10 @@ def run_command_with_input(command_list, stdin_input, timeout=TIMEOUT) -> Comman raise +def run_command_with_inputs(command_list: List[str], inputs: List[str], timeout=TIMEOUT) -> CommandResult: + return run_command_with_input(command_list, ("\n".join(inputs) + "\n").encode(), timeout) + + class FileCreator(object): def __init__(self): self.rootdir = tempfile.mkdtemp() diff --git a/tests/unit/commands/_utils/test_template.py b/tests/unit/commands/_utils/test_template.py index be4001be68..1de707ec38 100644 --- a/tests/unit/commands/_utils/test_template.py +++ b/tests/unit/commands/_utils/test_template.py @@ -1,12 +1,10 @@ -import os import copy +import os +from unittest import TestCase +from unittest.mock import patch, mock_open, MagicMock -import jmespath import yaml from botocore.utils import set_value_from_jmespath - -from unittest import TestCase -from unittest.mock import patch, mock_open, MagicMock from parameterized import parameterized, param from samcli.commands._utils.resources import AWS_SERVERLESS_FUNCTION, AWS_SERVERLESS_API diff --git a/tests/unit/commands/deploy/test_guided_context.py b/tests/unit/commands/deploy/test_guided_context.py index 6e49b73a60..7b31ff60eb 100644 --- a/tests/unit/commands/deploy/test_guided_context.py +++ b/tests/unit/commands/deploy/test_guided_context.py @@ -666,7 +666,7 @@ def test_guided_prompts_with_code_signing( expected_code_sign_calls = expected_code_sign_calls * (number_of_functions + number_of_layers) self.assertEqual(expected_code_sign_calls, patched_code_signer_prompt.call_args_list) - @patch("samcli.commands.deploy.guided_context.get_session") + @patch("samcli.commands.deploy.guided_context.get_default_aws_region") @patch("samcli.commands.deploy.guided_context.prompt") @patch("samcli.commands.deploy.guided_context.confirm") @patch("samcli.commands.deploy.guided_context.manage_stack") @@ -685,7 +685,7 @@ def test_guided_prompts_check_default_config_region( patched_manage_stack, patched_confirm, patched_prompt, - patched_get_session, + patched_get_default_aws_region, ): patched_sam_function_provider.return_value = {} patched_get_template_artifacts_format.return_value = [ZIP] @@ -695,7 +695,7 @@ def test_guided_prompts_check_default_config_region( patched_confirm.side_effect = [True, False, True, True, ""] patched_signer_config_per_function.return_value = ({}, {}) patched_manage_stack.return_value = "managed_s3_stack" - patched_get_session.return_value.get_config_variable.return_value = "default_config_region" + patched_get_default_aws_region.return_value = "default_config_region" # setting the default region to None self.gc.region = None self.gc.guided_prompts(parameter_override_keys=None) diff --git a/tests/unit/commands/pipeline/__init__.py b/tests/unit/commands/pipeline/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/commands/pipeline/bootstrap/__init__.py b/tests/unit/commands/pipeline/bootstrap/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/commands/pipeline/bootstrap/test_cli.py b/tests/unit/commands/pipeline/bootstrap/test_cli.py new file mode 100644 index 0000000000..649fbbdf32 --- /dev/null +++ b/tests/unit/commands/pipeline/bootstrap/test_cli.py @@ -0,0 +1,276 @@ +from unittest import TestCase +from unittest.mock import patch, Mock + +import click +from click.testing import CliRunner + +from samcli.commands.pipeline.bootstrap.cli import ( + _load_saved_pipeline_user_arn, + _get_bootstrap_command_names, + PIPELINE_CONFIG_FILENAME, + PIPELINE_CONFIG_DIR, +) +from samcli.commands.pipeline.bootstrap.cli import cli as bootstrap_cmd +from samcli.commands.pipeline.bootstrap.cli import do_cli as bootstrap_cli + +ANY_REGION = "ANY_REGION" +ANY_PROFILE = "ANY_PROFILE" +ANY_STAGE_NAME = "ANY_STAGE_NAME" +ANY_PIPELINE_USER_ARN = "ANY_PIPELINE_USER_ARN" +ANY_PIPELINE_EXECUTION_ROLE_ARN = "ANY_PIPELINE_EXECUTION_ROLE_ARN" +ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN = "ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN" +ANY_ARTIFACTS_BUCKET_ARN = "ANY_ARTIFACTS_BUCKET_ARN" +ANY_IMAGE_REPOSITORY_ARN = "ANY_IMAGE_REPOSITORY_ARN" +ANY_ARN = "ANY_ARN" +ANY_CONFIG_FILE = "ANY_CONFIG_FILE" +ANY_CONFIG_ENV = "ANY_CONFIG_ENV" +PIPELINE_BOOTSTRAP_COMMAND_NAMES = ["pipeline", "bootstrap"] + + +class TestCli(TestCase): + def setUp(self) -> None: + self.cli_context = { + "region": ANY_REGION, + "profile": ANY_PROFILE, + "interactive": True, + "stage_name": ANY_STAGE_NAME, + "pipeline_user_arn": ANY_PIPELINE_USER_ARN, + "pipeline_execution_role_arn": ANY_PIPELINE_EXECUTION_ROLE_ARN, + "cloudformation_execution_role_arn": ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + "artifacts_bucket_arn": ANY_ARTIFACTS_BUCKET_ARN, + "create_image_repository": True, + "image_repository_arn": ANY_IMAGE_REPOSITORY_ARN, + "confirm_changeset": True, + "config_file": ANY_CONFIG_FILE, + "config_env": ANY_CONFIG_ENV, + } + + @patch("samcli.commands.pipeline.bootstrap.cli.do_cli") + def test_bootstrap_command_default_argument_values(self, do_cli_mock): + runner: CliRunner = CliRunner() + runner.invoke(bootstrap_cmd) + # Test the defaults are as following: + # interactive -> True + # create_image_repository -> False + # confirm_changeset -> True + # region, profile, stage_name and all ARNs are None + do_cli_mock.assert_called_once_with( + region=None, + profile=None, + interactive=True, + stage_name=None, + pipeline_user_arn=None, + pipeline_execution_role_arn=None, + cloudformation_execution_role_arn=None, + artifacts_bucket_arn=None, + create_image_repository=False, + image_repository_arn=None, + confirm_changeset=True, + config_file="default", + config_env="samconfig.toml", + ) + + @patch("samcli.commands.pipeline.bootstrap.cli.do_cli") + def test_bootstrap_command_flag_arguments(self, do_cli_mock): + runner: CliRunner = CliRunner() + runner.invoke(bootstrap_cmd, args=["--interactive", "--no-create-image-repository", "--confirm-changeset"]) + args, kwargs = do_cli_mock.call_args + self.assertTrue(kwargs["interactive"]) + self.assertFalse(kwargs["create_image_repository"]) + self.assertTrue(kwargs["confirm_changeset"]) + + runner.invoke(bootstrap_cmd, args=["--no-interactive", "--create-image-repository", "--no-confirm-changeset"]) + args, kwargs = do_cli_mock.call_args + self.assertFalse(kwargs["interactive"]) + self.assertTrue(kwargs["create_image_repository"]) + self.assertFalse(kwargs["confirm_changeset"]) + + @patch("samcli.commands.pipeline.bootstrap.cli.do_cli") + def test_bootstrap_command_with_different_arguments_combination(self, do_cli_mock): + runner: CliRunner = CliRunner() + runner.invoke( + bootstrap_cmd, + args=["--no-interactive", "--stage", "environment1", "--bucket", "bucketARN"], + ) + args, kwargs = do_cli_mock.call_args + self.assertFalse(kwargs["interactive"]) + self.assertEqual(kwargs["stage_name"], "environment1") + self.assertEqual(kwargs["artifacts_bucket_arn"], "bucketARN") + + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") + @patch("samcli.commands.pipeline.bootstrap.cli.Stage") + @patch("samcli.commands.pipeline.bootstrap.cli.GuidedContext") + def test_bootstrapping_normal_interactive_flow( + self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock + ): + # setup + gc_instance = Mock() + guided_context_mock.return_value = gc_instance + environment_instance = Mock() + environment_mock.return_value = environment_instance + load_saved_pipeline_user_arn_mock.return_value = ANY_PIPELINE_USER_ARN + self.cli_context["interactive"] = True + self.cli_context["pipeline_user_arn"] = None + get_command_names_mock.return_value = PIPELINE_BOOTSTRAP_COMMAND_NAMES + + # trigger + bootstrap_cli(**self.cli_context) + + # verify + load_saved_pipeline_user_arn_mock.assert_called_once() + gc_instance.run.assert_called_once() + environment_instance.bootstrap.assert_called_once_with(confirm_changeset=True) + environment_instance.print_resources_summary.assert_called_once() + environment_instance.save_config_safe.assert_called_once_with( + config_dir=PIPELINE_CONFIG_DIR, + filename=PIPELINE_CONFIG_FILENAME, + cmd_names=PIPELINE_BOOTSTRAP_COMMAND_NAMES, + ) + + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") + @patch("samcli.commands.pipeline.bootstrap.cli.Stage") + @patch("samcli.commands.pipeline.bootstrap.cli.GuidedContext") + def test_bootstrap_will_not_try_loading_pipeline_user_if_already_provided( + self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock + ): + bootstrap_cli(**self.cli_context) + load_saved_pipeline_user_arn_mock.assert_not_called() + + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") + @patch("samcli.commands.pipeline.bootstrap.cli.Stage") + @patch("samcli.commands.pipeline.bootstrap.cli.GuidedContext") + def test_bootstrap_will_try_loading_pipeline_user_if_not_provided( + self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock + ): + self.cli_context["pipeline_user_arn"] = None + bootstrap_cli(**self.cli_context) + load_saved_pipeline_user_arn_mock.assert_called_once() + + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") + @patch("samcli.commands.pipeline.bootstrap.cli.Stage") + @patch("samcli.commands.pipeline.bootstrap.cli.GuidedContext") + def test_stage_name_is_required_to_be_provided_in_case_of_non_interactive_mode( + self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock + ): + self.cli_context["interactive"] = False + self.cli_context["stage_name"] = None + with self.assertRaises(click.UsageError): + bootstrap_cli(**self.cli_context) + + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") + @patch("samcli.commands.pipeline.bootstrap.cli.Stage") + @patch("samcli.commands.pipeline.bootstrap.cli.GuidedContext") + def test_stage_name_is_not_required_to_be_provided_in_case_of_interactive_mode( + self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock + ): + self.cli_context["interactive"] = True + self.cli_context["stage_name"] = None + bootstrap_cli(**self.cli_context) # No exception is thrown + + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") + @patch("samcli.commands.pipeline.bootstrap.cli.Stage") + @patch("samcli.commands.pipeline.bootstrap.cli.GuidedContext") + def test_guided_context_will_be_enabled_or_disabled_based_on_the_interactive_mode( + self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock + ): + gc_instance = Mock() + guided_context_mock.return_value = gc_instance + self.cli_context["interactive"] = False + bootstrap_cli(**self.cli_context) + gc_instance.run.assert_not_called() + self.cli_context["interactive"] = True + bootstrap_cli(**self.cli_context) + gc_instance.run.assert_called_once() + + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") + @patch("samcli.commands.pipeline.bootstrap.cli.Stage") + @patch("samcli.commands.pipeline.bootstrap.cli.GuidedContext") + def test_bootstrapping_will_confirm_before_creating_the_resources_unless_the_user_choose_not_to( + self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock + ): + environment_instance = Mock() + environment_mock.return_value = environment_instance + self.cli_context["confirm_changeset"] = False + bootstrap_cli(**self.cli_context) + environment_instance.bootstrap.assert_called_once_with(confirm_changeset=False) + environment_instance.bootstrap.reset_mock() + self.cli_context["confirm_changeset"] = True + bootstrap_cli(**self.cli_context) + environment_instance.bootstrap.assert_called_once_with(confirm_changeset=True) + + @patch("samcli.commands.pipeline.bootstrap.cli.SamConfig") + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + def test_load_saved_pipeline_user_arn_will_read_from_the_correct_file( + self, get_command_names_mock, sam_config_mock + ): + # setup + get_command_names_mock.return_value = PIPELINE_BOOTSTRAP_COMMAND_NAMES + sam_config_instance_mock = Mock() + sam_config_mock.return_value = sam_config_instance_mock + sam_config_instance_mock.exists.return_value = False + + # trigger + _load_saved_pipeline_user_arn() + + # verify + sam_config_mock.assert_called_once_with(config_dir=PIPELINE_CONFIG_DIR, filename=PIPELINE_CONFIG_FILENAME) + + @patch("samcli.commands.pipeline.bootstrap.cli.SamConfig") + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + def test_load_saved_pipeline_user_arn_will_return_non_if_the_pipeline_toml_file_is_not_found( + self, get_command_names_mock, sam_config_mock + ): + # setup + get_command_names_mock.return_value = PIPELINE_BOOTSTRAP_COMMAND_NAMES + sam_config_instance_mock = Mock() + sam_config_mock.return_value = sam_config_instance_mock + sam_config_instance_mock.exists.return_value = False + + # trigger + pipeline_user_arn = _load_saved_pipeline_user_arn() + + # verify + self.assertIsNone(pipeline_user_arn) + + @patch("samcli.commands.pipeline.bootstrap.cli.SamConfig") + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + def test_load_saved_pipeline_user_arn_will_return_non_if_the_pipeline_toml_file_does_not_contain_pipeline_user( + self, get_command_names_mock, sam_config_mock + ): + # setup + get_command_names_mock.return_value = PIPELINE_BOOTSTRAP_COMMAND_NAMES + sam_config_instance_mock = Mock() + sam_config_mock.return_value = sam_config_instance_mock + sam_config_instance_mock.exists.return_value = True + sam_config_instance_mock.get_all.return_value = {"non-pipeline_user-key": "any_value"} + + # trigger + pipeline_user_arn = _load_saved_pipeline_user_arn() + + # verify + self.assertIsNone(pipeline_user_arn) + + @patch("samcli.commands.pipeline.bootstrap.cli.SamConfig") + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + def test_load_saved_pipeline_user_arn_returns_the_pipeline_user_arn_from_the_pipeline_toml_file( + self, get_command_names_mock, sam_config_mock + ): + # setup + get_command_names_mock.return_value = PIPELINE_BOOTSTRAP_COMMAND_NAMES + sam_config_instance_mock = Mock() + sam_config_mock.return_value = sam_config_instance_mock + sam_config_instance_mock.exists.return_value = True + sam_config_instance_mock.get_all.return_value = {"pipeline_user": ANY_PIPELINE_USER_ARN} + + # trigger + pipeline_user_arn = _load_saved_pipeline_user_arn() + + # verify + self.assertEqual(pipeline_user_arn, ANY_PIPELINE_USER_ARN) diff --git a/tests/unit/commands/pipeline/bootstrap/test_guided_context.py b/tests/unit/commands/pipeline/bootstrap/test_guided_context.py new file mode 100644 index 0000000000..c4c11e9792 --- /dev/null +++ b/tests/unit/commands/pipeline/bootstrap/test_guided_context.py @@ -0,0 +1,231 @@ +from unittest import TestCase +from unittest.mock import patch, Mock, ANY + +from parameterized import parameterized + +from samcli.commands.pipeline.bootstrap.guided_context import GuidedContext + +ANY_STAGE_NAME = "ANY_STAGE_NAME" +ANY_PIPELINE_USER_ARN = "ANY_PIPELINE_USER_ARN" +ANY_PIPELINE_EXECUTION_ROLE_ARN = "ANY_PIPELINE_EXECUTION_ROLE_ARN" +ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN = "ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN" +ANY_ARTIFACTS_BUCKET_ARN = "ANY_ARTIFACTS_BUCKET_ARN" +ANY_IMAGE_REPOSITORY_ARN = "ANY_IMAGE_REPOSITORY_ARN" +ANY_ARN = "ANY_ARN" +ANY_REGION = "us-east-2" + + +class TestGuidedContext(TestCase): + @patch("samcli.commands.pipeline.bootstrap.guided_context.get_current_account_id") + @patch("samcli.commands.pipeline.bootstrap.guided_context.click") + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext._prompt_account_id") + def test_guided_context_will_not_prompt_for_fields_that_are_already_provided( + self, prompt_account_id_mock, click_mock, account_id_mock + ): + account_id_mock.return_value = "1234567890" + click_mock.confirm.return_value = False + click_mock.prompt = Mock(return_value="0") + gc: GuidedContext = GuidedContext( + stage_name=ANY_STAGE_NAME, + pipeline_user_arn=ANY_PIPELINE_USER_ARN, + pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, + cloudformation_execution_role_arn=ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + artifacts_bucket_arn=ANY_ARTIFACTS_BUCKET_ARN, + create_image_repository=True, + image_repository_arn=ANY_IMAGE_REPOSITORY_ARN, + region=ANY_REGION, + ) + gc.run() + # there should only two prompt to ask + # 1. which account to use (mocked in _prompt_account_id(), not contributing to count) + # 2. what values customers want to change + prompt_account_id_mock.assert_called_once() + click_mock.prompt.assert_called_once() + + @patch("samcli.commands.pipeline.bootstrap.guided_context.get_current_account_id") + @patch("samcli.commands.pipeline.bootstrap.guided_context.click") + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext._prompt_account_id") + def test_guided_context_will_prompt_for_fields_that_are_not_provided( + self, prompt_account_id_mock, click_mock, account_id_mock + ): + account_id_mock.return_value = "1234567890" + click_mock.confirm.return_value = False + click_mock.prompt = Mock(return_value="0") + gc: GuidedContext = GuidedContext( + image_repository_arn=ANY_IMAGE_REPOSITORY_ARN # Exclude ECR repo, it has its own detailed test below + ) + gc.run() + prompt_account_id_mock.assert_called_once() + self.assertTrue(self.did_prompt_text_like("Stage Name", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("Pipeline IAM user", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("Pipeline execution role", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("CloudFormation execution role", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("Artifact bucket", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("region", click_mock.prompt)) + + @patch("samcli.commands.pipeline.bootstrap.guided_context.get_current_account_id") + @patch("samcli.commands.pipeline.bootstrap.guided_context.click") + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext._prompt_account_id") + def test_guided_context_will_not_prompt_for_not_provided_image_repository_if_no_image_repository_is_required( + self, prompt_account_id_mock, click_mock, account_id_mock + ): + account_id_mock.return_value = "1234567890" + # ECR Image Repository choices: + # 1 - No, My SAM Template won't include lambda functions of Image package-type + # 2 - Yes, I need a help creating one + # 3 - I already have an ECR image repository + gc_without_ecr_info: GuidedContext = GuidedContext( + stage_name=ANY_STAGE_NAME, + pipeline_user_arn=ANY_PIPELINE_USER_ARN, + pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, + cloudformation_execution_role_arn=ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + artifacts_bucket_arn=ANY_ARTIFACTS_BUCKET_ARN, + ) + + self.assertIsNone(gc_without_ecr_info.image_repository_arn) + + click_mock.confirm.return_value = False # the user chose to not CREATE an ECR Image repository + click_mock.prompt.side_effect = [None, "0"] + gc_without_ecr_info.run() + self.assertIsNone(gc_without_ecr_info.image_repository_arn) + self.assertFalse(gc_without_ecr_info.create_image_repository) + self.assertFalse(self.did_prompt_text_like("Please enter the ECR image repository", click_mock.prompt)) + + click_mock.confirm.return_value = True # the user chose to CREATE an ECR Image repository + click_mock.prompt.side_effect = [None, None, "0"] + gc_without_ecr_info.run() + self.assertIsNone(gc_without_ecr_info.image_repository_arn) + self.assertTrue(gc_without_ecr_info.create_image_repository) + self.assertTrue(self.did_prompt_text_like("Please enter the ECR image repository", click_mock.prompt)) + + click_mock.confirm.return_value = True # the user already has a repo + click_mock.prompt.side_effect = [None, ANY_IMAGE_REPOSITORY_ARN, "0"] + gc_without_ecr_info.run() + self.assertFalse(gc_without_ecr_info.create_image_repository) + self.assertTrue( + self.did_prompt_text_like("Please enter the ECR image repository", click_mock.prompt) + ) # we've asked about it + self.assertEqual(gc_without_ecr_info.image_repository_arn, ANY_IMAGE_REPOSITORY_ARN) + + @staticmethod + def did_prompt_text_like(txt, click_prompt_mock): + txt = txt.lower() + for kall in click_prompt_mock.call_args_list: + args, kwargs = kall + if args: + text = args[0].lower() + else: + text = kwargs.get("text", "").lower() + if txt in text: + return True + return False + + +class TestGuidedContext_prompt_account_id(TestCase): + @patch("samcli.commands.pipeline.bootstrap.guided_context.get_current_account_id") + @patch("samcli.commands.pipeline.bootstrap.guided_context.click") + @patch("samcli.commands.pipeline.bootstrap.guided_context.os.getenv") + @patch("samcli.commands.pipeline.bootstrap.guided_context.list_available_profiles") + def test_prompt_account_id_can_display_profiles_and_environment( + self, list_available_profiles_mock, getenv_mock, click_mock, get_current_account_id_mock + ): + getenv_mock.return_value = "not None" + list_available_profiles_mock.return_value = ["profile1", "profile2"] + click_mock.prompt.return_value = "1" # select environment variable + get_current_account_id_mock.return_value = "account_id" + + guided_context_mock = Mock() + GuidedContext._prompt_account_id(guided_context_mock) + + click_mock.prompt.assert_called_once_with( + ANY, show_choices=False, show_default=False, type=click_mock.Choice(["1", "2", "3", "q"]) + ) + + @patch("samcli.commands.pipeline.bootstrap.guided_context.get_current_account_id") + @patch("samcli.commands.pipeline.bootstrap.guided_context.click") + @patch("samcli.commands.pipeline.bootstrap.guided_context.os.getenv") + @patch("samcli.commands.pipeline.bootstrap.guided_context.list_available_profiles") + def test_prompt_account_id_wont_show_environment_option_when_it_doesnt_exist( + self, list_available_profiles_mock, getenv_mock, click_mock, get_current_account_id_mock + ): + getenv_mock.return_value = None + list_available_profiles_mock.return_value = ["profile1", "profile2"] + click_mock.prompt.return_value = "1" # select environment variable + get_current_account_id_mock.return_value = "account_id" + + guided_context_mock = Mock() + GuidedContext._prompt_account_id(guided_context_mock) + + click_mock.prompt.assert_called_once_with( + ANY, show_choices=False, show_default=False, type=click_mock.Choice(["2", "3", "q"]) + ) + + @patch("samcli.commands.pipeline.bootstrap.guided_context.get_current_account_id") + @patch("samcli.commands.pipeline.bootstrap.guided_context.click") + @patch("samcli.commands.pipeline.bootstrap.guided_context.os.getenv") + @patch("samcli.commands.pipeline.bootstrap.guided_context.list_available_profiles") + def test_prompt_account_id_select_environment_unset_self_profile( + self, list_available_profiles_mock, getenv_mock, click_mock, get_current_account_id_mock + ): + getenv_mock.return_value = "not None" + list_available_profiles_mock.return_value = ["profile1", "profile2"] + click_mock.prompt.return_value = "1" # select environment variable + get_current_account_id_mock.return_value = "account_id" + + guided_context_mock = Mock() + GuidedContext._prompt_account_id(guided_context_mock) + + self.assertEquals(None, guided_context_mock.profile) + + @parameterized.expand( + [ + ( + "2", + "profile1", + ), + ( + "3", + "profile2", + ), + ] + ) + @patch("samcli.commands.pipeline.bootstrap.guided_context.get_current_account_id") + @patch("samcli.commands.pipeline.bootstrap.guided_context.click") + @patch("samcli.commands.pipeline.bootstrap.guided_context.os.getenv") + @patch("samcli.commands.pipeline.bootstrap.guided_context.list_available_profiles") + def test_prompt_account_id_select_profile_set_profile_to_its_name( + self, + profile_selection, + expected_profile, + list_available_profiles_mock, + getenv_mock, + click_mock, + get_current_account_id_mock, + ): + getenv_mock.return_value = "not None" + list_available_profiles_mock.return_value = ["profile1", "profile2"] + click_mock.prompt.return_value = profile_selection + get_current_account_id_mock.return_value = "account_id" + + guided_context_mock = Mock() + GuidedContext._prompt_account_id(guided_context_mock) + + self.assertEquals(expected_profile, guided_context_mock.profile) + + @patch("samcli.commands.pipeline.bootstrap.guided_context.sys.exit") + @patch("samcli.commands.pipeline.bootstrap.guided_context.get_current_account_id") + @patch("samcli.commands.pipeline.bootstrap.guided_context.click") + @patch("samcli.commands.pipeline.bootstrap.guided_context.os.getenv") + @patch("samcli.commands.pipeline.bootstrap.guided_context.list_available_profiles") + def test_prompt_account_id_select_quit( + self, list_available_profiles_mock, getenv_mock, click_mock, get_current_account_id_mock, exit_mock + ): + getenv_mock.return_value = "not None" + list_available_profiles_mock.return_value = ["profile1", "profile2"] + click_mock.prompt.return_value = "q" # quit + get_current_account_id_mock.return_value = "account_id" + + guided_context_mock = Mock() + GuidedContext._prompt_account_id(guided_context_mock) + + exit_mock.assert_called_once_with(0) diff --git a/tests/unit/commands/pipeline/init/__init__.py b/tests/unit/commands/pipeline/init/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/commands/pipeline/init/test_cli.py b/tests/unit/commands/pipeline/init/test_cli.py new file mode 100644 index 0000000000..2e7cd0699b --- /dev/null +++ b/tests/unit/commands/pipeline/init/test_cli.py @@ -0,0 +1,22 @@ +from unittest import TestCase +from unittest.mock import patch + +from click.testing import CliRunner + +from samcli.commands.pipeline.init.cli import cli as init_cmd +from samcli.commands.pipeline.init.cli import do_cli as init_cli + + +class TestCli(TestCase): + @patch("samcli.commands.pipeline.init.cli.do_cli") + def test_cli_default_flow(self, do_cli_mock): + runner: CliRunner = CliRunner() + runner.invoke(init_cmd) + # Currently we support the interactive mode only, i.e. we don't accept any command arguments, + # instead we ask the user about the required arguments in an interactive way + do_cli_mock.assert_called_once_with(False) # Called without arguments + + @patch("samcli.commands.pipeline.init.cli.InteractiveInitFlow.do_interactive") + def test_do_cli(self, do_interactive_mock): + init_cli(False) + do_interactive_mock.assert_called_once_with() # Called without arguments diff --git a/tests/unit/commands/pipeline/init/test_initeractive_init_flow.py b/tests/unit/commands/pipeline/init/test_initeractive_init_flow.py new file mode 100644 index 0000000000..2cdaacc91e --- /dev/null +++ b/tests/unit/commands/pipeline/init/test_initeractive_init_flow.py @@ -0,0 +1,566 @@ +import json +import shutil +import tempfile +from unittest import TestCase +from unittest.mock import patch, Mock, call +import os +from pathlib import Path + +from parameterized import parameterized + +from samcli.commands.exceptions import AppPipelineTemplateMetadataException +from samcli.commands.pipeline.init.interactive_init_flow import ( + InteractiveInitFlow, + PipelineTemplateCloneException, + APP_PIPELINE_TEMPLATES_REPO_LOCAL_NAME, + shared_path, + CUSTOM_PIPELINE_TEMPLATE_REPO_LOCAL_NAME, + _prompt_cicd_provider, + _prompt_provider_pipeline_template, + _get_pipeline_template_metadata, + _copy_dir_contents_to_cwd, +) +from samcli.commands.pipeline.init.pipeline_templates_manifest import AppPipelineTemplateManifestException +from samcli.lib.utils.git_repo import CloneRepoException +from samcli.lib.cookiecutter.interactive_flow_creator import QuestionsNotFoundException + + +class TestInteractiveInitFlow(TestCase): + @patch("samcli.commands.pipeline.init.interactive_init_flow._read_app_pipeline_templates_manifest") + @patch("samcli.commands.pipeline.init.interactive_init_flow._prompt_pipeline_template") + @patch("samcli.commands.pipeline.init.interactive_init_flow.InteractiveInitFlow._generate_from_pipeline_template") + @patch("samcli.commands.pipeline.init.interactive_init_flow.shared_path") + @patch("samcli.commands.pipeline.init.interactive_init_flow.GitRepo.clone") + @patch("samcli.lib.cookiecutter.question.click") + def test_app_pipeline_templates_clone_fail_when_an_old_clone_exists( + self, + click_mock, + clone_mock, + shared_path_mock, + generate_from_pipeline_template_mock, + select_pipeline_template_mock, + read_app_pipeline_templates_manifest_mock, + ): + # setup + clone_mock.side_effect = CloneRepoException # clone fail + app_pipeline_templates_path_mock = Mock() + selected_pipeline_template_path_mock = Mock() + pipeline_templates_manifest_mock = Mock() + shared_path_mock.joinpath.return_value = app_pipeline_templates_path_mock + app_pipeline_templates_path_mock.exists.return_value = True # An old clone exists + app_pipeline_templates_path_mock.joinpath.return_value = selected_pipeline_template_path_mock + read_app_pipeline_templates_manifest_mock.return_value = pipeline_templates_manifest_mock + click_mock.prompt.return_value = "1" # App pipeline templates + + # trigger + InteractiveInitFlow(allow_bootstrap=False).do_interactive() + + # verify + clone_mock.assert_called_once_with( + shared_path_mock, APP_PIPELINE_TEMPLATES_REPO_LOCAL_NAME, replace_existing=True + ) + app_pipeline_templates_path_mock.exists.assert_called_once() + read_app_pipeline_templates_manifest_mock.assert_called_once_with(app_pipeline_templates_path_mock) + select_pipeline_template_mock.assert_called_once_with(pipeline_templates_manifest_mock) + generate_from_pipeline_template_mock.assert_called_once_with(selected_pipeline_template_path_mock) + + @patch("samcli.commands.pipeline.init.interactive_init_flow.shared_path") + @patch("samcli.commands.pipeline.init.interactive_init_flow.GitRepo.clone") + @patch("samcli.lib.cookiecutter.question.click") + def test_app_pipeline_templates_clone_fail_when_no_old_clone_exist(self, click_mock, clone_mock, shared_path_mock): + # setup + clone_mock.side_effect = CloneRepoException # clone fail + app_pipeline_templates_path_mock = Mock() + shared_path_mock.joinpath.return_value = app_pipeline_templates_path_mock + app_pipeline_templates_path_mock.exists.return_value = False # No old clone exists + click_mock.prompt.return_value = "1" # App pipeline templates + + # trigger + with self.assertRaises(PipelineTemplateCloneException): + InteractiveInitFlow(allow_bootstrap=False).do_interactive() + + @patch("samcli.commands.pipeline.init.interactive_init_flow.GitRepo.clone") + @patch("samcli.commands.pipeline.init.interactive_init_flow.click") + @patch("samcli.lib.cookiecutter.question.click") + def test_custom_pipeline_template_clone_fail(self, question_click_mock, init_click_mock, clone_mock): + # setup + clone_mock.side_effect = CloneRepoException # clone fail + question_click_mock.prompt.return_value = "2" # Custom pipeline templates + init_click_mock.prompt.return_value = ( + "https://github.com/any-custom-pipeline-template-repo.git" # Custom pipeline template repo URL + ) + + # trigger + with self.assertRaises(PipelineTemplateCloneException): + InteractiveInitFlow(allow_bootstrap=False).do_interactive() + + @patch("samcli.commands.pipeline.init.interactive_init_flow._read_app_pipeline_templates_manifest") + @patch("samcli.commands.pipeline.init.interactive_init_flow.GitRepo.clone") + @patch("samcli.lib.cookiecutter.question.click") + def test_app_pipeline_templates_with_invalid_manifest( + self, click_mock, clone_mock, read_app_pipeline_templates_manifest_mock + ): + # setup + app_pipeline_templates_path_mock = Mock() + clone_mock.return_value = app_pipeline_templates_path_mock + read_app_pipeline_templates_manifest_mock.side_effect = AppPipelineTemplateManifestException("") + click_mock.prompt.return_value = "1" # App pipeline templates + + # trigger + with self.assertRaises(AppPipelineTemplateManifestException): + InteractiveInitFlow(allow_bootstrap=False).do_interactive() + + @patch("samcli.commands.pipeline.init.interactive_init_flow.SamConfig") + @patch("samcli.commands.pipeline.init.interactive_init_flow.osutils") + @patch("samcli.lib.cookiecutter.template.cookiecutter") + @patch("samcli.commands.pipeline.init.interactive_init_flow.InteractiveFlowCreator.create_flow") + @patch("samcli.commands.pipeline.init.interactive_init_flow.PipelineTemplatesManifest") + @patch("samcli.commands.pipeline.init.interactive_init_flow.GitRepo.clone") + @patch("samcli.commands.pipeline.init.interactive_init_flow._copy_dir_contents_to_cwd") + @patch("samcli.commands.pipeline.init.interactive_init_flow._get_pipeline_template_metadata") + @patch("samcli.lib.cookiecutter.question.click") + def test_generate_pipeline_configuration_file_from_app_pipeline_template_happy_case( + self, + click_mock, + _get_pipeline_template_metadata_mock, + _copy_dir_contents_to_cwd_mock, + clone_mock, + PipelineTemplatesManifest_mock, + create_interactive_flow_mock, + cookiecutter_mock, + osutils_mock, + samconfig_mock, + ): + # setup + any_app_pipeline_templates_path = Path( + os.path.normpath(shared_path.joinpath(APP_PIPELINE_TEMPLATES_REPO_LOCAL_NAME)) + ) + clone_mock.return_value = any_app_pipeline_templates_path + jenkins_template_location = "some/location" + jenkins_template_mock = Mock( + display_name="Jenkins pipeline template", location=jenkins_template_location, provider="jenkins" + ) + pipeline_templates_manifest_mock = Mock( + providers=[ + Mock(id="gitlab", display_name="Gitlab"), + Mock(id="jenkins", display_name="Jenkins"), + ], + templates=[jenkins_template_mock], + ) + PipelineTemplatesManifest_mock.return_value = pipeline_templates_manifest_mock + cookiecutter_output_dir_mock = "/tmp/any/dir2" + osutils_mock.mkdir_temp.return_value.__enter__ = Mock(return_value=cookiecutter_output_dir_mock) + interactive_flow_mock = Mock() + create_interactive_flow_mock.return_value = interactive_flow_mock + cookiecutter_context_mock = {"key": "value"} + interactive_flow_mock.run.return_value = cookiecutter_context_mock + config_file = Mock() + samconfig_mock.return_value = config_file + config_file.exists.return_value = True + config_file.get_stage_names.return_value = ["testing", "prod"] + config_file.get_stage_names.return_value = ["testing", "prod"] + config_file.get_all.return_value = {"pipeline_execution_role": "arn:aws:iam::123456789012:role/execution-role"} + + click_mock.prompt.side_effect = [ + "1", # App pipeline templates + "2", # choose "Jenkins" when prompt for CI/CD system. (See pipeline_templates_manifest_mock, Jenkins is the 2nd provider) + "1", # choose "Jenkins pipeline template" when prompt for pipeline template + ] + _get_pipeline_template_metadata_mock.return_value = {"number_of_stages": 2} + + # trigger + InteractiveInitFlow(allow_bootstrap=False).do_interactive() + + # verify + osutils_mock.mkdir_temp.assert_called() # cookiecutter project is generated to temp + expected_cookicutter_template_location = any_app_pipeline_templates_path.joinpath(jenkins_template_location) + clone_mock.assert_called_once_with(shared_path, APP_PIPELINE_TEMPLATES_REPO_LOCAL_NAME, replace_existing=True) + PipelineTemplatesManifest_mock.assert_called_once() + create_interactive_flow_mock.assert_called_once_with( + str(expected_cookicutter_template_location.joinpath("questions.json")) + ) + interactive_flow_mock.run.assert_called_once_with( + { + str(["testing", "pipeline_execution_role"]): "arn:aws:iam::123456789012:role/execution-role", + str(["1", "pipeline_execution_role"]): "arn:aws:iam::123456789012:role/execution-role", + str(["prod", "pipeline_execution_role"]): "arn:aws:iam::123456789012:role/execution-role", + str(["2", "pipeline_execution_role"]): "arn:aws:iam::123456789012:role/execution-role", + str(["stage_names_message"]): "Here are the stage names detected " + f'in {os.path.join(".aws-sam", "pipeline", "pipelineconfig.toml")}:\n\t1 - testing\n\t2 - prod', + } + ) + cookiecutter_mock.assert_called_once_with( + template=str(expected_cookicutter_template_location), + output_dir=cookiecutter_output_dir_mock, + no_input=True, + extra_context=cookiecutter_context_mock, + overwrite_if_exists=True, + ) + + @patch("samcli.commands.pipeline.init.interactive_init_flow._read_app_pipeline_templates_manifest") + @patch("samcli.commands.pipeline.init.interactive_init_flow.GitRepo.clone") + @patch("samcli.lib.cookiecutter.question.click") + def test_generate_pipeline_configuration_file_when_pipeline_template_missing_questions_file( + self, click_mock, clone_mock, read_app_pipeline_templates_manifest_mock + ): + # setup + any_app_pipeline_templates_path = shared_path.joinpath(APP_PIPELINE_TEMPLATES_REPO_LOCAL_NAME) + clone_mock.return_value = any_app_pipeline_templates_path + jenkins_template_location = "some/location" + jenkins_template_mock = Mock( + display_name="Jenkins pipeline template", location=jenkins_template_location, provider="jenkins" + ) + pipeline_templates_manifest_mock = Mock( + providers=[ + Mock(id="gitlab", display_name="Gitlab"), + Mock(id="jenkins", display_name="Jenkins"), + ], + templates=[jenkins_template_mock], + ) + read_app_pipeline_templates_manifest_mock.return_value = pipeline_templates_manifest_mock + + click_mock.prompt.side_effect = [ + "1", # App pipeline templates + "2", # choose "Jenkins" when prompt for CI/CD system. (See pipeline_templates_manifest_mock, Jenkins is the 2nd provider) + "1", # choose "Jenkins pipeline template" when prompt for pipeline template + ] + + # trigger + with self.assertRaises(QuestionsNotFoundException): + InteractiveInitFlow(allow_bootstrap=False).do_interactive() + + @patch("samcli.commands.pipeline.init.interactive_init_flow.os") + @patch("samcli.commands.pipeline.init.interactive_init_flow.osutils") + @patch("samcli.commands.pipeline.init.interactive_init_flow.InteractiveInitFlow._generate_from_pipeline_template") + @patch("samcli.commands.pipeline.init.interactive_init_flow.GitRepo.clone") + @patch("samcli.commands.pipeline.init.interactive_init_flow.click") + @patch("samcli.lib.cookiecutter.question.click") + def test_generate_pipeline_configuration_file_from_custom_local_existing_path_will_not_do_git_clone( + self, + questions_click_mock, + init_click_mock, + clone_mock, + generate_from_pipeline_template_mock, + osutils_mock, + os_mock, + ): + # setup + local_pipeline_templates_path = "/any/existing/local/path" + os_mock.path.exists.return_value = True + questions_click_mock.prompt.return_value = "2" # Custom pipeline templates + init_click_mock.prompt.return_value = local_pipeline_templates_path # git repo path + # trigger + InteractiveInitFlow(allow_bootstrap=False).do_interactive() + + # verify + osutils_mock.mkdir_temp.assert_not_called() + clone_mock.assert_not_called() + generate_from_pipeline_template_mock.assert_called_once_with(Path(local_pipeline_templates_path)) + + @patch("samcli.commands.pipeline.init.interactive_init_flow.osutils") + @patch("samcli.lib.cookiecutter.template.cookiecutter") + @patch("samcli.commands.pipeline.init.interactive_init_flow.InteractiveFlowCreator.create_flow") + @patch("samcli.commands.pipeline.init.interactive_init_flow.GitRepo.clone") + @patch("samcli.commands.pipeline.init.interactive_init_flow.click") + @patch("samcli.commands.pipeline.init.interactive_init_flow._copy_dir_contents_to_cwd") + @patch("samcli.commands.pipeline.init.interactive_init_flow._get_pipeline_template_metadata") + @patch("samcli.lib.cookiecutter.question.click") + def test_generate_pipeline_configuration_file_from_custom_remote_pipeline_template_happy_case( + self, + questions_click_mock, + _get_pipeline_template_metadata_mock, + _copy_dir_contents_to_cwd_mock, + init_click_mock, + clone_mock, + create_interactive_flow_mock, + cookiecutter_mock, + osutils_mock, + ): + # setup + any_temp_dir = "/tmp/any/dir" + cookiecutter_output_dir_mock = "/tmp/any/dir2" + osutils_mock.mkdir_temp.return_value.__enter__ = Mock(side_effect=[any_temp_dir, cookiecutter_output_dir_mock]) + osutils_mock.mkdir_temp.return_value.__exit__ = Mock() + any_custom_pipeline_templates_path = Path(os.path.join(any_temp_dir, CUSTOM_PIPELINE_TEMPLATE_REPO_LOCAL_NAME)) + clone_mock.return_value = any_custom_pipeline_templates_path + interactive_flow_mock = Mock() + create_interactive_flow_mock.return_value = interactive_flow_mock + cookiecutter_context_mock = {"key": "value"} + interactive_flow_mock.run.return_value = cookiecutter_context_mock + _copy_dir_contents_to_cwd_mock.return_value = ["file1"] + + questions_click_mock.prompt.return_value = "2" # Custom pipeline templates + init_click_mock.prompt.return_value = "https://github.com/any-custom-pipeline-template-repo.git" + _get_pipeline_template_metadata_mock.return_value = {"number_of_stages": 2} + + # trigger + InteractiveInitFlow(allow_bootstrap=False).do_interactive() + + # verify + # Custom templates are cloned to temp; cookiecutter project is generated to temp + osutils_mock.mkdir_temp.assert_called() + clone_mock.assert_called_once_with( + Path(any_temp_dir), CUSTOM_PIPELINE_TEMPLATE_REPO_LOCAL_NAME, replace_existing=True + ) + create_interactive_flow_mock.assert_called_once_with( + str(any_custom_pipeline_templates_path.joinpath("questions.json")) + ) + interactive_flow_mock.run.assert_called_once() + cookiecutter_mock.assert_called_once_with( + template=str(any_custom_pipeline_templates_path), + output_dir=cookiecutter_output_dir_mock, + no_input=True, + extra_context=cookiecutter_context_mock, + overwrite_if_exists=True, + ) + + @patch("samcli.lib.cookiecutter.question.click") + def test_prompt_cicd_provider_will_not_prompt_if_the_list_of_providers_has_only_one_provider(self, click_mock): + gitlab_provider = Mock(id="gitlab", display_name="Gitlab CI/CD") + providers = [gitlab_provider] + + chosen_provider = _prompt_cicd_provider(providers) + click_mock.prompt.assert_not_called() + self.assertEqual(chosen_provider, gitlab_provider) + + jenkins_provider = Mock(id="jenkins", display_name="Jenkins") + providers.append(jenkins_provider) + click_mock.prompt.return_value = "2" + chosen_provider = _prompt_cicd_provider(providers) + click_mock.prompt.assert_called_once() + self.assertEqual(chosen_provider, jenkins_provider) + + @patch("samcli.lib.cookiecutter.question.click") + def test_prompt_provider_pipeline_template_will_not_prompt_if_the_list_of_templatess_has_only_one_provider( + self, click_mock + ): + template1 = Mock(display_name="anyName1", location="anyLocation1", provider="a provider") + template2 = Mock(display_name="anyName2", location="anyLocation2", provider="a provider") + templates = [template1] + + chosen_template = _prompt_provider_pipeline_template(templates) + click_mock.prompt.assert_not_called() + self.assertEqual(chosen_template, template1) + + templates.append(template2) + click_mock.prompt.return_value = "2" + chosen_template = _prompt_provider_pipeline_template(templates) + click_mock.prompt.assert_called_once() + self.assertEqual(chosen_template, template2) + + def test_get_pipeline_template_metadata_can_load(self): + with tempfile.TemporaryDirectory() as dir: + metadata = {"number_of_stages": 2} + with open(Path(dir, "metadata.json"), "w") as f: + json.dump(metadata, f) + self.assertEquals(metadata, _get_pipeline_template_metadata(dir)) + + def test_get_pipeline_template_metadata_not_exist(self): + with tempfile.TemporaryDirectory() as dir: + with self.assertRaises(AppPipelineTemplateMetadataException): + _get_pipeline_template_metadata(dir) + + @parameterized.expand( + [ + ('["not_a_dict"]',), + ("not a json"), + ] + ) + def test_get_pipeline_template_metadata_not_valid(self, metadata_str): + with tempfile.TemporaryDirectory() as dir: + with open(Path(dir, "metadata.json"), "w") as f: + f.write(metadata_str) + with self.assertRaises(AppPipelineTemplateMetadataException): + _get_pipeline_template_metadata(dir) + + +class TestInteractiveInitFlowWithBootstrap(TestCase): + @patch("samcli.commands.pipeline.init.interactive_init_flow.SamConfig") + @patch("samcli.commands.pipeline.init.interactive_init_flow.osutils") + @patch("samcli.lib.cookiecutter.template.cookiecutter") + @patch("samcli.commands.pipeline.init.interactive_init_flow.InteractiveFlowCreator.create_flow") + @patch( + "samcli.commands.pipeline.init.interactive_init_flow.InteractiveInitFlow._prompt_run_bootstrap_within_pipeline_init" + ) + @patch("samcli.commands.pipeline.init.interactive_init_flow.PipelineTemplatesManifest") + @patch("samcli.commands.pipeline.init.interactive_init_flow.GitRepo.clone") + @patch("samcli.commands.pipeline.init.interactive_init_flow._copy_dir_contents_to_cwd") + @patch("samcli.commands.pipeline.init.interactive_init_flow._get_pipeline_template_metadata") + @patch("samcli.lib.cookiecutter.question.click") + def test_with_bootstrap_but_answer_no( + self, + click_mock, + _get_pipeline_template_metadata_mock, + _copy_dir_contents_to_cwd_mock, + clone_mock, + PipelineTemplatesManifest_mock, + _prompt_run_bootstrap_within_pipeline_init_mock, + create_interactive_flow_mock, + cookiecutter_mock, + osutils_mock, + samconfig_mock, + ): + # setup + any_app_pipeline_templates_path = Path( + os.path.normpath(shared_path.joinpath(APP_PIPELINE_TEMPLATES_REPO_LOCAL_NAME)) + ) + clone_mock.return_value = any_app_pipeline_templates_path + jenkins_template_location = "some/location" + jenkins_template_mock = Mock( + display_name="Jenkins pipeline template", location=jenkins_template_location, provider="jenkins" + ) + pipeline_templates_manifest_mock = Mock( + providers=[ + Mock(id="gitlab", display_name="Gitlab"), + Mock(id="jenkins", display_name="Jenkins"), + ], + templates=[jenkins_template_mock], + ) + PipelineTemplatesManifest_mock.return_value = pipeline_templates_manifest_mock + cookiecutter_output_dir_mock = "/tmp/any/dir2" + osutils_mock.mkdir_temp.return_value.__enter__ = Mock(return_value=cookiecutter_output_dir_mock) + interactive_flow_mock = Mock() + create_interactive_flow_mock.return_value = interactive_flow_mock + cookiecutter_context_mock = {"key": "value"} + interactive_flow_mock.run.return_value = cookiecutter_context_mock + config_file = Mock() + samconfig_mock.return_value = config_file + config_file.exists.return_value = True + config_file.get_stage_names.return_value = ["testing"] + config_file.get_all.return_value = {"pipeline_execution_role": "arn:aws:iam::123456789012:role/execution-role"} + _get_pipeline_template_metadata_mock.return_value = {"number_of_stages": 2} + + click_mock.prompt.side_effect = [ + "1", # App pipeline templates + "2", + # choose "Jenkins" when prompt for CI/CD system. (See pipeline_templates_manifest_mock, Jenkins is the 2nd provider) + "1", # choose "Jenkins pipeline template" when prompt for pipeline template + ] + + _prompt_run_bootstrap_within_pipeline_init_mock.return_value = False # not to bootstrap + + # trigger + InteractiveInitFlow(allow_bootstrap=True).do_interactive() + + # verify + _prompt_run_bootstrap_within_pipeline_init_mock.assert_called_once_with(["testing"], 2) + + @parameterized.expand( + [ + ([["testing"], ["testing", "prod"]], [call(["testing"], 2)]), + ([[], ["testing"], ["testing", "prod"]], [call([], 2), call(["testing"], 2)]), + ] + ) + @patch("samcli.commands.pipeline.init.interactive_init_flow.SamConfig") + @patch("samcli.commands.pipeline.init.interactive_init_flow.osutils") + @patch("samcli.lib.cookiecutter.template.cookiecutter") + @patch("samcli.commands.pipeline.init.interactive_init_flow.InteractiveFlowCreator.create_flow") + @patch( + "samcli.commands.pipeline.init.interactive_init_flow.InteractiveInitFlow._prompt_run_bootstrap_within_pipeline_init" + ) + @patch("samcli.commands.pipeline.init.interactive_init_flow.PipelineTemplatesManifest") + @patch("samcli.commands.pipeline.init.interactive_init_flow.GitRepo.clone") + @patch("samcli.commands.pipeline.init.interactive_init_flow._copy_dir_contents_to_cwd") + @patch("samcli.commands.pipeline.init.interactive_init_flow._get_pipeline_template_metadata") + @patch("samcli.lib.cookiecutter.question.click") + def test_with_bootstrap_answer_yes( + self, + get_stage_name_side_effects, + _prompt_run_bootstrap_expected_calls, + click_mock, + _get_pipeline_template_metadata_mock, + _copy_dir_contents_to_cwd_mock, + clone_mock, + PipelineTemplatesManifest_mock, + _prompt_run_bootstrap_within_pipeline_init_mock, + create_interactive_flow_mock, + cookiecutter_mock, + osutils_mock, + samconfig_mock, + ): + # setup + any_app_pipeline_templates_path = Path( + os.path.normpath(shared_path.joinpath(APP_PIPELINE_TEMPLATES_REPO_LOCAL_NAME)) + ) + clone_mock.return_value = any_app_pipeline_templates_path + jenkins_template_location = "some/location" + jenkins_template_mock = Mock( + display_name="Jenkins pipeline template", location=jenkins_template_location, provider="jenkins" + ) + pipeline_templates_manifest_mock = Mock( + providers=[ + Mock(id="gitlab", display_name="Gitlab"), + Mock(id="jenkins", display_name="Jenkins"), + ], + templates=[jenkins_template_mock], + ) + PipelineTemplatesManifest_mock.return_value = pipeline_templates_manifest_mock + cookiecutter_output_dir_mock = "/tmp/any/dir2" + osutils_mock.mkdir_temp.return_value.__enter__ = Mock(return_value=cookiecutter_output_dir_mock) + interactive_flow_mock = Mock() + create_interactive_flow_mock.return_value = interactive_flow_mock + cookiecutter_context_mock = {"key": "value"} + interactive_flow_mock.run.return_value = cookiecutter_context_mock + config_file = Mock() + samconfig_mock.return_value = config_file + config_file.exists.return_value = True + config_file.get_stage_names.side_effect = get_stage_name_side_effects + config_file.get_all.return_value = {"pipeline_execution_role": "arn:aws:iam::123456789012:role/execution-role"} + _get_pipeline_template_metadata_mock.return_value = {"number_of_stages": 2} + + click_mock.prompt.side_effect = [ + "1", # App pipeline templates + "2", + # choose "Jenkins" when prompt for CI/CD system. (See pipeline_templates_manifest_mock, Jenkins is the 2nd provider) + "1", # choose "Jenkins pipeline template" when prompt for pipeline template + ] + + _prompt_run_bootstrap_within_pipeline_init_mock.return_value = True # to bootstrap + + # trigger + InteractiveInitFlow(allow_bootstrap=True).do_interactive() + + # verify + _prompt_run_bootstrap_within_pipeline_init_mock.assert_has_calls(_prompt_run_bootstrap_expected_calls) + + +class TestInteractiveInitFlow_copy_dir_contents_to_cwd(TestCase): + def tearDown(self) -> None: + if Path("file").exists(): + Path("file").unlink() + shutil.rmtree(os.path.join(".aws-sam", "pipeline"), ignore_errors=True) + + @patch("samcli.commands.pipeline.init.interactive_init_flow.click.confirm") + def test_copy_dir_contents_to_cwd_no_need_override(self, confirm_mock): + with tempfile.TemporaryDirectory() as source: + confirm_mock.return_value = True + Path(source, "file").touch() + Path(source, "file").write_text("hi") + file_paths = _copy_dir_contents_to_cwd(source) + confirm_mock.assert_not_called() + self.assertEqual("hi", Path("file").read_text(encoding="utf-8")) + self.assertEqual([str(Path(".", "file"))], file_paths) + + @patch("samcli.commands.pipeline.init.interactive_init_flow.click.confirm") + def test_copy_dir_contents_to_cwd_override(self, confirm_mock): + with tempfile.TemporaryDirectory() as source: + confirm_mock.return_value = True + Path(source, "file").touch() + Path(source, "file").write_text("hi") + Path("file").touch() + file_paths = _copy_dir_contents_to_cwd(source) + confirm_mock.assert_called_once() + self.assertEqual("hi", Path("file").read_text(encoding="utf-8")) + self.assertEqual([str(Path(".", "file"))], file_paths) + + @patch("samcli.commands.pipeline.init.interactive_init_flow.click.confirm") + def test_copy_dir_contents_to_cwd_not_override(self, confirm_mock): + with tempfile.TemporaryDirectory() as source: + confirm_mock.return_value = False + Path(source, "file").touch() + Path(source, "file").write_text("hi") + Path("file").touch() + file_paths = _copy_dir_contents_to_cwd(source) + confirm_mock.assert_called_once() + self.assertEqual("", Path("file").read_text(encoding="utf-8")) + self.assertEqual([str(Path(".aws-sam", "pipeline", "generated-files", "file"))], file_paths) diff --git a/tests/unit/commands/pipeline/init/test_pipeline_templates_manifest.py b/tests/unit/commands/pipeline/init/test_pipeline_templates_manifest.py new file mode 100644 index 0000000000..d35541c3f6 --- /dev/null +++ b/tests/unit/commands/pipeline/init/test_pipeline_templates_manifest.py @@ -0,0 +1,82 @@ +from unittest import TestCase +import os +from pathlib import Path +from samcli.commands.pipeline.init.pipeline_templates_manifest import ( + Provider, + PipelineTemplatesManifest, + PipelineTemplateMetadata, + AppPipelineTemplateManifestException, +) +from samcli.lib.utils import osutils + +INVALID_YAML_MANIFEST = """ +providers: +- Jenkins with wrong identation +""" + +MISSING_KEYS_MANIFEST = """ +NotProviders: + - Jenkins +Templates: + - NotName: jenkins-two-environments-pipeline + provider: Jenkins + location: templates/cookiecutter-jenkins-two-environments-pipeline +""" + +VALID_MANIFEST = """ +providers: + - displayName: Jenkins + id: jenkins + - displayName: Gitlab CI/CD + id: gitlab + - displayName: Github Actions + id: github-actions +templates: + - displayName: jenkins-two-environments-pipeline + provider: jenkins + location: templates/cookiecutter-jenkins-two-environments-pipeline + - displayName: gitlab-two-environments-pipeline + provider: gitlab + location: templates/cookiecutter-gitlab-two-environments-pipeline + - displayName: Github-Actions-two-environments-pipeline + provider: github-actions + location: templates/cookiecutter-github-actions-two-environments-pipeline +""" + + +class TestCli(TestCase): + def test_manifest_file_not_found(self): + non_existing_path = Path(os.path.normpath("/any/non/existing/manifest.yaml")) + with self.assertRaises(AppPipelineTemplateManifestException): + PipelineTemplatesManifest(manifest_path=non_existing_path) + + def test_invalid_yaml_manifest_file(self): + with osutils.mkdir_temp(ignore_errors=True) as tempdir: + manifest_path = os.path.normpath(os.path.join(tempdir, "manifest.yaml")) + with open(manifest_path, "w", encoding="utf-8") as fp: + fp.write(INVALID_YAML_MANIFEST) + with self.assertRaises(AppPipelineTemplateManifestException): + PipelineTemplatesManifest(manifest_path=Path(manifest_path)) + + def test_manifest_missing_required_keys(self): + with osutils.mkdir_temp(ignore_errors=True) as tempdir: + manifest_path = os.path.normpath(os.path.join(tempdir, "manifest.yaml")) + with open(manifest_path, "w", encoding="utf-8") as fp: + fp.write(MISSING_KEYS_MANIFEST) + with self.assertRaises(AppPipelineTemplateManifestException): + PipelineTemplatesManifest(manifest_path=Path(manifest_path)) + + def test_manifest_happy_case(self): + with osutils.mkdir_temp(ignore_errors=True) as tempdir: + manifest_path = os.path.normpath(os.path.join(tempdir, "manifest.yaml")) + with open(manifest_path, "w", encoding="utf-8") as fp: + fp.write(VALID_MANIFEST) + manifest = PipelineTemplatesManifest(manifest_path=Path(manifest_path)) + self.assertEquals(len(manifest.providers), 3) + gitlab_provider: Provider = next(p for p in manifest.providers if p.id == "gitlab") + self.assertEquals(gitlab_provider.display_name, "Gitlab CI/CD") + self.assertEquals(len(manifest.templates), 3) + gitlab_template: PipelineTemplateMetadata = next(t for t in manifest.templates if t.provider == "gitlab") + self.assertEquals(gitlab_template.display_name, "gitlab-two-environments-pipeline") + self.assertEquals(gitlab_template.provider, "gitlab") + self.assertEquals(gitlab_template.location, "templates/cookiecutter-gitlab-two-environments-pipeline") diff --git a/tests/unit/lib/bootstrap/test_bootstrap.py b/tests/unit/lib/bootstrap/test_bootstrap.py index 8094a404c0..e62ad26a5c 100644 --- a/tests/unit/lib/bootstrap/test_bootstrap.py +++ b/tests/unit/lib/bootstrap/test_bootstrap.py @@ -1,23 +1,45 @@ from unittest import TestCase -from unittest.mock import patch +from unittest.mock import patch, MagicMock -from samcli.commands.exceptions import UserException -from samcli.lib.bootstrap.bootstrap import manage_stack +from samcli.commands.exceptions import UserException, CredentialsError +from samcli.lib.bootstrap.bootstrap import manage_stack, StackOutput, get_current_account_id class TestBootstrapManagedStack(TestCase): @patch("samcli.lib.bootstrap.bootstrap.manage_cloudformation_stack") def test_stack_missing_bucket(self, manage_cfn_stack_mock): - manage_cfn_stack_mock.return_value = [] + manage_cfn_stack_mock.return_value = StackOutput(stack_output=[]) with self.assertRaises(UserException): manage_stack("testProfile", "fakeRegion") - manage_cfn_stack_mock.return_value = [{"OutputKey": "NotSourceBucket", "OutputValue": "AnyValue"}] + manage_cfn_stack_mock.return_value = StackOutput( + stack_output=[{"OutputKey": "NotSourceBucket", "OutputValue": "AnyValue"}] + ) with self.assertRaises(UserException): manage_stack("testProfile", "fakeRegion") @patch("samcli.lib.bootstrap.bootstrap.manage_cloudformation_stack") def test_manage_stack_happy_case(self, manage_cfn_stack_mock): expected_bucket_name = "BucketName" - manage_cfn_stack_mock.return_value = [{"OutputKey": "SourceBucket", "OutputValue": expected_bucket_name}] + manage_cfn_stack_mock.return_value = StackOutput( + stack_output=[{"OutputKey": "SourceBucket", "OutputValue": expected_bucket_name}] + ) actual_bucket_name = manage_stack("testProfile", "fakeRegion") self.assertEqual(actual_bucket_name, expected_bucket_name) + + @patch("samcli.lib.bootstrap.bootstrap.boto3") + def test_get_current_account_id(self, boto3_mock): + session_mock = boto3_mock.Session.return_value = MagicMock() + sts_mock = MagicMock() + sts_mock.get_caller_identity.return_value = {"Account": 1234567890} + session_mock.client.return_value = sts_mock + account_id = get_current_account_id() + self.assertEqual(account_id, 1234567890) + + @patch("samcli.lib.bootstrap.bootstrap.boto3") + def test_get_current_account_id_missing_id(self, boto3_mock): + session_mock = boto3_mock.Session.return_value = MagicMock() + sts_mock = MagicMock() + sts_mock.get_caller_identity.return_value = {} + session_mock.client.return_value = sts_mock + with self.assertRaises(CredentialsError): + get_current_account_id() diff --git a/tests/unit/lib/cookiecutter/test_question.py b/tests/unit/lib/cookiecutter/test_question.py index c46a37fa43..2db7055357 100644 --- a/tests/unit/lib/cookiecutter/test_question.py +++ b/tests/unit/lib/cookiecutter/test_question.py @@ -27,6 +27,7 @@ def setUp(self): key=self._ANY_KEY, default=self._ANY_ANSWER, is_required=True, + allow_autofill=False, next_question_map=self._ANY_NEXT_QUESTION_MAP, default_next_question_key=self._ANY_DEFAULT_NEXT_QUESTION_KEY, ) @@ -151,6 +152,16 @@ def test_ask_resolves_from_cookiecutter_context_with_default_object_missing_keys with self.assertRaises(KeyError): question.ask(context=context) + def test_question_allow_autofill_with_default_value(self): + q = Question(text=self._ANY_TEXT, key=self._ANY_KEY, is_required=True, allow_autofill=True, default="123") + self.assertEquals("123", q.ask()) + + @patch("samcli.lib.cookiecutter.question.click") + def test_question_allow_autofill_without_default_value(self, click_mock): + answer_mock = click_mock.prompt.return_value = Mock() + q = Question(text=self._ANY_TEXT, key=self._ANY_KEY, is_required=True, allow_autofill=True) + self.assertEquals(answer_mock, q.ask()) + class TestChoice(TestCase): def setUp(self): @@ -188,7 +199,11 @@ def test_ask(self, mock_click, mock_choice): answer = self.question.ask({}) self.assertEqual(answer, TestQuestion._ANY_OPTIONS[1]) # we deduct one from user's choice (base 1 vs base 0) mock_click.prompt.assert_called_once_with( - text="Choice", default=self.question.default_answer, show_choices=False, type=ANY + text="Choice", + default=self.question.default_answer, + show_choices=False, + type=ANY, + show_default=self.question.default_answer is not None, ) mock_choice.assert_called_once_with(["1", "2", "3"]) diff --git a/tests/unit/lib/cookiecutter/test_template.py b/tests/unit/lib/cookiecutter/test_template.py index edb7412f59..318939f46b 100644 --- a/tests/unit/lib/cookiecutter/test_template.py +++ b/tests/unit/lib/cookiecutter/test_template.py @@ -114,11 +114,16 @@ def test_generate_project(self, mock_preprocessor, mock_postprocessor, mock_inte postprocessors=[mock_postprocessor], ) mock_preprocessor.run.return_value = self._ANY_PROCESSOR_CONTEXT - t.generate_project(context=self._ANY_INTERACTIVE_FLOW_CONTEXT) + output_dir = Mock() + t.generate_project(context=self._ANY_INTERACTIVE_FLOW_CONTEXT, output_dir=output_dir) mock_interactive_flow.run.assert_not_called() mock_preprocessor.run.assert_called_once_with(self._ANY_INTERACTIVE_FLOW_CONTEXT) mock_cookiecutter.assert_called_with( - template=self._ANY_LOCATION, output_dir=".", no_input=True, extra_context=self._ANY_PROCESSOR_CONTEXT + template=self._ANY_LOCATION, + output_dir=output_dir, + no_input=True, + extra_context=self._ANY_PROCESSOR_CONTEXT, + overwrite_if_exists=True, ) mock_postprocessor.run.assert_called_once_with(self._ANY_PROCESSOR_CONTEXT) @@ -127,7 +132,7 @@ def test_generate_project_preprocessors_exceptions(self, mock_preprocessor): t = Template(location=self._ANY_LOCATION, preprocessors=[mock_preprocessor]) with self.assertRaises(PreprocessingError): mock_preprocessor.run.side_effect = Exception("something went wrong") - t.generate_project({}) + t.generate_project({}, Mock()) @patch("samcli.lib.cookiecutter.template.cookiecutter") @patch("samcli.lib.cookiecutter.processor") @@ -135,7 +140,7 @@ def test_generate_project_postprocessors_exceptions(self, mock_postprocessor, mo t = Template(location=self._ANY_LOCATION, postprocessors=[mock_postprocessor]) with self.assertRaises(PostprocessingError): mock_postprocessor.run.side_effect = Exception("something went wrong") - t.generate_project({}) + t.generate_project({}, Mock()) @patch("samcli.lib.cookiecutter.template.generate_non_cookiecutter_project") @patch("samcli.lib.cookiecutter.template.cookiecutter") @@ -143,13 +148,13 @@ def test_generate_project_cookiecutter_exceptions(self, mock_cookiecutter, mock_ t = Template(location=self._ANY_LOCATION) with self.assertRaises(InvalidLocationError): mock_cookiecutter.side_effect = UnknownRepoType() - t.generate_project({}) + t.generate_project({}, Mock()) mock_cookiecutter.reset_mock() with self.assertRaises(GenerateProjectFailedError): mock_cookiecutter.side_effect = Exception("something went wrong") - t.generate_project({}) + t.generate_project({}, Mock()) mock_cookiecutter.reset_mock() # if the provided template is not a cookiecutter template, we generate a non cookiecutter template mock_cookiecutter.side_effect = RepositoryNotFound() - t.generate_project({}) + t.generate_project({}, Mock()) mock_generate_non_cookiecutter_project.assert_called_once() diff --git a/tests/unit/lib/pipeline/__init__.py b/tests/unit/lib/pipeline/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/lib/pipeline/bootstrap/__init__.py b/tests/unit/lib/pipeline/bootstrap/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/lib/pipeline/bootstrap/test_environment.py b/tests/unit/lib/pipeline/bootstrap/test_environment.py new file mode 100644 index 0000000000..9a12f2be15 --- /dev/null +++ b/tests/unit/lib/pipeline/bootstrap/test_environment.py @@ -0,0 +1,425 @@ +from unittest import TestCase +from unittest.mock import Mock, patch, call, MagicMock + +from samcli.lib.pipeline.bootstrap.stage import Stage + +ANY_STAGE_NAME = "ANY_STAGE_NAME" +ANY_PIPELINE_USER_ARN = "ANY_PIPELINE_USER_ARN" +ANY_PIPELINE_EXECUTION_ROLE_ARN = "ANY_PIPELINE_EXECUTION_ROLE_ARN" +ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN = "ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN" +ANY_ARTIFACTS_BUCKET_ARN = "ANY_ARTIFACTS_BUCKET_ARN" +ANY_IMAGE_REPOSITORY_ARN = "ANY_IMAGE_REPOSITORY_ARN" +ANY_ARN = "ANY_ARN" + + +class TestStage(TestCase): + def test_stage_name_is_the_only_required_field_to_initialize_an_stage(self): + stage: Stage = Stage(name=ANY_STAGE_NAME) + self.assertEqual(stage.name, ANY_STAGE_NAME) + self.assertIsNone(stage.aws_profile) + self.assertIsNone(stage.aws_region) + self.assertIsNotNone(stage.pipeline_user) + self.assertIsNotNone(stage.pipeline_execution_role) + self.assertIsNotNone(stage.cloudformation_execution_role) + self.assertIsNotNone(stage.artifacts_bucket) + self.assertIsNotNone(stage.image_repository) + + with self.assertRaises(TypeError): + Stage() + + def test_did_user_provide_all_required_resources_when_not_all_resources_are_provided(self): + stage: Stage = Stage(name=ANY_STAGE_NAME) + self.assertFalse(stage.did_user_provide_all_required_resources()) + stage: Stage = Stage(name=ANY_STAGE_NAME, pipeline_user_arn=ANY_PIPELINE_USER_ARN) + self.assertFalse(stage.did_user_provide_all_required_resources()) + stage: Stage = Stage( + name=ANY_STAGE_NAME, + pipeline_user_arn=ANY_PIPELINE_USER_ARN, + pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, + ) + self.assertFalse(stage.did_user_provide_all_required_resources()) + stage: Stage = Stage( + name=ANY_STAGE_NAME, + pipeline_user_arn=ANY_PIPELINE_USER_ARN, + pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, + cloudformation_execution_role_arn=ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + ) + self.assertFalse(stage.did_user_provide_all_required_resources()) + stage: Stage = Stage( + name=ANY_STAGE_NAME, + pipeline_user_arn=ANY_PIPELINE_USER_ARN, + pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, + cloudformation_execution_role_arn=ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + artifacts_bucket_arn=ANY_ARTIFACTS_BUCKET_ARN, + create_image_repository=True, + ) + self.assertFalse(stage.did_user_provide_all_required_resources()) + + def test_did_user_provide_all_required_resources_ignore_image_repository_if_it_is_not_required(self): + stage: Stage = Stage( + name=ANY_STAGE_NAME, + pipeline_user_arn=ANY_PIPELINE_USER_ARN, + pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, + cloudformation_execution_role_arn=ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + artifacts_bucket_arn=ANY_ARTIFACTS_BUCKET_ARN, + create_image_repository=False, + ) + self.assertTrue(stage.did_user_provide_all_required_resources()) + + def test_did_user_provide_all_required_resources_when_image_repository_is_required(self): + stage: Stage = Stage( + name=ANY_STAGE_NAME, + pipeline_user_arn=ANY_PIPELINE_USER_ARN, + pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, + cloudformation_execution_role_arn=ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + artifacts_bucket_arn=ANY_ARTIFACTS_BUCKET_ARN, + create_image_repository=True, + ) + self.assertFalse(stage.did_user_provide_all_required_resources()) + stage: Stage = Stage( + name=ANY_STAGE_NAME, + pipeline_user_arn=ANY_PIPELINE_USER_ARN, + pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, + cloudformation_execution_role_arn=ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + artifacts_bucket_arn=ANY_ARTIFACTS_BUCKET_ARN, + create_image_repository=True, + image_repository_arn=ANY_IMAGE_REPOSITORY_ARN, + ) + self.assertTrue(stage.did_user_provide_all_required_resources()) + + @patch("samcli.lib.pipeline.bootstrap.stage.Stage._get_pipeline_user_secret_pair") + @patch("samcli.lib.pipeline.bootstrap.stage.click") + @patch("samcli.lib.pipeline.bootstrap.stage.manage_stack") + def test_did_user_provide_all_required_resources_returns_false_if_the_stage_was_initialized_without_any_of_the_resources_even_if_fulfilled_after_bootstrap( + self, manage_stack_mock, click_mock, pipeline_user_secret_pair_mock + ): + # setup + stack_output = Mock() + pipeline_user_secret_pair_mock.return_value = ("id", "secret") + stack_output.get.return_value = ANY_ARN + manage_stack_mock.return_value = stack_output + stage: Stage = Stage(name=ANY_STAGE_NAME) + + self.assertFalse(stage.did_user_provide_all_required_resources()) + + stage.bootstrap(confirm_changeset=False) + # After bootstrapping, all the resources should be fulfilled + self.assertEqual(ANY_ARN, stage.pipeline_user.arn) + self.assertEqual(ANY_ARN, stage.pipeline_execution_role.arn) + self.assertEqual(ANY_ARN, stage.cloudformation_execution_role.arn) + self.assertEqual(ANY_ARN, stage.artifacts_bucket.arn) + self.assertEqual(ANY_ARN, stage.image_repository.arn) + + # although all of the resources got fulfilled, `did_user_provide_all_required_resources` should return false + # as these resources are not provided by the user + self.assertFalse(stage.did_user_provide_all_required_resources()) + + @patch("samcli.lib.pipeline.bootstrap.stage.click") + @patch("samcli.lib.pipeline.bootstrap.stage.manage_stack") + @patch.object(Stage, "did_user_provide_all_required_resources") + def test_bootstrap_will_not_deploy_the_cfn_template_if_all_resources_are_already_provided( + self, did_user_provide_all_required_resources_mock, manage_stack_mock, click_mock + ): + did_user_provide_all_required_resources_mock.return_value = True + stage: Stage = Stage(name=ANY_STAGE_NAME) + stage.bootstrap(confirm_changeset=False) + manage_stack_mock.assert_not_called() + + @patch("samcli.lib.pipeline.bootstrap.stage.Stage._get_pipeline_user_secret_pair") + @patch("samcli.lib.pipeline.bootstrap.stage.click") + @patch("samcli.lib.pipeline.bootstrap.stage.manage_stack") + def test_bootstrap_will_confirm_before_deploying_unless_confirm_changeset_is_disabled( + self, manage_stack_mock, click_mock, pipeline_user_secret_pair_mock + ): + click_mock.confirm.return_value = False + pipeline_user_secret_pair_mock.return_value = ("id", "secret") + stage: Stage = Stage(name=ANY_STAGE_NAME) + stage.bootstrap(confirm_changeset=False) + click_mock.confirm.assert_not_called() + manage_stack_mock.assert_called_once() + manage_stack_mock.reset_mock() + stage.bootstrap(confirm_changeset=True) + click_mock.confirm.assert_called_once() + manage_stack_mock.assert_not_called() # As the user choose to not confirm + + @patch("samcli.lib.pipeline.bootstrap.stage.click") + @patch("samcli.lib.pipeline.bootstrap.stage.manage_stack") + def test_bootstrap_will_not_deploy_the_cfn_template_if_the_user_did_not_confirm( + self, manage_stack_mock, click_mock + ): + click_mock.confirm.return_value = False + stage: Stage = Stage(name=ANY_STAGE_NAME) + stage.bootstrap(confirm_changeset=True) + manage_stack_mock.assert_not_called() + + @patch("samcli.lib.pipeline.bootstrap.stage.Stage._get_pipeline_user_secret_pair") + @patch("samcli.lib.pipeline.bootstrap.stage.click") + @patch("samcli.lib.pipeline.bootstrap.stage.manage_stack") + def test_bootstrap_will_deploy_the_cfn_template_if_the_user_did_confirm( + self, manage_stack_mock, click_mock, pipeline_user_secret_pair_mock + ): + click_mock.confirm.return_value = True + pipeline_user_secret_pair_mock.return_value = ("id", "secret") + stage: Stage = Stage(name=ANY_STAGE_NAME) + stage.bootstrap(confirm_changeset=True) + manage_stack_mock.assert_called_once() + + @patch("samcli.lib.pipeline.bootstrap.stage.Stage._get_pipeline_user_secret_pair") + @patch("samcli.lib.pipeline.bootstrap.stage.click") + @patch("samcli.lib.pipeline.bootstrap.stage.manage_stack") + def test_bootstrap_will_pass_arns_of_all_user_provided_resources_any_empty_strings_for_other_resources_to_the_cfn_stack( + self, manage_stack_mock, click_mock, pipeline_user_secret_pair_mock + ): + click_mock.confirm.return_value = True + pipeline_user_secret_pair_mock.return_value = ("id", "secret") + stage: Stage = Stage( + name=ANY_STAGE_NAME, + pipeline_user_arn=ANY_PIPELINE_USER_ARN, + artifacts_bucket_arn=ANY_ARTIFACTS_BUCKET_ARN, + create_image_repository=True, + image_repository_arn=ANY_IMAGE_REPOSITORY_ARN, + ) + stage.bootstrap() + manage_stack_mock.assert_called_once() + args, kwargs = manage_stack_mock.call_args_list[0] + expected_parameter_overrides = { + "PipelineUserArn": ANY_PIPELINE_USER_ARN, + "PipelineExecutionRoleArn": "", + "CloudFormationExecutionRoleArn": "", + "ArtifactsBucketArn": ANY_ARTIFACTS_BUCKET_ARN, + "CreateImageRepository": "true", + "ImageRepositoryArn": ANY_IMAGE_REPOSITORY_ARN, + } + self.assertEqual(expected_parameter_overrides, kwargs["parameter_overrides"]) + + @patch("samcli.lib.pipeline.bootstrap.stage.Stage._get_pipeline_user_secret_pair") + @patch("samcli.lib.pipeline.bootstrap.stage.click") + @patch("samcli.lib.pipeline.bootstrap.stage.manage_stack") + def test_bootstrap_will_fullfill_all_resource_arns( + self, manage_stack_mock, click_mock, pipeline_user_secret_pair_mock + ): + # setup + pipeline_user_secret_pair_mock.return_value = ("id", "secret") + stack_output = Mock() + stack_output.get.return_value = ANY_ARN + manage_stack_mock.return_value = stack_output + stage: Stage = Stage(name=ANY_STAGE_NAME) + click_mock.confirm.return_value = True + + # verify resources' ARNS are empty + self.assertIsNone(stage.pipeline_user.arn) + self.assertIsNone(stage.pipeline_execution_role.arn) + self.assertIsNone(stage.cloudformation_execution_role.arn) + self.assertIsNone(stage.artifacts_bucket.arn) + + # trigger + stage.bootstrap() + + # verify + manage_stack_mock.assert_called_once() + self.assertEqual(ANY_ARN, stage.pipeline_user.arn) + self.assertEqual(ANY_ARN, stage.pipeline_execution_role.arn) + self.assertEqual(ANY_ARN, stage.cloudformation_execution_role.arn) + self.assertEqual(ANY_ARN, stage.artifacts_bucket.arn) + + @patch("samcli.lib.pipeline.bootstrap.stage.SamConfig") + def test_save_config_escapes_none_resources(self, samconfig_mock): + cmd_names = ["any", "commands"] + samconfig_instance_mock = Mock() + samconfig_mock.return_value = samconfig_instance_mock + stage: Stage = Stage(name=ANY_STAGE_NAME) + + empty_ecr_call = call( + cmd_names=cmd_names, + section="parameters", + env=ANY_STAGE_NAME, + key="image_repository", + value="", + ) + + expected_calls = [] + self.trigger_and_assert_save_config_calls( + stage, cmd_names, expected_calls + [empty_ecr_call], samconfig_instance_mock.put + ) + + stage.pipeline_user.arn = ANY_PIPELINE_USER_ARN + expected_calls.append( + call(cmd_names=cmd_names, section="parameters", key="pipeline_user", value=ANY_PIPELINE_USER_ARN) + ) + self.trigger_and_assert_save_config_calls( + stage, cmd_names, expected_calls + [empty_ecr_call], samconfig_instance_mock.put + ) + + stage.pipeline_execution_role.arn = ANY_PIPELINE_EXECUTION_ROLE_ARN + expected_calls.append( + call( + cmd_names=cmd_names, + section="parameters", + env=ANY_STAGE_NAME, + key="pipeline_execution_role", + value=ANY_PIPELINE_EXECUTION_ROLE_ARN, + ), + ) + self.trigger_and_assert_save_config_calls( + stage, cmd_names, expected_calls + [empty_ecr_call], samconfig_instance_mock.put + ) + + stage.cloudformation_execution_role.arn = ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN + expected_calls.append( + call( + cmd_names=cmd_names, + section="parameters", + env=ANY_STAGE_NAME, + key="cloudformation_execution_role", + value=ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + ), + ) + self.trigger_and_assert_save_config_calls( + stage, cmd_names, expected_calls + [empty_ecr_call], samconfig_instance_mock.put + ) + + stage.artifacts_bucket.arn = "arn:aws:s3:::artifact_bucket_name" + expected_calls.append( + call( + cmd_names=cmd_names, + section="parameters", + env=ANY_STAGE_NAME, + key="artifacts_bucket", + value="artifact_bucket_name", + ), + ) + self.trigger_and_assert_save_config_calls( + stage, cmd_names, expected_calls + [empty_ecr_call], samconfig_instance_mock.put + ) + + stage.image_repository.arn = "arn:aws:ecr:us-east-2:111111111111:repository/image_repository_name" + expected_calls.append( + call( + cmd_names=cmd_names, + section="parameters", + env=ANY_STAGE_NAME, + key="image_repository", + value="111111111111.dkr.ecr.us-east-2.amazonaws.com/image_repository_name", + ) + ) + self.trigger_and_assert_save_config_calls(stage, cmd_names, expected_calls, samconfig_instance_mock.put) + + def trigger_and_assert_save_config_calls(self, stage, cmd_names, expected_calls, samconfig_put_mock): + stage.save_config(config_dir="any_config_dir", filename="any_pipeline.toml", cmd_names=cmd_names) + self.assertEqual(len(expected_calls), samconfig_put_mock.call_count) + samconfig_put_mock.assert_has_calls(expected_calls, any_order=True) + samconfig_put_mock.reset_mock() + + @patch("samcli.lib.pipeline.bootstrap.stage.boto3") + def test_getting_pipeline_user_credentials(self, boto3_mock): + sm_client_mock = MagicMock() + sm_client_mock.get_secret_value.return_value = { + "SecretString": '{"aws_access_key_id": "AccessKeyId", "aws_secret_access_key": "SuperSecretKey"}' + } + session_mock = MagicMock() + session_mock.client.return_value = sm_client_mock + boto3_mock.Session.return_value = session_mock + + (key, secret) = Stage._get_pipeline_user_secret_pair("dummy_arn", None, "dummy-region") + self.assertEqual(key, "AccessKeyId") + self.assertEqual(secret, "SuperSecretKey") + sm_client_mock.get_secret_value.assert_called_once_with(SecretId="dummy_arn") + + @patch("samcli.lib.pipeline.bootstrap.stage.SamConfig") + def test_save_config_ignores_exceptions_thrown_while_calculating_artifacts_bucket_name(self, samconfig_mock): + samconfig_instance_mock = Mock() + samconfig_mock.return_value = samconfig_instance_mock + stage: Stage = Stage(name=ANY_STAGE_NAME, artifacts_bucket_arn="invalid ARN") + # calling artifacts_bucket.name() during save_config() will raise a ValueError exception, we need to make sure + # this exception is swallowed so that other configs can be safely saved to the pipelineconfig.toml file + stage.save_config(config_dir="any_config_dir", filename="any_pipeline.toml", cmd_names=["any", "commands"]) + + @patch("samcli.lib.pipeline.bootstrap.stage.SamConfig") + def test_save_config_ignores_exceptions_thrown_while_calculating_image_repository_uri(self, samconfig_mock): + samconfig_instance_mock = Mock() + samconfig_mock.return_value = samconfig_instance_mock + stage: Stage = Stage(name=ANY_STAGE_NAME, image_repository_arn="invalid ARN") + # calling image_repository.get_uri() during save_config() will raise a ValueError exception, we need to make + # sure this exception is swallowed so that other configs can be safely saved to the pipelineconfig.toml file + stage.save_config(config_dir="any_config_dir", filename="any_pipeline.toml", cmd_names=["any", "commands"]) + + @patch.object(Stage, "save_config") + def test_save_config_safe(self, save_config_mock): + save_config_mock.side_effect = Exception + stage: Stage = Stage(name=ANY_STAGE_NAME) + stage.save_config_safe(config_dir="any_config_dir", filename="any_pipeline.toml", cmd_names=["commands"]) + save_config_mock.assert_called_once_with("any_config_dir", "any_pipeline.toml", ["commands"]) + + @patch("samcli.lib.pipeline.bootstrap.stage.click") + def test_print_resources_summary_when_no_resources_provided_by_the_user(self, click_mock): + stage: Stage = Stage(name=ANY_STAGE_NAME) + stage.print_resources_summary() + self.assert_summary_has_a_message_like("The following resources were created in your account", click_mock.secho) + + @patch("samcli.lib.pipeline.bootstrap.stage.click") + def test_print_resources_summary_when_all_resources_are_provided_by_the_user(self, click_mock): + stage: Stage = Stage( + name=ANY_STAGE_NAME, + pipeline_user_arn=ANY_PIPELINE_USER_ARN, + pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, + cloudformation_execution_role_arn=ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + artifacts_bucket_arn=ANY_ARTIFACTS_BUCKET_ARN, + create_image_repository=True, + image_repository_arn=ANY_IMAGE_REPOSITORY_ARN, + ) + stage.print_resources_summary() + self.assert_summary_does_not_have_a_message_like( + "The following resources were created in your account", click_mock.secho + ) + + @patch("samcli.lib.pipeline.bootstrap.stage.click") + def test_print_resources_summary_when_some_resources_are_provided_by_the_user(self, click_mock): + stage: Stage = Stage( + name=ANY_STAGE_NAME, + pipeline_user_arn=ANY_PIPELINE_USER_ARN, + artifacts_bucket_arn=ANY_ARTIFACTS_BUCKET_ARN, + create_image_repository=True, + image_repository_arn=ANY_IMAGE_REPOSITORY_ARN, + ) + stage.print_resources_summary() + self.assert_summary_has_a_message_like("The following resources were created in your account", click_mock.secho) + + @patch("samcli.lib.pipeline.bootstrap.stage.click") + def test_print_resources_summary_prints_the_credentials_of_the_pipeline_user_iff_not_provided_by_the_user( + self, click_mock + ): + stage_with_provided_pipeline_user: Stage = Stage(name=ANY_STAGE_NAME, pipeline_user_arn=ANY_PIPELINE_USER_ARN) + stage_with_provided_pipeline_user.print_resources_summary() + self.assert_summary_does_not_have_a_message_like("AWS_ACCESS_KEY_ID", click_mock.secho) + self.assert_summary_does_not_have_a_message_like("AWS_SECRET_ACCESS_KEY", click_mock.secho) + click_mock.secho.reset_mock() + + stage_without_provided_pipeline_user: Stage = Stage(name=ANY_STAGE_NAME) + stage_without_provided_pipeline_user.print_resources_summary() + self.assert_summary_has_a_message_like("AWS_ACCESS_KEY_ID", click_mock.secho) + self.assert_summary_has_a_message_like("AWS_SECRET_ACCESS_KEY", click_mock.secho) + + def assert_summary_has_a_message_like(self, msg, click_secho_mock): + self.assertTrue( + self.does_summary_have_a_message_like(msg, click_secho_mock), + msg=f'stage resources summary does not include "{msg}" which is unexpected', + ) + + def assert_summary_does_not_have_a_message_like(self, msg, click_secho_mock): + self.assertFalse( + self.does_summary_have_a_message_like(msg, click_secho_mock), + msg=f'stage resources summary includes "{msg}" which is unexpected', + ) + + @staticmethod + def does_summary_have_a_message_like(msg, click_secho_mock): + msg = msg.lower() + for kall in click_secho_mock.call_args_list: + args, kwargs = kall + if args: + message = args[0].lower() + else: + message = kwargs.get("message", "").lower() + if msg in message: + return True + return False diff --git a/tests/unit/lib/pipeline/bootstrap/test_resource.py b/tests/unit/lib/pipeline/bootstrap/test_resource.py new file mode 100644 index 0000000000..f7dcab50f2 --- /dev/null +++ b/tests/unit/lib/pipeline/bootstrap/test_resource.py @@ -0,0 +1,81 @@ +from unittest import TestCase + +from samcli.lib.pipeline.bootstrap.resource import ARNParts, Resource, IAMUser, ECRImageRepository + +VALID_ARN = "arn:partition:service:region:account-id:resource-id" +INVALID_ARN = "ARN" + + +class TestArnParts(TestCase): + def test_arn_parts_of_valid_arn(self): + arn_parts: ARNParts = ARNParts(arn=VALID_ARN) + self.assertEqual(arn_parts.partition, "partition") + self.assertEqual(arn_parts.service, "service") + self.assertEqual(arn_parts.region, "region") + self.assertEqual(arn_parts.account_id, "account-id") + self.assertEqual(arn_parts.resource_id, "resource-id") + + def test_arn_parts_of_invalid_arn(self): + with self.assertRaises(ValueError): + invalid_arn = "invalid_arn" + ARNParts(arn=invalid_arn) + + +class TestResource(TestCase): + def test_resource(self): + resource = Resource(arn=VALID_ARN, comment="") + self.assertEqual(resource.arn, VALID_ARN) + self.assertTrue(resource.is_user_provided) + self.assertEqual(resource.name(), "resource-id") + + resource = Resource(arn=INVALID_ARN, comment="") + self.assertEqual(resource.arn, INVALID_ARN) + self.assertTrue(resource.is_user_provided) + with self.assertRaises(ValueError): + resource.name() + + resource = Resource(arn=None, comment="") + self.assertIsNone(resource.arn) + self.assertFalse(resource.is_user_provided) + self.assertIsNone(resource.name()) + + +class TestIAMUser(TestCase): + def test_create_iam_user(self): + user: IAMUser = IAMUser(arn=VALID_ARN, comment="user") + self.assertEquals(user.arn, VALID_ARN) + self.assertEquals(user.comment, "user") + self.assertIsNone(user.access_key_id) + self.assertIsNone(user.secret_access_key) + + user = IAMUser( + arn=INVALID_ARN, + access_key_id="any_access_key_id", + secret_access_key="any_secret_access_key", + comment="user", + ) + self.assertEquals(user.arn, INVALID_ARN) + self.assertEquals(user.comment, "user") + self.assertEquals(user.access_key_id, "any_access_key_id") + self.assertEquals(user.secret_access_key, "any_secret_access_key") + + +class TestECRImageRepository(TestCase): + def test_get_uri_with_valid_ecr_arn(self): + valid_ecr_arn = "arn:partition:service:region:account-id:repository/repository-name" + repo: ECRImageRepository = ECRImageRepository(arn=valid_ecr_arn, comment="ecr") + self.assertEqual(repo.get_uri(), "account-id.dkr.ecr.region.amazonaws.com/repository-name") + self.assertEquals("ecr", repo.comment) + + def test_get_uri_with_invalid_ecr_arn(self): + repo = ECRImageRepository(arn=INVALID_ARN, comment="ecr") + with self.assertRaises(ValueError): + repo.get_uri() + + def test_get_uri_with_valid_aws_arn_that_is_invalid_ecr_arn(self): + ecr_arn_missing_repository_prefix = ( + "arn:partition:service:region:account-id:repository-name-without-repository/-prefix" + ) + repo = ECRImageRepository(arn=ecr_arn_missing_repository_prefix, comment="ecr") + with self.assertRaises(ValueError): + repo.get_uri() diff --git a/tests/unit/lib/samconfig/test_samconfig.py b/tests/unit/lib/samconfig/test_samconfig.py index 74c9ee9661..42017d5490 100644 --- a/tests/unit/lib/samconfig/test_samconfig.py +++ b/tests/unit/lib/samconfig/test_samconfig.py @@ -1,11 +1,11 @@ import os from pathlib import Path - from unittest import TestCase from samcli.lib.config.exceptions import SamConfigVersionException +from samcli.lib.config.samconfig import SamConfig, DEFAULT_CONFIG_FILE_NAME, DEFAULT_GLOBAL_CMDNAME, DEFAULT_ENV from samcli.lib.config.version import VERSION_KEY, SAM_CONFIG_VERSION -from samcli.lib.config.samconfig import SamConfig, DEFAULT_CONFIG_FILE_NAME, DEFAULT_GLOBAL_CMDNAME +from samcli.lib.utils import osutils class TestSamConfig(TestCase): @@ -27,14 +27,25 @@ def _check_config_file(self): self.assertTrue(self.samconfig.sanity_check()) self.assertEqual(SAM_CONFIG_VERSION, self.samconfig.document.get(VERSION_KEY)) - def _update_samconfig(self, cmd_names, section, key, value, env): - self.samconfig.put(cmd_names=cmd_names, section=section, key=key, value=value, env=env) + def _update_samconfig(self, cmd_names, section, key, value, env=None): + if env: + self.samconfig.put(cmd_names=cmd_names, section=section, key=key, value=value, env=env) + else: + self.samconfig.put(cmd_names=cmd_names, section=section, key=key, value=value) self.samconfig.flush() self._check_config_file() def test_init(self): self.assertEqual(self.samconfig.filepath, Path(self.config_dir, DEFAULT_CONFIG_FILE_NAME)) + def test_get_stage_names(self): + self.assertEqual(self.samconfig.get_stage_names(), []) + self._update_samconfig(cmd_names=["myCommand"], section="mySection", key="port", value=5401, env="stage1") + self._update_samconfig(cmd_names=["myCommand"], section="mySection", key="port", value=5401, env="stage2") + self.assertEqual(self.samconfig.get_stage_names(), ["stage1", "stage2"]) + self._update_samconfig(cmd_names=["myCommand"], section="mySection", key="port", value=5401) + self.assertEqual(self.samconfig.get_stage_names(), ["stage1", "stage2", DEFAULT_ENV]) + def test_param_overwrite(self): self._update_samconfig(cmd_names=["myCommand"], section="mySection", key="port", value=5401, env="myEnv") self.assertEqual( @@ -195,3 +206,18 @@ def test_write_config_file_non_standard_version(self): self.samconfig.put(cmd_names=["local", "start", "api"], section="parameters", key="skip_pull_image", value=True) self.samconfig.sanity_check() self.assertEqual(self.samconfig.document.get(VERSION_KEY), 0.2) + + def test_write_config_file_will_create_the_file_if_not_exist(self): + with osutils.mkdir_temp(ignore_errors=True) as tempdir: + non_existing_dir = os.path.join(tempdir, "non-existing-dir") + non_existing_file = "non-existing-file" + samconfig = SamConfig(config_dir=non_existing_dir, filename=non_existing_file) + + self.assertFalse(samconfig.exists()) + + samconfig.flush() + self.assertFalse(samconfig.exists()) # nothing to write, no need to create the file + + samconfig.put(cmd_names=["any", "command"], section="any-section", key="any-key", value="any-value") + samconfig.flush() + self.assertTrue(samconfig.exists()) diff --git a/tests/unit/lib/utils/test_managed_cloudformation_stack.py b/tests/unit/lib/utils/test_managed_cloudformation_stack.py index 9f1ea0915a..fd21b792f1 100644 --- a/tests/unit/lib/utils/test_managed_cloudformation_stack.py +++ b/tests/unit/lib/utils/test_managed_cloudformation_stack.py @@ -21,19 +21,28 @@ def _stubbed_cf_client(self): def test_session_missing_profile(self, boto_mock): boto_mock.side_effect = ProfileNotFound(profile="test-profile") with self.assertRaises(CredentialsError): - manage_stack("test-profile", "fake-region", SAM_CLI_STACK_NAME, _get_stack_template()) + manage_stack( + profile="test-profile", + region="fake-region", + stack_name=SAM_CLI_STACK_NAME, + template_body=_get_stack_template(), + ) @patch("boto3.client") def test_client_missing_credentials(self, boto_mock): boto_mock.side_effect = NoCredentialsError() with self.assertRaises(CredentialsError): - manage_stack(None, "fake-region", SAM_CLI_STACK_NAME, _get_stack_template()) + manage_stack( + profile=None, region="fake-region", stack_name=SAM_CLI_STACK_NAME, template_body=_get_stack_template() + ) @patch("boto3.client") def test_client_missing_region(self, boto_mock): boto_mock.side_effect = NoRegionError() with self.assertRaises(RegionError): - manage_stack(None, "fake-region", SAM_CLI_STACK_NAME, _get_stack_template()) + manage_stack( + profile=None, region="fake-region", stack_name=SAM_CLI_STACK_NAME, template_body=_get_stack_template() + ) def test_new_stack(self): stub_cf, stubber = self._stubbed_cf_client() @@ -47,6 +56,8 @@ def test_new_stack(self): "Tags": [{"Key": "ManagedStackSource", "Value": "AwsSamCli"}], "ChangeSetType": "CREATE", "ChangeSetName": "InitialCreation", + "Capabilities": ["CAPABILITY_IAM"], + "Parameters": [], } ccs_resp = {"Id": "id", "StackId": "aws-sam-cli-managed-default"} stubber.add_response("create_change_set", ccs_resp, ccs_params) @@ -151,6 +162,8 @@ def test_change_set_creation_fails(self): "Tags": [{"Key": "ManagedStackSource", "Value": "AwsSamCli"}], "ChangeSetType": "CREATE", "ChangeSetName": "InitialCreation", + "Capabilities": ["CAPABILITY_IAM"], + "Parameters": [], } stubber.add_client_error("create_change_set", service_error_code="ClientError", expected_params=ccs_params) stubber.activate() @@ -171,6 +184,8 @@ def test_change_set_execution_fails(self): "Tags": [{"Key": "ManagedStackSource", "Value": "AwsSamCli"}], "ChangeSetType": "CREATE", "ChangeSetName": "InitialCreation", + "Capabilities": ["CAPABILITY_IAM"], + "Parameters": [], } ccs_resp = {"Id": "id", "StackId": "aws-sam-cli-managed-default"} stubber.add_response("create_change_set", ccs_resp, ccs_params) From ecd02bbfb9c99cf4e71ce342123cb47ed30fb7b7 Mon Sep 17 00:00:00 2001 From: Raymond Wang <14915548+wchengru@users.noreply.github.com> Date: Tue, 20 Jul 2021 14:03:49 -0700 Subject: [PATCH 110/121] chore: bump aws-lambda-builder version to 1.5.0 (#3086) --- requirements/base.txt | 2 +- requirements/reproducible-linux.txt | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 25efa93b05..a8131192bc 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -12,6 +12,6 @@ docker~=4.2.0 dateparser~=0.7 requests==2.25.1 serverlessrepo==0.1.10 -aws_lambda_builders==1.4.0 +aws_lambda_builders==1.5.0 tomlkit==0.7.2 watchdog==2.1.2 diff --git a/requirements/reproducible-linux.txt b/requirements/reproducible-linux.txt index a2f725e5fd..ae068e1c8e 100644 --- a/requirements/reproducible-linux.txt +++ b/requirements/reproducible-linux.txt @@ -12,10 +12,10 @@ attrs==20.3.0 \ --hash=sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6 \ --hash=sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700 # via jsonschema -aws-lambda-builders==1.4.0 \ - --hash=sha256:3f885433bb71bae653b520e3cf4c31fe5f5b977cb770d42c631af155cd60fd2b \ - --hash=sha256:5d4e4ecb3d3290f0eec1f62b7b0d9d6b91160ae71447d95899eede392d05f75f \ - --hash=sha256:d32f79cf67b189a7598793f69797f284b2eb9a9fada562175b1e854187f95aed +aws-lambda-builders==1.5.0 \ + --hash=sha256:0167b40da88c679e21341852faf59fae2aafe36c22a560de2e4aa75c7b9dd846 \ + --hash=sha256:6fd7fddd50b7bbbb8668c44c638d685123a698bf1a866da2f34b440bca9958ad \ + --hash=sha256:c9f2259656353f98e70c49ed52b6ea8891d1f6853c2b1a9ac891772768d1697e # via aws-sam-cli (setup.py) aws-sam-translator==1.37.0 \ --hash=sha256:12cbf4af9e95acf73dabfbc44af990dc1e880f35697bb8c04f31b3bb90ab5526 \ From 3fda4c5183d638c69facf2b45839869c7a80f06f Mon Sep 17 00:00:00 2001 From: Tarun Date: Tue, 20 Jul 2021 14:04:00 -0700 Subject: [PATCH 111/121] chore: update to aws-sam-translator 1.38.0 (#3073) --- requirements/base.txt | 2 +- requirements/reproducible-linux.txt | 8 +-- .../lib/models/api_request_model.yaml | 9 +++ .../models/api_request_model_openapi_3.yaml | 12 ++++ .../lib/models/api_with_apikey_required.yaml | 8 +++ .../api_with_apikey_required_openapi_3.yaml | 8 +++ .../lib/models/api_with_auth_all_maximum.yaml | 52 +++++++++++++++- .../api_with_auth_all_maximum_openapi_3.yaml | 52 +++++++++++++++- .../lib/models/api_with_auth_all_minimum.yaml | 18 ++++++ .../api_with_auth_all_minimum_openapi.yaml | 18 ++++++ .../lib/models/api_with_auth_no_default.yaml | 18 ++++++ .../api_with_aws_account_blacklist.yaml | 6 ++ .../api_with_aws_account_whitelist.yaml | 13 ++++ ...api_with_cors_and_auth_preflight_auth.yaml | 7 +++ ...cors_and_conditions_no_definitionbody.yaml | 7 +++ .../api_with_cors_and_only_methods.yaml | 5 ++ .../api_with_cors_no_definitionbody.yaml | 7 +++ ...efault_aws_iam_auth_and_no_auth_route.yaml | 14 +++++ ...h_if_conditional_with_resource_policy.yaml | 7 +++ .../models/api_with_method_aws_iam_auth.yaml | 26 ++++++++ .../validate/lib/models/api_with_mode.yaml | 22 +++++++ .../lib/models/api_with_open_api_version.yaml | 5 ++ .../models/api_with_open_api_version_2.yaml | 5 ++ .../lib/models/api_with_path_parameters.yaml | 6 ++ .../lib/models/api_with_resource_policy.yaml | 7 +++ ..._with_resource_policy_global_implicit.yaml | 15 +++++ .../lib/models/api_with_resource_refs.yaml | 5 ++ .../models/api_with_source_vpc_blacklist.yaml | 5 ++ .../models/api_with_source_vpc_whitelist.yaml | 10 +++ ...pi_with_swagger_and_openapi_with_auth.yaml | 5 ++ .../api_with_swagger_authorizer_none.yaml | 24 ++++++++ .../lib/models/api_with_usageplans.yaml | 7 +++ ...th_usageplans_shared_no_side_effect_1.yaml | 61 +++++++++++++++++++ ...th_usageplans_shared_no_side_effect_2.yaml | 34 +++++++++++ 34 files changed, 497 insertions(+), 11 deletions(-) create mode 100644 tests/functional/commands/validate/lib/models/api_with_mode.yaml create mode 100644 tests/functional/commands/validate/lib/models/api_with_usageplans_shared_no_side_effect_1.yaml create mode 100644 tests/functional/commands/validate/lib/models/api_with_usageplans_shared_no_side_effect_2.yaml diff --git a/requirements/base.txt b/requirements/base.txt index a8131192bc..d20eee865b 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ boto3~=1.14 jmespath~=0.10.0 PyYAML~=5.3 cookiecutter~=1.7.2 -aws-sam-translator==1.37.0 +aws-sam-translator==1.38.0 #docker minor version updates can include breaking changes. Auto update micro version only. docker~=4.2.0 dateparser~=0.7 diff --git a/requirements/reproducible-linux.txt b/requirements/reproducible-linux.txt index ae068e1c8e..34bac7b600 100644 --- a/requirements/reproducible-linux.txt +++ b/requirements/reproducible-linux.txt @@ -17,10 +17,10 @@ aws-lambda-builders==1.5.0 \ --hash=sha256:6fd7fddd50b7bbbb8668c44c638d685123a698bf1a866da2f34b440bca9958ad \ --hash=sha256:c9f2259656353f98e70c49ed52b6ea8891d1f6853c2b1a9ac891772768d1697e # via aws-sam-cli (setup.py) -aws-sam-translator==1.37.0 \ - --hash=sha256:12cbf4af9e95acf73dabfbc44af990dc1e880f35697bb8c04f31b3bb90ab5526 \ - --hash=sha256:26e4866627e4284afc367bee2bd04d3cf23cecc8ff879b419457715a738395a9 \ - --hash=sha256:6884d942a815450637bac48e297996df2dacc27077d25ced09d8e9ce1f6a585c +aws-sam-translator==1.38.0 \ + --hash=sha256:0ecadda9cf5ab2318f57f1253181a2151e4c53cd35d21717a923c075a5a65cb6 \ + --hash=sha256:dc6b816bb5cfd9709299f9b263fc0cf5ae60aca4166d1c90413ece651f1556bb \ + --hash=sha256:ee7c7c5e44ec67202622ca877140545496527ffcc45da3beeda966f007443a88 # via aws-sam-cli (setup.py) binaryornot==0.4.4 \ --hash=sha256:359501dfc9d40632edc9fac890e19542db1a287bbcfa58175b66658392018061 \ diff --git a/tests/functional/commands/validate/lib/models/api_request_model.yaml b/tests/functional/commands/validate/lib/models/api_request_model.yaml index 4dc0c5f423..5c1d96b073 100644 --- a/tests/functional/commands/validate/lib/models/api_request_model.yaml +++ b/tests/functional/commands/validate/lib/models/api_request_model.yaml @@ -15,6 +15,15 @@ Resources: RequestModel: Model: User Required: true + AnyPath: + Type: Api + Properties: + RestApiId: HtmlApi + Path: /any + Method: any + RequestModel: + Model: User + Required: true HtmlApi: Type: AWS::Serverless::Api diff --git a/tests/functional/commands/validate/lib/models/api_request_model_openapi_3.yaml b/tests/functional/commands/validate/lib/models/api_request_model_openapi_3.yaml index 2e9a7d26d2..69e003ebdb 100644 --- a/tests/functional/commands/validate/lib/models/api_request_model_openapi_3.yaml +++ b/tests/functional/commands/validate/lib/models/api_request_model_openapi_3.yaml @@ -27,6 +27,18 @@ Resources: Path: /iam Auth: Authorizer: AWS_IAM + AnyIam: + Type: Api + Properties: + RequestModel: + Model: User + Required: true + RestApiId: + Ref: HtmlApi + Method: any + Path: /any/iam + Auth: + Authorizer: AWS_IAM HtmlApi: diff --git a/tests/functional/commands/validate/lib/models/api_with_apikey_required.yaml b/tests/functional/commands/validate/lib/models/api_with_apikey_required.yaml index 4ae8e52680..27dfe9a720 100644 --- a/tests/functional/commands/validate/lib/models/api_with_apikey_required.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_apikey_required.yaml @@ -19,3 +19,11 @@ Resources: Method: get Auth: ApiKeyRequired: true + MyApiWithApiKeyRequiredAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithoutAuth + Path: /any/ApiKeyRequiredTrue + Method: any + Auth: + ApiKeyRequired: true diff --git a/tests/functional/commands/validate/lib/models/api_with_apikey_required_openapi_3.yaml b/tests/functional/commands/validate/lib/models/api_with_apikey_required_openapi_3.yaml index e3140b5945..bd962b7709 100644 --- a/tests/functional/commands/validate/lib/models/api_with_apikey_required_openapi_3.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_apikey_required_openapi_3.yaml @@ -20,3 +20,11 @@ Resources: Method: get Auth: ApiKeyRequired: true + MyApiWithApiKeyRequiredAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithoutAuth + Path: /any/ApiKeyRequiredTrue + Method: any + Auth: + ApiKeyRequired: true diff --git a/tests/functional/commands/validate/lib/models/api_with_auth_all_maximum.yaml b/tests/functional/commands/validate/lib/models/api_with_auth_all_maximum.yaml index 831425e6da..67e3f4a8eb 100644 --- a/tests/functional/commands/validate/lib/models/api_with_auth_all_maximum.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_auth_all_maximum.yaml @@ -50,7 +50,7 @@ Resources: Context: - Authorization4 ReauthorizeEvery: 0 - + MyFunction: Type: AWS::Serverless::Function Properties: @@ -66,6 +66,14 @@ Resources: Method: get Auth: Authorizer: NONE + WithNoAuthorizerAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApi + Path: /any/noauth + Method: any + Auth: + Authorizer: NONE WithCognitoMultipleUserPoolsAuthorizer: Type: Api Properties: @@ -74,6 +82,14 @@ Resources: Method: post Auth: Authorizer: MyCognitoAuthMultipleUserPools + WithCognitoMultipleUserPoolsAuthorizerAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApi + Path: /any/cognitomultiple + Method: any + Auth: + Authorizer: MyCognitoAuthMultipleUserPools WithLambdaTokenAuthorizer: Type: Api Properties: @@ -82,7 +98,15 @@ Resources: Method: get Auth: Authorizer: MyLambdaTokenAuth - WithLambdaTokenAuthorizer: + WithLambdaTokenAuthorizerAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApi + Path: /any/lambdatoken + Method: any + Auth: + Authorizer: MyLambdaTokenAuth + WithLambdaTokenNoneAuthorizer: Type: Api Properties: RestApiId: !Ref MyApi @@ -90,6 +114,14 @@ Resources: Method: patch Auth: Authorizer: MyLambdaTokenAuthNoneFunctionInvokeRole + WithLambdaTokenNoneAuthorizerAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApi + Path: /any/lambdatokennone + Method: any + Auth: + Authorizer: MyLambdaTokenAuthNoneFunctionInvokeRole WithLambdaRequestAuthorizer: Type: Api Properties: @@ -98,9 +130,23 @@ Resources: Method: delete Auth: Authorizer: MyLambdaRequestAuth + WithLambdaRequestAuthorizerAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApi + Path: /any/lambdarequest + Method: any + Auth: + Authorizer: MyLambdaRequestAuth WithDefaultAuthorizer: Type: Api Properties: RestApiId: !Ref MyApi Path: /users - Method: put \ No newline at end of file + Method: put + WithDefaultAuthorizerAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApi + Path: /any/default + Method: any \ No newline at end of file diff --git a/tests/functional/commands/validate/lib/models/api_with_auth_all_maximum_openapi_3.yaml b/tests/functional/commands/validate/lib/models/api_with_auth_all_maximum_openapi_3.yaml index 0012f8bc14..5c8d3597eb 100644 --- a/tests/functional/commands/validate/lib/models/api_with_auth_all_maximum_openapi_3.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_auth_all_maximum_openapi_3.yaml @@ -51,7 +51,7 @@ Resources: Context: - Authorization4 ReauthorizeEvery: 0 - + MyFunction: Type: AWS::Serverless::Function Properties: @@ -67,6 +67,14 @@ Resources: Method: get Auth: Authorizer: NONE + WithNoAuthorizerAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApi + Path: /any/noauth + Method: any + Auth: + Authorizer: NONE WithCognitoMultipleUserPoolsAuthorizer: Type: Api Properties: @@ -75,6 +83,14 @@ Resources: Method: post Auth: Authorizer: MyCognitoAuthMultipleUserPools + WithCognitoMultipleUserPoolsAuthorizerAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApi + Path: /any/cognitomultiple + Method: any + Auth: + Authorizer: MyCognitoAuthMultipleUserPools WithLambdaTokenAuthorizer: Type: Api Properties: @@ -83,7 +99,15 @@ Resources: Method: get Auth: Authorizer: MyLambdaTokenAuth - WithLambdaTokenAuthorizer: + WithLambdaTokenAuthorizerAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApi + Path: /any/lambdatoken + Method: any + Auth: + Authorizer: MyLambdaTokenAuth + WithLambdaTokenNoneAuthorizer: Type: Api Properties: RestApiId: !Ref MyApi @@ -91,6 +115,14 @@ Resources: Method: patch Auth: Authorizer: MyLambdaTokenAuthNoneFunctionInvokeRole + WithLambdaTokenNoneAuthorizerAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApi + Path: /any/lambdatokennone + Method: any + Auth: + Authorizer: MyLambdaTokenAuthNoneFunctionInvokeRole WithLambdaRequestAuthorizer: Type: Api Properties: @@ -99,9 +131,23 @@ Resources: Method: delete Auth: Authorizer: MyLambdaRequestAuth + WithLambdaRequestAuthorizerAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApi + Path: /any/lambdarequest + Method: any + Auth: + Authorizer: MyLambdaRequestAuth WithDefaultAuthorizer: Type: Api Properties: RestApiId: !Ref MyApi Path: /users - Method: put \ No newline at end of file + Method: put + WithDefaultAuthorizerAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApi + Path: /any/default + Method: any \ No newline at end of file diff --git a/tests/functional/commands/validate/lib/models/api_with_auth_all_minimum.yaml b/tests/functional/commands/validate/lib/models/api_with_auth_all_minimum.yaml index f6eda0af2c..399df76126 100644 --- a/tests/functional/commands/validate/lib/models/api_with_auth_all_minimum.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_auth_all_minimum.yaml @@ -51,18 +51,36 @@ Resources: RestApiId: !Ref MyApiWithCognitoAuth Method: get Path: /cognito + CognitoAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithCognitoAuth + Method: any + Path: /any/cognito LambdaToken: Type: Api Properties: RestApiId: !Ref MyApiWithLambdaTokenAuth Method: get Path: /lambda-token + LambdaTokenAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithLambdaTokenAuth + Method: any + Path: /any/lambda-token LambdaRequest: Type: Api Properties: RestApiId: !Ref MyApiWithLambdaRequestAuth Method: get Path: /lambda-request + LambdaRequestAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithLambdaRequestAuth + Method: any + Path: /any/lambda-request MyUserPool: Type: AWS::Cognito::UserPool Properties: diff --git a/tests/functional/commands/validate/lib/models/api_with_auth_all_minimum_openapi.yaml b/tests/functional/commands/validate/lib/models/api_with_auth_all_minimum_openapi.yaml index 486bd1250f..bfa377bbbf 100644 --- a/tests/functional/commands/validate/lib/models/api_with_auth_all_minimum_openapi.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_auth_all_minimum_openapi.yaml @@ -54,18 +54,36 @@ Resources: RestApiId: !Ref MyApiWithCognitoAuth Method: get Path: /cognito + CognitoAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithCognitoAuth + Method: any + Path: /any/cognito LambdaToken: Type: Api Properties: RestApiId: !Ref MyApiWithLambdaTokenAuth Method: get Path: /lambda-token + LambdaTokenAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithLambdaTokenAuth + Method: any + Path: /any/lambda-token LambdaRequest: Type: Api Properties: RestApiId: !Ref MyApiWithLambdaRequestAuth Method: get Path: /lambda-request + LambdaRequestAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithLambdaRequestAuth + Method: any + Path: /any/lambda-request MyUserPool: Type: AWS::Cognito::UserPool Properties: diff --git a/tests/functional/commands/validate/lib/models/api_with_auth_no_default.yaml b/tests/functional/commands/validate/lib/models/api_with_auth_no_default.yaml index 85d591b06e..3f3900386c 100644 --- a/tests/functional/commands/validate/lib/models/api_with_auth_no_default.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_auth_no_default.yaml @@ -48,18 +48,36 @@ Resources: RestApiId: !Ref MyApiWithCognitoAuth Method: get Path: /cognito + CognitoAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithCognitoAuth + Method: any + Path: /any/cognito LambdaToken: Type: Api Properties: RestApiId: !Ref MyApiWithLambdaTokenAuth Method: get Path: /lambda-token + LambdaTokenAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithLambdaTokenAuth + Method: any + Path: /any/lambda-token LambdaRequest: Type: Api Properties: RestApiId: !Ref MyApiWithLambdaRequestAuth Method: get Path: /lambda-request + LambdaRequestAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithLambdaRequestAuth + Method: any + Path: /any/lambda-request MyUserPool: Type: AWS::Cognito::UserPool Properties: diff --git a/tests/functional/commands/validate/lib/models/api_with_aws_account_blacklist.yaml b/tests/functional/commands/validate/lib/models/api_with_aws_account_blacklist.yaml index b93e63d9b6..19b51412a9 100644 --- a/tests/functional/commands/validate/lib/models/api_with_aws_account_blacklist.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_aws_account_blacklist.yaml @@ -23,3 +23,9 @@ Resources: Properties: Method: Put Path: /get + Any: + Type: Api + Properties: + Method: any + Path: /any + diff --git a/tests/functional/commands/validate/lib/models/api_with_aws_account_whitelist.yaml b/tests/functional/commands/validate/lib/models/api_with_aws_account_whitelist.yaml index c69a9b64f3..ff55cbae2b 100644 --- a/tests/functional/commands/validate/lib/models/api_with_aws_account_whitelist.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_aws_account_whitelist.yaml @@ -26,3 +26,16 @@ Resources: ] Method: Put Path: /get + Any: + Type: Api + Properties: + Auth: + ResourcePolicy: + AwsAccountWhitelist: [ + "12345" + ] + AwsAccountBlacklist: [ + "67890" + ] + Method: any + Path: /any diff --git a/tests/functional/commands/validate/lib/models/api_with_cors_and_auth_preflight_auth.yaml b/tests/functional/commands/validate/lib/models/api_with_cors_and_auth_preflight_auth.yaml index e984428c15..1fb222b890 100644 --- a/tests/functional/commands/validate/lib/models/api_with_cors_and_auth_preflight_auth.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_cors_and_auth_preflight_auth.yaml @@ -24,6 +24,13 @@ Resources: Method: post RestApiId: !Ref ServerlessApi + AnyHtml: + Type: Api + Properties: + Path: /any + Method: any + RestApiId: !Ref ServerlessApi + ServerlessApi: Type: AWS::Serverless::Api diff --git a/tests/functional/commands/validate/lib/models/api_with_cors_and_conditions_no_definitionbody.yaml b/tests/functional/commands/validate/lib/models/api_with_cors_and_conditions_no_definitionbody.yaml index 6070b112d9..5075726ae7 100644 --- a/tests/functional/commands/validate/lib/models/api_with_cors_and_conditions_no_definitionbody.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_cors_and_conditions_no_definitionbody.yaml @@ -48,6 +48,13 @@ Resources: Path: / Method: post + AnyHtml: + Type: Api + Properties: + RestApiId: !Ref ExplicitApi + Path: /any + Method: any + ExplicitApi: Type: AWS::Serverless::Api diff --git a/tests/functional/commands/validate/lib/models/api_with_cors_and_only_methods.yaml b/tests/functional/commands/validate/lib/models/api_with_cors_and_only_methods.yaml index 1ee2d92883..724de43017 100644 --- a/tests/functional/commands/validate/lib/models/api_with_cors_and_only_methods.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_cors_and_only_methods.yaml @@ -16,4 +16,9 @@ Resources: Properties: Path: / Method: get + AnyHtml: + Type: Api + Properties: + Path: /any + Method: any diff --git a/tests/functional/commands/validate/lib/models/api_with_cors_no_definitionbody.yaml b/tests/functional/commands/validate/lib/models/api_with_cors_no_definitionbody.yaml index f8b7bcd522..7d496c2f9b 100644 --- a/tests/functional/commands/validate/lib/models/api_with_cors_no_definitionbody.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_cors_no_definitionbody.yaml @@ -27,6 +27,13 @@ Resources: Path: / Method: post + AnyHtml: + Type: Api + Properties: + RestApiId: !Ref ExplicitApi + Path: /any + Method: any + ExplicitApi: Type: AWS::Serverless::Api diff --git a/tests/functional/commands/validate/lib/models/api_with_default_aws_iam_auth_and_no_auth_route.yaml b/tests/functional/commands/validate/lib/models/api_with_default_aws_iam_auth_and_no_auth_route.yaml index 8bad587889..d3d69d577c 100644 --- a/tests/functional/commands/validate/lib/models/api_with_default_aws_iam_auth_and_no_auth_route.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_default_aws_iam_auth_and_no_auth_route.yaml @@ -19,6 +19,12 @@ Resources: RestApiId: !Ref MyApiWithAwsIamAuth Path: / Method: post + MyApiWithAwsIamAuthAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithAwsIamAuth + Path: /any/iam + Method: any MyApiWithNoAuth: Type: Api Properties: @@ -27,3 +33,11 @@ Resources: Method: get Auth: Authorizer: 'NONE' + MyApiWithNoAuthAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithAwsIamAuth + Path: /any/none + Method: any + Auth: + Authorizer: 'NONE' diff --git a/tests/functional/commands/validate/lib/models/api_with_if_conditional_with_resource_policy.yaml b/tests/functional/commands/validate/lib/models/api_with_if_conditional_with_resource_policy.yaml index 3ffecb9b74..cfbc74ec1e 100644 --- a/tests/functional/commands/validate/lib/models/api_with_if_conditional_with_resource_policy.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_if_conditional_with_resource_policy.yaml @@ -50,5 +50,12 @@ Resources: Ref: ExplicitApi Path: /three Method: put + AnyHtml: + Type: Api + Properties: + RestApiId: + Ref: ExplicitApi + Path: /any + Method: any \ No newline at end of file diff --git a/tests/functional/commands/validate/lib/models/api_with_method_aws_iam_auth.yaml b/tests/functional/commands/validate/lib/models/api_with_method_aws_iam_auth.yaml index 8a1c8c6da2..16c06dc43e 100644 --- a/tests/functional/commands/validate/lib/models/api_with_method_aws_iam_auth.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_method_aws_iam_auth.yaml @@ -37,3 +37,29 @@ Resources: Auth: Authorizer: AWS_IAM InvokeRole: CALLER_CREDENTIALS + MyApiWithAwsIamAuthAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithoutAuth + Path: /any/one + Method: any + Auth: + Authorizer: AWS_IAM + MyApiWithAwsIamAuthAndCustomInvokeRoleAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithoutAuth + Path: /any/two + Method: any + Auth: + Authorizer: AWS_IAM + InvokeRole: rn:aws:iam::123:role/AUTH_AWS_IAM + MyApiWithAwsIamAuthAndDefaultInvokeRoleAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithoutAuth + Path: /any/three + Method: any + Auth: + Authorizer: AWS_IAM + InvokeRole: CALLER_CREDENTIALS diff --git a/tests/functional/commands/validate/lib/models/api_with_mode.yaml b/tests/functional/commands/validate/lib/models/api_with_mode.yaml new file mode 100644 index 0000000000..8df0693af4 --- /dev/null +++ b/tests/functional/commands/validate/lib/models/api_with_mode.yaml @@ -0,0 +1,22 @@ +Resources: + Function: + Type: AWS::Serverless::Function + Properties: + CodeUri: s3://sam-demo-bucket/member_portal.zip + Handler: index.gethtml + Runtime: nodejs12.x + Events: + GetHtml: + Type: Api + Properties: + RestApiId: Api + Path: / + Method: get + + Api: + Type: AWS::Serverless::Api + Properties: + StageName: Prod + DefinitionUri: s3://sam-demo-bucket/webpage_swagger.json + Description: my description + Mode: overwrite diff --git a/tests/functional/commands/validate/lib/models/api_with_open_api_version.yaml b/tests/functional/commands/validate/lib/models/api_with_open_api_version.yaml index 1ffd32bd6a..7efa33f629 100644 --- a/tests/functional/commands/validate/lib/models/api_with_open_api_version.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_open_api_version.yaml @@ -16,6 +16,11 @@ Resources: Properties: Path: / Method: get + AnyHtml: + Type: Api + Properties: + Path: /any + Method: any ExplicitApi: Type: AWS::Serverless::Api Properties: diff --git a/tests/functional/commands/validate/lib/models/api_with_open_api_version_2.yaml b/tests/functional/commands/validate/lib/models/api_with_open_api_version_2.yaml index 688344e032..52e6530326 100644 --- a/tests/functional/commands/validate/lib/models/api_with_open_api_version_2.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_open_api_version_2.yaml @@ -16,6 +16,11 @@ Resources: Properties: Path: / Method: get + AnyHtml: + Type: Api + Properties: + Path: /any + Method: any ExplicitApi: Type: AWS::Serverless::Api Properties: diff --git a/tests/functional/commands/validate/lib/models/api_with_path_parameters.yaml b/tests/functional/commands/validate/lib/models/api_with_path_parameters.yaml index ac79e312c5..e1799d3e70 100644 --- a/tests/functional/commands/validate/lib/models/api_with_path_parameters.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_path_parameters.yaml @@ -12,6 +12,12 @@ Resources: RestApiId: HtmlApi Path: /{prameter}/resources Method: get + AnyHtml: + Type: Api + Properties: + RestApiId: HtmlApi + Path: /any/{prameter}/resources + Method: any HtmlApi: Type: AWS::Serverless::Api diff --git a/tests/functional/commands/validate/lib/models/api_with_resource_policy.yaml b/tests/functional/commands/validate/lib/models/api_with_resource_policy.yaml index fb9071db25..2c34783842 100644 --- a/tests/functional/commands/validate/lib/models/api_with_resource_policy.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_resource_policy.yaml @@ -37,5 +37,12 @@ Resources: Ref: ExplicitApi Path: /three Method: put + AnyHtml: + Type: Api + Properties: + RestApiId: + Ref: ExplicitApi + Path: /any + Method: any \ No newline at end of file diff --git a/tests/functional/commands/validate/lib/models/api_with_resource_policy_global_implicit.yaml b/tests/functional/commands/validate/lib/models/api_with_resource_policy_global_implicit.yaml index d3599c73c4..613f67dc10 100644 --- a/tests/functional/commands/validate/lib/models/api_with_resource_policy_global_implicit.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_resource_policy_global_implicit.yaml @@ -21,3 +21,18 @@ Resources: Action: 'execute-api:blah', Resource: ['execute-api:/*/*/*'] }] + AddItemAnyMethod: + Type: Api + Properties: + Path: /any/add + Method: any + Auth: + ResourcePolicy: + CustomStatements: [{ + Action: 'execute-api:Invoke', + Resource: ['execute-api:/*/*/*'] + }, + { + Action: 'execute-api:blah', + Resource: ['execute-api:/*/*/*'] + }] diff --git a/tests/functional/commands/validate/lib/models/api_with_resource_refs.yaml b/tests/functional/commands/validate/lib/models/api_with_resource_refs.yaml index 3381677ef2..e84845cbba 100644 --- a/tests/functional/commands/validate/lib/models/api_with_resource_refs.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_resource_refs.yaml @@ -21,6 +21,11 @@ Resources: Properties: Path: /html Method: GET + GetHtmlAnyMethod: + Type: Api + Properties: + Path: /any/html + Method: any Outputs: ImplicitApiDeployment: diff --git a/tests/functional/commands/validate/lib/models/api_with_source_vpc_blacklist.yaml b/tests/functional/commands/validate/lib/models/api_with_source_vpc_blacklist.yaml index 65073bdede..6315a79314 100644 --- a/tests/functional/commands/validate/lib/models/api_with_source_vpc_blacklist.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_source_vpc_blacklist.yaml @@ -23,4 +23,9 @@ Resources: Properties: Method: Put Path: /get + ApiAnyMethod: + Type: Api + Properties: + Method: any + Path: /any/get diff --git a/tests/functional/commands/validate/lib/models/api_with_source_vpc_whitelist.yaml b/tests/functional/commands/validate/lib/models/api_with_source_vpc_whitelist.yaml index 1cacf39415..f67ea34d8a 100644 --- a/tests/functional/commands/validate/lib/models/api_with_source_vpc_whitelist.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_source_vpc_whitelist.yaml @@ -31,11 +31,21 @@ Resources: Properties: Method: Put Path: /get + ApiAnyMethod: + Type: Api + Properties: + Method: any + Path: /any/get Fetch: Type: Api Properties: Method: Post Path: /fetch + FetchAnyMethod: + Type: Api + Properties: + Method: any + Path: /any/fetch MyApi: Type: AWS::Serverless::Api diff --git a/tests/functional/commands/validate/lib/models/api_with_swagger_and_openapi_with_auth.yaml b/tests/functional/commands/validate/lib/models/api_with_swagger_and_openapi_with_auth.yaml index af30762da9..1b796e449b 100644 --- a/tests/functional/commands/validate/lib/models/api_with_swagger_and_openapi_with_auth.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_swagger_and_openapi_with_auth.yaml @@ -24,6 +24,11 @@ Resources: Properties: Path: / Method: get + GetHtmlAnyMethod: + Type: Api + Properties: + Path: /any + Method: any ExplicitApi: Type: AWS::Serverless::Api diff --git a/tests/functional/commands/validate/lib/models/api_with_swagger_authorizer_none.yaml b/tests/functional/commands/validate/lib/models/api_with_swagger_authorizer_none.yaml index eb0ae32bea..98173772ec 100644 --- a/tests/functional/commands/validate/lib/models/api_with_swagger_authorizer_none.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_swagger_authorizer_none.yaml @@ -85,6 +85,14 @@ Resources: Auth: Authorizer: NONE Path: /cognito + CognitoAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithCognitoAuth + Method: any + Auth: + Authorizer: NONE + Path: /any/cognito LambdaToken: Type: Api Properties: @@ -93,6 +101,14 @@ Resources: Auth: Authorizer: NONE Path: /lambda-token + LambdaTokenAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithLambdaTokenAuth + Method: any + Auth: + Authorizer: NONE + Path: /any/lambda-token LambdaRequest: Type: Api Properties: @@ -101,6 +117,14 @@ Resources: Authorizer: NONE Method: get Path: /lambda-request + LambdaRequestAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithLambdaRequestAuth + Auth: + Authorizer: NONE + Method: any + Path: /any/lambda-request MyUserPool: Type: AWS::Cognito::UserPool diff --git a/tests/functional/commands/validate/lib/models/api_with_usageplans.yaml b/tests/functional/commands/validate/lib/models/api_with_usageplans.yaml index 836d98648b..41b08e493d 100644 --- a/tests/functional/commands/validate/lib/models/api_with_usageplans.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_usageplans.yaml @@ -63,6 +63,13 @@ Resources: Ref: MyApiOne Method: get Path: /path/one + ApiKeyAnyMethod: + Type: Api + Properties: + RestApiId: + Ref: MyApiOne + Method: any + Path: /any/path/one MyFunctionTwo: Type: AWS::Serverless::Function diff --git a/tests/functional/commands/validate/lib/models/api_with_usageplans_shared_no_side_effect_1.yaml b/tests/functional/commands/validate/lib/models/api_with_usageplans_shared_no_side_effect_1.yaml new file mode 100644 index 0000000000..f05fe7511b --- /dev/null +++ b/tests/functional/commands/validate/lib/models/api_with_usageplans_shared_no_side_effect_1.yaml @@ -0,0 +1,61 @@ +Globals: + Api: + Auth: + ApiKeyRequired: true + UsagePlan: + CreateUsagePlan: SHARED + +Resources: + MyApiOne: + Type: AWS::Serverless::Api + Properties: + StageName: Prod + + MyApiTwo: + Type: AWS::Serverless::Api + Properties: + StageName: Prod + + MyFunctionOne: + Type: AWS::Serverless::Function + Properties: + Handler: index.handler + Runtime: nodejs12.x + InlineCode: | + exports.handler = async (event) => { + return { + statusCode: 200, + body: JSON.stringify(event), + headers: {} + } + } + Events: + ApiKey: + Type: Api + Properties: + RestApiId: + Ref: MyApiOne + Method: get + Path: /path/one + + MyFunctionTwo: + Type: AWS::Serverless::Function + Properties: + Handler: index.handler + Runtime: nodejs12.x + InlineCode: | + exports.handler = async (event) => { + return { + statusCode: 200, + body: JSON.stringify(event), + headers: {} + } + } + Events: + ApiKey: + Type: Api + Properties: + RestApiId: + Ref: MyApiTwo + Method: get + Path: /path/two diff --git a/tests/functional/commands/validate/lib/models/api_with_usageplans_shared_no_side_effect_2.yaml b/tests/functional/commands/validate/lib/models/api_with_usageplans_shared_no_side_effect_2.yaml new file mode 100644 index 0000000000..857e387692 --- /dev/null +++ b/tests/functional/commands/validate/lib/models/api_with_usageplans_shared_no_side_effect_2.yaml @@ -0,0 +1,34 @@ +Globals: + Api: + Auth: + ApiKeyRequired: true + UsagePlan: + CreateUsagePlan: SHARED + +Resources: + MyApiFour: + Type: AWS::Serverless::Api + Properties: + StageName: Prod + + MyFunctionFour: + Type: AWS::Serverless::Function + Properties: + Handler: index.handler + Runtime: nodejs12.x + InlineCode: | + exports.handler = async (event) => { + return { + statusCode: 200, + body: JSON.stringify(event), + headers: {} + } + } + Events: + ApiKey: + Type: Api + Properties: + RestApiId: + Ref: MyApiFour + Method: get + Path: /path/four From d3e078213816f52ae00240a272c329b410364955 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Tue, 20 Jul 2021 17:11:55 -0400 Subject: [PATCH 112/121] Updated to context refresh only when region and profile have non None values and removed unused class variable in delete_context --- samcli/commands/delete/delete_context.py | 11 +++++----- .../commands/delete/test_delete_context.py | 22 ++++++++++++++----- 2 files changed, 21 insertions(+), 12 deletions(-) diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 587005f96c..37014b74bd 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -2,7 +2,7 @@ Delete a SAM stack """ import logging -from typing import Dict + import json import boto3 @@ -35,9 +35,6 @@ class DeleteContext: - - ecr_repos: Dict[str, Dict[str, str]] - def __init__(self, stack_name: str, region: str, profile: str, config_file: str, config_env: str, no_prompts: bool): self.stack_name = stack_name self.region = region @@ -87,10 +84,8 @@ def parse_config_file(self): LOG.debug("Local config present and using the defined options") if not self.region: self.region = config_options.get("region", None) - Context.get_current_context().region = self.region if not self.profile: self.profile = config_options.get("profile", None) - Context.get_current_context().profile = self.profile self.s3_bucket = config_options.get("s3_bucket", None) self.s3_prefix = config_options.get("s3_prefix", None) @@ -102,6 +97,10 @@ def init_clients(self): session = boto3.Session() region = session.region_name self.region = region if region else "us-east-1" + + if self.profile: + Context.get_current_context().profile = self.profile + if self.region: Context.get_current_context().region = self.region boto_config = get_boto_config_with_user_agent() diff --git a/tests/unit/commands/delete/test_delete_context.py b/tests/unit/commands/delete/test_delete_context.py index c5b77a47f2..087f552bc4 100644 --- a/tests/unit/commands/delete/test_delete_context.py +++ b/tests/unit/commands/delete/test_delete_context.py @@ -13,8 +13,9 @@ class TestDeleteContext(TestCase): @patch("samcli.commands.delete.delete_context.click.echo") + @patch("samcli.commands.delete.delete_context.click.get_current_context") @patch.object(CfUtils, "has_stack", MagicMock(return_value=(False))) - def test_delete_context_stack_does_not_exist(self, patched_click_echo): + def test_delete_context_stack_does_not_exist(self, patched_click_get_current_context, patched_click_echo): with DeleteContext( stack_name="test", region="us-east-1", @@ -143,11 +144,14 @@ def test_delete_context_valid_execute_run(self, patched_click_get_current_contex @patch("samcli.commands.delete.delete_context.click.echo") @patch("samcli.commands.deploy.guided_context.click.secho") + @patch("samcli.commands.delete.delete_context.click.get_current_context") @patch.object(CfUtils, "has_stack", MagicMock(side_effect=(True, False))) @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) @patch.object(CfUtils, "delete_stack", MagicMock()) @patch.object(CfUtils, "wait_for_delete", MagicMock()) - def test_delete_context_no_s3_bucket(self, patched_click_secho, patched_click_echo): + def test_delete_context_no_s3_bucket( + self, patched_click_get_current_context, patched_click_secho, patched_click_echo + ): with DeleteContext( stack_name="test", region="us-east-1", @@ -175,12 +179,15 @@ def test_delete_context_no_s3_bucket(self, patched_click_secho, patched_click_ec @patch("samcli.commands.delete.delete_context.get_cf_template_name") @patch("samcli.commands.delete.delete_context.confirm") + @patch("samcli.commands.delete.delete_context.click.get_current_context") @patch.object(CfUtils, "has_stack", MagicMock(side_effect=(True, False))) @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) @patch.object(CfUtils, "delete_stack", MagicMock()) @patch.object(CfUtils, "wait_for_delete", MagicMock()) @patch.object(S3Uploader, "delete_artifact", MagicMock()) - def test_guided_prompts_s3_bucket_prefix_present_execute_run(self, patched_confirm, patched_get_cf_template_name): + def test_guided_prompts_s3_bucket_prefix_present_execute_run( + self, patched_click_get_current_context, patched_confirm, patched_get_cf_template_name + ): patched_get_cf_template_name.return_value = "hello.template" with DeleteContext( @@ -228,6 +235,7 @@ def test_guided_prompts_s3_bucket_prefix_present_execute_run(self, patched_confi @patch("samcli.commands.delete.delete_context.get_cf_template_name") @patch("samcli.commands.delete.delete_context.confirm") + @patch("samcli.commands.delete.delete_context.click.get_current_context") @patch.object(CfUtils, "has_stack", MagicMock(side_effect=(True, False))) @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) @patch.object(CfUtils, "delete_stack", MagicMock()) @@ -235,7 +243,7 @@ def test_guided_prompts_s3_bucket_prefix_present_execute_run(self, patched_confi @patch.object(S3Uploader, "delete_artifact", MagicMock()) @patch.object(ECRUploader, "delete_ecr_repository", MagicMock()) def test_guided_prompts_s3_bucket_present_no_prefix_execute_run( - self, patched_confirm, patched_get_cf_template_name + self, patched_click_get_current_context, patched_confirm, patched_get_cf_template_name ): patched_get_cf_template_name.return_value = "hello.template" @@ -274,6 +282,7 @@ def test_guided_prompts_s3_bucket_present_no_prefix_execute_run( @patch("samcli.commands.delete.delete_context.get_cf_template_name") @patch("samcli.commands.delete.delete_context.confirm") + @patch("samcli.commands.delete.delete_context.click.get_current_context") @patch.object(CfUtils, "has_stack", MagicMock(side_effect=(True, True))) @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) @patch.object(CfUtils, "delete_stack", MagicMock()) @@ -282,7 +291,7 @@ def test_guided_prompts_s3_bucket_present_no_prefix_execute_run( @patch.object(ECRUploader, "delete_ecr_repository", MagicMock()) @patch.object(Template, "get_ecr_repos", MagicMock(side_effect=({}, {"logical_id": {"Repository": "test_id"}}))) def test_guided_prompts_ecr_companion_stack_present_execute_run( - self, patched_confirm, patched_get_cf_template_name + self, patched_click_get_current_context, patched_confirm, patched_get_cf_template_name ): patched_get_cf_template_name.return_value = "hello.template" @@ -347,6 +356,7 @@ def test_guided_prompts_ecr_companion_stack_present_execute_run( @patch("samcli.commands.delete.delete_context.get_cf_template_name") @patch("samcli.commands.delete.delete_context.click.echo") + @patch("samcli.commands.delete.delete_context.click.get_current_context") @patch.object(CfUtils, "has_stack", MagicMock(side_effect=(True, False))) @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) @patch.object(CfUtils, "delete_stack", MagicMock()) @@ -355,7 +365,7 @@ def test_guided_prompts_ecr_companion_stack_present_execute_run( @patch.object(ECRUploader, "delete_ecr_repository", MagicMock()) @patch.object(Template, "get_ecr_repos", MagicMock(return_value=({"logical_id": {"Repository": "test_id"}}))) def test_no_prompts_input_is_ecr_companion_stack_present_execute_run( - self, patched_click_echo, patched_get_cf_template_name + self, patched_click_get_current_context, patched_click_echo, patched_get_cf_template_name ): CfUtils.get_stack_template.return_value = { "TemplateBody": {"Metadata": {"CompanionStackname": "test-098f6bcd-CompanionStack"}} From 80d542887e648d324227a9da25081a1514d91be5 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Tue, 20 Jul 2021 18:48:15 -0400 Subject: [PATCH 113/121] Added unit test for ResourceImageDict class methods --- .../lib/package/test_artifact_exporter.py | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/tests/unit/lib/package/test_artifact_exporter.py b/tests/unit/lib/package/test_artifact_exporter.py index 11511a3431..339340097e 100644 --- a/tests/unit/lib/package/test_artifact_exporter.py +++ b/tests/unit/lib/package/test_artifact_exporter.py @@ -52,6 +52,7 @@ CloudFormationResourceVersionSchemaHandlerPackage, ResourceZip, ResourceImage, + ResourceImageDict, ECRResource, ) @@ -455,6 +456,35 @@ class MockResource(ResourceImage): resource.delete(resource_id, resource_dict) self.assertEqual(self.ecr_uploader_mock.delete_artifact.call_count, 1) + @patch("samcli.lib.package.packageable_resources.upload_local_image_artifacts") + def test_resource_image_dict(self, upload_local_image_artifacts_mock): + # Property value is a path to an image + + class MockResource(ResourceImageDict): + PROPERTY_NAME = "foo" + + resource = MockResource(self.uploaders_mock, None) + + resource_id = "id" + resource_dict = {} + resource_dict[resource.PROPERTY_NAME] = "image:latest" + parent_dir = "dir" + ecr_url = "123456789.dkr.ecr.us-east-1.amazonaws.com/sam-cli" + + upload_local_image_artifacts_mock.return_value = ecr_url + + resource.export(resource_id, resource_dict, parent_dir) + + upload_local_image_artifacts_mock.assert_called_once_with( + resource_id, resource_dict, resource.PROPERTY_NAME, parent_dir, self.ecr_uploader_mock + ) + + self.assertEqual(resource_dict[resource.PROPERTY_NAME][resource.EXPORT_PROPERTY_CODE_KEY], ecr_url) + + self.ecr_uploader_mock.delete_artifact = MagicMock() + resource.delete(resource_id, resource_dict) + self.assertEqual(self.ecr_uploader_mock.delete_artifact.call_count, 1) + def test_lambda_image_resource_package_success(self): # Property value is set to an image From 6b7ec4f0719d611c944295f180f9837c5e2736f9 Mon Sep 17 00:00:00 2001 From: _sam <3804518+aahung@users.noreply.github.com> Date: Tue, 20 Jul 2021 16:16:16 -0700 Subject: [PATCH 114/121] ci: Update expected Jenkins file in pipeline integ test (#3090) --- tests/integration/testdata/pipeline/expected_jenkinsfile | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/integration/testdata/pipeline/expected_jenkinsfile b/tests/integration/testdata/pipeline/expected_jenkinsfile index 7a213a30f9..0271ca633e 100644 --- a/tests/integration/testdata/pipeline/expected_jenkinsfile +++ b/tests/integration/testdata/pipeline/expected_jenkinsfile @@ -145,6 +145,11 @@ pipeline { // } // } + // uncomment this to have a manual approval step before deployment to production + // stage('production-deployment-approval'){ + // input "Do you want to deploy to production environment?" + // } + stage('deploy-prod') { when { branch env.MAIN_BRANCH From 9206b47ccca304e6cbaf627063fee2fba035d58c Mon Sep 17 00:00:00 2001 From: _sam <3804518+aahung@users.noreply.github.com> Date: Wed, 21 Jul 2021 06:04:53 -0700 Subject: [PATCH 115/121] chore: Refine pipeline help text and update unit test (#3091) * Update --bucket help text * Update --stage help text * Update help text * Update help text * Update help text * Update help text * Update help text * Update jenkins generated files * Update some intro texts * Remove trialing spaces --- samcli/commands/pipeline/bootstrap/cli.py | 41 +++++++++++-------- samcli/commands/pipeline/init/cli.py | 16 ++++---- .../pipeline/init/interactive_init_flow.py | 8 ++-- .../testdata/pipeline/expected_jenkinsfile | 9 ++-- 4 files changed, 40 insertions(+), 34 deletions(-) diff --git a/samcli/commands/pipeline/bootstrap/cli.py b/samcli/commands/pipeline/bootstrap/cli.py index 9e8b454992..4c32ebc9b3 100644 --- a/samcli/commands/pipeline/bootstrap/cli.py +++ b/samcli/commands/pipeline/bootstrap/cli.py @@ -17,12 +17,11 @@ from .guided_context import GuidedContext from ..external_links import CONFIG_AWS_CRED_ON_CICD_URL -SHORT_HELP = "Generates the necessary AWS resources to connect your CI/CD system." +SHORT_HELP = "Generates the required AWS resources to connect your CI/CD system." HELP_TEXT = """ -SAM Pipeline Bootstrap generates the necessary AWS resources to connect your -CI/CD system. This step must be completed for each pipeline stage prior to -running sam pipeline init +This command generates the required AWS infrastructure resources to connect to your CI/CD system. +This step must be run for each deployment stage in your pipeline, prior to running the sam pipline init command. """ PIPELINE_CONFIG_DIR = os.path.join(".aws-sam", "pipeline") @@ -39,29 +38,33 @@ ) @click.option( "--stage", - help="The name of the corresponding stage. It is used as a suffix for the created resources.", + help="The name of the corresponding deployment stage. " + "It is used as a suffix for the created AWS infrastructure resources.", required=False, ) @click.option( "--pipeline-user", - help="An IAM user generated or referenced by sam pipeline bootstrap in order to " - "allow the connected CI/CD system to connect to the SAM CLI.", + help="The Amazon Resource Name (ARN) of the IAM user having its access key ID and secret access key " + "shared with the CI/CD system. It is used to grant this IAM user permission to access the " + "corresponding AWS account. If not provided, the command will create one along with the access " + "key ID and secret access key credentials.", required=False, ) @click.option( "--pipeline-execution-role", - help="Execution role that the CI/CD system assumes in order to make changes to resources on your behalf.", + help="The ARN of the IAM role to be assumed by the pipeline user to operate on this stage. " + "Provide it only if you want to use your own role, otherwise this command will create one.", required=False, ) @click.option( "--cloudformation-execution-role", - help="Execution role that CloudFormation assumes in order to make changes to resources on your behalf", + help="The ARN of the IAM role to be assumed by the AWS CloudFormation service while deploying the " + "application's stack. Provide only if you want to use your own role, otherwise the command will create one.", required=False, ) @click.option( "--bucket", - help="The name of the S3 bucket where this command uploads your CloudFormation template. This is required for" - "deployments of templates sized greater than 51,200 bytes.", + help="The ARN of the Amazon S3 bucket to hold the AWS SAM artifacts.", required=False, ) @click.option( @@ -73,14 +76,16 @@ ) @click.option( "--image-repository", - help="ECR repo uri where this command uploads the image artifacts that are referenced in your template.", + help="The ARN of an Amazon ECR image repository to hold the container images of Lambda functions or " + "layers that have a package type of Image. If provided, the --create-image-repository options is ignored. " + "If not provided and --create-image-repository is specified, the command will create one.", required=False, ) @click.option( "--confirm-changeset/--no-confirm-changeset", default=True, is_flag=True, - help="Prompt to confirm if the resources is to be deployed by SAM CLI.", + help="Prompt to confirm if the resources are to be deployed.", ) @common_options @aws_creds_options @@ -150,10 +155,12 @@ def do_cli( dedent( """\ - sam pipeline bootstrap generates the necessary AWS resources to connect a stage in - your CI/CD system. We will ask for [1] stage definition, [2] account details, and - [3] references to existing resources in order to bootstrap these pipeline - resources. + sam pipeline bootstrap generates the required AWS infrastructure resources to connect + to your CI/CD system. This step must be run for each deployment stage in your pipeline, + prior to running the sam pipeline init command. + + We will ask for [1] stage definition, [2] account details, and + [3] references to existing resources in order to bootstrap these pipeline resources. """ ), ) diff --git a/samcli/commands/pipeline/init/cli.py b/samcli/commands/pipeline/init/cli.py index bcbe205c6a..a7223398c9 100644 --- a/samcli/commands/pipeline/init/cli.py +++ b/samcli/commands/pipeline/init/cli.py @@ -10,14 +10,14 @@ from samcli.commands.pipeline.init.interactive_init_flow import InteractiveInitFlow from samcli.lib.telemetry.metric import track_command -SHORT_HELP = "Generates CI/CD pipeline configuration files." +SHORT_HELP = "Generates a CI/CD pipeline configuration file." HELP_TEXT = """ -sam pipeline init generates a pipeline configuration file that you can use to connect your -AWS account(s) to your CI/CD system. Before using sam pipeline init, you must -bootstrap the necessary resources for each stage in your pipeline. You can do this -by running sam pipeline init --bootstrap to be guided through the setup and configuration -file generation process, or refer to resources you have previously created with the -sam pipeline bootstrap command. +This command generates a pipeline configuration file that your CI/CD system can use to deploy +serverless applications using AWS SAM. + +Before using sam pipeline init, you must bootstrap the necessary resources for each stage in your pipeline. +You can do this by running sam pipeline init --bootstrap to be guided through the setup and configuration +file generation process, or refer to resources you have previously created with the sam pipeline bootstrap command. """ @@ -27,7 +27,7 @@ "--bootstrap", is_flag=True, default=False, - help="Allow bootstrapping resources.", + help="Enable interactive mode that walks the user through creating necessary AWS infrastructure resources.", ) @cli_framework_options @pass_context diff --git a/samcli/commands/pipeline/init/interactive_init_flow.py b/samcli/commands/pipeline/init/interactive_init_flow.py index 7504f3a66b..d4e989ebfa 100644 --- a/samcli/commands/pipeline/init/interactive_init_flow.py +++ b/samcli/commands/pipeline/init/interactive_init_flow.py @@ -57,10 +57,10 @@ def do_interactive(self) -> None: dedent( """\ - sam pipeline init generates a pipeline configuration file that you can use to connect your - AWS account(s) to your CI/CD system. We will guide you through the process to - bootstrap resources for each stage, then walk through the details necessary for - creating the pipeline config file. + sam pipeline init generates a pipeline configuration file that your CI/CD system + can use to deploy serverless applications using AWS SAM. + We will guide you through the process to bootstrap resources for each stage, + then walk through the details necessary for creating the pipeline config file. Please ensure you are in the root folder of your SAM application before you begin. """ diff --git a/tests/integration/testdata/pipeline/expected_jenkinsfile b/tests/integration/testdata/pipeline/expected_jenkinsfile index 0271ca633e..14adf3b92b 100644 --- a/tests/integration/testdata/pipeline/expected_jenkinsfile +++ b/tests/integration/testdata/pipeline/expected_jenkinsfile @@ -145,11 +145,6 @@ pipeline { // } // } - // uncomment this to have a manual approval step before deployment to production - // stage('production-deployment-approval'){ - // input "Do you want to deploy to production environment?" - // } - stage('deploy-prod') { when { branch env.MAIN_BRANCH @@ -160,6 +155,10 @@ pipeline { } } steps { + // uncomment this to have a manual approval step before deployment to production + // timeout(time: 24, unit: 'HOURS') { + // input 'Please confirm before starting production deployment' + // } withAWS( credentials: env.PIPELINE_USER_CREDENTIAL_ID, region: env.PROD_REGION, From aae1e97c5eeadb0788aea1b291603fc0b5dddaa3 Mon Sep 17 00:00:00 2001 From: Tarun Date: Wed, 21 Jul 2021 10:55:15 -0700 Subject: [PATCH 116/121] Clearing pipeline integ test buckets with versioned objects. (#3094) * Clearing pipeline integ test buckets with versioned objects. * Fixing black formatting. Co-authored-by: Tarun Mall --- tests/integration/pipeline/base.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/integration/pipeline/base.py b/tests/integration/pipeline/base.py index f82d27e357..af47374e93 100644 --- a/tests/integration/pipeline/base.py +++ b/tests/integration/pipeline/base.py @@ -82,9 +82,11 @@ def _cleanup_s3_buckets(self, stack_name): for resource in stack_resources["StackResources"] if resource["ResourceType"] == "AWS::S3::Bucket" ] - s3_client = boto3.client("s3") + session = boto3.session.Session() + s3_client = session.resource("s3") for bucket in buckets: - s3_client.delete_bucket(Bucket=bucket.get("PhysicalResourceId")) + bucket = s3_client.Bucket(bucket) + bucket.object_versions.delete() except botocore.exceptions.ClientError: """No need to fail in cleanup""" From 8bbb7c5ca7366242c53cc66e9181e073b9fcbfd2 Mon Sep 17 00:00:00 2001 From: Tarun Date: Wed, 21 Jul 2021 11:22:35 -0700 Subject: [PATCH 117/121] Fixing bug in bucket cleanup. (#3096) Co-authored-by: Tarun Mall --- tests/integration/pipeline/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/pipeline/base.py b/tests/integration/pipeline/base.py index af47374e93..9a52820b63 100644 --- a/tests/integration/pipeline/base.py +++ b/tests/integration/pipeline/base.py @@ -85,7 +85,7 @@ def _cleanup_s3_buckets(self, stack_name): session = boto3.session.Session() s3_client = session.resource("s3") for bucket in buckets: - bucket = s3_client.Bucket(bucket) + bucket = s3_client.Bucket(bucket.get("PhysicalResourceId")) bucket.object_versions.delete() except botocore.exceptions.ClientError: """No need to fail in cleanup""" From 172616c906c1cf64f8357ffa0d07ed8f9d96c13d Mon Sep 17 00:00:00 2001 From: Tarun Date: Wed, 21 Jul 2021 11:43:03 -0700 Subject: [PATCH 118/121] Deleting bucket (#3097) Co-authored-by: Tarun Mall --- tests/integration/pipeline/base.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/integration/pipeline/base.py b/tests/integration/pipeline/base.py index 9a52820b63..e776613f33 100644 --- a/tests/integration/pipeline/base.py +++ b/tests/integration/pipeline/base.py @@ -87,6 +87,7 @@ def _cleanup_s3_buckets(self, stack_name): for bucket in buckets: bucket = s3_client.Bucket(bucket.get("PhysicalResourceId")) bucket.object_versions.delete() + bucket.delete() except botocore.exceptions.ClientError: """No need to fail in cleanup""" From 1916bfa354b5d2612bd1bf9efd54a77e2bc66ff6 Mon Sep 17 00:00:00 2001 From: Tarun Date: Wed, 21 Jul 2021 12:37:41 -0700 Subject: [PATCH 119/121] Revert "temp: disable testing against python 3.8, and enabled 3.7 (#3009)" (#3098) This reverts commit fe832185be09acb199b2a09ad73bf59e1553d131. Co-authored-by: Tarun Mall --- appveyor.yml | 40 ++++++++++++++++++++-------------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/appveyor.yml b/appveyor.yml index ed730e0a24..b96017d0d3 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -9,26 +9,26 @@ environment: matrix: - - PYTHON_HOME: "C:\\Python36-x64" - PYTHON_VERSION: '3.6' - PYTHON_ARCH: '64' - NOSE_PARAMETERIZED_NO_WARN: 1 - INSTALL_PY_37_PIP: 1 - INSTALL_PY_38_PIP: 1 - AWS_S3: 'AWS_S3_36' - AWS_ECR: 'AWS_ECR_36' - APPVEYOR_CONSOLE_DISABLE_PTY: true - - - PYTHON_HOME: "C:\\Python37-x64" - PYTHON_VERSION: '3.7' - PYTHON_ARCH: '64' - RUN_SMOKE: 1 - NOSE_PARAMETERIZED_NO_WARN: 1 - INSTALL_PY_36_PIP: 1 - INSTALL_PY_38_PIP: 1 - AWS_S3: 'AWS_S3_37' - AWS_ECR: 'AWS_ECR_37' - APPVEYOR_CONSOLE_DISABLE_PTY: true + # - PYTHON_HOME: "C:\\Python36-x64" + # PYTHON_VERSION: '3.6' + # PYTHON_ARCH: '64' + # NOSE_PARAMETERIZED_NO_WARN: 1 + # INSTALL_PY_37_PIP: 1 + # INSTALL_PY_38_PIP: 1 + # AWS_S3: 'AWS_S3_36' + # AWS_ECR: 'AWS_ECR_36' + # APPVEYOR_CONSOLE_DISABLE_PTY: true + + # - PYTHON_HOME: "C:\\Python37-x64" + # PYTHON_VERSION: '3.7' + # PYTHON_ARCH: '64' + # RUN_SMOKE: 1 + # NOSE_PARAMETERIZED_NO_WARN: 1 + # INSTALL_PY_36_PIP: 1 + # INSTALL_PY_38_PIP: 1 + # AWS_S3: 'AWS_S3_37' + # AWS_ECR: 'AWS_ECR_37' + # APPVEYOR_CONSOLE_DISABLE_PTY: true - PYTHON_HOME: "C:\\Python38-x64" PYTHON_VERSION: '3.8' From 50ef1244b60aae104bd5eff6430338f417821c4d Mon Sep 17 00:00:00 2001 From: Raymond Wang <14915548+wchengru@users.noreply.github.com> Date: Wed, 21 Jul 2021 15:10:28 -0700 Subject: [PATCH 120/121] chore: bump SAM CLI version to 1.27.0 (#3101) --- samcli/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samcli/__init__.py b/samcli/__init__.py index 1c484dccfd..15c38c4850 100644 --- a/samcli/__init__.py +++ b/samcli/__init__.py @@ -2,4 +2,4 @@ SAM CLI version """ -__version__ = "1.26.0" +__version__ = "1.27.0" From 1c42df379b442bd8fa16fcc6090035bdc884aa56 Mon Sep 17 00:00:00 2001 From: Tarun Date: Wed, 21 Jul 2021 16:55:04 -0700 Subject: [PATCH 121/121] Add pipeline to pyinstaller (#3103) * Adding pipeline to pyinstaller. Co-authored-by: Tarun Mall --- installer/pyinstaller/hook-samcli.py | 3 +++ samcli/__init__.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/installer/pyinstaller/hook-samcli.py b/installer/pyinstaller/hook-samcli.py index 2a68112a8a..72a939a9b2 100644 --- a/installer/pyinstaller/hook-samcli.py +++ b/installer/pyinstaller/hook-samcli.py @@ -13,6 +13,9 @@ "samcli.commands.deploy", "samcli.commands.logs", "samcli.commands.publish", + "samcli.commands.pipeline.pipeline", + "samcli.commands.pipeline.init", + "samcli.commands.pipeline.bootstrap", # default hidden import 'pkg_resources.py2_warn' is added # since pyInstaller 4.0. "pkg_resources.py2_warn", diff --git a/samcli/__init__.py b/samcli/__init__.py index 15c38c4850..572d288049 100644 --- a/samcli/__init__.py +++ b/samcli/__init__.py @@ -2,4 +2,4 @@ SAM CLI version """ -__version__ = "1.27.0" +__version__ = "1.27.1"