From 126373508854ed86f0f3e7366c5bbe5e3ef5f590 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Mon, 21 Jun 2021 16:27:30 -0400 Subject: [PATCH 01/16] Added methods for cf and s3 files and init UI --- samcli/cli/cli_config_file.py | 13 ++- samcli/cli/command.py | 1 + samcli/commands/delete/__init__.py | 6 ++ samcli/commands/delete/command.py | 88 +++++++++++++++++ samcli/commands/delete/delete_context.py | 117 +++++++++++++++++++++++ samcli/commands/delete/exceptions.py | 14 +++ samcli/lib/delete/__init__.py | 0 samcli/lib/delete/cf_utils.py | 105 ++++++++++++++++++++ samcli/lib/delete/utils.py | 16 ++++ samcli/lib/package/s3_uploader.py | 23 +++++ 10 files changed, 379 insertions(+), 4 deletions(-) create mode 100644 samcli/commands/delete/__init__.py create mode 100644 samcli/commands/delete/command.py create mode 100644 samcli/commands/delete/delete_context.py create mode 100644 samcli/commands/delete/exceptions.py create mode 100644 samcli/lib/delete/__init__.py create mode 100644 samcli/lib/delete/cf_utils.py create mode 100644 samcli/lib/delete/utils.py diff --git a/samcli/cli/cli_config_file.py b/samcli/cli/cli_config_file.py index 67e214e122..9e2b4aa020 100644 --- a/samcli/cli/cli_config_file.py +++ b/samcli/cli/cli_config_file.py @@ -27,12 +27,14 @@ class TomlProvider: A parser for toml configuration files """ - def __init__(self, section=None): + def __init__(self, section=None, cmd_names=None): """ The constructor for TomlProvider class :param section: section defined in the configuration file nested within `cmd` + :param cmd_names: cmd_name defined in the configuration file """ self.section = section + self.cmd_names = cmd_names def __call__(self, config_path, config_env, cmd_names): """ @@ -67,18 +69,21 @@ def __call__(self, config_path, config_env, cmd_names): LOG.debug("Config file '%s' does not exist", samconfig.path()) return resolved_config + if not self.cmd_names: + self.cmd_names = cmd_names + try: LOG.debug( "Loading configuration values from [%s.%s.%s] (env.command_name.section) in config file at '%s'...", config_env, - cmd_names, + self.cmd_names, self.section, samconfig.path(), ) # NOTE(TheSriram): change from tomlkit table type to normal dictionary, # so that click defaults work out of the box. - resolved_config = dict(samconfig.get_all(cmd_names, self.section, env=config_env).items()) + resolved_config = dict(samconfig.get_all(self.cmd_names, self.section, env=config_env).items()) LOG.debug("Configuration values successfully loaded.") LOG.debug("Configuration values are: %s", resolved_config) @@ -87,7 +92,7 @@ def __call__(self, config_path, config_env, cmd_names): "Error reading configuration from [%s.%s.%s] (env.command_name.section) " "in configuration file at '%s' with : %s", config_env, - cmd_names, + self.cmd_names, self.section, samconfig.path(), str(ex), diff --git a/samcli/cli/command.py b/samcli/cli/command.py index 384529f78b..c329345f14 100644 --- a/samcli/cli/command.py +++ b/samcli/cli/command.py @@ -19,6 +19,7 @@ "samcli.commands.local.local", "samcli.commands.package", "samcli.commands.deploy", + "samcli.commands.delete", "samcli.commands.logs", "samcli.commands.publish", # We intentionally do not expose the `bootstrap` command for now. We might open it up later diff --git a/samcli/commands/delete/__init__.py b/samcli/commands/delete/__init__.py new file mode 100644 index 0000000000..ea5b0202d2 --- /dev/null +++ b/samcli/commands/delete/__init__.py @@ -0,0 +1,6 @@ +""" +`sam delete` command +""" + +# Expose the cli object here +from .command import cli # noqa diff --git a/samcli/commands/delete/command.py b/samcli/commands/delete/command.py new file mode 100644 index 0000000000..bdc201aef4 --- /dev/null +++ b/samcli/commands/delete/command.py @@ -0,0 +1,88 @@ +# """ +# CLI command for "delete" command +# """ + +import logging + +import click +from samcli.cli.cli_config_file import TomlProvider, configuration_option +from samcli.cli.main import aws_creds_options, common_options, pass_context, print_cmdline_args + +from samcli.lib.utils.version_checker import check_newer_version + +SHORT_HELP = "Delete an AWS SAM application." + +HELP_TEXT = """The sam delete command deletes a Cloudformation Stack and deletes all your resources which were created. + +\b +e.g. sam delete --stack-name sam-app --region us-east-1 + +\b +""" + +CONFIG_SECTION = "parameters" +CONFIG_COMMAND = "deploy" +LOG = logging.getLogger(__name__) + + +@click.command( + "delete", + short_help=SHORT_HELP, + context_settings={"ignore_unknown_options": False, "allow_interspersed_args": True, "allow_extra_args": True}, + help=HELP_TEXT, +) +@configuration_option(provider=TomlProvider(section=CONFIG_SECTION, cmd_names=[CONFIG_COMMAND])) +@click.option( + "--stack-name", + required=False, + help="The name of the AWS CloudFormation stack you want to delete. ", +) +@click.option( + "--s3-bucket", + required=False, + help="The name of the S3 bucket where this command delets your " "CloudFormation artifacts.", +) +@click.option( + "--s3-prefix", + required=False, + help="A prefix name that the command uses to delete the " + "artifacts' that were deployed to the S3 bucket. " + "The prefix name is a path name (folder name) for the S3 bucket.", +) +@aws_creds_options +@common_options +@pass_context +@check_newer_version +@print_cmdline_args +def cli( + ctx, + stack_name, + s3_bucket, + s3_prefix, + config_file, + config_env, +): + """ + `sam delete` command entry point + """ + + # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing + do_cli(stack_name, ctx.region, ctx.profile, s3_bucket, s3_prefix) # pragma: no cover + + +def do_cli( + stack_name, + region, + profile, + s3_bucket, + s3_prefix +): + """ + Implementation of the ``cli`` method + """ + from samcli.commands.delete.delete_context import DeleteContext + + with DeleteContext( + stack_name=stack_name, region=region, s3_bucket=s3_bucket, s3_prefix=s3_prefix, profile=profile + ) as delete_context: + delete_context.run() diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py new file mode 100644 index 0000000000..d34d096c0c --- /dev/null +++ b/samcli/commands/delete/delete_context.py @@ -0,0 +1,117 @@ +import boto3 + +import click +from click import confirm +from click import prompt + +from samcli.lib.utils.botoconfig import get_boto_config_with_user_agent +from samcli.lib.delete.cf_utils import CfUtils +from samcli.lib.delete.utils import get_cf_template_name +from samcli.lib.package.s3_uploader import S3Uploader +from samcli.yamlhelper import yaml_parse +# from samcli.lib.package.artifact_exporter import Template +# from samcli.lib.package.ecr_uploader import ECRUploader +# from samcli.lib.package.uploaders import Uploaders +import docker + +class DeleteContext: + def __init__(self, stack_name, region, s3_bucket, s3_prefix, profile): + self.stack_name = stack_name + self.region = region + self.profile = profile + self.s3_bucket = s3_bucket + self.s3_prefix = s3_prefix + self.cf_utils = None + self.start_bold = "\033[1m" + self.end_bold = "\033[0m" + self.s3_uploader = None + # self.uploaders = None + self.cf_template_file_name = None + self.delete_artifacts_folder = None + self.delete_cf_template_file = None + + def __enter__(self): + return self + + def __exit__(self, *args): + pass + + def run(self): + # print("Stack Name:", self.stack_name) + # print(self.s3_bucket) + # print(self.s3_prefix) + if not self.stack_name: + self.stack_name = prompt( + f"\t{self.start_bold}Enter stack name you want to delete{self.end_bold}", type=click.STRING + ) + + delete_stack = confirm( + f"\t{self.start_bold}Are you sure you want to delete the stack {self.stack_name}?{self.end_bold}", + default=False, + ) + # Fetch the template using the stack-name + if delete_stack: + boto_config = get_boto_config_with_user_agent() + + # Define cf_client based on the region as different regions can have same stack-names + cloudformation_client = boto3.client( + "cloudformation", region_name=self.region if self.region else None, config=boto_config + ) + + s3_client = boto3.client("s3", region_name=self.region if self.region else None, config=boto_config) + ecr_client = boto3.client("ecr", region_name=self.region if self.region else None, config=boto_config) + + self.s3_uploader = S3Uploader(s3_client=s3_client, bucket_name=self.s3_bucket, prefix=self.s3_prefix) + + # docker_client = docker.from_env() + # ecr_uploader = ECRUploader(docker_client, ecr_client, None, None) + + self.cf_utils = CfUtils(cloudformation_client) + + is_deployed = self.cf_utils.has_stack(self.stack_name) + + if is_deployed: + template_str = self.cf_utils.get_stack_template(self.stack_name, "Original") + + template_dict = yaml_parse(template_str) + + if self.s3_bucket and self.s3_prefix: + self.delete_artifacts_folder = confirm( + f"\t{self.start_bold}Are you sure you want to delete the folder {self.s3_prefix} in S3 which contains the artifacts?{self.end_bold}", + default=False, + ) + if not self.delete_artifacts_folder: + self.cf_template_file_name = get_cf_template_name(template_str, "template") + delete_cf_template_file = confirm( + f"\t{self.start_bold}Do you want to delete the template file {self.cf_template_file_name} in S3?{self.end_bold}", + default=False, + ) + + click.echo("\n") + # Delete the primary stack + self.cf_utils.delete_stack(self.stack_name) + + click.echo("- deleting Cloudformation stack {0}".format(self.stack_name)) + + # Delete the artifacts + # self.uploaders = Uploaders(self.s3_uploader, ecr_uploader) + # template = Template(None, None, self.uploaders, None) + # template.delete(template_dict) + + # Delete the template file using template_str + if self.delete_cf_template_file: + self.s3_uploader.delete_artifact(cf_template_file_name) + + # Delete the folder of artifacts if s3_bucket and s3_prefix provided + elif self.delete_artifacts_folder: + prefix_files = s3_client.list_objects_v2(Bucket=self.s3_bucket, Prefix=self.s3_prefix) + self.s3_uploader.delete_artifact(None, prefix_files) + + # Delete the ECR companion stack + + if self.cf_template_file_name: + click.echo("- deleting template file {0}".format(cf_template_file)) + click.echo("\n") + click.echo("delete complete") + else: + click.echo("Error: The input stack {0} does not exist on Cloudformation".format(self.stack_name)) diff --git a/samcli/commands/delete/exceptions.py b/samcli/commands/delete/exceptions.py new file mode 100644 index 0000000000..82c56b6bb6 --- /dev/null +++ b/samcli/commands/delete/exceptions.py @@ -0,0 +1,14 @@ +""" +Exceptions that are raised by sam delete +""" +from samcli.commands.exceptions import UserException + + +class DeleteFailedError(UserException): + def __init__(self, stack_name, msg): + self.stack_name = stack_name + self.msg = msg + + message_fmt = "Failed to delete the stack: {stack_name}, {msg}" + + super().__init__(message=message_fmt.format(stack_name=self.stack_name, msg=msg)) diff --git a/samcli/lib/delete/__init__.py b/samcli/lib/delete/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/lib/delete/cf_utils.py b/samcli/lib/delete/cf_utils.py new file mode 100644 index 0000000000..b8bccdc651 --- /dev/null +++ b/samcli/lib/delete/cf_utils.py @@ -0,0 +1,105 @@ +""" +Delete Cloudformation stacks and s3 files +""" + +import botocore +import logging + +from samcli.commands.delete.exceptions import DeleteFailedError + +LOG = logging.getLogger(__name__) + + +class CfUtils: + def __init__(self, cloudformation_client): + self._client = cloudformation_client + + def has_stack(self, stack_name): + """ + Checks if a CloudFormation stack with given name exists + + :param stack_name: Name or ID of the stack + :return: True if stack exists. False otherwise + """ + try: + resp = self._client.describe_stacks(StackName=stack_name) + if not resp["Stacks"]: + return False + + stack = resp["Stacks"][0] + return stack["StackStatus"] != "REVIEW_IN_PROGRESS" + + except botocore.exceptions.ClientError as e: + # If a stack does not exist, describe_stacks will throw an + # exception. Unfortunately we don't have a better way than parsing + # the exception msg to understand the nature of this exception. + + if "Stack with id {0} does not exist".format(stack_name) in str(e): + LOG.debug("Stack with id %s does not exist", stack_name) + return False + except botocore.exceptions.BotoCoreError as e: + # If there are credentials, environment errors, + # catch that and throw a deploy failed error. + + LOG.debug("Botocore Exception : %s", str(e)) + raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e + + except Exception as e: + # We don't know anything about this exception. Don't handle + LOG.debug("Unable to get stack details.", exc_info=e) + raise e + + def get_stack_template(self, stack_name, stage): + try: + resp = self._client.get_template(StackName=stack_name, TemplateStage=stage) + if not resp["TemplateBody"]: + return "" + + return resp["TemplateBody"] + + except botocore.exceptions.ClientError as e: + # If a stack does not exist, get_stack_template will throw an + # exception. Unfortunately we don't have a better way than parsing + # the exception msg to understand the nature of this exception. + + if "Stack with id {0} does not exist".format(stack_name) in str(e): + LOG.debug("Stack with id %s does not exist", stack_name) + return "" + except botocore.exceptions.BotoCoreError as e: + # If there are credentials, environment errors, + # catch that and throw a deploy failed error. + + LOG.debug("Botocore Exception : %s", str(e)) + raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e + + except Exception as e: + # We don't know anything about this exception. Don't handle + LOG.debug("Unable to get stack details.", exc_info=e) + raise e + + def delete_stack(self, stack_name): + try: + resp = self._client.delete_stack(StackName=stack_name) + + return resp + + except botocore.exceptions.ClientError as e: + # If a stack does not exist, describe_stacks will throw an + # exception. Unfortunately we don't have a better way than parsing + # the exception msg to understand the nature of this exception. + + if "Stack with id {0} does not exist".format(stack_name) in str(e): + LOG.debug("Stack with id %s does not exist", stack_name) + return False + except botocore.exceptions.BotoCoreError as e: + # If there are credentials, environment errors, + # catch that and throw a deploy failed error. + + LOG.debug("Botocore Exception : %s", str(e)) + raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e + + except Exception as e: + # We don't know anything about this exception. Don't handle + LOG.debug("Unable to get stack details.", exc_info=e) + raise e + diff --git a/samcli/lib/delete/utils.py b/samcli/lib/delete/utils.py new file mode 100644 index 0000000000..280d24e462 --- /dev/null +++ b/samcli/lib/delete/utils.py @@ -0,0 +1,16 @@ +""" +Utilities for Delete +""" + +from samcli.lib.utils.hash import file_checksum +from samcli.lib.package.artifact_exporter import mktempfile + +def get_cf_template_name(self, template_str, extension): + with mktempfile() as temp_file: + temp_file.write(template_str) + temp_file.flush() + + filemd5 = file_checksum(temp_file.name) + remote_path = filemd5 + "." + extension + + return remote_path \ No newline at end of file diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index 4a64a983d0..08bbf4db23 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -144,6 +144,29 @@ def upload_with_dedup( return self.upload(file_name, remote_path) + def delete_artifact(self, file_name: str, prefix_files=None): + + try: + if not self.bucket_name: + raise BucketNotSpecifiedError() + + remote_path = file_name + if self.prefix: + if remote_path: + remote_path = "{0}/{1}".format(self.prefix, file_name) + print("- deleting", remote_path) + self.s3.delete_object(Bucket=self.bucket_name, Key=remote_path) + elif prefix_files: + for obj in prefix_files["Contents"]: + print("- deleting", obj["Key"]) + self.s3.delete_object(Bucket=self.bucket_name, Key=obj["Key"]) + + except botocore.exceptions.ClientError as ex: + error_code = ex.response["Error"]["Code"] + if error_code == "NoSuchBucket": + raise NoSuchBucketError(bucket_name=self.bucket_name) from ex + raise ex + def file_exists(self, remote_path: str) -> bool: """ Check if the file we are trying to upload already exists in S3 From ba47369e90cf966e95f799e4153571e4321e449f Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Wed, 23 Jun 2021 13:36:39 -0400 Subject: [PATCH 02/16] Added unit tests for utils methods and s3_uploader --- samcli/commands/delete/command.py | 15 ++--- samcli/commands/delete/delete_context.py | 46 ++++++++------ samcli/lib/delete/cf_utils.py | 56 ++++++++--------- samcli/lib/delete/utils.py | 4 +- samcli/lib/package/s3_uploader.py | 38 ++++++++---- tests/unit/lib/delete/__init__.py | 0 tests/unit/lib/delete/test_cf_utils.py | 71 ++++++++++++++++++++++ tests/unit/lib/package/test_s3_uploader.py | 45 ++++++++++++++ 8 files changed, 204 insertions(+), 71 deletions(-) create mode 100644 tests/unit/lib/delete/__init__.py create mode 100644 tests/unit/lib/delete/test_cf_utils.py diff --git a/samcli/commands/delete/command.py b/samcli/commands/delete/command.py index bdc201aef4..13483b40e1 100644 --- a/samcli/commands/delete/command.py +++ b/samcli/commands/delete/command.py @@ -1,6 +1,6 @@ -# """ -# CLI command for "delete" command -# """ +""" +CLI command for "delete" command +""" import logging @@ -70,18 +70,13 @@ def cli( do_cli(stack_name, ctx.region, ctx.profile, s3_bucket, s3_prefix) # pragma: no cover -def do_cli( - stack_name, - region, - profile, - s3_bucket, - s3_prefix -): +def do_cli(stack_name, region, profile, s3_bucket, s3_prefix): """ Implementation of the ``cli`` method """ from samcli.commands.delete.delete_context import DeleteContext + # ctx = click.get_current_context() #This is here if s3_bucket and s3_prefix options are not used with DeleteContext( stack_name=stack_name, region=region, s3_bucket=s3_bucket, s3_prefix=s3_prefix, profile=profile ) as delete_context: diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index d34d096c0c..7e4999442f 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -1,5 +1,9 @@ -import boto3 +""" +Delete a SAM stack +""" +import boto3 +import docker import click from click import confirm from click import prompt @@ -9,10 +13,12 @@ from samcli.lib.delete.utils import get_cf_template_name from samcli.lib.package.s3_uploader import S3Uploader from samcli.yamlhelper import yaml_parse + +# Intentionally commented # from samcli.lib.package.artifact_exporter import Template # from samcli.lib.package.ecr_uploader import ECRUploader # from samcli.lib.package.uploaders import Uploaders -import docker + class DeleteContext: def __init__(self, stack_name, region, s3_bucket, s3_prefix, profile): @@ -37,20 +43,24 @@ def __exit__(self, *args): pass def run(self): - # print("Stack Name:", self.stack_name) - # print(self.s3_bucket) - # print(self.s3_prefix) + """ + Delete the stack based on the argument provided by customers and samconfig.toml. + """ if not self.stack_name: self.stack_name = prompt( f"\t{self.start_bold}Enter stack name you want to delete{self.end_bold}", type=click.STRING ) + if not self.region: + self.region = prompt( + f"\t{self.start_bold}Enter region you want to delete from{self.end_bold}", type=click.STRING + ) delete_stack = confirm( f"\t{self.start_bold}Are you sure you want to delete the stack {self.stack_name}?{self.end_bold}", default=False, ) # Fetch the template using the stack-name - if delete_stack: + if delete_stack and self.region: boto_config = get_boto_config_with_user_agent() # Define cf_client based on the region as different regions can have same stack-names @@ -63,8 +73,8 @@ def run(self): self.s3_uploader = S3Uploader(s3_client=s3_client, bucket_name=self.s3_bucket, prefix=self.s3_prefix) - # docker_client = docker.from_env() - # ecr_uploader = ECRUploader(docker_client, ecr_client, None, None) + docker_client = docker.from_env() + ecr_uploader = ECRUploader(docker_client, ecr_client, None, None) self.cf_utils = CfUtils(cloudformation_client) @@ -77,14 +87,16 @@ def run(self): if self.s3_bucket and self.s3_prefix: self.delete_artifacts_folder = confirm( - f"\t{self.start_bold}Are you sure you want to delete the folder {self.s3_prefix} in S3 which contains the artifacts?{self.end_bold}", - default=False, + f"\t{self.start_bold}Are you sure you want to delete the folder {self.s3_prefix} \ + in S3 which contains the artifacts?{self.end_bold}", + default=False, ) if not self.delete_artifacts_folder: self.cf_template_file_name = get_cf_template_name(template_str, "template") delete_cf_template_file = confirm( - f"\t{self.start_bold}Do you want to delete the template file {self.cf_template_file_name} in S3?{self.end_bold}", - default=False, + f"\t{self.start_bold}Do you want to delete the template file \ + {self.cf_template_file_name} in S3?{self.end_bold}", + default=False, ) click.echo("\n") @@ -94,23 +106,23 @@ def run(self): click.echo("- deleting Cloudformation stack {0}".format(self.stack_name)) # Delete the artifacts + # Intentionally commented # self.uploaders = Uploaders(self.s3_uploader, ecr_uploader) # template = Template(None, None, self.uploaders, None) # template.delete(template_dict) - # Delete the template file using template_str + # Delete the CF template file in S3 if self.delete_cf_template_file: - self.s3_uploader.delete_artifact(cf_template_file_name) + self.s3_uploader.delete_artifact(self.cf_template_file_name) # Delete the folder of artifacts if s3_bucket and s3_prefix provided elif self.delete_artifacts_folder: - prefix_files = s3_client.list_objects_v2(Bucket=self.s3_bucket, Prefix=self.s3_prefix) - self.s3_uploader.delete_artifact(None, prefix_files) + self.s3_uploader.delete_prefix_artifacts() # Delete the ECR companion stack if self.cf_template_file_name: - click.echo("- deleting template file {0}".format(cf_template_file)) + click.echo("- deleting template file {0}".format(self.cf_template_file)) click.echo("\n") click.echo("delete complete") else: diff --git a/samcli/lib/delete/cf_utils.py b/samcli/lib/delete/cf_utils.py index b8bccdc651..945aa5a09a 100644 --- a/samcli/lib/delete/cf_utils.py +++ b/samcli/lib/delete/cf_utils.py @@ -1,10 +1,10 @@ """ -Delete Cloudformation stacks and s3 files +Delete Cloudformation stacks and s3 files """ -import botocore import logging +from botocore.exceptions import ClientError, BotoCoreError from samcli.commands.delete.exceptions import DeleteFailedError LOG = logging.getLogger(__name__) @@ -29,7 +29,7 @@ def has_stack(self, stack_name): stack = resp["Stacks"][0] return stack["StackStatus"] != "REVIEW_IN_PROGRESS" - except botocore.exceptions.ClientError as e: + except ClientError as e: # If a stack does not exist, describe_stacks will throw an # exception. Unfortunately we don't have a better way than parsing # the exception msg to understand the nature of this exception. @@ -37,9 +37,10 @@ def has_stack(self, stack_name): if "Stack with id {0} does not exist".format(stack_name) in str(e): LOG.debug("Stack with id %s does not exist", stack_name) return False - except botocore.exceptions.BotoCoreError as e: + raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e + except BotoCoreError as e: # If there are credentials, environment errors, - # catch that and throw a deploy failed error. + # catch that and throw a delete failed error. LOG.debug("Botocore Exception : %s", str(e)) raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e @@ -50,26 +51,25 @@ def has_stack(self, stack_name): raise e def get_stack_template(self, stack_name, stage): + """ + Return the Cloudformation template of the given stack_name + + :param stack_name: Name or ID of the stack + :param stage: The Stage of the template Original or Processed + :return: Template body of the stack + """ try: resp = self._client.get_template(StackName=stack_name, TemplateStage=stage) if not resp["TemplateBody"]: - return "" + return None return resp["TemplateBody"] - except botocore.exceptions.ClientError as e: - # If a stack does not exist, get_stack_template will throw an - # exception. Unfortunately we don't have a better way than parsing - # the exception msg to understand the nature of this exception. - - if "Stack with id {0} does not exist".format(stack_name) in str(e): - LOG.debug("Stack with id %s does not exist", stack_name) - return "" - except botocore.exceptions.BotoCoreError as e: + except (ClientError, BotoCoreError) as e: # If there are credentials, environment errors, - # catch that and throw a deploy failed error. + # catch that and throw a delete failed error. - LOG.debug("Botocore Exception : %s", str(e)) + LOG.debug("Failed to delete stack : %s", str(e)) raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e except Exception as e: @@ -78,28 +78,24 @@ def get_stack_template(self, stack_name, stage): raise e def delete_stack(self, stack_name): + """ + Delete the Cloudformation stack with the given stack_name + + :param stack_name: Name or ID of the stack + :return: Status of deletion + """ try: resp = self._client.delete_stack(StackName=stack_name) - return resp - except botocore.exceptions.ClientError as e: - # If a stack does not exist, describe_stacks will throw an - # exception. Unfortunately we don't have a better way than parsing - # the exception msg to understand the nature of this exception. - - if "Stack with id {0} does not exist".format(stack_name) in str(e): - LOG.debug("Stack with id %s does not exist", stack_name) - return False - except botocore.exceptions.BotoCoreError as e: + except (ClientError, BotoCoreError) as e: # If there are credentials, environment errors, - # catch that and throw a deploy failed error. + # catch that and throw a delete failed error. - LOG.debug("Botocore Exception : %s", str(e)) + LOG.debug("Failed to delete stack : %s", str(e)) raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e except Exception as e: # We don't know anything about this exception. Don't handle LOG.debug("Unable to get stack details.", exc_info=e) raise e - diff --git a/samcli/lib/delete/utils.py b/samcli/lib/delete/utils.py index 280d24e462..f6e6edeb4d 100644 --- a/samcli/lib/delete/utils.py +++ b/samcli/lib/delete/utils.py @@ -5,7 +5,7 @@ from samcli.lib.utils.hash import file_checksum from samcli.lib.package.artifact_exporter import mktempfile -def get_cf_template_name(self, template_str, extension): +def get_cf_template_name(template_str, extension): with mktempfile() as temp_file: temp_file.write(template_str) temp_file.flush() @@ -13,4 +13,4 @@ def get_cf_template_name(self, template_str, extension): filemd5 = file_checksum(temp_file.name) remote_path = filemd5 + "." + extension - return remote_path \ No newline at end of file + return remote_path diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index 08bbf4db23..3e445aec25 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -144,22 +144,24 @@ def upload_with_dedup( return self.upload(file_name, remote_path) - def delete_artifact(self, file_name: str, prefix_files=None): - + def delete_artifact(self, remote_path: str, is_key=False): + """ + Deletes a given file from S3 + :param remote_path: Path to the file that will be deleted + :param is_key: If the given remote_path is the key or a file_name + """ try: if not self.bucket_name: raise BucketNotSpecifiedError() - remote_path = file_name - if self.prefix: - if remote_path: - remote_path = "{0}/{1}".format(self.prefix, file_name) - print("- deleting", remote_path) - self.s3.delete_object(Bucket=self.bucket_name, Key=remote_path) - elif prefix_files: - for obj in prefix_files["Contents"]: - print("- deleting", obj["Key"]) - self.s3.delete_object(Bucket=self.bucket_name, Key=obj["Key"]) + key = remote_path + if self.prefix and not is_key: + key = "{0}/{1}".format(self.prefix, remote_path) + + # Deleting Specific file with key + print("- deleting", key) + resp = self.s3.delete_object(Bucket=self.bucket_name, Key=key) + return resp["ResponseMetadata"] except botocore.exceptions.ClientError as ex: error_code = ex.response["Error"]["Code"] @@ -167,6 +169,18 @@ def delete_artifact(self, file_name: str, prefix_files=None): raise NoSuchBucketError(bucket_name=self.bucket_name) from ex raise ex + def delete_prefix_artifacts(self): + """ + Deletes all the files from the prefix in S3 + """ + if not self.bucket_name: + raise BucketNotSpecifiedError() + if self.prefix: + prefix_files = self.s3.list_objects_v2(Bucket=self.bucket_name, Prefix=self.prefix) + + for obj in prefix_files["Contents"]: + self.delete_artifact(obj["Key"], True) + def file_exists(self, remote_path: str) -> bool: """ Check if the file we are trying to upload already exists in S3 diff --git a/tests/unit/lib/delete/__init__.py b/tests/unit/lib/delete/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/lib/delete/test_cf_utils.py b/tests/unit/lib/delete/test_cf_utils.py new file mode 100644 index 0000000000..37403a156f --- /dev/null +++ b/tests/unit/lib/delete/test_cf_utils.py @@ -0,0 +1,71 @@ +from unittest.mock import patch, MagicMock, ANY, call +from unittest import TestCase + +from samcli.commands.delete.exceptions import DeleteFailedError +from botocore.exceptions import ClientError, BotoCoreError +from samcli.lib.delete.cf_utils import CfUtils + + +class TestCfUtils(TestCase): + def setUp(self): + self.session = MagicMock() + self.cloudformation_client = self.session.client("cloudformation") + self.s3_client = self.session.client("s3") + self.cf_utils = CfUtils(self.cloudformation_client) + + def test_cf_utils_init(self): + self.assertEqual(self.cf_utils._client, self.cloudformation_client) + + def test_cf_utils_has_no_stack(self): + self.cf_utils._client.describe_stacks = MagicMock(return_value={"Stacks": []}) + self.assertEqual(self.cf_utils.has_stack("test"), False) + + def test_cf_utils_has_stack_exception_non_exsistent(self): + self.cf_utils._client.describe_stacks = MagicMock( + side_effect=ClientError( + error_response={"Error": {"Message": "Stack with id test does not exist"}}, + operation_name="stack_status", + ) + ) + self.assertEqual(self.cf_utils.has_stack("test"), False) + + def test_cf_utils_has_stack_exception(self): + self.cf_utils._client.describe_stacks = MagicMock(side_effect=Exception()) + with self.assertRaises(Exception): + self.cf_utils.has_stack("test") + + def test_cf_utils_has_stack_in_review(self): + self.cf_utils._client.describe_stacks = MagicMock( + return_value={"Stacks": [{"StackStatus": "REVIEW_IN_PROGRESS"}]} + ) + self.assertEqual(self.cf_utils.has_stack("test"), False) + + def test_cf_utils_has_stack_exception_botocore(self): + self.cf_utils._client.describe_stacks = MagicMock(side_effect=BotoCoreError()) + with self.assertRaises(DeleteFailedError): + self.cf_utils.has_stack("test") + + def test_cf_utils_get_stack_template_exception_botocore(self): + self.cf_utils._client.get_template = MagicMock(side_effect=BotoCoreError()) + with self.assertRaises(DeleteFailedError): + self.cf_utils.get_stack_template("test", "Original") + + def test_cf_utils_get_stack_template_exception_botocore(self): + self.cf_utils._client.get_template = MagicMock(side_effect=BotoCoreError()) + with self.assertRaises(DeleteFailedError): + self.cf_utils.get_stack_template("test", "Original") + + def test_cf_utils_get_stack_template_exception(self): + self.cf_utils._client.get_template = MagicMock(side_effect=Exception()) + with self.assertRaises(Exception): + self.cf_utils.get_stack_template("test", "Original") + + def test_cf_utils_delete_stack_exception_botocore(self): + self.cf_utils._client.delete_stack = MagicMock(side_effect=BotoCoreError()) + with self.assertRaises(DeleteFailedError): + self.cf_utils.delete_stack("test") + + def test_cf_utils_delete_stack_exception(self): + self.cf_utils._client.delete_stack = MagicMock(side_effect=Exception()) + with self.assertRaises(Exception): + self.cf_utils.delete_stack("test") diff --git a/tests/unit/lib/package/test_s3_uploader.py b/tests/unit/lib/package/test_s3_uploader.py index c40c4c6cf4..07fed24211 100644 --- a/tests/unit/lib/package/test_s3_uploader.py +++ b/tests/unit/lib/package/test_s3_uploader.py @@ -172,6 +172,51 @@ def test_s3_upload_no_bucket(self): s3_uploader.upload(f.name, remote_path) self.assertEqual(BucketNotSpecifiedError().message, str(ex)) + def test_s3_delete_artifact(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=None, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + no_progressbar=self.no_progressbar, + ) + s3_uploader.artifact_metadata = {"a": "b"} + with self.assertRaises(BucketNotSpecifiedError) as ex: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: + self.assertEqual(s3_uploader.delete_artifact(f.name), {"a": "b"}) + + def test_s3_delete_artifact_no_bucket(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=None, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + no_progressbar=self.no_progressbar, + ) + with self.assertRaises(BucketNotSpecifiedError) as ex: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: + s3_uploader.delete_artifact(f.name) + self.assertEqual(BucketNotSpecifiedError().message, str(ex)) + + def test_s3_upload_bucket_not_found(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=self.bucket_name, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=True, + no_progressbar=self.no_progressbar, + ) + + s3_uploader.s3.delete_object = MagicMock( + side_effect=ClientError(error_response={"Error": {"Code": "NoSuchBucket"}}, operation_name="create_object") + ) + with tempfile.NamedTemporaryFile() as f: + with self.assertRaises(NoSuchBucketError): + s3_uploader.delete_artifact(f.name) + def test_s3_upload_with_dedup(self): s3_uploader = S3Uploader( s3_client=self.s3, From d77f7c4831eefca0e4a975a159157205a903e4f7 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Thu, 24 Jun 2021 13:16:23 -0400 Subject: [PATCH 03/16] Removed s3_bucket and s3_prefix click options --- samcli/commands/delete/command.py | 23 +++++------------------ samcli/commands/delete/delete_context.py | 12 +++++------- 2 files changed, 10 insertions(+), 25 deletions(-) diff --git a/samcli/commands/delete/command.py b/samcli/commands/delete/command.py index 13483b40e1..7227be86e3 100644 --- a/samcli/commands/delete/command.py +++ b/samcli/commands/delete/command.py @@ -37,18 +37,6 @@ required=False, help="The name of the AWS CloudFormation stack you want to delete. ", ) -@click.option( - "--s3-bucket", - required=False, - help="The name of the S3 bucket where this command delets your " "CloudFormation artifacts.", -) -@click.option( - "--s3-prefix", - required=False, - help="A prefix name that the command uses to delete the " - "artifacts' that were deployed to the S3 bucket. " - "The prefix name is a path name (folder name) for the S3 bucket.", -) @aws_creds_options @common_options @pass_context @@ -57,8 +45,6 @@ def cli( ctx, stack_name, - s3_bucket, - s3_prefix, config_file, config_env, ): @@ -67,17 +53,18 @@ def cli( """ # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing - do_cli(stack_name, ctx.region, ctx.profile, s3_bucket, s3_prefix) # pragma: no cover + do_cli(stack_name, ctx.region, ctx.profile) # pragma: no cover -def do_cli(stack_name, region, profile, s3_bucket, s3_prefix): +def do_cli(stack_name, region, profile): """ Implementation of the ``cli`` method """ from samcli.commands.delete.delete_context import DeleteContext - # ctx = click.get_current_context() #This is here if s3_bucket and s3_prefix options are not used + ctx = click.get_current_context() + with DeleteContext( - stack_name=stack_name, region=region, s3_bucket=s3_bucket, s3_prefix=s3_prefix, profile=profile + stack_name=stack_name, region=region, s3_bucket=ctx.default_map.get("s3_bucket", None), s3_prefix=ctx.default_map.get("s3_prefix", None), profile=profile ) as delete_context: delete_context.run() diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 7e4999442f..919f1e4f99 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -74,7 +74,7 @@ def run(self): self.s3_uploader = S3Uploader(s3_client=s3_client, bucket_name=self.s3_bucket, prefix=self.s3_prefix) docker_client = docker.from_env() - ecr_uploader = ECRUploader(docker_client, ecr_client, None, None) + # ecr_uploader = ECRUploader(docker_client, ecr_client, None, None) self.cf_utils = CfUtils(cloudformation_client) @@ -87,15 +87,13 @@ def run(self): if self.s3_bucket and self.s3_prefix: self.delete_artifacts_folder = confirm( - f"\t{self.start_bold}Are you sure you want to delete the folder {self.s3_prefix} \ - in S3 which contains the artifacts?{self.end_bold}", + f"\t{self.start_bold}Are you sure you want to delete the folder {self.s3_prefix} in S3 which contains the artifacts?{self.end_bold}", default=False, ) if not self.delete_artifacts_folder: self.cf_template_file_name = get_cf_template_name(template_str, "template") - delete_cf_template_file = confirm( - f"\t{self.start_bold}Do you want to delete the template file \ - {self.cf_template_file_name} in S3?{self.end_bold}", + self.delete_cf_template_file = confirm( + f"\t{self.start_bold}Do you want to delete the template file {self.cf_template_file_name} in S3?{self.end_bold}", default=False, ) @@ -122,7 +120,7 @@ def run(self): # Delete the ECR companion stack if self.cf_template_file_name: - click.echo("- deleting template file {0}".format(self.cf_template_file)) + click.echo(f"- deleting template file {self.cf_template_file_name}") click.echo("\n") click.echo("delete complete") else: From 3f6f070953f3de69d3a32734a17184e56b5f2e6e Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Thu, 24 Jun 2021 21:42:20 -0400 Subject: [PATCH 04/16] Fixed lint errors and added few unit tests --- samcli/commands/delete/command.py | 9 +++- samcli/commands/delete/delete_context.py | 16 +++--- samcli/lib/package/s3_uploader.py | 3 +- tests/unit/commands/delete/__init__.py | 0 tests/unit/commands/delete/test_command.py | 49 +++++++++++++++++++ .../commands/delete/test_delete_context.py | 0 tests/unit/lib/delete/test_cf_utils.py | 17 ++++++- tests/unit/lib/package/test_s3_uploader.py | 2 +- 8 files changed, 83 insertions(+), 13 deletions(-) create mode 100644 tests/unit/commands/delete/__init__.py create mode 100644 tests/unit/commands/delete/test_command.py create mode 100644 tests/unit/commands/delete/test_delete_context.py diff --git a/samcli/commands/delete/command.py b/samcli/commands/delete/command.py index 7227be86e3..8fc04716b9 100644 --- a/samcli/commands/delete/command.py +++ b/samcli/commands/delete/command.py @@ -63,8 +63,13 @@ def do_cli(stack_name, region, profile): from samcli.commands.delete.delete_context import DeleteContext ctx = click.get_current_context() - + s3_bucket = ctx.default_map.get("s3_bucket", None) + s3_prefix = ctx.default_map.get("s3_prefix", None) with DeleteContext( - stack_name=stack_name, region=region, s3_bucket=ctx.default_map.get("s3_bucket", None), s3_prefix=ctx.default_map.get("s3_prefix", None), profile=profile + stack_name=stack_name, + region=region, + s3_bucket=s3_bucket, + s3_prefix=s3_prefix, + profile=profile ) as delete_context: delete_context.run() diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 919f1e4f99..b2a861fa16 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -3,7 +3,7 @@ """ import boto3 -import docker +# import docker import click from click import confirm from click import prompt @@ -12,7 +12,7 @@ from samcli.lib.delete.cf_utils import CfUtils from samcli.lib.delete.utils import get_cf_template_name from samcli.lib.package.s3_uploader import S3Uploader -from samcli.yamlhelper import yaml_parse +# from samcli.yamlhelper import yaml_parse # Intentionally commented # from samcli.lib.package.artifact_exporter import Template @@ -69,11 +69,11 @@ def run(self): ) s3_client = boto3.client("s3", region_name=self.region if self.region else None, config=boto_config) - ecr_client = boto3.client("ecr", region_name=self.region if self.region else None, config=boto_config) + # ecr_client = boto3.client("ecr", region_name=self.region if self.region else None, config=boto_config) self.s3_uploader = S3Uploader(s3_client=s3_client, bucket_name=self.s3_bucket, prefix=self.s3_prefix) - docker_client = docker.from_env() + # docker_client = docker.from_env() # ecr_uploader = ECRUploader(docker_client, ecr_client, None, None) self.cf_utils = CfUtils(cloudformation_client) @@ -83,17 +83,19 @@ def run(self): if is_deployed: template_str = self.cf_utils.get_stack_template(self.stack_name, "Original") - template_dict = yaml_parse(template_str) + # template_dict = yaml_parse(template_str) if self.s3_bucket and self.s3_prefix: self.delete_artifacts_folder = confirm( - f"\t{self.start_bold}Are you sure you want to delete the folder {self.s3_prefix} in S3 which contains the artifacts?{self.end_bold}", + f"\t{self.start_bold}Are you sure you want to delete the folder" + \ + f"{self.s3_prefix} in S3 which contains the artifacts?{self.end_bold}", default=False, ) if not self.delete_artifacts_folder: self.cf_template_file_name = get_cf_template_name(template_str, "template") self.delete_cf_template_file = confirm( - f"\t{self.start_bold}Do you want to delete the template file {self.cf_template_file_name} in S3?{self.end_bold}", + f"\t{self.start_bold}Do you want to delete the template file" + \ + f" {self.cf_template_file_name} in S3?{self.end_bold}", default=False, ) diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index 3e445aec25..c0a4d88cf6 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -22,6 +22,7 @@ from collections import abc from typing import Optional, Dict, Any, cast from urllib.parse import urlparse, parse_qs +import click import botocore import botocore.exceptions @@ -159,7 +160,7 @@ def delete_artifact(self, remote_path: str, is_key=False): key = "{0}/{1}".format(self.prefix, remote_path) # Deleting Specific file with key - print("- deleting", key) + click.echo("- deleting S3 file " + key) resp = self.s3.delete_object(Bucket=self.bucket_name, Key=key) return resp["ResponseMetadata"] diff --git a/tests/unit/commands/delete/__init__.py b/tests/unit/commands/delete/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/commands/delete/test_command.py b/tests/unit/commands/delete/test_command.py new file mode 100644 index 0000000000..0a0e58afec --- /dev/null +++ b/tests/unit/commands/delete/test_command.py @@ -0,0 +1,49 @@ +from unittest import TestCase +from unittest.mock import ANY, MagicMock, Mock, call, patch + +from samcli.commands.delete.command import do_cli +from tests.unit.cli.test_cli_config_file import MockContext + +def get_mock_sam_config(): + mock_sam_config = MagicMock() + mock_sam_config.exists = MagicMock(return_value=True) + return mock_sam_config + +MOCK_SAM_CONFIG = get_mock_sam_config() + +class TestDeleteCliCommand(TestCase): + def setUp(self): + + self.stack_name = "stack-name" + self.s3_bucket = "s3-bucket" + self.s3_prefix = "s3-prefix" + self.region = None + self.profile = None + self.config_env = "mock-default-env" + self.config_file = "mock-default-filename" + MOCK_SAM_CONFIG.reset_mock() + + + @patch("samcli.commands.delete.command.click") + @patch("samcli.commands.delete.delete_context.DeleteContext") + def test_all_args(self, mock_delete_context, mock_delete_click): + + context_mock = Mock() + mock_delete_context.return_value.__enter__.return_value = context_mock + + do_cli( + stack_name=self.stack_name, + region=self.region, + profile=self.profile, + ) + + mock_delete_context.assert_called_with( + stack_name=self.stack_name, + s3_bucket=mock_delete_click.get_current_context().default_map.get("s3_bucket", None), + s3_prefix=mock_delete_click.get_current_context().default_map.get("s3_prefix", None), + region=self.region, + profile=self.profile, + ) + + context_mock.run.assert_called_with() + self.assertEqual(context_mock.run.call_count, 1) diff --git a/tests/unit/commands/delete/test_delete_context.py b/tests/unit/commands/delete/test_delete_context.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/lib/delete/test_cf_utils.py b/tests/unit/lib/delete/test_cf_utils.py index 37403a156f..20cb5288dc 100644 --- a/tests/unit/lib/delete/test_cf_utils.py +++ b/tests/unit/lib/delete/test_cf_utils.py @@ -28,6 +28,16 @@ def test_cf_utils_has_stack_exception_non_exsistent(self): ) ) self.assertEqual(self.cf_utils.has_stack("test"), False) + + def test_cf_utils_has_stack_exception_client_error(self): + self.cf_utils._client.describe_stacks = MagicMock( + side_effect=ClientError( + error_response={"Error": {"Message": "Error: The security token included in the request is expired"}}, + operation_name="stack_status", + ) + ) + with self.assertRaises(DeleteFailedError): + self.cf_utils.has_stack("test") def test_cf_utils_has_stack_exception(self): self.cf_utils._client.describe_stacks = MagicMock(side_effect=Exception()) @@ -45,8 +55,11 @@ def test_cf_utils_has_stack_exception_botocore(self): with self.assertRaises(DeleteFailedError): self.cf_utils.has_stack("test") - def test_cf_utils_get_stack_template_exception_botocore(self): - self.cf_utils._client.get_template = MagicMock(side_effect=BotoCoreError()) + def test_cf_utils_get_stack_template_exception_client_error(self): + self.cf_utils._client.get_template = MagicMock(side_effect=ClientError( + error_response={"Error": {"Message": "Stack with id test does not exist"}}, + operation_name="stack_status", + )) with self.assertRaises(DeleteFailedError): self.cf_utils.get_stack_template("test", "Original") diff --git a/tests/unit/lib/package/test_s3_uploader.py b/tests/unit/lib/package/test_s3_uploader.py index 07fed24211..f1765c3f8c 100644 --- a/tests/unit/lib/package/test_s3_uploader.py +++ b/tests/unit/lib/package/test_s3_uploader.py @@ -200,7 +200,7 @@ def test_s3_delete_artifact_no_bucket(self): s3_uploader.delete_artifact(f.name) self.assertEqual(BucketNotSpecifiedError().message, str(ex)) - def test_s3_upload_bucket_not_found(self): + def test_s3_delete_artifact_bucket_not_found(self): s3_uploader = S3Uploader( s3_client=self.s3, bucket_name=self.bucket_name, From af2f9296f30568f9e851f520f1d3853edce2350f Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Thu, 24 Jun 2021 22:14:25 -0400 Subject: [PATCH 05/16] Make black happy --- samcli/commands/delete/command.py | 6 +----- samcli/commands/delete/delete_context.py | 14 ++++++++------ samcli/lib/delete/cf_utils.py | 4 ++-- samcli/lib/delete/utils.py | 1 + tests/unit/commands/delete/test_command.py | 4 +++- tests/unit/lib/delete/test_cf_utils.py | 12 +++++++----- 6 files changed, 22 insertions(+), 19 deletions(-) diff --git a/samcli/commands/delete/command.py b/samcli/commands/delete/command.py index 8fc04716b9..412e4fc76f 100644 --- a/samcli/commands/delete/command.py +++ b/samcli/commands/delete/command.py @@ -66,10 +66,6 @@ def do_cli(stack_name, region, profile): s3_bucket = ctx.default_map.get("s3_bucket", None) s3_prefix = ctx.default_map.get("s3_prefix", None) with DeleteContext( - stack_name=stack_name, - region=region, - s3_bucket=s3_bucket, - s3_prefix=s3_prefix, - profile=profile + stack_name=stack_name, region=region, s3_bucket=s3_bucket, s3_prefix=s3_prefix, profile=profile ) as delete_context: delete_context.run() diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index b2a861fa16..fb4a09e4e1 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -3,6 +3,7 @@ """ import boto3 + # import docker import click from click import confirm @@ -12,6 +13,7 @@ from samcli.lib.delete.cf_utils import CfUtils from samcli.lib.delete.utils import get_cf_template_name from samcli.lib.package.s3_uploader import S3Uploader + # from samcli.yamlhelper import yaml_parse # Intentionally commented @@ -87,16 +89,16 @@ def run(self): if self.s3_bucket and self.s3_prefix: self.delete_artifacts_folder = confirm( - f"\t{self.start_bold}Are you sure you want to delete the folder" + \ - f"{self.s3_prefix} in S3 which contains the artifacts?{self.end_bold}", - default=False, + f"\t{self.start_bold}Are you sure you want to delete the folder" + + f"{self.s3_prefix} in S3 which contains the artifacts?{self.end_bold}", + default=False, ) if not self.delete_artifacts_folder: self.cf_template_file_name = get_cf_template_name(template_str, "template") self.delete_cf_template_file = confirm( - f"\t{self.start_bold}Do you want to delete the template file" + \ - f" {self.cf_template_file_name} in S3?{self.end_bold}", - default=False, + f"\t{self.start_bold}Do you want to delete the template file" + + f" {self.cf_template_file_name} in S3?{self.end_bold}", + default=False, ) click.echo("\n") diff --git a/samcli/lib/delete/cf_utils.py b/samcli/lib/delete/cf_utils.py index 945aa5a09a..f0c7aa4731 100644 --- a/samcli/lib/delete/cf_utils.py +++ b/samcli/lib/delete/cf_utils.py @@ -65,7 +65,7 @@ def get_stack_template(self, stack_name, stage): return resp["TemplateBody"] - except (ClientError, BotoCoreError) as e: + except (ClientError, BotoCoreError) as e: # If there are credentials, environment errors, # catch that and throw a delete failed error. @@ -88,7 +88,7 @@ def delete_stack(self, stack_name): resp = self._client.delete_stack(StackName=stack_name) return resp - except (ClientError, BotoCoreError) as e: + except (ClientError, BotoCoreError) as e: # If there are credentials, environment errors, # catch that and throw a delete failed error. diff --git a/samcli/lib/delete/utils.py b/samcli/lib/delete/utils.py index f6e6edeb4d..497610f776 100644 --- a/samcli/lib/delete/utils.py +++ b/samcli/lib/delete/utils.py @@ -5,6 +5,7 @@ from samcli.lib.utils.hash import file_checksum from samcli.lib.package.artifact_exporter import mktempfile + def get_cf_template_name(template_str, extension): with mktempfile() as temp_file: temp_file.write(template_str) diff --git a/tests/unit/commands/delete/test_command.py b/tests/unit/commands/delete/test_command.py index 0a0e58afec..a199c4e960 100644 --- a/tests/unit/commands/delete/test_command.py +++ b/tests/unit/commands/delete/test_command.py @@ -4,13 +4,16 @@ from samcli.commands.delete.command import do_cli from tests.unit.cli.test_cli_config_file import MockContext + def get_mock_sam_config(): mock_sam_config = MagicMock() mock_sam_config.exists = MagicMock(return_value=True) return mock_sam_config + MOCK_SAM_CONFIG = get_mock_sam_config() + class TestDeleteCliCommand(TestCase): def setUp(self): @@ -23,7 +26,6 @@ def setUp(self): self.config_file = "mock-default-filename" MOCK_SAM_CONFIG.reset_mock() - @patch("samcli.commands.delete.command.click") @patch("samcli.commands.delete.delete_context.DeleteContext") def test_all_args(self, mock_delete_context, mock_delete_click): diff --git a/tests/unit/lib/delete/test_cf_utils.py b/tests/unit/lib/delete/test_cf_utils.py index 20cb5288dc..36f32ae735 100644 --- a/tests/unit/lib/delete/test_cf_utils.py +++ b/tests/unit/lib/delete/test_cf_utils.py @@ -28,7 +28,7 @@ def test_cf_utils_has_stack_exception_non_exsistent(self): ) ) self.assertEqual(self.cf_utils.has_stack("test"), False) - + def test_cf_utils_has_stack_exception_client_error(self): self.cf_utils._client.describe_stacks = MagicMock( side_effect=ClientError( @@ -56,10 +56,12 @@ def test_cf_utils_has_stack_exception_botocore(self): self.cf_utils.has_stack("test") def test_cf_utils_get_stack_template_exception_client_error(self): - self.cf_utils._client.get_template = MagicMock(side_effect=ClientError( - error_response={"Error": {"Message": "Stack with id test does not exist"}}, - operation_name="stack_status", - )) + self.cf_utils._client.get_template = MagicMock( + side_effect=ClientError( + error_response={"Error": {"Message": "Stack with id test does not exist"}}, + operation_name="stack_status", + ) + ) with self.assertRaises(DeleteFailedError): self.cf_utils.get_stack_template("test", "Original") From 99f7db4768e44bffed317bcbcd2d1ebba48174cb Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Tue, 29 Jun 2021 10:16:00 -0400 Subject: [PATCH 06/16] Added LOG statements --- samcli/lib/delete/cf_utils.py | 13 +++++++------ samcli/lib/package/s3_uploader.py | 4 ++++ 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/samcli/lib/delete/cf_utils.py b/samcli/lib/delete/cf_utils.py index f0c7aa4731..db36f11ce3 100644 --- a/samcli/lib/delete/cf_utils.py +++ b/samcli/lib/delete/cf_utils.py @@ -37,17 +37,18 @@ def has_stack(self, stack_name): if "Stack with id {0} does not exist".format(stack_name) in str(e): LOG.debug("Stack with id %s does not exist", stack_name) return False + LOG.error("ClientError Exception : %s", str(e)) raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e except BotoCoreError as e: # If there are credentials, environment errors, # catch that and throw a delete failed error. - LOG.debug("Botocore Exception : %s", str(e)) + LOG.error("Botocore Exception : %s", str(e)) raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e except Exception as e: # We don't know anything about this exception. Don't handle - LOG.debug("Unable to get stack details.", exc_info=e) + LOG.error("Unable to get stack details.", exc_info=e) raise e def get_stack_template(self, stack_name, stage): @@ -69,12 +70,12 @@ def get_stack_template(self, stack_name, stage): # If there are credentials, environment errors, # catch that and throw a delete failed error. - LOG.debug("Failed to delete stack : %s", str(e)) + LOG.error("Failed to delete stack : %s", str(e)) raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e except Exception as e: # We don't know anything about this exception. Don't handle - LOG.debug("Unable to get stack details.", exc_info=e) + LOG.error("Unable to get stack details.", exc_info=e) raise e def delete_stack(self, stack_name): @@ -92,10 +93,10 @@ def delete_stack(self, stack_name): # If there are credentials, environment errors, # catch that and throw a delete failed error. - LOG.debug("Failed to delete stack : %s", str(e)) + LOG.error("Failed to delete stack : %s", str(e)) raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e except Exception as e: # We don't know anything about this exception. Don't handle - LOG.debug("Unable to get stack details.", exc_info=e) + LOG.error("Failed to delete stack. ", exc_info=e) raise e diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index c0a4d88cf6..3fb7070d2b 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -153,6 +153,7 @@ def delete_artifact(self, remote_path: str, is_key=False): """ try: if not self.bucket_name: + LOG.error("Bucket not specified") raise BucketNotSpecifiedError() key = remote_path @@ -162,11 +163,13 @@ def delete_artifact(self, remote_path: str, is_key=False): # Deleting Specific file with key click.echo("- deleting S3 file " + key) resp = self.s3.delete_object(Bucket=self.bucket_name, Key=key) + LOG.debug("S3 method delete_object is called and returned: %s", resp["ResponseMetadata"]) return resp["ResponseMetadata"] except botocore.exceptions.ClientError as ex: error_code = ex.response["Error"]["Code"] if error_code == "NoSuchBucket": + LOG.error("Provided bucket %s does not exist ", self.bucket_name) raise NoSuchBucketError(bucket_name=self.bucket_name) from ex raise ex @@ -175,6 +178,7 @@ def delete_prefix_artifacts(self): Deletes all the files from the prefix in S3 """ if not self.bucket_name: + LOG.error("Bucket not specified") raise BucketNotSpecifiedError() if self.prefix: prefix_files = self.s3.list_objects_v2(Bucket=self.bucket_name, Prefix=self.prefix) From e7304ec2807bd4943363ca7ae791407fdfdf673f Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Tue, 29 Jun 2021 12:48:29 -0400 Subject: [PATCH 07/16] Added and updated changes based on CR --- samcli/commands/delete/command.py | 43 ++++-- samcli/commands/delete/delete_context.py | 144 +++++++++++------- samcli/lib/delete/utils.py | 17 --- samcli/lib/deploy/deployer.py | 7 +- samcli/lib/package/artifact_exporter.py | 6 +- samcli/lib/package/utils.py | 12 +- tests/unit/commands/delete/test_command.py | 6 +- .../lib/package/test_artifact_exporter.py | 28 ++-- 8 files changed, 153 insertions(+), 110 deletions(-) delete mode 100644 samcli/lib/delete/utils.py diff --git a/samcli/commands/delete/command.py b/samcli/commands/delete/command.py index 412e4fc76f..d95382bb51 100644 --- a/samcli/commands/delete/command.py +++ b/samcli/commands/delete/command.py @@ -5,23 +5,21 @@ import logging import click -from samcli.cli.cli_config_file import TomlProvider, configuration_option from samcli.cli.main import aws_creds_options, common_options, pass_context, print_cmdline_args from samcli.lib.utils.version_checker import check_newer_version -SHORT_HELP = "Delete an AWS SAM application." +SHORT_HELP = "Delete an AWS SAM application and the artifacts created by sam deploy." -HELP_TEXT = """The sam delete command deletes a Cloudformation Stack and deletes all your resources which were created. +HELP_TEXT = """The sam delete command deletes the Cloudformation +Stack and all the artifacts which were created using sam deploy. \b -e.g. sam delete --stack-name sam-app --region us-east-1 +e.g. sam delete \b """ -CONFIG_SECTION = "parameters" -CONFIG_COMMAND = "deploy" LOG = logging.getLogger(__name__) @@ -31,12 +29,34 @@ context_settings={"ignore_unknown_options": False, "allow_interspersed_args": True, "allow_extra_args": True}, help=HELP_TEXT, ) -@configuration_option(provider=TomlProvider(section=CONFIG_SECTION, cmd_names=[CONFIG_COMMAND])) @click.option( "--stack-name", required=False, help="The name of the AWS CloudFormation stack you want to delete. ", ) +@click.option( + "--config-file", + required=False, + help=( + "The path and file name of the configuration file containing default parameter values to use. " + "Its default value is 'samconfig.toml' in project directory. For more information about configuration files, " + "see: " + "https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-config.html." + ), + type=click.STRING, + default="samconfig.toml", +) +@click.option( + "--config-env", + required=False, + help=( + "The environment name specifying the default parameter values in the configuration file to use. " + "Its default value is 'default'. For more information about configuration files, see: " + "https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-config.html." + ), + type=click.STRING, + default="default", +) @aws_creds_options @common_options @pass_context @@ -53,19 +73,16 @@ def cli( """ # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing - do_cli(stack_name, ctx.region, ctx.profile) # pragma: no cover + do_cli(stack_name, ctx.region, config_file, config_env, ctx.profile) # pragma: no cover -def do_cli(stack_name, region, profile): +def do_cli(stack_name, region, config_file, config_env, profile): """ Implementation of the ``cli`` method """ from samcli.commands.delete.delete_context import DeleteContext - ctx = click.get_current_context() - s3_bucket = ctx.default_map.get("s3_bucket", None) - s3_prefix = ctx.default_map.get("s3_prefix", None) with DeleteContext( - stack_name=stack_name, region=region, s3_bucket=s3_bucket, s3_prefix=s3_prefix, profile=profile + stack_name=stack_name, region=region, profile=profile, config_file=config_file, config_env=config_env ) as delete_context: delete_context.run() diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index fb4a09e4e1..95c62d4dd1 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -8,11 +8,11 @@ import click from click import confirm from click import prompt - +from samcli.cli.cli_config_file import TomlProvider from samcli.lib.utils.botoconfig import get_boto_config_with_user_agent from samcli.lib.delete.cf_utils import CfUtils -from samcli.lib.delete.utils import get_cf_template_name from samcli.lib.package.s3_uploader import S3Uploader +from samcli.lib.package.artifact_exporter import mktempfile, get_cf_template_name # from samcli.yamlhelper import yaml_parse @@ -21,14 +21,20 @@ # from samcli.lib.package.ecr_uploader import ECRUploader # from samcli.lib.package.uploaders import Uploaders +CONFIG_COMMAND = "deploy" +CONFIG_SECTION = "parameters" +TEMPLATE_STAGE = "Original" + class DeleteContext: - def __init__(self, stack_name, region, s3_bucket, s3_prefix, profile): + def __init__(self, stack_name, region, profile, config_file, config_env): self.stack_name = stack_name self.region = region self.profile = profile - self.s3_bucket = s3_bucket - self.s3_prefix = s3_prefix + self.config_file = config_file + self.config_env = config_env + self.s3_bucket = None # s3_bucket + self.s3_prefix = None # s3_prefix self.cf_utils = None self.start_bold = "\033[1m" self.end_bold = "\033[0m" @@ -39,15 +45,7 @@ def __init__(self, stack_name, region, s3_bucket, s3_prefix, profile): self.delete_cf_template_file = None def __enter__(self): - return self - - def __exit__(self, *args): - pass - - def run(self): - """ - Delete the stack based on the argument provided by customers and samconfig.toml. - """ + self.parse_config_file() if not self.stack_name: self.stack_name = prompt( f"\t{self.start_bold}Enter stack name you want to delete{self.end_bold}", type=click.STRING @@ -57,8 +55,82 @@ def run(self): self.region = prompt( f"\t{self.start_bold}Enter region you want to delete from{self.end_bold}", type=click.STRING ) + return self + + def __exit__(self, *args): + pass + + def parse_config_file(self): + """ + Read the provided config file if it exists and assign the options values. + """ + toml_provider = TomlProvider(CONFIG_SECTION, [CONFIG_COMMAND]) + config_options = toml_provider( + config_path=self.config_file, config_env=self.config_env, cmd_names=[CONFIG_COMMAND] + ) + if config_options: + if not self.stack_name: + self.stack_name = config_options.get("stack_name", None) + if self.stack_name == config_options["stack_name"]: + if not self.region: + self.region = config_options.get("region", None) + if not self.profile: + self.profile = config_options.get("profile", None) + self.s3_bucket = config_options.get("s3_bucket", None) + self.s3_prefix = config_options.get("s3_prefix", None) + + def delete(self): + """ + Delete method calls for Cloudformation stacks and S3 and ECR artifacts + """ + template_str = self.cf_utils.get_stack_template(self.stack_name, TEMPLATE_STAGE) + + # template_dict = yaml_parse(template_str) + + if self.s3_bucket and self.s3_prefix: + self.delete_artifacts_folder = confirm( + f"\t{self.start_bold}Are you sure you want to delete the folder" + + f" {self.s3_prefix} in S3 which contains the artifacts?{self.end_bold}", + default=False, + ) + if not self.delete_artifacts_folder: + with mktempfile() as temp_file: + self.cf_template_file_name = get_cf_template_name(temp_file, template_str, "template") + self.delete_cf_template_file = confirm( + f"\t{self.start_bold}Do you want to delete the template file" + + f" {self.cf_template_file_name} in S3?{self.end_bold}", + default=False, + ) + + click.echo("\n") + # Delete the primary stack + self.cf_utils.delete_stack(self.stack_name) + + click.echo("- deleting Cloudformation stack {0}".format(self.stack_name)) + + # Delete the artifacts + # Intentionally commented + # self.uploaders = Uploaders(self.s3_uploader, ecr_uploader) + # template = Template(None, None, self.uploaders, None) + # template.delete(template_dict) + + # Delete the CF template file in S3 + if self.delete_cf_template_file: + self.s3_uploader.delete_artifact(self.cf_template_file_name) + + # Delete the folder of artifacts if s3_bucket and s3_prefix provided + elif self.delete_artifacts_folder: + self.s3_uploader.delete_prefix_artifacts() + + # Delete the ECR companion stack + + def run(self): + """ + Delete the stack based on the argument provided by customers and samconfig.toml. + """ delete_stack = confirm( - f"\t{self.start_bold}Are you sure you want to delete the stack {self.stack_name}?{self.end_bold}", + f"\t{self.start_bold}Are you sure you want to delete the stack {self.stack_name}" + + f" in the region {self.region} ?{self.end_bold}", default=False, ) # Fetch the template using the stack-name @@ -83,48 +155,8 @@ def run(self): is_deployed = self.cf_utils.has_stack(self.stack_name) if is_deployed: - template_str = self.cf_utils.get_stack_template(self.stack_name, "Original") - - # template_dict = yaml_parse(template_str) - - if self.s3_bucket and self.s3_prefix: - self.delete_artifacts_folder = confirm( - f"\t{self.start_bold}Are you sure you want to delete the folder" - + f"{self.s3_prefix} in S3 which contains the artifacts?{self.end_bold}", - default=False, - ) - if not self.delete_artifacts_folder: - self.cf_template_file_name = get_cf_template_name(template_str, "template") - self.delete_cf_template_file = confirm( - f"\t{self.start_bold}Do you want to delete the template file" - + f" {self.cf_template_file_name} in S3?{self.end_bold}", - default=False, - ) - - click.echo("\n") - # Delete the primary stack - self.cf_utils.delete_stack(self.stack_name) - - click.echo("- deleting Cloudformation stack {0}".format(self.stack_name)) - - # Delete the artifacts - # Intentionally commented - # self.uploaders = Uploaders(self.s3_uploader, ecr_uploader) - # template = Template(None, None, self.uploaders, None) - # template.delete(template_dict) - - # Delete the CF template file in S3 - if self.delete_cf_template_file: - self.s3_uploader.delete_artifact(self.cf_template_file_name) - - # Delete the folder of artifacts if s3_bucket and s3_prefix provided - elif self.delete_artifacts_folder: - self.s3_uploader.delete_prefix_artifacts() - - # Delete the ECR companion stack + self.delete() - if self.cf_template_file_name: - click.echo(f"- deleting template file {self.cf_template_file_name}") click.echo("\n") click.echo("delete complete") else: diff --git a/samcli/lib/delete/utils.py b/samcli/lib/delete/utils.py deleted file mode 100644 index 497610f776..0000000000 --- a/samcli/lib/delete/utils.py +++ /dev/null @@ -1,17 +0,0 @@ -""" -Utilities for Delete -""" - -from samcli.lib.utils.hash import file_checksum -from samcli.lib.package.artifact_exporter import mktempfile - - -def get_cf_template_name(template_str, extension): - with mktempfile() as temp_file: - temp_file.write(template_str) - temp_file.flush() - - filemd5 = file_checksum(temp_file.name) - remote_path = filemd5 + "." + extension - - return remote_path diff --git a/samcli/lib/deploy/deployer.py b/samcli/lib/deploy/deployer.py index 8aae03425e..eeed0fd321 100644 --- a/samcli/lib/deploy/deployer.py +++ b/samcli/lib/deploy/deployer.py @@ -34,7 +34,7 @@ ) from samcli.commands._utils.table_print import pprint_column_names, pprint_columns, newline_per_item, MIN_OFFSET from samcli.commands.deploy import exceptions as deploy_exceptions -from samcli.lib.package.artifact_exporter import mktempfile +from samcli.lib.package.artifact_exporter import mktempfile, get_cf_template_name from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.utils.time import utc_to_timestamp @@ -174,12 +174,11 @@ def create_changeset( # TemplateBody. This is required for large templates. if s3_uploader: with mktempfile() as temporary_file: - temporary_file.write(kwargs.pop("TemplateBody")) - temporary_file.flush() + remote_path = get_cf_template_name(temporary_file, kwargs.pop("TemplateBody"), "template") # TemplateUrl property requires S3 URL to be in path-style format parts = S3Uploader.parse_s3_url( - s3_uploader.upload_with_dedup(temporary_file.name, "template"), version_property="Version" + s3_uploader.upload(temporary_file.name, remote_path), version_property="Version" ) kwargs["TemplateURL"] = s3_uploader.to_path_style_s3_url(parts["Key"], parts.get("Version", None)) diff --git a/samcli/lib/package/artifact_exporter.py b/samcli/lib/package/artifact_exporter.py index c0f2b94576..85b5792ef9 100644 --- a/samcli/lib/package/artifact_exporter.py +++ b/samcli/lib/package/artifact_exporter.py @@ -42,6 +42,7 @@ is_local_file, mktempfile, is_s3_url, + get_cf_template_name, ) from samcli.lib.utils.packagetype import ZIP from samcli.yamlhelper import yaml_parse, yaml_dump @@ -83,10 +84,9 @@ def do_export(self, resource_id, resource_dict, parent_dir): exported_template_str = yaml_dump(exported_template_dict) with mktempfile() as temporary_file: - temporary_file.write(exported_template_str) - temporary_file.flush() - url = self.uploader.upload_with_dedup(temporary_file.name, "template") + remote_path = get_cf_template_name(temporary_file, exported_template_str, "template") + url = self.uploader.upload(temporary_file.name, remote_path) # TemplateUrl property requires S3 URL to be in path-style format parts = S3Uploader.parse_s3_url(url, version_property="Version") diff --git a/samcli/lib/package/utils.py b/samcli/lib/package/utils.py index 6317c35a48..a831518805 100644 --- a/samcli/lib/package/utils.py +++ b/samcli/lib/package/utils.py @@ -19,7 +19,7 @@ from samcli.commands.package.exceptions import ImageNotFoundError from samcli.lib.package.ecr_utils import is_ecr_url from samcli.lib.package.s3_uploader import S3Uploader -from samcli.lib.utils.hash import dir_checksum +from samcli.lib.utils.hash import dir_checksum, file_checksum LOG = logging.getLogger(__name__) @@ -284,3 +284,13 @@ def copy_to_temp_dir(filepath): dst = os.path.join(tmp_dir, os.path.basename(filepath)) shutil.copyfile(filepath, dst) return tmp_dir + + +def get_cf_template_name(temp_file, template_str, extension): + temp_file.write(template_str) + temp_file.flush() + + filemd5 = file_checksum(temp_file.name) + remote_path = filemd5 + "." + extension + + return remote_path diff --git a/tests/unit/commands/delete/test_command.py b/tests/unit/commands/delete/test_command.py index a199c4e960..4e268688ee 100644 --- a/tests/unit/commands/delete/test_command.py +++ b/tests/unit/commands/delete/test_command.py @@ -36,15 +36,17 @@ def test_all_args(self, mock_delete_context, mock_delete_click): do_cli( stack_name=self.stack_name, region=self.region, + config_file=self.config_file, + config_env=self.config_env, profile=self.profile, ) mock_delete_context.assert_called_with( stack_name=self.stack_name, - s3_bucket=mock_delete_click.get_current_context().default_map.get("s3_bucket", None), - s3_prefix=mock_delete_click.get_current_context().default_map.get("s3_prefix", None), region=self.region, profile=self.profile, + config_file=self.config_file, + config_env=self.config_env, ) context_mock.run.assert_called_with() diff --git a/tests/unit/lib/package/test_artifact_exporter.py b/tests/unit/lib/package/test_artifact_exporter.py index 7cc20f6be7..6c85108c8e 100644 --- a/tests/unit/lib/package/test_artifact_exporter.py +++ b/tests/unit/lib/package/test_artifact_exporter.py @@ -778,7 +778,7 @@ def test_export_cloudformation_stack(self, TemplateMock): TemplateMock.return_value = template_instance_mock template_instance_mock.export.return_value = exported_template_dict - self.s3_uploader_mock.upload_with_dedup.return_value = result_s3_url + self.s3_uploader_mock.upload.return_value = result_s3_url self.s3_uploader_mock.to_path_style_s3_url.return_value = result_path_style_s3_url with tempfile.NamedTemporaryFile() as handle: @@ -792,7 +792,7 @@ def test_export_cloudformation_stack(self, TemplateMock): TemplateMock.assert_called_once_with(template_path, parent_dir, self.uploaders_mock, self.code_signer_mock) template_instance_mock.export.assert_called_once_with() - self.s3_uploader_mock.upload_with_dedup.assert_called_once_with(mock.ANY, "template") + self.s3_uploader_mock.upload.assert_called_once_with(mock.ANY, "721aad13918f292d25bc9dc7d61b0e9c.template") self.s3_uploader_mock.to_path_style_s3_url.assert_called_once_with("world", None) def test_export_cloudformation_stack_no_upload_path_is_s3url(self): @@ -805,7 +805,7 @@ def test_export_cloudformation_stack_no_upload_path_is_s3url(self): # Case 1: Path is already S3 url stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict[property_name], s3_url) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_cloudformation_stack_no_upload_path_is_httpsurl(self): stack_resource = CloudFormationStackResource(self.uploaders_mock, self.code_signer_mock) @@ -817,7 +817,7 @@ def test_export_cloudformation_stack_no_upload_path_is_httpsurl(self): # Case 1: Path is already S3 url stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict[property_name], s3_url) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_cloudformation_stack_no_upload_path_is_s3_region_httpsurl(self): stack_resource = CloudFormationStackResource(self.uploaders_mock, self.code_signer_mock) @@ -829,7 +829,7 @@ def test_export_cloudformation_stack_no_upload_path_is_s3_region_httpsurl(self): stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict[property_name], s3_url) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_cloudformation_stack_no_upload_path_is_empty(self): stack_resource = CloudFormationStackResource(self.uploaders_mock, self.code_signer_mock) @@ -842,7 +842,7 @@ def test_export_cloudformation_stack_no_upload_path_is_empty(self): resource_dict = {} stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict, {}) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_cloudformation_stack_no_upload_path_not_file(self): stack_resource = CloudFormationStackResource(self.uploaders_mock, self.code_signer_mock) @@ -855,7 +855,7 @@ def test_export_cloudformation_stack_no_upload_path_not_file(self): resource_dict = {property_name: dirname} with self.assertRaises(exceptions.ExportFailedError): stack_resource.export(resource_id, resource_dict, "dir") - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() @patch("samcli.lib.package.artifact_exporter.Template") def test_export_serverless_application(self, TemplateMock): @@ -871,7 +871,7 @@ def test_export_serverless_application(self, TemplateMock): TemplateMock.return_value = template_instance_mock template_instance_mock.export.return_value = exported_template_dict - self.s3_uploader_mock.upload_with_dedup.return_value = result_s3_url + self.s3_uploader_mock.upload.return_value = result_s3_url self.s3_uploader_mock.to_path_style_s3_url.return_value = result_path_style_s3_url with tempfile.NamedTemporaryFile() as handle: @@ -885,7 +885,7 @@ def test_export_serverless_application(self, TemplateMock): TemplateMock.assert_called_once_with(template_path, parent_dir, self.uploaders_mock, self.code_signer_mock) template_instance_mock.export.assert_called_once_with() - self.s3_uploader_mock.upload_with_dedup.assert_called_once_with(mock.ANY, "template") + self.s3_uploader_mock.upload.assert_called_once_with(mock.ANY, "721aad13918f292d25bc9dc7d61b0e9c.template") self.s3_uploader_mock.to_path_style_s3_url.assert_called_once_with("world", None) def test_export_serverless_application_no_upload_path_is_s3url(self): @@ -898,7 +898,7 @@ def test_export_serverless_application_no_upload_path_is_s3url(self): # Case 1: Path is already S3 url stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict[property_name], s3_url) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_serverless_application_no_upload_path_is_httpsurl(self): stack_resource = ServerlessApplicationResource(self.uploaders_mock, self.code_signer_mock) @@ -910,7 +910,7 @@ def test_export_serverless_application_no_upload_path_is_httpsurl(self): # Case 1: Path is already S3 url stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict[property_name], s3_url) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_serverless_application_no_upload_path_is_empty(self): stack_resource = ServerlessApplicationResource(self.uploaders_mock, self.code_signer_mock) @@ -921,7 +921,7 @@ def test_export_serverless_application_no_upload_path_is_empty(self): resource_dict = {} stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict, {}) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_serverless_application_no_upload_path_not_file(self): stack_resource = ServerlessApplicationResource(self.uploaders_mock, self.code_signer_mock) @@ -933,7 +933,7 @@ def test_export_serverless_application_no_upload_path_not_file(self): resource_dict = {property_name: dirname} with self.assertRaises(exceptions.ExportFailedError): stack_resource.export(resource_id, resource_dict, "dir") - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_serverless_application_no_upload_path_is_dictionary(self): stack_resource = ServerlessApplicationResource(self.uploaders_mock, self.code_signer_mock) @@ -945,7 +945,7 @@ def test_export_serverless_application_no_upload_path_is_dictionary(self): resource_dict = {property_name: location} stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict[property_name], location) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() @patch("samcli.lib.package.artifact_exporter.yaml_parse") def test_template_export_metadata(self, yaml_parse_mock): From c2e43db315aa292db027ba54f7a3f2fd006eb50e Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Tue, 29 Jun 2021 13:37:40 -0400 Subject: [PATCH 08/16] Fixed the unit tests in artifact_exporter.py --- tests/unit/lib/package/test_artifact_exporter.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/unit/lib/package/test_artifact_exporter.py b/tests/unit/lib/package/test_artifact_exporter.py index 6c85108c8e..f7aceafef1 100644 --- a/tests/unit/lib/package/test_artifact_exporter.py +++ b/tests/unit/lib/package/test_artifact_exporter.py @@ -792,7 +792,7 @@ def test_export_cloudformation_stack(self, TemplateMock): TemplateMock.assert_called_once_with(template_path, parent_dir, self.uploaders_mock, self.code_signer_mock) template_instance_mock.export.assert_called_once_with() - self.s3_uploader_mock.upload.assert_called_once_with(mock.ANY, "721aad13918f292d25bc9dc7d61b0e9c.template") + self.s3_uploader_mock.upload.assert_called_once_with(mock.ANY, mock.ANY) self.s3_uploader_mock.to_path_style_s3_url.assert_called_once_with("world", None) def test_export_cloudformation_stack_no_upload_path_is_s3url(self): @@ -885,7 +885,7 @@ def test_export_serverless_application(self, TemplateMock): TemplateMock.assert_called_once_with(template_path, parent_dir, self.uploaders_mock, self.code_signer_mock) template_instance_mock.export.assert_called_once_with() - self.s3_uploader_mock.upload.assert_called_once_with(mock.ANY, "721aad13918f292d25bc9dc7d61b0e9c.template") + self.s3_uploader_mock.upload.assert_called_once_with(mock.ANY, mock.ANY) self.s3_uploader_mock.to_path_style_s3_url.assert_called_once_with("world", None) def test_export_serverless_application_no_upload_path_is_s3url(self): From dd35d532d6a2f48fa007bd2907a337924262de3d Mon Sep 17 00:00:00 2001 From: hnnasit <84355507+hnnasit@users.noreply.github.com> Date: Tue, 29 Jun 2021 14:09:54 -0400 Subject: [PATCH 09/16] Update HELP_TEXT in delete/command.py Co-authored-by: Chris Rehn --- samcli/commands/delete/command.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/samcli/commands/delete/command.py b/samcli/commands/delete/command.py index d95382bb51..c0310bfa6e 100644 --- a/samcli/commands/delete/command.py +++ b/samcli/commands/delete/command.py @@ -11,8 +11,8 @@ SHORT_HELP = "Delete an AWS SAM application and the artifacts created by sam deploy." -HELP_TEXT = """The sam delete command deletes the Cloudformation -Stack and all the artifacts which were created using sam deploy. +HELP_TEXT = """The sam delete command deletes the CloudFormation +stack and all the artifacts which were created using sam deploy. \b e.g. sam delete From f43e763e1a382ee946bf8fba45075c00326edc8c Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Wed, 30 Jun 2021 12:17:26 -0400 Subject: [PATCH 10/16] Updated code based on Chris' comments --- samcli/commands/delete/command.py | 12 +++-- samcli/commands/delete/delete_context.py | 66 ++++++++---------------- samcli/commands/delete/exceptions.py | 10 ++++ samcli/lib/delete/cf_utils.py | 19 ++++--- samcli/lib/package/s3_uploader.py | 4 +- tests/unit/lib/delete/test_cf_utils.py | 6 +-- 6 files changed, 55 insertions(+), 62 deletions(-) diff --git a/samcli/commands/delete/command.py b/samcli/commands/delete/command.py index c0310bfa6e..823e262bc6 100644 --- a/samcli/commands/delete/command.py +++ b/samcli/commands/delete/command.py @@ -64,19 +64,21 @@ @print_cmdline_args def cli( ctx, - stack_name, - config_file, - config_env, + stack_name: str, + config_file: str, + config_env: str, ): """ `sam delete` command entry point """ # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing - do_cli(stack_name, ctx.region, config_file, config_env, ctx.profile) # pragma: no cover + do_cli( + stack_name=stack_name, region=ctx.region, config_file=config_file, config_env=config_env, profile=ctx.profile + ) # pragma: no cover -def do_cli(stack_name, region, config_file, config_env, profile): +def do_cli(stack_name: str, region: str, config_file: str, config_env: str, profile: str): """ Implementation of the ``cli`` method """ diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 95c62d4dd1..a6d146d584 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -4,7 +4,6 @@ import boto3 -# import docker import click from click import confirm from click import prompt @@ -14,32 +13,22 @@ from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.package.artifact_exporter import mktempfile, get_cf_template_name -# from samcli.yamlhelper import yaml_parse - -# Intentionally commented -# from samcli.lib.package.artifact_exporter import Template -# from samcli.lib.package.ecr_uploader import ECRUploader -# from samcli.lib.package.uploaders import Uploaders - CONFIG_COMMAND = "deploy" CONFIG_SECTION = "parameters" TEMPLATE_STAGE = "Original" class DeleteContext: - def __init__(self, stack_name, region, profile, config_file, config_env): + def __init__(self, stack_name: str, region: str, profile: str, config_file: str, config_env: str): self.stack_name = stack_name self.region = region self.profile = profile self.config_file = config_file self.config_env = config_env - self.s3_bucket = None # s3_bucket - self.s3_prefix = None # s3_prefix + self.s3_bucket = None + self.s3_prefix = None self.cf_utils = None - self.start_bold = "\033[1m" - self.end_bold = "\033[0m" self.s3_uploader = None - # self.uploaders = None self.cf_template_file_name = None self.delete_artifacts_folder = None self.delete_cf_template_file = None @@ -48,13 +37,11 @@ def __enter__(self): self.parse_config_file() if not self.stack_name: self.stack_name = prompt( - f"\t{self.start_bold}Enter stack name you want to delete{self.end_bold}", type=click.STRING + click.style("\tEnter stack name you want to delete:", bold=True), type=click.STRING ) if not self.region: - self.region = prompt( - f"\t{self.start_bold}Enter region you want to delete from{self.end_bold}", type=click.STRING - ) + self.region = prompt(click.style("\tEnter region you want to delete from:", bold=True), type=click.STRING) return self def __exit__(self, *args): @@ -85,52 +72,48 @@ def delete(self): """ template_str = self.cf_utils.get_stack_template(self.stack_name, TEMPLATE_STAGE) - # template_dict = yaml_parse(template_str) - if self.s3_bucket and self.s3_prefix: self.delete_artifacts_folder = confirm( - f"\t{self.start_bold}Are you sure you want to delete the folder" - + f" {self.s3_prefix} in S3 which contains the artifacts?{self.end_bold}", + click.style( + "\tAre you sure you want to delete the folder" + + f" {self.s3_prefix} in S3 which contains the artifacts?", + bold=True, + ), default=False, ) if not self.delete_artifacts_folder: with mktempfile() as temp_file: self.cf_template_file_name = get_cf_template_name(temp_file, template_str, "template") self.delete_cf_template_file = confirm( - f"\t{self.start_bold}Do you want to delete the template file" - + f" {self.cf_template_file_name} in S3?{self.end_bold}", + click.style( + "\tDo you want to delete the template file" + f" {self.cf_template_file_name} in S3?", bold=True + ), default=False, ) click.echo("\n") # Delete the primary stack - self.cf_utils.delete_stack(self.stack_name) + self.cf_utils.delete_stack(stack_name=self.stack_name) - click.echo("- deleting Cloudformation stack {0}".format(self.stack_name)) - - # Delete the artifacts - # Intentionally commented - # self.uploaders = Uploaders(self.s3_uploader, ecr_uploader) - # template = Template(None, None, self.uploaders, None) - # template.delete(template_dict) + click.echo(f"- deleting Cloudformation stack {self.stack_name}") # Delete the CF template file in S3 if self.delete_cf_template_file: - self.s3_uploader.delete_artifact(self.cf_template_file_name) + self.s3_uploader.delete_artifact(remote_path=self.cf_template_file_name) # Delete the folder of artifacts if s3_bucket and s3_prefix provided elif self.delete_artifacts_folder: self.s3_uploader.delete_prefix_artifacts() - # Delete the ECR companion stack - def run(self): """ Delete the stack based on the argument provided by customers and samconfig.toml. """ delete_stack = confirm( - f"\t{self.start_bold}Are you sure you want to delete the stack {self.stack_name}" - + f" in the region {self.region} ?{self.end_bold}", + click.style( + f"\tAre you sure you want to delete the stack {self.stack_name}" + f" in the region {self.region} ?", + bold=True, + ), default=False, ) # Fetch the template using the stack-name @@ -143,16 +126,11 @@ def run(self): ) s3_client = boto3.client("s3", region_name=self.region if self.region else None, config=boto_config) - # ecr_client = boto3.client("ecr", region_name=self.region if self.region else None, config=boto_config) self.s3_uploader = S3Uploader(s3_client=s3_client, bucket_name=self.s3_bucket, prefix=self.s3_prefix) - - # docker_client = docker.from_env() - # ecr_uploader = ECRUploader(docker_client, ecr_client, None, None) - self.cf_utils = CfUtils(cloudformation_client) - is_deployed = self.cf_utils.has_stack(self.stack_name) + is_deployed = self.cf_utils.has_stack(stack_name=self.stack_name) if is_deployed: self.delete() @@ -160,4 +138,4 @@ def run(self): click.echo("\n") click.echo("delete complete") else: - click.echo("Error: The input stack {0} does not exist on Cloudformation".format(self.stack_name)) + click.echo(f"Error: The input stack {self.stack_name} does not exist on Cloudformation") diff --git a/samcli/commands/delete/exceptions.py b/samcli/commands/delete/exceptions.py index 82c56b6bb6..7e2ba5105c 100644 --- a/samcli/commands/delete/exceptions.py +++ b/samcli/commands/delete/exceptions.py @@ -12,3 +12,13 @@ def __init__(self, stack_name, msg): message_fmt = "Failed to delete the stack: {stack_name}, {msg}" super().__init__(message=message_fmt.format(stack_name=self.stack_name, msg=msg)) + + +class FetchTemplateFailedError(UserException): + def __init__(self, stack_name, msg): + self.stack_name = stack_name + self.msg = msg + + message_fmt = "Failed to fetch the template for the stack: {stack_name}, {msg}" + + super().__init__(message=message_fmt.format(stack_name=self.stack_name, msg=msg)) diff --git a/samcli/lib/delete/cf_utils.py b/samcli/lib/delete/cf_utils.py index db36f11ce3..8644a51445 100644 --- a/samcli/lib/delete/cf_utils.py +++ b/samcli/lib/delete/cf_utils.py @@ -5,7 +5,7 @@ import logging from botocore.exceptions import ClientError, BotoCoreError -from samcli.commands.delete.exceptions import DeleteFailedError +from samcli.commands.delete.exceptions import DeleteFailedError, FetchTemplateFailedError LOG = logging.getLogger(__name__) @@ -14,7 +14,7 @@ class CfUtils: def __init__(self, cloudformation_client): self._client = cloudformation_client - def has_stack(self, stack_name): + def has_stack(self, stack_name: str): """ Checks if a CloudFormation stack with given name exists @@ -27,6 +27,10 @@ def has_stack(self, stack_name): return False stack = resp["Stacks"][0] + # Note: Stacks with REVIEW_IN_PROGRESS can be deleted + # using delete_stack but get_template does not return + # the template_str for this stack restricting deletion of + # artifacts. return stack["StackStatus"] != "REVIEW_IN_PROGRESS" except ClientError as e: @@ -51,7 +55,7 @@ def has_stack(self, stack_name): LOG.error("Unable to get stack details.", exc_info=e) raise e - def get_stack_template(self, stack_name, stage): + def get_stack_template(self, stack_name: str, stage: str): """ Return the Cloudformation template of the given stack_name @@ -62,23 +66,22 @@ def get_stack_template(self, stack_name, stage): try: resp = self._client.get_template(StackName=stack_name, TemplateStage=stage) if not resp["TemplateBody"]: - return None - + return "" return resp["TemplateBody"] except (ClientError, BotoCoreError) as e: # If there are credentials, environment errors, # catch that and throw a delete failed error. - LOG.error("Failed to delete stack : %s", str(e)) - raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e + LOG.error("Failed to fetch template for the stack : %s", str(e)) + raise FetchTemplateFailedError(stack_name=stack_name, msg=str(e)) from e except Exception as e: # We don't know anything about this exception. Don't handle LOG.error("Unable to get stack details.", exc_info=e) raise e - def delete_stack(self, stack_name): + def delete_stack(self, stack_name: str): """ Delete the Cloudformation stack with the given stack_name diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index 3fb7070d2b..c9a5e3f6f0 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -145,7 +145,7 @@ def upload_with_dedup( return self.upload(file_name, remote_path) - def delete_artifact(self, remote_path: str, is_key=False): + def delete_artifact(self, remote_path: str, is_key: Optional[bool] = False): """ Deletes a given file from S3 :param remote_path: Path to the file that will be deleted @@ -161,7 +161,7 @@ def delete_artifact(self, remote_path: str, is_key=False): key = "{0}/{1}".format(self.prefix, remote_path) # Deleting Specific file with key - click.echo("- deleting S3 file " + key) + click.echo(f"- deleting S3 file {key}") resp = self.s3.delete_object(Bucket=self.bucket_name, Key=key) LOG.debug("S3 method delete_object is called and returned: %s", resp["ResponseMetadata"]) return resp["ResponseMetadata"] diff --git a/tests/unit/lib/delete/test_cf_utils.py b/tests/unit/lib/delete/test_cf_utils.py index 36f32ae735..9e80a00d4a 100644 --- a/tests/unit/lib/delete/test_cf_utils.py +++ b/tests/unit/lib/delete/test_cf_utils.py @@ -1,7 +1,7 @@ from unittest.mock import patch, MagicMock, ANY, call from unittest import TestCase -from samcli.commands.delete.exceptions import DeleteFailedError +from samcli.commands.delete.exceptions import DeleteFailedError, FetchTemplateFailedError from botocore.exceptions import ClientError, BotoCoreError from samcli.lib.delete.cf_utils import CfUtils @@ -62,12 +62,12 @@ def test_cf_utils_get_stack_template_exception_client_error(self): operation_name="stack_status", ) ) - with self.assertRaises(DeleteFailedError): + with self.assertRaises(FetchTemplateFailedError): self.cf_utils.get_stack_template("test", "Original") def test_cf_utils_get_stack_template_exception_botocore(self): self.cf_utils._client.get_template = MagicMock(side_effect=BotoCoreError()) - with self.assertRaises(DeleteFailedError): + with self.assertRaises(FetchTemplateFailedError): self.cf_utils.get_stack_template("test", "Original") def test_cf_utils_get_stack_template_exception(self): From bc9db140b2a84b7c4457a8eafc63eb9c23526fca Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Wed, 30 Jun 2021 16:18:58 -0400 Subject: [PATCH 11/16] Small changes and fixes based on the comments --- samcli/commands/delete/command.py | 4 ++-- samcli/commands/delete/delete_context.py | 12 +++++------- samcli/lib/delete/cf_utils.py | 15 +++++++-------- samcli/lib/package/s3_uploader.py | 8 +++++--- 4 files changed, 19 insertions(+), 20 deletions(-) diff --git a/samcli/commands/delete/command.py b/samcli/commands/delete/command.py index 823e262bc6..266d093a36 100644 --- a/samcli/commands/delete/command.py +++ b/samcli/commands/delete/command.py @@ -36,7 +36,6 @@ ) @click.option( "--config-file", - required=False, help=( "The path and file name of the configuration file containing default parameter values to use. " "Its default value is 'samconfig.toml' in project directory. For more information about configuration files, " @@ -45,10 +44,10 @@ ), type=click.STRING, default="samconfig.toml", + show_default=True, ) @click.option( "--config-env", - required=False, help=( "The environment name specifying the default parameter values in the configuration file to use. " "Its default value is 'default'. For more information about configuration files, see: " @@ -56,6 +55,7 @@ ), type=click.STRING, default="default", + show_default=True, ) @aws_creds_options @common_options diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index a6d146d584..8f12402cde 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -70,9 +70,10 @@ def delete(self): """ Delete method calls for Cloudformation stacks and S3 and ECR artifacts """ - template_str = self.cf_utils.get_stack_template(self.stack_name, TEMPLATE_STAGE) + template = self.cf_utils.get_stack_template(self.stack_name, TEMPLATE_STAGE) + template_str = template.get("TemplateBody", None) - if self.s3_bucket and self.s3_prefix: + if self.s3_bucket and self.s3_prefix and template_str: self.delete_artifacts_folder = confirm( click.style( "\tAre you sure you want to delete the folder" @@ -91,11 +92,10 @@ def delete(self): default=False, ) - click.echo("\n") # Delete the primary stack self.cf_utils.delete_stack(stack_name=self.stack_name) - click.echo(f"- deleting Cloudformation stack {self.stack_name}") + click.echo(f"\n\t- Deleting Cloudformation stack {self.stack_name}") # Delete the CF template file in S3 if self.delete_cf_template_file: @@ -134,8 +134,6 @@ def run(self): if is_deployed: self.delete() - - click.echo("\n") - click.echo("delete complete") + click.echo("\nDeleted successfully") else: click.echo(f"Error: The input stack {self.stack_name} does not exist on Cloudformation") diff --git a/samcli/lib/delete/cf_utils.py b/samcli/lib/delete/cf_utils.py index 8644a51445..a78ed6d38b 100644 --- a/samcli/lib/delete/cf_utils.py +++ b/samcli/lib/delete/cf_utils.py @@ -4,6 +4,7 @@ import logging +from typing import Dict from botocore.exceptions import ClientError, BotoCoreError from samcli.commands.delete.exceptions import DeleteFailedError, FetchTemplateFailedError @@ -14,7 +15,7 @@ class CfUtils: def __init__(self, cloudformation_client): self._client = cloudformation_client - def has_stack(self, stack_name: str): + def has_stack(self, stack_name: str) -> bool: """ Checks if a CloudFormation stack with given name exists @@ -31,7 +32,7 @@ def has_stack(self, stack_name: str): # using delete_stack but get_template does not return # the template_str for this stack restricting deletion of # artifacts. - return stack["StackStatus"] != "REVIEW_IN_PROGRESS" + return bool(stack["StackStatus"] != "REVIEW_IN_PROGRESS") except ClientError as e: # If a stack does not exist, describe_stacks will throw an @@ -55,7 +56,7 @@ def has_stack(self, stack_name: str): LOG.error("Unable to get stack details.", exc_info=e) raise e - def get_stack_template(self, stack_name: str, stage: str): + def get_stack_template(self, stack_name: str, stage: str) -> Dict: """ Return the Cloudformation template of the given stack_name @@ -66,8 +67,8 @@ def get_stack_template(self, stack_name: str, stage: str): try: resp = self._client.get_template(StackName=stack_name, TemplateStage=stage) if not resp["TemplateBody"]: - return "" - return resp["TemplateBody"] + return {} + return dict(resp) except (ClientError, BotoCoreError) as e: # If there are credentials, environment errors, @@ -86,11 +87,9 @@ def delete_stack(self, stack_name: str): Delete the Cloudformation stack with the given stack_name :param stack_name: Name or ID of the stack - :return: Status of deletion """ try: - resp = self._client.delete_stack(StackName=stack_name) - return resp + self._client.delete_stack(StackName=stack_name) except (ClientError, BotoCoreError) as e: # If there are credentials, environment errors, diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index c9a5e3f6f0..61a6988416 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -145,11 +145,13 @@ def upload_with_dedup( return self.upload(file_name, remote_path) - def delete_artifact(self, remote_path: str, is_key: Optional[bool] = False): + def delete_artifact(self, remote_path: str, is_key: bool = False) -> Dict: """ Deletes a given file from S3 :param remote_path: Path to the file that will be deleted :param is_key: If the given remote_path is the key or a file_name + + :return: metadata dict of the deleted object """ try: if not self.bucket_name: @@ -161,10 +163,10 @@ def delete_artifact(self, remote_path: str, is_key: Optional[bool] = False): key = "{0}/{1}".format(self.prefix, remote_path) # Deleting Specific file with key - click.echo(f"- deleting S3 file {key}") + click.echo(f"\t- Deleting S3 file {key}") resp = self.s3.delete_object(Bucket=self.bucket_name, Key=key) LOG.debug("S3 method delete_object is called and returned: %s", resp["ResponseMetadata"]) - return resp["ResponseMetadata"] + return dict(resp["ResponseMetadata"]) except botocore.exceptions.ClientError as ex: error_code = ex.response["Error"]["Code"] From 401a950f1b384c07892ba2d2a9ad76266e94ef46 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Wed, 30 Jun 2021 19:31:40 -0400 Subject: [PATCH 12/16] Removed region prompt --- samcli/commands/delete/delete_context.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 8f12402cde..5a70fa9f07 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -40,8 +40,6 @@ def __enter__(self): click.style("\tEnter stack name you want to delete:", bold=True), type=click.STRING ) - if not self.region: - self.region = prompt(click.style("\tEnter region you want to delete from:", bold=True), type=click.STRING) return self def __exit__(self, *args): From 46c8bdbeead64d3176240263bf0a7de0f6ee3e24 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Mon, 5 Jul 2021 13:54:01 -0400 Subject: [PATCH 13/16] Update SAM context values for profile and region in delete_context.py --- samcli/commands/delete/delete_context.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 5a70fa9f07..bf4bf263ba 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -59,8 +59,10 @@ def parse_config_file(self): if self.stack_name == config_options["stack_name"]: if not self.region: self.region = config_options.get("region", None) + click.get_current_context().region = self.region if not self.profile: self.profile = config_options.get("profile", None) + click.get_current_context().profile = self.profile self.s3_bucket = config_options.get("s3_bucket", None) self.s3_prefix = config_options.get("s3_prefix", None) From 3ac180e24407d310d94fdf8f03424c3e7a029959 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Mon, 5 Jul 2021 15:01:13 -0400 Subject: [PATCH 14/16] Added typing for get_cf_template_name method --- samcli/commands/delete/delete_context.py | 4 +++- samcli/lib/deploy/deployer.py | 4 +++- samcli/lib/package/artifact_exporter.py | 4 +++- samcli/lib/package/utils.py | 4 ++-- 4 files changed, 11 insertions(+), 5 deletions(-) diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index bf4bf263ba..c547cf4aa6 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -84,7 +84,9 @@ def delete(self): ) if not self.delete_artifacts_folder: with mktempfile() as temp_file: - self.cf_template_file_name = get_cf_template_name(temp_file, template_str, "template") + self.cf_template_file_name = get_cf_template_name( + temp_file=temp_file, template_str=template_str, extension="template" + ) self.delete_cf_template_file = confirm( click.style( "\tDo you want to delete the template file" + f" {self.cf_template_file_name} in S3?", bold=True diff --git a/samcli/lib/deploy/deployer.py b/samcli/lib/deploy/deployer.py index eeed0fd321..0e9b3b4bf0 100644 --- a/samcli/lib/deploy/deployer.py +++ b/samcli/lib/deploy/deployer.py @@ -175,7 +175,9 @@ def create_changeset( if s3_uploader: with mktempfile() as temporary_file: - remote_path = get_cf_template_name(temporary_file, kwargs.pop("TemplateBody"), "template") + remote_path = get_cf_template_name( + temp_file=temporary_file, template_str=kwargs.pop("TemplateBody"), extension="template" + ) # TemplateUrl property requires S3 URL to be in path-style format parts = S3Uploader.parse_s3_url( s3_uploader.upload(temporary_file.name, remote_path), version_property="Version" diff --git a/samcli/lib/package/artifact_exporter.py b/samcli/lib/package/artifact_exporter.py index 85b5792ef9..6bc9787018 100644 --- a/samcli/lib/package/artifact_exporter.py +++ b/samcli/lib/package/artifact_exporter.py @@ -85,7 +85,9 @@ def do_export(self, resource_id, resource_dict, parent_dir): with mktempfile() as temporary_file: - remote_path = get_cf_template_name(temporary_file, exported_template_str, "template") + remote_path = get_cf_template_name( + temp_file=temporary_file, template_str=exported_template_str, extension="template" + ) url = self.uploader.upload(temporary_file.name, remote_path) # TemplateUrl property requires S3 URL to be in path-style format diff --git a/samcli/lib/package/utils.py b/samcli/lib/package/utils.py index a831518805..c33b2b3de7 100644 --- a/samcli/lib/package/utils.py +++ b/samcli/lib/package/utils.py @@ -11,7 +11,7 @@ import zipfile import contextlib from contextlib import contextmanager -from typing import Dict, Optional, cast +from typing import Dict, Optional, cast, TextIO import jmespath @@ -286,7 +286,7 @@ def copy_to_temp_dir(filepath): return tmp_dir -def get_cf_template_name(temp_file, template_str, extension): +def get_cf_template_name(temp_file: TextIO, template_str: str, extension: str) -> str: temp_file.write(template_str) temp_file.flush() From fb332fd76bf6cfd94c7d231c50a080be738d4f68 Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Mon, 5 Jul 2021 16:03:13 -0400 Subject: [PATCH 15/16] Added stack_name prompt if the stack_name is not present in samconfig file --- samcli/commands/delete/delete_context.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index c547cf4aa6..2cd835f9fa 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -56,6 +56,13 @@ def parse_config_file(self): if config_options: if not self.stack_name: self.stack_name = config_options.get("stack_name", None) + + if not self.stack_name: + self.stack_name = prompt( + click.style("\tEnter stack name you want to delete:", bold=True), type=click.STRING + ) + # If the stack_name is same as the one present in samconfig file, + # get the information about parameters if not specified by customer. if self.stack_name == config_options["stack_name"]: if not self.region: self.region = config_options.get("region", None) From bf2d4f484c36e8a9f6e1b3e8e70228ccce59816d Mon Sep 17 00:00:00 2001 From: Haresh Nasit Date: Mon, 5 Jul 2021 19:16:25 -0400 Subject: [PATCH 16/16] Replace [] with get() for stack-name in delete_context.py --- samcli/commands/delete/delete_context.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 2cd835f9fa..4b7a5ff295 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -57,13 +57,9 @@ def parse_config_file(self): if not self.stack_name: self.stack_name = config_options.get("stack_name", None) - if not self.stack_name: - self.stack_name = prompt( - click.style("\tEnter stack name you want to delete:", bold=True), type=click.STRING - ) # If the stack_name is same as the one present in samconfig file, # get the information about parameters if not specified by customer. - if self.stack_name == config_options["stack_name"]: + if self.stack_name and self.stack_name == config_options.get("stack_name", None): if not self.region: self.region = config_options.get("region", None) click.get_current_context().region = self.region