From 5eddc44e8a37c25f23908306bc625707999e3e22 Mon Sep 17 00:00:00 2001 From: hnnasit <84355507+hnnasit@users.noreply.github.com> Date: Tue, 6 Jul 2021 14:35:39 -0400 Subject: [PATCH 1/9] feat: Delete methods for CF stacks and S3 files (#2981) * Added methods for cf and s3 files and init UI * Added unit tests for utils methods and s3_uploader * Removed s3_bucket and s3_prefix click options * Fixed lint errors and added few unit tests * Make black happy * Added LOG statements * Added and updated changes based on CR * Fixed the unit tests in artifact_exporter.py * Update HELP_TEXT in delete/command.py Co-authored-by: Chris Rehn * Updated code based on Chris' comments * Small changes and fixes based on the comments * Removed region prompt * Update SAM context values for profile and region in delete_context.py * Added typing for get_cf_template_name method * Added stack_name prompt if the stack_name is not present in samconfig file * Replace [] with get() for stack-name in delete_context.py Co-authored-by: Chris Rehn --- samcli/cli/cli_config_file.py | 13 +- samcli/cli/command.py | 1 + samcli/commands/delete/__init__.py | 6 + samcli/commands/delete/command.py | 90 +++++++++++ samcli/commands/delete/delete_context.py | 144 ++++++++++++++++++ samcli/commands/delete/exceptions.py | 24 +++ samcli/lib/delete/__init__.py | 0 samcli/lib/delete/cf_utils.py | 104 +++++++++++++ samcli/lib/deploy/deployer.py | 9 +- samcli/lib/package/artifact_exporter.py | 8 +- samcli/lib/package/s3_uploader.py | 44 ++++++ samcli/lib/package/utils.py | 14 +- tests/unit/commands/delete/__init__.py | 0 tests/unit/commands/delete/test_command.py | 53 +++++++ .../commands/delete/test_delete_context.py | 0 tests/unit/lib/delete/__init__.py | 0 tests/unit/lib/delete/test_cf_utils.py | 86 +++++++++++ .../lib/package/test_artifact_exporter.py | 28 ++-- tests/unit/lib/package/test_s3_uploader.py | 45 ++++++ 19 files changed, 642 insertions(+), 27 deletions(-) create mode 100644 samcli/commands/delete/__init__.py create mode 100644 samcli/commands/delete/command.py create mode 100644 samcli/commands/delete/delete_context.py create mode 100644 samcli/commands/delete/exceptions.py create mode 100644 samcli/lib/delete/__init__.py create mode 100644 samcli/lib/delete/cf_utils.py create mode 100644 tests/unit/commands/delete/__init__.py create mode 100644 tests/unit/commands/delete/test_command.py create mode 100644 tests/unit/commands/delete/test_delete_context.py create mode 100644 tests/unit/lib/delete/__init__.py create mode 100644 tests/unit/lib/delete/test_cf_utils.py diff --git a/samcli/cli/cli_config_file.py b/samcli/cli/cli_config_file.py index 67e214e122..9e2b4aa020 100644 --- a/samcli/cli/cli_config_file.py +++ b/samcli/cli/cli_config_file.py @@ -27,12 +27,14 @@ class TomlProvider: A parser for toml configuration files """ - def __init__(self, section=None): + def __init__(self, section=None, cmd_names=None): """ The constructor for TomlProvider class :param section: section defined in the configuration file nested within `cmd` + :param cmd_names: cmd_name defined in the configuration file """ self.section = section + self.cmd_names = cmd_names def __call__(self, config_path, config_env, cmd_names): """ @@ -67,18 +69,21 @@ def __call__(self, config_path, config_env, cmd_names): LOG.debug("Config file '%s' does not exist", samconfig.path()) return resolved_config + if not self.cmd_names: + self.cmd_names = cmd_names + try: LOG.debug( "Loading configuration values from [%s.%s.%s] (env.command_name.section) in config file at '%s'...", config_env, - cmd_names, + self.cmd_names, self.section, samconfig.path(), ) # NOTE(TheSriram): change from tomlkit table type to normal dictionary, # so that click defaults work out of the box. - resolved_config = dict(samconfig.get_all(cmd_names, self.section, env=config_env).items()) + resolved_config = dict(samconfig.get_all(self.cmd_names, self.section, env=config_env).items()) LOG.debug("Configuration values successfully loaded.") LOG.debug("Configuration values are: %s", resolved_config) @@ -87,7 +92,7 @@ def __call__(self, config_path, config_env, cmd_names): "Error reading configuration from [%s.%s.%s] (env.command_name.section) " "in configuration file at '%s' with : %s", config_env, - cmd_names, + self.cmd_names, self.section, samconfig.path(), str(ex), diff --git a/samcli/cli/command.py b/samcli/cli/command.py index c135400586..0741cda84f 100644 --- a/samcli/cli/command.py +++ b/samcli/cli/command.py @@ -19,6 +19,7 @@ "samcli.commands.local.local", "samcli.commands.package", "samcli.commands.deploy", + "samcli.commands.delete", "samcli.commands.logs", "samcli.commands.publish", "samcli.commands.pipeline.pipeline", diff --git a/samcli/commands/delete/__init__.py b/samcli/commands/delete/__init__.py new file mode 100644 index 0000000000..ea5b0202d2 --- /dev/null +++ b/samcli/commands/delete/__init__.py @@ -0,0 +1,6 @@ +""" +`sam delete` command +""" + +# Expose the cli object here +from .command import cli # noqa diff --git a/samcli/commands/delete/command.py b/samcli/commands/delete/command.py new file mode 100644 index 0000000000..266d093a36 --- /dev/null +++ b/samcli/commands/delete/command.py @@ -0,0 +1,90 @@ +""" +CLI command for "delete" command +""" + +import logging + +import click +from samcli.cli.main import aws_creds_options, common_options, pass_context, print_cmdline_args + +from samcli.lib.utils.version_checker import check_newer_version + +SHORT_HELP = "Delete an AWS SAM application and the artifacts created by sam deploy." + +HELP_TEXT = """The sam delete command deletes the CloudFormation +stack and all the artifacts which were created using sam deploy. + +\b +e.g. sam delete + +\b +""" + +LOG = logging.getLogger(__name__) + + +@click.command( + "delete", + short_help=SHORT_HELP, + context_settings={"ignore_unknown_options": False, "allow_interspersed_args": True, "allow_extra_args": True}, + help=HELP_TEXT, +) +@click.option( + "--stack-name", + required=False, + help="The name of the AWS CloudFormation stack you want to delete. ", +) +@click.option( + "--config-file", + help=( + "The path and file name of the configuration file containing default parameter values to use. " + "Its default value is 'samconfig.toml' in project directory. For more information about configuration files, " + "see: " + "https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-config.html." + ), + type=click.STRING, + default="samconfig.toml", + show_default=True, +) +@click.option( + "--config-env", + help=( + "The environment name specifying the default parameter values in the configuration file to use. " + "Its default value is 'default'. For more information about configuration files, see: " + "https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-config.html." + ), + type=click.STRING, + default="default", + show_default=True, +) +@aws_creds_options +@common_options +@pass_context +@check_newer_version +@print_cmdline_args +def cli( + ctx, + stack_name: str, + config_file: str, + config_env: str, +): + """ + `sam delete` command entry point + """ + + # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing + do_cli( + stack_name=stack_name, region=ctx.region, config_file=config_file, config_env=config_env, profile=ctx.profile + ) # pragma: no cover + + +def do_cli(stack_name: str, region: str, config_file: str, config_env: str, profile: str): + """ + Implementation of the ``cli`` method + """ + from samcli.commands.delete.delete_context import DeleteContext + + with DeleteContext( + stack_name=stack_name, region=region, profile=profile, config_file=config_file, config_env=config_env + ) as delete_context: + delete_context.run() diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py new file mode 100644 index 0000000000..4b7a5ff295 --- /dev/null +++ b/samcli/commands/delete/delete_context.py @@ -0,0 +1,144 @@ +""" +Delete a SAM stack +""" + +import boto3 + +import click +from click import confirm +from click import prompt +from samcli.cli.cli_config_file import TomlProvider +from samcli.lib.utils.botoconfig import get_boto_config_with_user_agent +from samcli.lib.delete.cf_utils import CfUtils +from samcli.lib.package.s3_uploader import S3Uploader +from samcli.lib.package.artifact_exporter import mktempfile, get_cf_template_name + +CONFIG_COMMAND = "deploy" +CONFIG_SECTION = "parameters" +TEMPLATE_STAGE = "Original" + + +class DeleteContext: + def __init__(self, stack_name: str, region: str, profile: str, config_file: str, config_env: str): + self.stack_name = stack_name + self.region = region + self.profile = profile + self.config_file = config_file + self.config_env = config_env + self.s3_bucket = None + self.s3_prefix = None + self.cf_utils = None + self.s3_uploader = None + self.cf_template_file_name = None + self.delete_artifacts_folder = None + self.delete_cf_template_file = None + + def __enter__(self): + self.parse_config_file() + if not self.stack_name: + self.stack_name = prompt( + click.style("\tEnter stack name you want to delete:", bold=True), type=click.STRING + ) + + return self + + def __exit__(self, *args): + pass + + def parse_config_file(self): + """ + Read the provided config file if it exists and assign the options values. + """ + toml_provider = TomlProvider(CONFIG_SECTION, [CONFIG_COMMAND]) + config_options = toml_provider( + config_path=self.config_file, config_env=self.config_env, cmd_names=[CONFIG_COMMAND] + ) + if config_options: + if not self.stack_name: + self.stack_name = config_options.get("stack_name", None) + + # If the stack_name is same as the one present in samconfig file, + # get the information about parameters if not specified by customer. + if self.stack_name and self.stack_name == config_options.get("stack_name", None): + if not self.region: + self.region = config_options.get("region", None) + click.get_current_context().region = self.region + if not self.profile: + self.profile = config_options.get("profile", None) + click.get_current_context().profile = self.profile + self.s3_bucket = config_options.get("s3_bucket", None) + self.s3_prefix = config_options.get("s3_prefix", None) + + def delete(self): + """ + Delete method calls for Cloudformation stacks and S3 and ECR artifacts + """ + template = self.cf_utils.get_stack_template(self.stack_name, TEMPLATE_STAGE) + template_str = template.get("TemplateBody", None) + + if self.s3_bucket and self.s3_prefix and template_str: + self.delete_artifacts_folder = confirm( + click.style( + "\tAre you sure you want to delete the folder" + + f" {self.s3_prefix} in S3 which contains the artifacts?", + bold=True, + ), + default=False, + ) + if not self.delete_artifacts_folder: + with mktempfile() as temp_file: + self.cf_template_file_name = get_cf_template_name( + temp_file=temp_file, template_str=template_str, extension="template" + ) + self.delete_cf_template_file = confirm( + click.style( + "\tDo you want to delete the template file" + f" {self.cf_template_file_name} in S3?", bold=True + ), + default=False, + ) + + # Delete the primary stack + self.cf_utils.delete_stack(stack_name=self.stack_name) + + click.echo(f"\n\t- Deleting Cloudformation stack {self.stack_name}") + + # Delete the CF template file in S3 + if self.delete_cf_template_file: + self.s3_uploader.delete_artifact(remote_path=self.cf_template_file_name) + + # Delete the folder of artifacts if s3_bucket and s3_prefix provided + elif self.delete_artifacts_folder: + self.s3_uploader.delete_prefix_artifacts() + + def run(self): + """ + Delete the stack based on the argument provided by customers and samconfig.toml. + """ + delete_stack = confirm( + click.style( + f"\tAre you sure you want to delete the stack {self.stack_name}" + f" in the region {self.region} ?", + bold=True, + ), + default=False, + ) + # Fetch the template using the stack-name + if delete_stack and self.region: + boto_config = get_boto_config_with_user_agent() + + # Define cf_client based on the region as different regions can have same stack-names + cloudformation_client = boto3.client( + "cloudformation", region_name=self.region if self.region else None, config=boto_config + ) + + s3_client = boto3.client("s3", region_name=self.region if self.region else None, config=boto_config) + + self.s3_uploader = S3Uploader(s3_client=s3_client, bucket_name=self.s3_bucket, prefix=self.s3_prefix) + self.cf_utils = CfUtils(cloudformation_client) + + is_deployed = self.cf_utils.has_stack(stack_name=self.stack_name) + + if is_deployed: + self.delete() + click.echo("\nDeleted successfully") + else: + click.echo(f"Error: The input stack {self.stack_name} does not exist on Cloudformation") diff --git a/samcli/commands/delete/exceptions.py b/samcli/commands/delete/exceptions.py new file mode 100644 index 0000000000..7e2ba5105c --- /dev/null +++ b/samcli/commands/delete/exceptions.py @@ -0,0 +1,24 @@ +""" +Exceptions that are raised by sam delete +""" +from samcli.commands.exceptions import UserException + + +class DeleteFailedError(UserException): + def __init__(self, stack_name, msg): + self.stack_name = stack_name + self.msg = msg + + message_fmt = "Failed to delete the stack: {stack_name}, {msg}" + + super().__init__(message=message_fmt.format(stack_name=self.stack_name, msg=msg)) + + +class FetchTemplateFailedError(UserException): + def __init__(self, stack_name, msg): + self.stack_name = stack_name + self.msg = msg + + message_fmt = "Failed to fetch the template for the stack: {stack_name}, {msg}" + + super().__init__(message=message_fmt.format(stack_name=self.stack_name, msg=msg)) diff --git a/samcli/lib/delete/__init__.py b/samcli/lib/delete/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/lib/delete/cf_utils.py b/samcli/lib/delete/cf_utils.py new file mode 100644 index 0000000000..a78ed6d38b --- /dev/null +++ b/samcli/lib/delete/cf_utils.py @@ -0,0 +1,104 @@ +""" +Delete Cloudformation stacks and s3 files +""" + +import logging + +from typing import Dict +from botocore.exceptions import ClientError, BotoCoreError +from samcli.commands.delete.exceptions import DeleteFailedError, FetchTemplateFailedError + +LOG = logging.getLogger(__name__) + + +class CfUtils: + def __init__(self, cloudformation_client): + self._client = cloudformation_client + + def has_stack(self, stack_name: str) -> bool: + """ + Checks if a CloudFormation stack with given name exists + + :param stack_name: Name or ID of the stack + :return: True if stack exists. False otherwise + """ + try: + resp = self._client.describe_stacks(StackName=stack_name) + if not resp["Stacks"]: + return False + + stack = resp["Stacks"][0] + # Note: Stacks with REVIEW_IN_PROGRESS can be deleted + # using delete_stack but get_template does not return + # the template_str for this stack restricting deletion of + # artifacts. + return bool(stack["StackStatus"] != "REVIEW_IN_PROGRESS") + + except ClientError as e: + # If a stack does not exist, describe_stacks will throw an + # exception. Unfortunately we don't have a better way than parsing + # the exception msg to understand the nature of this exception. + + if "Stack with id {0} does not exist".format(stack_name) in str(e): + LOG.debug("Stack with id %s does not exist", stack_name) + return False + LOG.error("ClientError Exception : %s", str(e)) + raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e + except BotoCoreError as e: + # If there are credentials, environment errors, + # catch that and throw a delete failed error. + + LOG.error("Botocore Exception : %s", str(e)) + raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e + + except Exception as e: + # We don't know anything about this exception. Don't handle + LOG.error("Unable to get stack details.", exc_info=e) + raise e + + def get_stack_template(self, stack_name: str, stage: str) -> Dict: + """ + Return the Cloudformation template of the given stack_name + + :param stack_name: Name or ID of the stack + :param stage: The Stage of the template Original or Processed + :return: Template body of the stack + """ + try: + resp = self._client.get_template(StackName=stack_name, TemplateStage=stage) + if not resp["TemplateBody"]: + return {} + return dict(resp) + + except (ClientError, BotoCoreError) as e: + # If there are credentials, environment errors, + # catch that and throw a delete failed error. + + LOG.error("Failed to fetch template for the stack : %s", str(e)) + raise FetchTemplateFailedError(stack_name=stack_name, msg=str(e)) from e + + except Exception as e: + # We don't know anything about this exception. Don't handle + LOG.error("Unable to get stack details.", exc_info=e) + raise e + + def delete_stack(self, stack_name: str): + """ + Delete the Cloudformation stack with the given stack_name + + :param stack_name: Name or ID of the stack + """ + try: + self._client.delete_stack(StackName=stack_name) + + except (ClientError, BotoCoreError) as e: + # If there are credentials, environment errors, + # catch that and throw a delete failed error. + + LOG.error("Failed to delete stack : %s", str(e)) + raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e + + except Exception as e: + # We don't know anything about this exception. Don't handle + LOG.error("Failed to delete stack. ", exc_info=e) + raise e diff --git a/samcli/lib/deploy/deployer.py b/samcli/lib/deploy/deployer.py index 8aae03425e..0e9b3b4bf0 100644 --- a/samcli/lib/deploy/deployer.py +++ b/samcli/lib/deploy/deployer.py @@ -34,7 +34,7 @@ ) from samcli.commands._utils.table_print import pprint_column_names, pprint_columns, newline_per_item, MIN_OFFSET from samcli.commands.deploy import exceptions as deploy_exceptions -from samcli.lib.package.artifact_exporter import mktempfile +from samcli.lib.package.artifact_exporter import mktempfile, get_cf_template_name from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.utils.time import utc_to_timestamp @@ -174,12 +174,13 @@ def create_changeset( # TemplateBody. This is required for large templates. if s3_uploader: with mktempfile() as temporary_file: - temporary_file.write(kwargs.pop("TemplateBody")) - temporary_file.flush() + remote_path = get_cf_template_name( + temp_file=temporary_file, template_str=kwargs.pop("TemplateBody"), extension="template" + ) # TemplateUrl property requires S3 URL to be in path-style format parts = S3Uploader.parse_s3_url( - s3_uploader.upload_with_dedup(temporary_file.name, "template"), version_property="Version" + s3_uploader.upload(temporary_file.name, remote_path), version_property="Version" ) kwargs["TemplateURL"] = s3_uploader.to_path_style_s3_url(parts["Key"], parts.get("Version", None)) diff --git a/samcli/lib/package/artifact_exporter.py b/samcli/lib/package/artifact_exporter.py index c0f2b94576..6bc9787018 100644 --- a/samcli/lib/package/artifact_exporter.py +++ b/samcli/lib/package/artifact_exporter.py @@ -42,6 +42,7 @@ is_local_file, mktempfile, is_s3_url, + get_cf_template_name, ) from samcli.lib.utils.packagetype import ZIP from samcli.yamlhelper import yaml_parse, yaml_dump @@ -83,10 +84,11 @@ def do_export(self, resource_id, resource_dict, parent_dir): exported_template_str = yaml_dump(exported_template_dict) with mktempfile() as temporary_file: - temporary_file.write(exported_template_str) - temporary_file.flush() - url = self.uploader.upload_with_dedup(temporary_file.name, "template") + remote_path = get_cf_template_name( + temp_file=temporary_file, template_str=exported_template_str, extension="template" + ) + url = self.uploader.upload(temporary_file.name, remote_path) # TemplateUrl property requires S3 URL to be in path-style format parts = S3Uploader.parse_s3_url(url, version_property="Version") diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index 34ac666b86..76b7ff1ec7 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -22,6 +22,7 @@ from collections import abc from typing import Optional, Dict, Any, cast from urllib.parse import urlparse, parse_qs +import click import botocore import botocore.exceptions @@ -144,6 +145,49 @@ def upload_with_dedup( return self.upload(file_name, remote_path) + def delete_artifact(self, remote_path: str, is_key: bool = False) -> Dict: + """ + Deletes a given file from S3 + :param remote_path: Path to the file that will be deleted + :param is_key: If the given remote_path is the key or a file_name + + :return: metadata dict of the deleted object + """ + try: + if not self.bucket_name: + LOG.error("Bucket not specified") + raise BucketNotSpecifiedError() + + key = remote_path + if self.prefix and not is_key: + key = "{0}/{1}".format(self.prefix, remote_path) + + # Deleting Specific file with key + click.echo(f"\t- Deleting S3 file {key}") + resp = self.s3.delete_object(Bucket=self.bucket_name, Key=key) + LOG.debug("S3 method delete_object is called and returned: %s", resp["ResponseMetadata"]) + return dict(resp["ResponseMetadata"]) + + except botocore.exceptions.ClientError as ex: + error_code = ex.response["Error"]["Code"] + if error_code == "NoSuchBucket": + LOG.error("Provided bucket %s does not exist ", self.bucket_name) + raise NoSuchBucketError(bucket_name=self.bucket_name) from ex + raise ex + + def delete_prefix_artifacts(self): + """ + Deletes all the files from the prefix in S3 + """ + if not self.bucket_name: + LOG.error("Bucket not specified") + raise BucketNotSpecifiedError() + if self.prefix: + prefix_files = self.s3.list_objects_v2(Bucket=self.bucket_name, Prefix=self.prefix) + + for obj in prefix_files["Contents"]: + self.delete_artifact(obj["Key"], True) + def file_exists(self, remote_path: str) -> bool: """ Check if the file we are trying to upload already exists in S3 diff --git a/samcli/lib/package/utils.py b/samcli/lib/package/utils.py index 6317c35a48..c33b2b3de7 100644 --- a/samcli/lib/package/utils.py +++ b/samcli/lib/package/utils.py @@ -11,7 +11,7 @@ import zipfile import contextlib from contextlib import contextmanager -from typing import Dict, Optional, cast +from typing import Dict, Optional, cast, TextIO import jmespath @@ -19,7 +19,7 @@ from samcli.commands.package.exceptions import ImageNotFoundError from samcli.lib.package.ecr_utils import is_ecr_url from samcli.lib.package.s3_uploader import S3Uploader -from samcli.lib.utils.hash import dir_checksum +from samcli.lib.utils.hash import dir_checksum, file_checksum LOG = logging.getLogger(__name__) @@ -284,3 +284,13 @@ def copy_to_temp_dir(filepath): dst = os.path.join(tmp_dir, os.path.basename(filepath)) shutil.copyfile(filepath, dst) return tmp_dir + + +def get_cf_template_name(temp_file: TextIO, template_str: str, extension: str) -> str: + temp_file.write(template_str) + temp_file.flush() + + filemd5 = file_checksum(temp_file.name) + remote_path = filemd5 + "." + extension + + return remote_path diff --git a/tests/unit/commands/delete/__init__.py b/tests/unit/commands/delete/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/commands/delete/test_command.py b/tests/unit/commands/delete/test_command.py new file mode 100644 index 0000000000..4e268688ee --- /dev/null +++ b/tests/unit/commands/delete/test_command.py @@ -0,0 +1,53 @@ +from unittest import TestCase +from unittest.mock import ANY, MagicMock, Mock, call, patch + +from samcli.commands.delete.command import do_cli +from tests.unit.cli.test_cli_config_file import MockContext + + +def get_mock_sam_config(): + mock_sam_config = MagicMock() + mock_sam_config.exists = MagicMock(return_value=True) + return mock_sam_config + + +MOCK_SAM_CONFIG = get_mock_sam_config() + + +class TestDeleteCliCommand(TestCase): + def setUp(self): + + self.stack_name = "stack-name" + self.s3_bucket = "s3-bucket" + self.s3_prefix = "s3-prefix" + self.region = None + self.profile = None + self.config_env = "mock-default-env" + self.config_file = "mock-default-filename" + MOCK_SAM_CONFIG.reset_mock() + + @patch("samcli.commands.delete.command.click") + @patch("samcli.commands.delete.delete_context.DeleteContext") + def test_all_args(self, mock_delete_context, mock_delete_click): + + context_mock = Mock() + mock_delete_context.return_value.__enter__.return_value = context_mock + + do_cli( + stack_name=self.stack_name, + region=self.region, + config_file=self.config_file, + config_env=self.config_env, + profile=self.profile, + ) + + mock_delete_context.assert_called_with( + stack_name=self.stack_name, + region=self.region, + profile=self.profile, + config_file=self.config_file, + config_env=self.config_env, + ) + + context_mock.run.assert_called_with() + self.assertEqual(context_mock.run.call_count, 1) diff --git a/tests/unit/commands/delete/test_delete_context.py b/tests/unit/commands/delete/test_delete_context.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/lib/delete/__init__.py b/tests/unit/lib/delete/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/lib/delete/test_cf_utils.py b/tests/unit/lib/delete/test_cf_utils.py new file mode 100644 index 0000000000..9e80a00d4a --- /dev/null +++ b/tests/unit/lib/delete/test_cf_utils.py @@ -0,0 +1,86 @@ +from unittest.mock import patch, MagicMock, ANY, call +from unittest import TestCase + +from samcli.commands.delete.exceptions import DeleteFailedError, FetchTemplateFailedError +from botocore.exceptions import ClientError, BotoCoreError +from samcli.lib.delete.cf_utils import CfUtils + + +class TestCfUtils(TestCase): + def setUp(self): + self.session = MagicMock() + self.cloudformation_client = self.session.client("cloudformation") + self.s3_client = self.session.client("s3") + self.cf_utils = CfUtils(self.cloudformation_client) + + def test_cf_utils_init(self): + self.assertEqual(self.cf_utils._client, self.cloudformation_client) + + def test_cf_utils_has_no_stack(self): + self.cf_utils._client.describe_stacks = MagicMock(return_value={"Stacks": []}) + self.assertEqual(self.cf_utils.has_stack("test"), False) + + def test_cf_utils_has_stack_exception_non_exsistent(self): + self.cf_utils._client.describe_stacks = MagicMock( + side_effect=ClientError( + error_response={"Error": {"Message": "Stack with id test does not exist"}}, + operation_name="stack_status", + ) + ) + self.assertEqual(self.cf_utils.has_stack("test"), False) + + def test_cf_utils_has_stack_exception_client_error(self): + self.cf_utils._client.describe_stacks = MagicMock( + side_effect=ClientError( + error_response={"Error": {"Message": "Error: The security token included in the request is expired"}}, + operation_name="stack_status", + ) + ) + with self.assertRaises(DeleteFailedError): + self.cf_utils.has_stack("test") + + def test_cf_utils_has_stack_exception(self): + self.cf_utils._client.describe_stacks = MagicMock(side_effect=Exception()) + with self.assertRaises(Exception): + self.cf_utils.has_stack("test") + + def test_cf_utils_has_stack_in_review(self): + self.cf_utils._client.describe_stacks = MagicMock( + return_value={"Stacks": [{"StackStatus": "REVIEW_IN_PROGRESS"}]} + ) + self.assertEqual(self.cf_utils.has_stack("test"), False) + + def test_cf_utils_has_stack_exception_botocore(self): + self.cf_utils._client.describe_stacks = MagicMock(side_effect=BotoCoreError()) + with self.assertRaises(DeleteFailedError): + self.cf_utils.has_stack("test") + + def test_cf_utils_get_stack_template_exception_client_error(self): + self.cf_utils._client.get_template = MagicMock( + side_effect=ClientError( + error_response={"Error": {"Message": "Stack with id test does not exist"}}, + operation_name="stack_status", + ) + ) + with self.assertRaises(FetchTemplateFailedError): + self.cf_utils.get_stack_template("test", "Original") + + def test_cf_utils_get_stack_template_exception_botocore(self): + self.cf_utils._client.get_template = MagicMock(side_effect=BotoCoreError()) + with self.assertRaises(FetchTemplateFailedError): + self.cf_utils.get_stack_template("test", "Original") + + def test_cf_utils_get_stack_template_exception(self): + self.cf_utils._client.get_template = MagicMock(side_effect=Exception()) + with self.assertRaises(Exception): + self.cf_utils.get_stack_template("test", "Original") + + def test_cf_utils_delete_stack_exception_botocore(self): + self.cf_utils._client.delete_stack = MagicMock(side_effect=BotoCoreError()) + with self.assertRaises(DeleteFailedError): + self.cf_utils.delete_stack("test") + + def test_cf_utils_delete_stack_exception(self): + self.cf_utils._client.delete_stack = MagicMock(side_effect=Exception()) + with self.assertRaises(Exception): + self.cf_utils.delete_stack("test") diff --git a/tests/unit/lib/package/test_artifact_exporter.py b/tests/unit/lib/package/test_artifact_exporter.py index 7cc20f6be7..f7aceafef1 100644 --- a/tests/unit/lib/package/test_artifact_exporter.py +++ b/tests/unit/lib/package/test_artifact_exporter.py @@ -778,7 +778,7 @@ def test_export_cloudformation_stack(self, TemplateMock): TemplateMock.return_value = template_instance_mock template_instance_mock.export.return_value = exported_template_dict - self.s3_uploader_mock.upload_with_dedup.return_value = result_s3_url + self.s3_uploader_mock.upload.return_value = result_s3_url self.s3_uploader_mock.to_path_style_s3_url.return_value = result_path_style_s3_url with tempfile.NamedTemporaryFile() as handle: @@ -792,7 +792,7 @@ def test_export_cloudformation_stack(self, TemplateMock): TemplateMock.assert_called_once_with(template_path, parent_dir, self.uploaders_mock, self.code_signer_mock) template_instance_mock.export.assert_called_once_with() - self.s3_uploader_mock.upload_with_dedup.assert_called_once_with(mock.ANY, "template") + self.s3_uploader_mock.upload.assert_called_once_with(mock.ANY, mock.ANY) self.s3_uploader_mock.to_path_style_s3_url.assert_called_once_with("world", None) def test_export_cloudformation_stack_no_upload_path_is_s3url(self): @@ -805,7 +805,7 @@ def test_export_cloudformation_stack_no_upload_path_is_s3url(self): # Case 1: Path is already S3 url stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict[property_name], s3_url) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_cloudformation_stack_no_upload_path_is_httpsurl(self): stack_resource = CloudFormationStackResource(self.uploaders_mock, self.code_signer_mock) @@ -817,7 +817,7 @@ def test_export_cloudformation_stack_no_upload_path_is_httpsurl(self): # Case 1: Path is already S3 url stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict[property_name], s3_url) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_cloudformation_stack_no_upload_path_is_s3_region_httpsurl(self): stack_resource = CloudFormationStackResource(self.uploaders_mock, self.code_signer_mock) @@ -829,7 +829,7 @@ def test_export_cloudformation_stack_no_upload_path_is_s3_region_httpsurl(self): stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict[property_name], s3_url) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_cloudformation_stack_no_upload_path_is_empty(self): stack_resource = CloudFormationStackResource(self.uploaders_mock, self.code_signer_mock) @@ -842,7 +842,7 @@ def test_export_cloudformation_stack_no_upload_path_is_empty(self): resource_dict = {} stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict, {}) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_cloudformation_stack_no_upload_path_not_file(self): stack_resource = CloudFormationStackResource(self.uploaders_mock, self.code_signer_mock) @@ -855,7 +855,7 @@ def test_export_cloudformation_stack_no_upload_path_not_file(self): resource_dict = {property_name: dirname} with self.assertRaises(exceptions.ExportFailedError): stack_resource.export(resource_id, resource_dict, "dir") - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() @patch("samcli.lib.package.artifact_exporter.Template") def test_export_serverless_application(self, TemplateMock): @@ -871,7 +871,7 @@ def test_export_serverless_application(self, TemplateMock): TemplateMock.return_value = template_instance_mock template_instance_mock.export.return_value = exported_template_dict - self.s3_uploader_mock.upload_with_dedup.return_value = result_s3_url + self.s3_uploader_mock.upload.return_value = result_s3_url self.s3_uploader_mock.to_path_style_s3_url.return_value = result_path_style_s3_url with tempfile.NamedTemporaryFile() as handle: @@ -885,7 +885,7 @@ def test_export_serverless_application(self, TemplateMock): TemplateMock.assert_called_once_with(template_path, parent_dir, self.uploaders_mock, self.code_signer_mock) template_instance_mock.export.assert_called_once_with() - self.s3_uploader_mock.upload_with_dedup.assert_called_once_with(mock.ANY, "template") + self.s3_uploader_mock.upload.assert_called_once_with(mock.ANY, mock.ANY) self.s3_uploader_mock.to_path_style_s3_url.assert_called_once_with("world", None) def test_export_serverless_application_no_upload_path_is_s3url(self): @@ -898,7 +898,7 @@ def test_export_serverless_application_no_upload_path_is_s3url(self): # Case 1: Path is already S3 url stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict[property_name], s3_url) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_serverless_application_no_upload_path_is_httpsurl(self): stack_resource = ServerlessApplicationResource(self.uploaders_mock, self.code_signer_mock) @@ -910,7 +910,7 @@ def test_export_serverless_application_no_upload_path_is_httpsurl(self): # Case 1: Path is already S3 url stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict[property_name], s3_url) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_serverless_application_no_upload_path_is_empty(self): stack_resource = ServerlessApplicationResource(self.uploaders_mock, self.code_signer_mock) @@ -921,7 +921,7 @@ def test_export_serverless_application_no_upload_path_is_empty(self): resource_dict = {} stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict, {}) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_serverless_application_no_upload_path_not_file(self): stack_resource = ServerlessApplicationResource(self.uploaders_mock, self.code_signer_mock) @@ -933,7 +933,7 @@ def test_export_serverless_application_no_upload_path_not_file(self): resource_dict = {property_name: dirname} with self.assertRaises(exceptions.ExportFailedError): stack_resource.export(resource_id, resource_dict, "dir") - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_serverless_application_no_upload_path_is_dictionary(self): stack_resource = ServerlessApplicationResource(self.uploaders_mock, self.code_signer_mock) @@ -945,7 +945,7 @@ def test_export_serverless_application_no_upload_path_is_dictionary(self): resource_dict = {property_name: location} stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict[property_name], location) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() @patch("samcli.lib.package.artifact_exporter.yaml_parse") def test_template_export_metadata(self, yaml_parse_mock): diff --git a/tests/unit/lib/package/test_s3_uploader.py b/tests/unit/lib/package/test_s3_uploader.py index c40c4c6cf4..f1765c3f8c 100644 --- a/tests/unit/lib/package/test_s3_uploader.py +++ b/tests/unit/lib/package/test_s3_uploader.py @@ -172,6 +172,51 @@ def test_s3_upload_no_bucket(self): s3_uploader.upload(f.name, remote_path) self.assertEqual(BucketNotSpecifiedError().message, str(ex)) + def test_s3_delete_artifact(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=None, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + no_progressbar=self.no_progressbar, + ) + s3_uploader.artifact_metadata = {"a": "b"} + with self.assertRaises(BucketNotSpecifiedError) as ex: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: + self.assertEqual(s3_uploader.delete_artifact(f.name), {"a": "b"}) + + def test_s3_delete_artifact_no_bucket(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=None, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + no_progressbar=self.no_progressbar, + ) + with self.assertRaises(BucketNotSpecifiedError) as ex: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: + s3_uploader.delete_artifact(f.name) + self.assertEqual(BucketNotSpecifiedError().message, str(ex)) + + def test_s3_delete_artifact_bucket_not_found(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=self.bucket_name, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=True, + no_progressbar=self.no_progressbar, + ) + + s3_uploader.s3.delete_object = MagicMock( + side_effect=ClientError(error_response={"Error": {"Code": "NoSuchBucket"}}, operation_name="create_object") + ) + with tempfile.NamedTemporaryFile() as f: + with self.assertRaises(NoSuchBucketError): + s3_uploader.delete_artifact(f.name) + def test_s3_upload_with_dedup(self): s3_uploader = S3Uploader( s3_client=self.s3, From 524d90a4c0f409de272ec9af05d941f96de03e9b Mon Sep 17 00:00:00 2001 From: hnnasit <84355507+hnnasit@users.noreply.github.com> Date: Wed, 14 Jul 2021 06:03:20 -0400 Subject: [PATCH 2/9] Delete template artifacts (#3022) * Added methods for cf and s3 files and init UI * Added unit tests for utils methods and s3_uploader * Removed s3_bucket and s3_prefix click options * chore: Increase awareness of same file warning during package (#2946) * chore: increase awareness of same file warning during package * fix formatting & grammar Co-authored-by: Mathieu Grandis <73313235+mgrandis@users.noreply.github.com> * fix: Allow the base64Encoded field in REST Api, skip validation of unknown fields and validate missing statusCode for Http Api (#2941) * fix API Gateway emulator: - skip validating the non allowed fields for Http Api Gateway, as it always skip the unknown fields - add base64Encoded as an allowed field for Rest Api gateway - base64 decoding will be always done for Http API gateway if the lambda response isBase64Encoded is true regardless the content-type - validate if statusCode is missing in case of Http API, and payload version 1.0 * - accept "true", "True", "false", "False" as valid isBase64Encoded values. - Validate on other isBase64Encoded Values - add more integration && unit test cases * fix lint && black issues * use smaller image to test Base64 response * Fixed lint errors and added few unit tests * Make black happy * Added methods for deleting template artifacts * Wait method added for delete cf api * Added LOG statements * Added and updated changes based on CR * Fixed the unit tests in artifact_exporter.py * Update HELP_TEXT in delete/command.py Co-authored-by: Chris Rehn * Updated code based on Chris' comments * Added condition for resources that have deletionpolicy specified * Small changes and fixes based on the comments * Removed region prompt * Added unit tests for ecr delete method and typing for methods * Reformatted delete_context and added option to skip user prompts * Removed return type from artifact_exporter for delete method * Added unit tests for artifact_exporter and delete_context * Added more unit tests for delete_context and artifact_exporter * Added more unit tests for delete_context and artifact_exporter * Added docs and comments for artifact_exporter and ecr_uploader * Added log statements in delete_context and some updates in unit tests * Changed force to no-prompts and updated ecr delete method error handling * Created a separate function for parsing ecr url in ecr_uploader * Reformatted Template class init to pass template_str and init template_dict * Changed how s3 url is obtained for resource_zip edge-case: aws:glue:job * Fixed edge case where resource artifact points to a path style url * run Make black * Made the parse s3 url funcs protected and defined a parent method and modified delete method for ResourceImageDict * Changed parse_ecr_url function name to parse_image_url Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Co-authored-by: Mathieu Grandis <73313235+mgrandis@users.noreply.github.com> Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> Co-authored-by: Chris Rehn --- samcli/commands/delete/command.py | 29 ++- samcli/commands/delete/delete_context.py | 144 +++++++--- samcli/commands/package/exceptions.py | 26 +- samcli/lib/delete/cf_utils.py | 27 +- samcli/lib/package/artifact_exporter.py | 46 +++- samcli/lib/package/ecr_uploader.py | 77 +++++- samcli/lib/package/packageable_resources.py | 61 +++++ samcli/lib/package/s3_uploader.py | 71 ++++- samcli/lib/package/utils.py | 3 +- tests/unit/commands/delete/test_command.py | 13 +- .../commands/delete/test_delete_context.py | 246 ++++++++++++++++++ tests/unit/lib/delete/test_cf_utils.py | 29 ++- .../lib/package/test_artifact_exporter.py | 86 +++++- tests/unit/lib/package/test_ecr_uploader.py | 86 +++++- 14 files changed, 847 insertions(+), 97 deletions(-) diff --git a/samcli/commands/delete/command.py b/samcli/commands/delete/command.py index 266d093a36..06130eb68b 100644 --- a/samcli/commands/delete/command.py +++ b/samcli/commands/delete/command.py @@ -57,34 +57,45 @@ default="default", show_default=True, ) +@click.option( + "--no-prompts", + help=("Specify this flag to allow SAM CLI to skip through the guided prompts."), + is_flag=True, + required=False, +) @aws_creds_options @common_options @pass_context @check_newer_version @print_cmdline_args -def cli( - ctx, - stack_name: str, - config_file: str, - config_env: str, -): +def cli(ctx, stack_name: str, config_file: str, config_env: str, no_prompts: bool): """ `sam delete` command entry point """ # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing do_cli( - stack_name=stack_name, region=ctx.region, config_file=config_file, config_env=config_env, profile=ctx.profile + stack_name=stack_name, + region=ctx.region, + config_file=config_file, + config_env=config_env, + profile=ctx.profile, + no_prompts=no_prompts, ) # pragma: no cover -def do_cli(stack_name: str, region: str, config_file: str, config_env: str, profile: str): +def do_cli(stack_name: str, region: str, config_file: str, config_env: str, profile: str, no_prompts: bool): """ Implementation of the ``cli`` method """ from samcli.commands.delete.delete_context import DeleteContext with DeleteContext( - stack_name=stack_name, region=region, profile=profile, config_file=config_file, config_env=config_env + stack_name=stack_name, + region=region, + profile=profile, + config_file=config_file, + config_env=config_env, + no_prompts=no_prompts, ) as delete_context: delete_context.run() diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 4b7a5ff295..6491307247 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -1,9 +1,10 @@ """ Delete a SAM stack """ - +import logging import boto3 + import click from click import confirm from click import prompt @@ -13,22 +14,31 @@ from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.package.artifact_exporter import mktempfile, get_cf_template_name + +from samcli.lib.package.artifact_exporter import Template +from samcli.lib.package.ecr_uploader import ECRUploader +from samcli.lib.package.uploaders import Uploaders + CONFIG_COMMAND = "deploy" CONFIG_SECTION = "parameters" TEMPLATE_STAGE = "Original" +LOG = logging.getLogger(__name__) + class DeleteContext: - def __init__(self, stack_name: str, region: str, profile: str, config_file: str, config_env: str): + def __init__(self, stack_name: str, region: str, profile: str, config_file: str, config_env: str, no_prompts: bool): self.stack_name = stack_name self.region = region self.profile = profile self.config_file = config_file self.config_env = config_env + self.no_prompts = no_prompts self.s3_bucket = None self.s3_prefix = None self.cf_utils = None self.s3_uploader = None + self.uploaders = None self.cf_template_file_name = None self.delete_artifacts_folder = None self.delete_cf_template_file = None @@ -36,10 +46,12 @@ def __init__(self, stack_name: str, region: str, profile: str, config_file: str, def __enter__(self): self.parse_config_file() if not self.stack_name: + LOG.debug("No stack-name input found") self.stack_name = prompt( click.style("\tEnter stack name you want to delete:", bold=True), type=click.STRING ) + self.init_clients() return self def __exit__(self, *args): @@ -56,10 +68,10 @@ def parse_config_file(self): if config_options: if not self.stack_name: self.stack_name = config_options.get("stack_name", None) - # If the stack_name is same as the one present in samconfig file, # get the information about parameters if not specified by customer. if self.stack_name and self.stack_name == config_options.get("stack_name", None): + LOG.debug("Local config present and using the defined options") if not self.region: self.region = config_options.get("region", None) click.get_current_context().region = self.region @@ -69,38 +81,87 @@ def parse_config_file(self): self.s3_bucket = config_options.get("s3_bucket", None) self.s3_prefix = config_options.get("s3_prefix", None) - def delete(self): + def init_clients(self): """ - Delete method calls for Cloudformation stacks and S3 and ECR artifacts + Initialize all the clients being used by sam delete. """ - template = self.cf_utils.get_stack_template(self.stack_name, TEMPLATE_STAGE) - template_str = template.get("TemplateBody", None) + boto_config = get_boto_config_with_user_agent() - if self.s3_bucket and self.s3_prefix and template_str: - self.delete_artifacts_folder = confirm( - click.style( - "\tAre you sure you want to delete the folder" - + f" {self.s3_prefix} in S3 which contains the artifacts?", - bold=True, - ), - default=False, - ) + # Define cf_client based on the region as different regions can have same stack-names + cloudformation_client = boto3.client( + "cloudformation", region_name=self.region if self.region else None, config=boto_config + ) + + s3_client = boto3.client("s3", region_name=self.region if self.region else None, config=boto_config) + ecr_client = boto3.client("ecr", region_name=self.region if self.region else None, config=boto_config) + + self.region = s3_client._client_config.region_name if s3_client else self.region # pylint: disable=W0212 + self.s3_uploader = S3Uploader(s3_client=s3_client, bucket_name=self.s3_bucket, prefix=self.s3_prefix) + + ecr_uploader = ECRUploader(docker_client=None, ecr_client=ecr_client, ecr_repo=None, ecr_repo_multi=None) + + self.uploaders = Uploaders(self.s3_uploader, ecr_uploader) + self.cf_utils = CfUtils(cloudformation_client) + + def guided_prompts(self): + """ + Guided prompts asking customer to delete artifacts + """ + # Note: s3_bucket and s3_prefix information is only + # available if a local toml file is present or if + # this information is obtained from the template resources and so if this + # information is not found, warn the customer that S3 artifacts + # will need to be manually deleted. + + if not self.no_prompts and self.s3_bucket: + if self.s3_prefix: + self.delete_artifacts_folder = confirm( + click.style( + "\tAre you sure you want to delete the folder" + + f" {self.s3_prefix} in S3 which contains the artifacts?", + bold=True, + ), + default=False, + ) if not self.delete_artifacts_folder: - with mktempfile() as temp_file: - self.cf_template_file_name = get_cf_template_name( - temp_file=temp_file, template_str=template_str, extension="template" - ) + LOG.debug("S3 prefix not present or user does not want to delete the prefix folder") self.delete_cf_template_file = confirm( click.style( "\tDo you want to delete the template file" + f" {self.cf_template_file_name} in S3?", bold=True ), default=False, ) + elif self.s3_bucket: + if self.s3_prefix: + self.delete_artifacts_folder = True + else: + self.delete_cf_template_file = True + + def delete(self): + """ + Delete method calls for Cloudformation stacks and S3 and ECR artifacts + """ + # Fetch the template using the stack-name + cf_template = self.cf_utils.get_stack_template(self.stack_name, TEMPLATE_STAGE) + template_str = cf_template.get("TemplateBody", None) + + # Get the cloudformation template name using template_str + with mktempfile() as temp_file: + self.cf_template_file_name = get_cf_template_name(temp_file, template_str, "template") + + self.guided_prompts() # Delete the primary stack + click.echo(f"\n\t- Deleting Cloudformation stack {self.stack_name}") self.cf_utils.delete_stack(stack_name=self.stack_name) + self.cf_utils.wait_for_delete(self.stack_name) + LOG.debug("Deleted Cloudformation stack: %s", self.stack_name) - click.echo(f"\n\t- Deleting Cloudformation stack {self.stack_name}") + # Delete the artifacts + template = Template( + template_path=None, parent_dir=None, uploaders=self.uploaders, code_signer=None, template_str=template_str + ) + template.delete() # Delete the CF template file in S3 if self.delete_cf_template_file: @@ -110,35 +171,36 @@ def delete(self): elif self.delete_artifacts_folder: self.s3_uploader.delete_prefix_artifacts() + # If s3_bucket information is not available + elif not self.s3_bucket: + LOG.debug("Cannot delete s3 files as no s3_bucket found") + click.secho( + "\nWarning: s3_bucket and s3_prefix information cannot be obtained," + " delete the files manually if required", + fg="yellow", + ) + def run(self): """ Delete the stack based on the argument provided by customers and samconfig.toml. """ - delete_stack = confirm( - click.style( - f"\tAre you sure you want to delete the stack {self.stack_name}" + f" in the region {self.region} ?", - bold=True, - ), - default=False, - ) - # Fetch the template using the stack-name - if delete_stack and self.region: - boto_config = get_boto_config_with_user_agent() - - # Define cf_client based on the region as different regions can have same stack-names - cloudformation_client = boto3.client( - "cloudformation", region_name=self.region if self.region else None, config=boto_config + if not self.no_prompts: + delete_stack = confirm( + click.style( + f"\tAre you sure you want to delete the stack {self.stack_name}" + + f" in the region {self.region} ?", + bold=True, + ), + default=False, ) - s3_client = boto3.client("s3", region_name=self.region if self.region else None, config=boto_config) - - self.s3_uploader = S3Uploader(s3_client=s3_client, bucket_name=self.s3_bucket, prefix=self.s3_prefix) - self.cf_utils = CfUtils(cloudformation_client) - + if self.no_prompts or delete_stack: is_deployed = self.cf_utils.has_stack(stack_name=self.stack_name) - + # Check if the provided stack-name exists if is_deployed: + LOG.debug("Input stack is deployed, continue deleting") self.delete() click.echo("\nDeleted successfully") else: + LOG.debug("Input stack does not exists on Cloudformation") click.echo(f"Error: The input stack {self.stack_name} does not exist on Cloudformation") diff --git a/samcli/commands/package/exceptions.py b/samcli/commands/package/exceptions.py index af549058e9..f5fdee0297 100644 --- a/samcli/commands/package/exceptions.py +++ b/samcli/commands/package/exceptions.py @@ -62,12 +62,32 @@ def __init__(self, resource_id, property_name, property_value, ex): ) -class ImageNotFoundError(UserException): - def __init__(self, resource_id, property_name): +class DeleteArtifactFailedError(UserException): + def __init__(self, resource_id, property_name, ex): self.resource_id = resource_id self.property_name = property_name + self.ex = ex + + message_fmt = ( + "Unable to delete artifact referenced " + "by {property_name} parameter of {resource_id} resource." + "\n" + "{ex}" + ) + + super().__init__( + message=message_fmt.format( + property_name=self.property_name, + resource_id=self.resource_id, + ex=self.ex, + ) + ) + - message_fmt = "Image not found for {property_name} parameter of {resource_id} resource. \n" +class ImageNotFoundError(UserException): + def __init__(self, resource_id, property_name, message_fmt): + self.resource_id = resource_id + self.property_name = property_name super().__init__( message=message_fmt.format( diff --git a/samcli/lib/delete/cf_utils.py b/samcli/lib/delete/cf_utils.py index a78ed6d38b..40d3b58183 100644 --- a/samcli/lib/delete/cf_utils.py +++ b/samcli/lib/delete/cf_utils.py @@ -4,10 +4,12 @@ import logging + from typing import Dict -from botocore.exceptions import ClientError, BotoCoreError +from botocore.exceptions import ClientError, BotoCoreError, WaiterError from samcli.commands.delete.exceptions import DeleteFailedError, FetchTemplateFailedError + LOG = logging.getLogger(__name__) @@ -102,3 +104,26 @@ def delete_stack(self, stack_name: str): # We don't know anything about this exception. Don't handle LOG.error("Failed to delete stack. ", exc_info=e) raise e + + def wait_for_delete(self, stack_name): + """ + Waits until the delete stack completes + + :param stack_name: Stack name + """ + + # Wait for Delete to Finish + waiter = self._client.get_waiter("stack_delete_complete") + # Poll every 5 seconds. + waiter_config = {"Delay": 5} + try: + waiter.wait(StackName=stack_name, WaiterConfig=waiter_config) + except WaiterError as ex: + + resp = ex.last_response + status = resp["Status"] + reason = resp["StatusReason"] + + raise DeleteFailedError( + stack_name=stack_name, msg="ex: {0} Status: {1}. Reason: {2}".format(ex, status, reason) + ) from ex diff --git a/samcli/lib/package/artifact_exporter.py b/samcli/lib/package/artifact_exporter.py index 6bc9787018..00fa5cb089 100644 --- a/samcli/lib/package/artifact_exporter.py +++ b/samcli/lib/package/artifact_exporter.py @@ -16,7 +16,7 @@ # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import os -from typing import Dict +from typing import Dict, Optional from botocore.utils import set_value_from_jmespath @@ -128,25 +128,27 @@ def __init__( RESOURCES_EXPORT_LIST + [CloudFormationStackResource, ServerlessApplicationResource] ), metadata_to_export=frozenset(METADATA_EXPORT_LIST), + template_str: Optional[str] = None, ): """ Reads the template and makes it ready for export """ - if not (is_local_folder(parent_dir) and os.path.isabs(parent_dir)): - raise ValueError("parent_dir parameter must be " "an absolute path to a folder {0}".format(parent_dir)) + if not template_str: + if not (is_local_folder(parent_dir) and os.path.isabs(parent_dir)): + raise ValueError("parent_dir parameter must be " "an absolute path to a folder {0}".format(parent_dir)) - abs_template_path = make_abs_path(parent_dir, template_path) - template_dir = os.path.dirname(abs_template_path) + abs_template_path = make_abs_path(parent_dir, template_path) + template_dir = os.path.dirname(abs_template_path) - with open(abs_template_path, "r") as handle: - template_str = handle.read() + with open(abs_template_path, "r") as handle: + template_str = handle.read() + self.template_dir = template_dir + self.code_signer = code_signer self.template_dict = yaml_parse(template_str) - self.template_dir = template_dir self.resources_to_export = resources_to_export self.metadata_to_export = metadata_to_export self.uploaders = uploaders - self.code_signer = code_signer def _export_global_artifacts(self, template_dict: Dict) -> Dict: """ @@ -237,3 +239,29 @@ def export(self) -> Dict: exporter.export(resource_id, resource_dict, self.template_dir) return self.template_dict + + def delete(self): + """ + Deletes all the artifacts referenced by the given Cloudformation template + """ + if "Resources" not in self.template_dict: + return + + self._apply_global_values() + + for resource_id, resource in self.template_dict["Resources"].items(): + + resource_type = resource.get("Type", None) + resource_dict = resource.get("Properties", {}) + resource_deletion_policy = resource.get("DeletionPolicy", None) + # If the deletion policy is set to Retain, + # do not delete the artifact for the resource. + if resource_deletion_policy != "Retain": + for exporter_class in self.resources_to_export: + if exporter_class.RESOURCE_TYPE != resource_type: + continue + if resource_dict.get("PackageType", ZIP) != exporter_class.ARTIFACT_TYPE: + continue + # Delete code resources + exporter = exporter_class(self.uploaders, None) + exporter.delete(resource_id, resource_dict) diff --git a/samcli/lib/package/ecr_uploader.py b/samcli/lib/package/ecr_uploader.py index fcf4e836e9..e899bab7c8 100644 --- a/samcli/lib/package/ecr_uploader.py +++ b/samcli/lib/package/ecr_uploader.py @@ -5,12 +5,20 @@ import base64 import os +from typing import Dict +import click import botocore import docker from docker.errors import BuildError, APIError -from samcli.commands.package.exceptions import DockerPushFailedError, DockerLoginFailedError, ECRAuthorizationError +from samcli.commands.package.exceptions import ( + DockerPushFailedError, + DockerLoginFailedError, + ECRAuthorizationError, + ImageNotFoundError, + DeleteArtifactFailedError, +) from samcli.lib.package.image_utils import tag_translation from samcli.lib.package.stream_cursor_utils import cursor_up, cursor_left, cursor_down, clear_line from samcli.lib.utils.osutils import stderr @@ -83,6 +91,73 @@ def upload(self, image, resource_name): return f"{repository}:{_tag}" + def delete_artifact(self, image_uri: str, resource_id: str, property_name: str): + """ + Delete the given ECR image by extracting the repository and image_tag from + image_uri + + :param image_uri: image_uri of the image to be deleted + :param resource_id: id of the resource for which the image is deleted + :param property_name: provided property_name for the resource + """ + try: + repo_image_tag = self.parse_image_url(image_uri=image_uri) + repository = repo_image_tag["repository"] + image_tag = repo_image_tag["image_tag"] + resp = self.ecr_client.batch_delete_image( + repositoryName=repository, + imageIds=[ + {"imageTag": image_tag}, + ], + ) + if resp["failures"]: + # Image not found + image_details = resp["failures"][0] + if image_details["failureCode"] == "ImageNotFound": + LOG.error("ImageNotFound Exception") + message_fmt = ( + "Could not delete image for {property_name}" + " parameter of {resource_id} resource as it does not exist. \n" + ) + raise ImageNotFoundError(resource_id, property_name, message_fmt=message_fmt) + + LOG.error( + "Could not delete the image for the resource %s. FailureCode: %s, FailureReason: %s", + property_name, + image_details["failureCode"], + image_details["failureReason"], + ) + raise DeleteArtifactFailedError( + resource_id=resource_id, property_name=property_name, ex=image_details["failureReason"] + ) + + LOG.debug("Deleting ECR image with tag %s", image_tag) + click.echo(f"\t- Deleting ECR image {image_tag} in repository {repository}") + + except botocore.exceptions.ClientError as ex: + # Handle Client errors such as RepositoryNotFoundException or InvalidParameterException + LOG.error("DeleteArtifactFailedError Exception : %s", str(ex)) + raise DeleteArtifactFailedError(resource_id=resource_id, property_name=property_name, ex=ex) from ex + + @staticmethod + def parse_image_url(image_uri: str) -> Dict: + result = {} + registry_repo_tag = image_uri.split("/") + repo_colon_image_tag = None + if len(registry_repo_tag) == 1: + # If there is no registry specified, e.g. repo:tag + repo_colon_image_tag = registry_repo_tag[0] + else: + # Registry present, e.g. registry/repo:tag + repo_colon_image_tag = registry_repo_tag[1] + repo_image_tag_split = repo_colon_image_tag.split(":") + + # If no tag is specified, use latest + result["repository"] = repo_image_tag_split[0] + result["image_tag"] = repo_image_tag_split[1] if len(repo_image_tag_split) > 1 else "latest" + + return result + # TODO: move this to a generic class to allow for streaming logs back from docker. def _stream_progress(self, logs): """ diff --git a/samcli/lib/package/packageable_resources.py b/samcli/lib/package/packageable_resources.py index 937b451a28..39d9b73867 100644 --- a/samcli/lib/package/packageable_resources.py +++ b/samcli/lib/package/packageable_resources.py @@ -22,6 +22,7 @@ upload_local_image_artifacts, is_s3_protocol_url, is_path_value_valid, + is_ecr_url, ) from samcli.commands._utils.resources import ( @@ -79,6 +80,9 @@ def export(self, resource_id, resource_dict, parent_dir): def do_export(self, resource_id, resource_dict, parent_dir): pass + def delete(self, resource_id, resource_dict): + pass + class ResourceZip(Resource): """ @@ -154,6 +158,18 @@ def do_export(self, resource_id, resource_dict, parent_dir): ) set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, uploaded_url) + def delete(self, resource_id, resource_dict): + """ + Delete the S3 artifact using S3 url referenced by PROPERTY_NAME + """ + if resource_dict is None: + return + resource_path = jmespath.search(self.PROPERTY_NAME, resource_dict) + parsed_s3_url = self.uploader.parse_s3_url(resource_path) + if not self.uploader.bucket_name: + self.uploader.bucket_name = parsed_s3_url["Bucket"] + self.uploader.delete_artifact(parsed_s3_url["Key"], True) + class ResourceImageDict(Resource): """ @@ -197,6 +213,21 @@ def do_export(self, resource_id, resource_dict, parent_dir): ) set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, {self.EXPORT_PROPERTY_CODE_KEY: uploaded_url}) + def delete(self, resource_id, resource_dict): + """ + Delete the ECR artifact using ECR url in PROPERTY_NAME referenced by EXPORT_PROPERTY_CODE_KEY + """ + if resource_dict is None: + return + + remote_path = resource_dict.get(self.PROPERTY_NAME, {}).get(self.EXPORT_PROPERTY_CODE_KEY) + if is_ecr_url(remote_path): + self.uploader.delete_artifact( + image_uri=remote_path, resource_id=resource_id, property_name=self.PROPERTY_NAME + ) + else: + raise ValueError("URL given to the parse method is not a valid ECR url {0}".format(remote_path)) + class ResourceImage(Resource): """ @@ -238,6 +269,21 @@ def do_export(self, resource_id, resource_dict, parent_dir): ) set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, uploaded_url) + def delete(self, resource_id, resource_dict): + """ + Delete the ECR artifact using ECR url referenced by property_name + """ + if resource_dict is None: + return + + remote_path = resource_dict[self.PROPERTY_NAME] + if is_ecr_url(remote_path): + self.uploader.delete_artifact( + image_uri=remote_path, resource_id=resource_id, property_name=self.PROPERTY_NAME + ) + else: + raise ValueError("URL given to the parse method is not a valid ECR url {0}".format(remote_path)) + class ResourceWithS3UrlDict(ResourceZip): """ @@ -269,6 +315,21 @@ def do_export(self, resource_id, resource_dict, parent_dir): ) set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, parsed_url) + def delete(self, resource_id, resource_dict): + """ + Delete the S3 artifact using S3 url in the dict PROPERTY_NAME + using the bucket at BUCKET_NAME_PROPERTY and key at OBJECT_KEY_PROPERTY + """ + if resource_dict is None: + return + resource_path = resource_dict[self.PROPERTY_NAME] + s3_bucket = resource_path[self.BUCKET_NAME_PROPERTY] + key = resource_path[self.OBJECT_KEY_PROPERTY] + + if not self.uploader.bucket_name: + self.uploader.bucket_name = s3_bucket + self.uploader.delete_artifact(remote_path=key, is_key=True) + class ServerlessFunctionResource(ResourceZip): RESOURCE_TYPE = AWS_SERVERLESS_FUNCTION diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index 76b7ff1ec7..b3fbe53c7d 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -243,25 +243,70 @@ def parse_s3_url( object_key_property: str = "Key", version_property: Optional[str] = None, ) -> Dict: - if isinstance(url, str) and url.startswith("s3://"): - parsed = urlparse(url) - query = parse_qs(parsed.query) + return S3Uploader._parse_s3_format_url( + url=url, + bucket_name_property=bucket_name_property, + object_key_property=object_key_property, + version_property=version_property, + ) + + if isinstance(url, str) and url.startswith("https://s3"): + return S3Uploader._parse_path_style_s3_url( + url=url, bucket_name_property=bucket_name_property, object_key_property=object_key_property + ) - if parsed.netloc and parsed.path: - result = dict() - result[bucket_name_property] = parsed.netloc - result[object_key_property] = parsed.path.lstrip("/") + raise ValueError("URL given to the parse method is not a valid S3 url {0}".format(url)) - # If there is a query string that has a single versionId field, - # set the object version and return - if version_property is not None and "versionId" in query and len(query["versionId"]) == 1: - result[version_property] = query["versionId"][0] + @staticmethod + def _parse_s3_format_url( + url: Any, + bucket_name_property: str = "Bucket", + object_key_property: str = "Key", + version_property: Optional[str] = None, + ) -> Dict: + """ + Method for parsing s3 urls that begin with s3:// + e.g. s3://bucket/key + """ + parsed = urlparse(url) + query = parse_qs(parsed.query) + if parsed.netloc and parsed.path: + result = dict() + result[bucket_name_property] = parsed.netloc + result[object_key_property] = parsed.path.lstrip("/") + + # If there is a query string that has a single versionId field, + # set the object version and return + if version_property is not None and "versionId" in query and len(query["versionId"]) == 1: + result[version_property] = query["versionId"][0] + + return result + + raise ValueError("URL given to the parse method is not a valid S3 url {0}".format(url)) + + @staticmethod + def _parse_path_style_s3_url( + url: Any, + bucket_name_property: str = "Bucket", + object_key_property: str = "Key", + ) -> Dict: + """ + Static method for parsing path style s3 urls. + e.g. https://s3.us-east-1.amazonaws.com/bucket/key + """ + parsed = urlparse(url) + result = dict() + # parsed.path would point to /bucket/key + if parsed.path: + s3_bucket_key = parsed.path.split("/", 2)[1:] - return result + result[bucket_name_property] = s3_bucket_key[0] + result[object_key_property] = s3_bucket_key[1] - raise ValueError("URL given to the parse method is not a valid S3 url " "{0}".format(url)) + return result + raise ValueError("URL given to the parse method is not a valid S3 url {0}".format(url)) class ProgressPercentage: diff --git a/samcli/lib/package/utils.py b/samcli/lib/package/utils.py index c33b2b3de7..c152b37aa0 100644 --- a/samcli/lib/package/utils.py +++ b/samcli/lib/package/utils.py @@ -110,7 +110,8 @@ def upload_local_image_artifacts(resource_id, resource_dict, property_name, pare image_path = jmespath.search(property_name, resource_dict) if not image_path: - raise ImageNotFoundError(property_name=property_name, resource_id=resource_id) + message_fmt = "Image not found for {property_name} parameter of {resource_id} resource. \n" + raise ImageNotFoundError(property_name=property_name, resource_id=resource_id, message_fmt=message_fmt) if is_ecr_url(image_path): LOG.debug("Property %s of %s is already an ECR URL", property_name, resource_id) diff --git a/tests/unit/commands/delete/test_command.py b/tests/unit/commands/delete/test_command.py index 4e268688ee..7160553793 100644 --- a/tests/unit/commands/delete/test_command.py +++ b/tests/unit/commands/delete/test_command.py @@ -5,15 +5,6 @@ from tests.unit.cli.test_cli_config_file import MockContext -def get_mock_sam_config(): - mock_sam_config = MagicMock() - mock_sam_config.exists = MagicMock(return_value=True) - return mock_sam_config - - -MOCK_SAM_CONFIG = get_mock_sam_config() - - class TestDeleteCliCommand(TestCase): def setUp(self): @@ -22,9 +13,9 @@ def setUp(self): self.s3_prefix = "s3-prefix" self.region = None self.profile = None + self.no_prompts = None self.config_env = "mock-default-env" self.config_file = "mock-default-filename" - MOCK_SAM_CONFIG.reset_mock() @patch("samcli.commands.delete.command.click") @patch("samcli.commands.delete.delete_context.DeleteContext") @@ -39,6 +30,7 @@ def test_all_args(self, mock_delete_context, mock_delete_click): config_file=self.config_file, config_env=self.config_env, profile=self.profile, + no_prompts=self.no_prompts, ) mock_delete_context.assert_called_with( @@ -47,6 +39,7 @@ def test_all_args(self, mock_delete_context, mock_delete_click): profile=self.profile, config_file=self.config_file, config_env=self.config_env, + no_prompts=self.no_prompts, ) context_mock.run.assert_called_with() diff --git a/tests/unit/commands/delete/test_delete_context.py b/tests/unit/commands/delete/test_delete_context.py index e69de29bb2..f0975f144e 100644 --- a/tests/unit/commands/delete/test_delete_context.py +++ b/tests/unit/commands/delete/test_delete_context.py @@ -0,0 +1,246 @@ +from unittest import TestCase +from unittest.mock import patch, call, MagicMock + +import click + +from samcli.commands.delete.delete_context import DeleteContext +from samcli.cli.cli_config_file import TomlProvider +from samcli.lib.delete.cf_utils import CfUtils +from samcli.lib.package.s3_uploader import S3Uploader + + +class TestDeleteContext(TestCase): + @patch("samcli.commands.delete.delete_context.click.echo") + @patch.object(CfUtils, "has_stack", MagicMock(return_value=(False))) + def test_delete_context_stack_does_not_exist(self, patched_click_echo): + with DeleteContext( + stack_name="test", + region="us-east-1", + config_file="samconfig.toml", + config_env="default", + profile="test", + no_prompts=True, + ) as delete_context: + + delete_context.run() + expected_click_echo_calls = [ + call(f"Error: The input stack test does not exist on Cloudformation"), + ] + self.assertEqual(expected_click_echo_calls, patched_click_echo.call_args_list) + + @patch.object(DeleteContext, "parse_config_file", MagicMock()) + @patch.object(DeleteContext, "init_clients", MagicMock()) + def test_delete_context_enter(self): + with DeleteContext( + stack_name="test", + region="us-east-1", + config_file="samconfig.toml", + config_env="default", + profile="test", + no_prompts=True, + ) as delete_context: + self.assertEqual(delete_context.parse_config_file.call_count, 1) + self.assertEqual(delete_context.init_clients.call_count, 1) + + @patch.object( + TomlProvider, + "__call__", + MagicMock( + return_value=( + { + "stack_name": "test", + "region": "us-east-1", + "profile": "developer", + "s3_bucket": "s3-bucket", + "s3_prefix": "s3-prefix", + } + ) + ), + ) + @patch("samcli.commands.deploy.guided_context.click.get_current_context") + def test_delete_context_parse_config_file(self, patched_click_get_current_context): + patched_click_get_current_context = MagicMock() + with DeleteContext( + stack_name=None, + region=None, + config_file="samconfig.toml", + config_env="default", + profile=None, + no_prompts=True, + ) as delete_context: + self.assertEqual(delete_context.stack_name, "test") + self.assertEqual(delete_context.region, "us-east-1") + self.assertEqual(delete_context.profile, "developer") + self.assertEqual(delete_context.s3_bucket, "s3-bucket") + self.assertEqual(delete_context.s3_prefix, "s3-prefix") + + @patch.object( + TomlProvider, + "__call__", + MagicMock( + return_value=( + { + "stack_name": "test", + "region": "us-east-1", + "profile": "developer", + "s3_bucket": "s3-bucket", + "s3_prefix": "s3-prefix", + } + ) + ), + ) + @patch.object(CfUtils, "has_stack", MagicMock(return_value=(True))) + @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) + @patch.object(CfUtils, "delete_stack", MagicMock()) + @patch.object(CfUtils, "wait_for_delete", MagicMock()) + @patch.object(S3Uploader, "delete_prefix_artifacts", MagicMock()) + @patch("samcli.commands.deploy.guided_context.click.get_current_context") + def test_delete_context_valid_execute_run(self, patched_click_get_current_context): + patched_click_get_current_context = MagicMock() + with DeleteContext( + stack_name=None, + region=None, + config_file="samconfig.toml", + config_env="default", + profile=None, + no_prompts=True, + ) as delete_context: + delete_context.run() + + self.assertEqual(CfUtils.has_stack.call_count, 1) + self.assertEqual(CfUtils.get_stack_template.call_count, 1) + self.assertEqual(CfUtils.delete_stack.call_count, 1) + self.assertEqual(CfUtils.wait_for_delete.call_count, 1) + self.assertEqual(S3Uploader.delete_prefix_artifacts.call_count, 1) + + @patch("samcli.commands.delete.delete_context.click.echo") + @patch("samcli.commands.deploy.guided_context.click.secho") + @patch.object(CfUtils, "has_stack", MagicMock(return_value=(True))) + @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) + @patch.object(CfUtils, "delete_stack", MagicMock()) + @patch.object(CfUtils, "wait_for_delete", MagicMock()) + def test_delete_context_no_s3_bucket(self, patched_click_secho, patched_click_echo): + with DeleteContext( + stack_name="test", + region="us-east-1", + config_file="samconfig.toml", + config_env="default", + profile="test", + no_prompts=True, + ) as delete_context: + + delete_context.run() + expected_click_secho_calls = [ + call( + "\nWarning: s3_bucket and s3_prefix information cannot be obtained," + " delete the files manually if required", + fg="yellow", + ), + ] + self.assertEqual(expected_click_secho_calls, patched_click_secho.call_args_list) + + expected_click_echo_calls = [ + call("\n\t- Deleting Cloudformation stack test"), + call("\nDeleted successfully"), + ] + self.assertEqual(expected_click_echo_calls, patched_click_echo.call_args_list) + + @patch("samcli.commands.delete.delete_context.get_cf_template_name") + @patch("samcli.commands.delete.delete_context.confirm") + @patch.object(CfUtils, "has_stack", MagicMock(return_value=(True))) + @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) + @patch.object(CfUtils, "delete_stack", MagicMock()) + @patch.object(CfUtils, "wait_for_delete", MagicMock()) + @patch.object(S3Uploader, "delete_artifact", MagicMock()) + def test_guided_prompts_s3_bucket_prefix_present_execute_run(self, patched_confirm, patched_get_cf_template_name): + + patched_get_cf_template_name.return_value = "hello.template" + with DeleteContext( + stack_name="test", + region="us-east-1", + config_file="samconfig.toml", + config_env="default", + profile="test", + no_prompts=None, + ) as delete_context: + patched_confirm.side_effect = [True, False, True] + delete_context.cf_template_file_name = "hello.template" + delete_context.s3_bucket = "s3_bucket" + delete_context.s3_prefix = "s3_prefix" + + delete_context.run() + # Now to check for all the defaults on confirmations. + expected_confirmation_calls = [ + call( + click.style( + f"\tAre you sure you want to delete the stack test" + f" in the region us-east-1 ?", + bold=True, + ), + default=False, + ), + call( + click.style( + "\tAre you sure you want to delete the folder" + + f" s3_prefix in S3 which contains the artifacts?", + bold=True, + ), + default=False, + ), + call( + click.style( + "\tDo you want to delete the template file hello.template in S3?", + bold=True, + ), + default=False, + ), + ] + + self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) + self.assertFalse(delete_context.delete_artifacts_folder) + self.assertTrue(delete_context.delete_cf_template_file) + + @patch("samcli.commands.delete.delete_context.get_cf_template_name") + @patch("samcli.commands.delete.delete_context.confirm") + @patch.object(CfUtils, "has_stack", MagicMock(return_value=(True))) + @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) + @patch.object(CfUtils, "delete_stack", MagicMock()) + @patch.object(CfUtils, "wait_for_delete", MagicMock()) + @patch.object(S3Uploader, "delete_artifact", MagicMock()) + def test_guided_prompts_s3_bucket_present_no_prefix_execute_run( + self, patched_confirm, patched_get_cf_template_name + ): + + patched_get_cf_template_name.return_value = "hello.template" + with DeleteContext( + stack_name="test", + region="us-east-1", + config_file="samconfig.toml", + config_env="default", + profile="test", + no_prompts=None, + ) as delete_context: + patched_confirm.side_effect = [True, True] + delete_context.cf_template_file_name = "hello.template" + delete_context.s3_bucket = "s3_bucket" + + delete_context.run() + # Now to check for all the defaults on confirmations. + expected_confirmation_calls = [ + call( + click.style( + f"\tAre you sure you want to delete the stack test" + f" in the region us-east-1 ?", + bold=True, + ), + default=False, + ), + call( + click.style( + "\tDo you want to delete the template file hello.template in S3?", + bold=True, + ), + default=False, + ), + ] + + self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) + self.assertTrue(delete_context.delete_cf_template_file) diff --git a/tests/unit/lib/delete/test_cf_utils.py b/tests/unit/lib/delete/test_cf_utils.py index 9e80a00d4a..90d764a5c4 100644 --- a/tests/unit/lib/delete/test_cf_utils.py +++ b/tests/unit/lib/delete/test_cf_utils.py @@ -1,11 +1,23 @@ from unittest.mock import patch, MagicMock, ANY, call from unittest import TestCase + from samcli.commands.delete.exceptions import DeleteFailedError, FetchTemplateFailedError -from botocore.exceptions import ClientError, BotoCoreError +from botocore.exceptions import ClientError, BotoCoreError, WaiterError + from samcli.lib.delete.cf_utils import CfUtils +class MockDeleteWaiter: + def __init__(self, ex=None): + self.ex = ex + + def wait(self, StackName, WaiterConfig): + if self.ex: + raise self.ex + return + + class TestCfUtils(TestCase): def setUp(self): self.session = MagicMock() @@ -20,7 +32,7 @@ def test_cf_utils_has_no_stack(self): self.cf_utils._client.describe_stacks = MagicMock(return_value={"Stacks": []}) self.assertEqual(self.cf_utils.has_stack("test"), False) - def test_cf_utils_has_stack_exception_non_exsistent(self): + def test_cf_utils_has_stack_exception_non_existent(self): self.cf_utils._client.describe_stacks = MagicMock( side_effect=ClientError( error_response={"Error": {"Message": "Stack with id test does not exist"}}, @@ -84,3 +96,16 @@ def test_cf_utils_delete_stack_exception(self): self.cf_utils._client.delete_stack = MagicMock(side_effect=Exception()) with self.assertRaises(Exception): self.cf_utils.delete_stack("test") + + def test_cf_utils_wait_for_delete_exception(self): + self.cf_utils._client.get_waiter = MagicMock( + return_value=MockDeleteWaiter( + ex=WaiterError( + name="wait_for_delete", + reason="unit-test", + last_response={"Status": "Failed", "StatusReason": "It's a unit test"}, + ) + ) + ) + with self.assertRaises(DeleteFailedError): + self.cf_utils.wait_for_delete("test") diff --git a/tests/unit/lib/package/test_artifact_exporter.py b/tests/unit/lib/package/test_artifact_exporter.py index f7aceafef1..e6b9d14320 100644 --- a/tests/unit/lib/package/test_artifact_exporter.py +++ b/tests/unit/lib/package/test_artifact_exporter.py @@ -1,3 +1,4 @@ +import json import tempfile import os import string @@ -7,7 +8,7 @@ from contextlib import contextmanager, closing from unittest import mock -from unittest.mock import patch, Mock +from unittest.mock import patch, Mock, MagicMock from samcli.commands.package.exceptions import ExportFailedError from samcli.lib.package.s3_uploader import S3Uploader @@ -56,7 +57,7 @@ class TestArtifactExporter(unittest.TestCase): def setUp(self): - self.s3_uploader_mock = Mock() + self.s3_uploader_mock = MagicMock() self.s3_uploader_mock.s3.meta.endpoint_url = "https://s3.some-valid-region.amazonaws.com" self.ecr_uploader_mock = Mock() @@ -180,14 +181,14 @@ def test_is_s3_url(self): "s3://foo/bar/baz?versionId=abc", "s3://www.amazon.com/foo/bar", "s3://my-new-bucket/foo/bar?a=1&a=2&a=3&b=1", + "https://s3-eu-west-1.amazonaws.com/bucket/key", + "https://s3.us-east-1.amazonaws.com/bucket/key", ] invalid = [ # For purposes of exporter, we need S3 URLs to point to an object # and not a bucket "s3://foo", - # two versionIds is invalid - "https://s3-eu-west-1.amazonaws.com/bucket/key", "https://www.amazon.com", ] @@ -218,15 +219,24 @@ def test_parse_s3_url(self): "url": "s3://foo/bar/baz?versionId=abc&versionId=123", "result": {"Bucket": "foo", "Key": "bar/baz"}, }, + { + # Path style url + "url": "https://s3-eu-west-1.amazonaws.com/bucket/key", + "result": {"Bucket": "bucket", "Key": "key"}, + }, + { + # Path style url + "url": "https://s3.us-east-1.amazonaws.com/bucket/key", + "result": {"Bucket": "bucket", "Key": "key"}, + }, ] invalid = [ # For purposes of exporter, we need S3 URLs to point to an object # and not a bucket "s3://foo", - # two versionIds is invalid - "https://s3-eu-west-1.amazonaws.com/bucket/key", "https://www.amazon.com", + "https://s3.us-east-1.amazonaws.com", ] for config in valid: @@ -411,6 +421,10 @@ class MockResource(ResourceZip): self.assertEqual(resource_dict[resource.PROPERTY_NAME], s3_url) + self.s3_uploader_mock.delete_artifact = MagicMock() + resource.delete(resource_id, resource_dict) + self.assertEqual(self.s3_uploader_mock.delete_artifact.call_count, 1) + @patch("samcli.lib.package.packageable_resources.upload_local_image_artifacts") def test_resource_lambda_image(self, upload_local_image_artifacts_mock): # Property value is a path to an image @@ -436,6 +450,10 @@ class MockResource(ResourceImage): self.assertEqual(resource_dict[resource.PROPERTY_NAME], ecr_url) + self.ecr_uploader_mock.delete_artifact = MagicMock() + resource.delete(resource_id, resource_dict) + self.assertEqual(self.ecr_uploader_mock.delete_artifact.call_count, 1) + def test_lambda_image_resource_package_success(self): # Property value is set to an image @@ -746,6 +764,10 @@ class MockResource(ResourceWithS3UrlDict): resource_dict[resource.PROPERTY_NAME], {"b": "bucket", "o": "key1/key2", "v": "SomeVersionNumber"} ) + self.s3_uploader_mock.delete_artifact = MagicMock() + resource.delete(resource_id, resource_dict) + self.s3_uploader_mock.delete_artifact.assert_called_once_with(remote_path="key1/key2", is_key=True) + @patch("samcli.lib.package.packageable_resources.upload_local_artifacts") def test_resource_with_signing_configuration(self, upload_local_artifacts_mock): class MockResource(ResourceZip): @@ -1377,3 +1399,55 @@ def example_yaml_template(self): Timeout: 20 Runtime: nodejs4.3 """ + + def test_template_delete(self): + template_str = self.example_yaml_template() + + resource_type1_class = Mock() + resource_type1_class.RESOURCE_TYPE = "resource_type1" + resource_type1_class.ARTIFACT_TYPE = ZIP + resource_type1_class.EXPORT_DESTINATION = Destination.S3 + resource_type1_instance = Mock() + resource_type1_class.return_value = resource_type1_instance + resource_type2_class = Mock() + resource_type2_class.RESOURCE_TYPE = "resource_type2" + resource_type2_class.ARTIFACT_TYPE = ZIP + resource_type2_class.EXPORT_DESTINATION = Destination.S3 + resource_type2_instance = Mock() + resource_type2_class.return_value = resource_type2_instance + resource_type3_class = Mock() + resource_type3_class.RESOURCE_TYPE = "resource_type3" + resource_type3_class.ARTIFACT_TYPE = ZIP + resource_type3_class.EXPORT_DESTINATION = Destination.S3 + resource_type3_instance = Mock() + resource_type3_class.return_value = resource_type3_instance + + resources_to_export = [resource_type1_class, resource_type2_class] + + properties = {"foo": "bar"} + template_dict = { + "Resources": { + "Resource1": {"Type": "resource_type1", "Properties": properties}, + "Resource2": {"Type": "resource_type2", "Properties": properties}, + "Resource3": {"Type": "some-other-type", "Properties": properties, "DeletionPolicy": "Retain"}, + } + } + template_str = json.dumps(template_dict, indent=4, ensure_ascii=False) + + template_exporter = Template( + template_path=None, + parent_dir=None, + uploaders=self.uploaders_mock, + code_signer=None, + resources_to_export=resources_to_export, + template_str=template_str, + ) + + template_exporter.delete() + + resource_type1_class.assert_called_once_with(self.uploaders_mock, None) + resource_type1_instance.delete.assert_called_once_with("Resource1", mock.ANY) + resource_type2_class.assert_called_once_with(self.uploaders_mock, None) + resource_type2_instance.delete.assert_called_once_with("Resource2", mock.ANY) + resource_type3_class.assert_not_called() + resource_type3_instance.delete.assert_not_called() diff --git a/tests/unit/lib/package/test_ecr_uploader.py b/tests/unit/lib/package/test_ecr_uploader.py index 91798d43f9..25b2c4a047 100644 --- a/tests/unit/lib/package/test_ecr_uploader.py +++ b/tests/unit/lib/package/test_ecr_uploader.py @@ -5,7 +5,13 @@ from docker.errors import APIError, BuildError from parameterized import parameterized -from samcli.commands.package.exceptions import DockerLoginFailedError, DockerPushFailedError, ECRAuthorizationError +from samcli.commands.package.exceptions import ( + DockerLoginFailedError, + DockerPushFailedError, + ECRAuthorizationError, + ImageNotFoundError, + DeleteArtifactFailedError, +) from samcli.lib.package.ecr_uploader import ECRUploader from samcli.lib.utils.stream_writer import StreamWriter @@ -23,6 +29,9 @@ def setUp(self): BuildError.__name__: {"reason": "mock_reason", "build_log": "mock_build_log"}, APIError.__name__: {"message": "mock message"}, } + self.image_uri = "900643008914.dkr.ecr.us-east-1.amazonaws.com/" + self.ecr_repo + ":" + self.tag + self.property_name = "AWS::Serverless::Function" + self.resource_id = "HelloWorldFunction" def test_ecr_uploader_init(self): ecr_uploader = ECRUploader( @@ -166,3 +175,78 @@ def test_upload_failure_while_streaming(self): ecr_uploader.login = MagicMock() with self.assertRaises(DockerPushFailedError): ecr_uploader.upload(image, resource_name="HelloWorldFunction") + + def test_delete_artifact_no_image_error(self): + ecr_uploader = ECRUploader( + docker_client=self.docker_client, + ecr_client=self.ecr_client, + ecr_repo=self.ecr_repo, + ecr_repo_multi=self.ecr_repo_multi, + tag=self.tag, + ) + ecr_uploader.ecr_client.batch_delete_image.return_value = { + "failures": [{"imageId": {"imageTag": self.tag}, "failureCode": "ImageNotFound"}] + } + + with self.assertRaises(ImageNotFoundError): + ecr_uploader.delete_artifact( + image_uri=self.image_uri, resource_id=self.resource_id, property_name=self.property_name + ) + + def test_delete_artifact_resp_failure(self): + ecr_uploader = ECRUploader( + docker_client=self.docker_client, + ecr_client=self.ecr_client, + ecr_repo=self.ecr_repo, + ecr_repo_multi=self.ecr_repo_multi, + tag=self.tag, + ) + ecr_uploader.ecr_client.batch_delete_image.return_value = { + "failures": [ + { + "imageId": {"imageTag": self.tag}, + "failureCode": "Mock response Failure", + "failureReason": "Mock ECR testing", + } + ] + } + + with self.assertRaises(DeleteArtifactFailedError): + ecr_uploader.delete_artifact( + image_uri=self.image_uri, resource_id=self.resource_id, property_name=self.property_name + ) + + def test_delete_artifact_client_error(self): + ecr_uploader = ECRUploader( + docker_client=self.docker_client, + ecr_client=self.ecr_client, + ecr_repo=self.ecr_repo, + ecr_repo_multi=self.ecr_repo_multi, + tag=self.tag, + ) + ecr_uploader.ecr_client.batch_delete_image = MagicMock( + side_effect=ClientError( + error_response={"Error": {"Message": "mock client error"}}, operation_name="batch_delete_image" + ) + ) + + with self.assertRaises(DeleteArtifactFailedError): + ecr_uploader.delete_artifact( + image_uri=self.image_uri, resource_id=self.resource_id, property_name=self.property_name + ) + + def test_parse_image_url(self): + + valid = [ + {"url": self.image_uri, "result": {"repository": "mock-image-repo", "image_tag": "mock-tag"}}, + {"url": "mock-image-rep:mock-tag", "result": {"repository": "mock-image-rep", "image_tag": "mock-tag"}}, + { + "url": "mock-image-repo", + "result": {"repository": "mock-image-repo", "image_tag": "latest"}, + }, + ] + + for config in valid: + result = ECRUploader.parse_image_url(image_uri=config["url"]) + + self.assertEqual(result, config["result"]) From 0016b1888c5eaf8ef5b320291c43a2441d5121c7 Mon Sep 17 00:00:00 2001 From: hnnasit <84355507+hnnasit@users.noreply.github.com> Date: Mon, 19 Jul 2021 16:33:36 -0400 Subject: [PATCH 3/9] Get s3 info cf template (#3050) * Added methods for cf and s3 files and init UI * Added unit tests for utils methods and s3_uploader * Removed s3_bucket and s3_prefix click options * chore: Increase awareness of same file warning during package (#2946) * chore: increase awareness of same file warning during package * fix formatting & grammar Co-authored-by: Mathieu Grandis <73313235+mgrandis@users.noreply.github.com> * fix: Allow the base64Encoded field in REST Api, skip validation of unknown fields and validate missing statusCode for Http Api (#2941) * fix API Gateway emulator: - skip validating the non allowed fields for Http Api Gateway, as it always skip the unknown fields - add base64Encoded as an allowed field for Rest Api gateway - base64 decoding will be always done for Http API gateway if the lambda response isBase64Encoded is true regardless the content-type - validate if statusCode is missing in case of Http API, and payload version 1.0 * - accept "true", "True", "false", "False" as valid isBase64Encoded values. - Validate on other isBase64Encoded Values - add more integration && unit test cases * fix lint && black issues * use smaller image to test Base64 response * Fixed lint errors and added few unit tests * Make black happy * Added methods for deleting template artifacts * Wait method added for delete cf api * Added LOG statements * Added and updated changes based on CR * Fixed the unit tests in artifact_exporter.py * Update HELP_TEXT in delete/command.py Co-authored-by: Chris Rehn * Updated code based on Chris' comments * Added condition for resources that have deletionpolicy specified * Small changes and fixes based on the comments * Removed region prompt * Added unit tests for ecr delete method and typing for methods * Reformatted delete_context and added option to skip user prompts * Removed return type from artifact_exporter for delete method * Added unit tests for artifact_exporter and delete_context * Added more unit tests for delete_context and artifact_exporter * Added more unit tests for delete_context and artifact_exporter * Added docs and comments for artifact_exporter and ecr_uploader * Added log statements in delete_context and some updates in unit tests * Changed force to no-prompts and updated ecr delete method error handling * Created a separate function for parsing ecr url in ecr_uploader * Reformatted Template class init to pass template_str and init template_dict * Changed how s3 url is obtained for resource_zip edge-case: aws:glue:job * Fixed edge case where resource artifact points to a path style url * run Make black * Made the parse s3 url funcs protected and defined a parent method and modified delete method for ResourceImageDict * Added methods to extract s3 info from cf template * Added testing for get_s3_info method for artifact_exporter and s3_uploader methods * Removed commented code and updated method docstring * Better error handling for s3 delete artifacts and fixed bug for getting s3 resources information * Changed get_s3_info to get_property_value and changed output text for s3 delete method Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Co-authored-by: Mathieu Grandis <73313235+mgrandis@users.noreply.github.com> Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> Co-authored-by: Chris Rehn --- samcli/commands/delete/delete_context.py | 17 ++++-- samcli/lib/package/artifact_exporter.py | 48 ++++++++++++++++- samcli/lib/package/packageable_resources.py | 41 +++++++++++---- samcli/lib/package/s3_uploader.py | 22 +++++--- .../lib/package/test_artifact_exporter.py | 52 ++++++++++++++++++- tests/unit/lib/package/test_s3_uploader.py | 49 ++++++++++++++++- 6 files changed, 202 insertions(+), 27 deletions(-) diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 6491307247..dcfd9071b0 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -149,6 +149,20 @@ def delete(self): with mktempfile() as temp_file: self.cf_template_file_name = get_cf_template_name(temp_file, template_str, "template") + template = Template( + template_path=None, parent_dir=None, uploaders=self.uploaders, code_signer=None, template_str=template_str + ) + + # If s3 info is not available, try to obtain it from CF + # template resources. + if not self.s3_bucket: + s3_info = template.get_s3_info() + self.s3_bucket = s3_info["s3_bucket"] + self.s3_uploader.bucket_name = self.s3_bucket + + self.s3_prefix = s3_info["s3_prefix"] + self.s3_uploader.prefix = self.s3_prefix + self.guided_prompts() # Delete the primary stack @@ -158,9 +172,6 @@ def delete(self): LOG.debug("Deleted Cloudformation stack: %s", self.stack_name) # Delete the artifacts - template = Template( - template_path=None, parent_dir=None, uploaders=self.uploaders, code_signer=None, template_str=template_str - ) template.delete() # Delete the CF template file in S3 diff --git a/samcli/lib/package/artifact_exporter.py b/samcli/lib/package/artifact_exporter.py index 00fa5cb089..99567558e8 100644 --- a/samcli/lib/package/artifact_exporter.py +++ b/samcli/lib/package/artifact_exporter.py @@ -35,7 +35,7 @@ ResourceZip, ) from samcli.lib.package.s3_uploader import S3Uploader -from samcli.lib.package.uploaders import Uploaders +from samcli.lib.package.uploaders import Uploaders, Destination from samcli.lib.package.utils import ( is_local_folder, make_abs_path, @@ -47,7 +47,6 @@ from samcli.lib.utils.packagetype import ZIP from samcli.yamlhelper import yaml_parse, yaml_dump - # NOTE: sriram-mv, A cyclic dependency on `Template` needs to be broken. @@ -265,3 +264,48 @@ def delete(self): # Delete code resources exporter = exporter_class(self.uploaders, None) exporter.delete(resource_id, resource_dict) + + def get_s3_info(self): + """ + Iterates the template_dict resources with S3 EXPORT_DESTINATION to get the + s3_bucket and s3_prefix information for the purpose of deletion. + Method finds the first resource with s3 information, extracts the information + and then terminates. It is safe to assume that all the packaged files using the + commands package and deploy are in the same s3 bucket with the same s3 prefix. + """ + result = {"s3_bucket": None, "s3_prefix": None} + if "Resources" not in self.template_dict: + return result + + self._apply_global_values() + + for _, resource in self.template_dict["Resources"].items(): + + resource_type = resource.get("Type", None) + resource_dict = resource.get("Properties", {}) + + for exporter_class in self.resources_to_export: + # Skip the resources which don't give s3 information + if exporter_class.EXPORT_DESTINATION != Destination.S3: + continue + if exporter_class.RESOURCE_TYPE != resource_type: + continue + if resource_dict.get("PackageType", ZIP) != exporter_class.ARTIFACT_TYPE: + continue + + exporter = exporter_class(self.uploaders, None) + s3_info = exporter.get_property_value(resource_dict) + + result["s3_bucket"] = s3_info["Bucket"] + s3_key = s3_info["Key"] + + # Extract the prefix from the key + if s3_key: + key_split = s3_key.rsplit("/", 1) + if len(key_split) > 1: + result["s3_prefix"] = key_split[0] + break + if result["s3_bucket"]: + break + + return result diff --git a/samcli/lib/package/packageable_resources.py b/samcli/lib/package/packageable_resources.py index 39d9b73867..49f0e47653 100644 --- a/samcli/lib/package/packageable_resources.py +++ b/samcli/lib/package/packageable_resources.py @@ -164,11 +164,22 @@ def delete(self, resource_id, resource_dict): """ if resource_dict is None: return + + s3_info = self.get_property_value(resource_dict) + if s3_info["Key"]: + self.uploader.delete_artifact(s3_info["Key"], True) + + def get_property_value(self, resource_dict): + """ + Get the s3 property value for this resource + """ + if resource_dict is None: + return {"Bucket": None, "Key": None} + resource_path = jmespath.search(self.PROPERTY_NAME, resource_dict) - parsed_s3_url = self.uploader.parse_s3_url(resource_path) - if not self.uploader.bucket_name: - self.uploader.bucket_name = parsed_s3_url["Bucket"] - self.uploader.delete_artifact(parsed_s3_url["Key"], True) + if resource_path: + return self.uploader.parse_s3_url(resource_path) + return {"Bucket": None, "Key": None} class ResourceImageDict(Resource): @@ -322,13 +333,23 @@ def delete(self, resource_id, resource_dict): """ if resource_dict is None: return - resource_path = resource_dict[self.PROPERTY_NAME] - s3_bucket = resource_path[self.BUCKET_NAME_PROPERTY] - key = resource_path[self.OBJECT_KEY_PROPERTY] - if not self.uploader.bucket_name: - self.uploader.bucket_name = s3_bucket - self.uploader.delete_artifact(remote_path=key, is_key=True) + s3_info = self.get_property_value(resource_dict) + if s3_info["Key"]: + self.uploader.delete_artifact(remote_path=s3_info["Key"], is_key=True) + + def get_property_value(self, resource_dict): + """ + Get the s3 property value for this resource + """ + if resource_dict is None: + return {"Bucket": None, "Key": None} + + resource_path = resource_dict.get(self.PROPERTY_NAME, {}) + s3_bucket = resource_path.get(self.BUCKET_NAME_PROPERTY, None) + + key = resource_path.get(self.OBJECT_KEY_PROPERTY, None) + return {"Bucket": s3_bucket, "Key": key} class ServerlessFunctionResource(ResourceZip): diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index b3fbe53c7d..a7f1a9a8b9 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -145,7 +145,7 @@ def upload_with_dedup( return self.upload(file_name, remote_path) - def delete_artifact(self, remote_path: str, is_key: bool = False) -> Dict: + def delete_artifact(self, remote_path: str, is_key: bool = False) -> bool: """ Deletes a given file from S3 :param remote_path: Path to the file that will be deleted @@ -163,10 +163,16 @@ def delete_artifact(self, remote_path: str, is_key: bool = False) -> Dict: key = "{0}/{1}".format(self.prefix, remote_path) # Deleting Specific file with key - click.echo(f"\t- Deleting S3 file {key}") - resp = self.s3.delete_object(Bucket=self.bucket_name, Key=key) - LOG.debug("S3 method delete_object is called and returned: %s", resp["ResponseMetadata"]) - return dict(resp["ResponseMetadata"]) + if self.file_exists(remote_path=key): + click.echo(f"\t- Deleting S3 object with key {key}") + self.s3.delete_object(Bucket=self.bucket_name, Key=key) + LOG.debug("Deleted s3 object with key %s successfully", key) + return True + + # Given s3 object key does not exist + LOG.debug("Could not find the S3 file with the key %s", key) + click.echo(f"\t- Could not find and delete the S3 object with the key {key}") + return False except botocore.exceptions.ClientError as ex: error_code = ex.response["Error"]["Code"] @@ -183,9 +189,9 @@ def delete_prefix_artifacts(self): LOG.error("Bucket not specified") raise BucketNotSpecifiedError() if self.prefix: - prefix_files = self.s3.list_objects_v2(Bucket=self.bucket_name, Prefix=self.prefix) - - for obj in prefix_files["Contents"]: + response = self.s3.list_objects_v2(Bucket=self.bucket_name, Prefix=self.prefix) + prefix_files = response.get("Contents", []) + for obj in prefix_files: self.delete_artifact(obj["Key"], True) def file_exists(self, remote_path: str) -> bool: diff --git a/tests/unit/lib/package/test_artifact_exporter.py b/tests/unit/lib/package/test_artifact_exporter.py index e6b9d14320..0fea4bf3cb 100644 --- a/tests/unit/lib/package/test_artifact_exporter.py +++ b/tests/unit/lib/package/test_artifact_exporter.py @@ -14,7 +14,7 @@ from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.package.uploaders import Destination from samcli.lib.package.utils import zip_folder, make_zip -from samcli.lib.utils.packagetype import ZIP +from samcli.lib.utils.packagetype import ZIP, IMAGE from tests.testing_utils import FileCreator from samcli.commands.package import exceptions from samcli.lib.package.artifact_exporter import ( @@ -1401,7 +1401,6 @@ def example_yaml_template(self): """ def test_template_delete(self): - template_str = self.example_yaml_template() resource_type1_class = Mock() resource_type1_class.RESOURCE_TYPE = "resource_type1" @@ -1451,3 +1450,52 @@ def test_template_delete(self): resource_type2_instance.delete.assert_called_once_with("Resource2", mock.ANY) resource_type3_class.assert_not_called() resource_type3_instance.delete.assert_not_called() + + def test_template_get_s3_info(self): + + resource_type1_class = Mock() + resource_type1_class.RESOURCE_TYPE = "resource_type1" + resource_type1_class.ARTIFACT_TYPE = ZIP + resource_type1_class.PROPERTY_NAME = "CodeUri" + resource_type1_class.EXPORT_DESTINATION = Destination.S3 + resource_type1_instance = Mock() + resource_type1_class.return_value = resource_type1_instance + resource_type1_instance.get_property_value = Mock() + resource_type1_instance.get_property_value.return_value = {"Bucket": "bucket", "Key": "prefix/file"} + + resource_type2_class = Mock() + resource_type2_class.RESOURCE_TYPE = "resource_type2" + resource_type2_class.ARTIFACT_TYPE = ZIP + resource_type2_class.EXPORT_DESTINATION = Destination.S3 + resource_type2_instance = Mock() + resource_type2_class.return_value = resource_type2_instance + + resource_type3_class = Mock() + resource_type3_class.RESOURCE_TYPE = "resource_type3" + resource_type3_class.ARTIFACT_TYPE = IMAGE + resource_type3_class.EXPORT_DESTINATION = Destination.ECR + resource_type3_instance = Mock() + resource_type3_class.return_value = resource_type3_instance + + resources_to_export = [resource_type3_class, resource_type2_class, resource_type1_class] + + properties = {"foo": "bar", "CodeUri": "s3://bucket/prefix/file"} + template_dict = { + "Resources": { + "Resource1": {"Type": "resource_type1", "Properties": properties}, + } + } + template_str = json.dumps(template_dict, indent=4, ensure_ascii=False) + + template_exporter = Template( + template_path=None, + parent_dir=None, + uploaders=self.uploaders_mock, + code_signer=None, + resources_to_export=resources_to_export, + template_str=template_str, + ) + + s3_info = template_exporter.get_s3_info() + self.assertEqual(s3_info, {"s3_bucket": "bucket", "s3_prefix": "prefix"}) + resource_type1_instance.get_property_value.assert_called_once_with(properties) diff --git a/tests/unit/lib/package/test_s3_uploader.py b/tests/unit/lib/package/test_s3_uploader.py index f1765c3f8c..55b1bfbb2d 100644 --- a/tests/unit/lib/package/test_s3_uploader.py +++ b/tests/unit/lib/package/test_s3_uploader.py @@ -181,10 +181,26 @@ def test_s3_delete_artifact(self): force_upload=self.force_upload, no_progressbar=self.no_progressbar, ) - s3_uploader.artifact_metadata = {"a": "b"} + self.s3.delete_object = MagicMock() + self.s3.head_object = MagicMock() with self.assertRaises(BucketNotSpecifiedError) as ex: with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: - self.assertEqual(s3_uploader.delete_artifact(f.name), {"a": "b"}) + self.assertTrue(s3_uploader.delete_artifact(f.name)) + + def test_s3_delete_non_existant_artifact(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=None, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + no_progressbar=self.no_progressbar, + ) + self.s3.delete_object = MagicMock() + self.s3.head_object = MagicMock(side_effect=ClientError(error_response={}, operation_name="head_object")) + with self.assertRaises(BucketNotSpecifiedError) as ex: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: + self.assertFalse(s3_uploader.delete_artifact(f.name)) def test_s3_delete_artifact_no_bucket(self): s3_uploader = S3Uploader( @@ -217,6 +233,35 @@ def test_s3_delete_artifact_bucket_not_found(self): with self.assertRaises(NoSuchBucketError): s3_uploader.delete_artifact(f.name) + def test_delete_prefix_artifacts_no_bucket(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=None, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + no_progressbar=self.no_progressbar, + ) + with self.assertRaises(BucketNotSpecifiedError): + s3_uploader.delete_prefix_artifacts() + + def test_delete_prefix_artifacts_execute(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=self.bucket_name, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + no_progressbar=self.no_progressbar, + ) + + s3_uploader.s3.delete_object = MagicMock() + + s3_uploader.s3.list_objects_v2 = MagicMock(return_value={"Contents": [{"Key": "key"}]}) + + s3_uploader.delete_prefix_artifacts() + s3_uploader.s3.delete_object.assert_called_once_with(Bucket="mock-bucket", Key="key") + def test_s3_upload_with_dedup(self): s3_uploader = S3Uploader( s3_client=self.s3, From 57f42893aec538a09c1a35380e986823d81e8448 Mon Sep 17 00:00:00 2001 From: hnnasit <84355507+hnnasit@users.noreply.github.com> Date: Tue, 20 Jul 2021 16:30:29 -0400 Subject: [PATCH 4/9] Sam delete integration testing (#3076) * Added methods for cf and s3 files and init UI * Added unit tests for utils methods and s3_uploader * Removed s3_bucket and s3_prefix click options * chore: Increase awareness of same file warning during package (#2946) * chore: increase awareness of same file warning during package * fix formatting & grammar Co-authored-by: Mathieu Grandis <73313235+mgrandis@users.noreply.github.com> * fix: Allow the base64Encoded field in REST Api, skip validation of unknown fields and validate missing statusCode for Http Api (#2941) * fix API Gateway emulator: - skip validating the non allowed fields for Http Api Gateway, as it always skip the unknown fields - add base64Encoded as an allowed field for Rest Api gateway - base64 decoding will be always done for Http API gateway if the lambda response isBase64Encoded is true regardless the content-type - validate if statusCode is missing in case of Http API, and payload version 1.0 * - accept "true", "True", "false", "False" as valid isBase64Encoded values. - Validate on other isBase64Encoded Values - add more integration && unit test cases * fix lint && black issues * use smaller image to test Base64 response * Fixed lint errors and added few unit tests * Make black happy * Added methods for deleting template artifacts * Wait method added for delete cf api * Added LOG statements * Added and updated changes based on CR * Fixed the unit tests in artifact_exporter.py * Update HELP_TEXT in delete/command.py Co-authored-by: Chris Rehn * Updated code based on Chris' comments * Added condition for resources that have deletionpolicy specified * Small changes and fixes based on the comments * Removed region prompt * Added unit tests for ecr delete method and typing for methods * Reformatted delete_context and added option to skip user prompts * Removed return type from artifact_exporter for delete method * Added unit tests for artifact_exporter and delete_context * Added more unit tests for delete_context and artifact_exporter * Added more unit tests for delete_context and artifact_exporter * Added docs and comments for artifact_exporter and ecr_uploader * Added log statements in delete_context and some updates in unit tests * Init integration tests methods for sam delete * Added more template files as a list for sam delete integration testing * Changed force to no-prompts and updated ecr delete method error handling * Created a separate function for parsing ecr url in ecr_uploader * Reformatted Template class init to pass template_str and init template_dict * Changed how s3 url is obtained for resource_zip edge-case: aws:glue:job * Fixed edge case where resource artifact points to a path style url * run Make black * Added 2 more integrations tests no_stack_deployed and delete for image type resources * Made the parse s3 url funcs protected and defined a parent method and modified delete method for ResourceImageDict * Added methods to extract s3 info from cf template * Added testing for get_s3_info method for artifact_exporter and s3_uploader methods * Added few more integration tests for delete * Merged delete-template-artifacts branch code and updated this branch code * Added tests for no prefix present for zip and image * Added a check to confirm input stack is deleted for all the integration tests * Removed commented code and updated method docstring * Better error handling for s3 delete artifacts and fixed bug for getting s3 resources information * Changed get_s3_info to get_property_value and changed output text for s3 delete method * Added integration test for deleting nested stacks Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Co-authored-by: Mathieu Grandis <73313235+mgrandis@users.noreply.github.com> Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> Co-authored-by: Chris Rehn --- tests/integration/delete/__init__.py | 0 tests/integration/delete/delete_integ_base.py | 41 +++ .../integration/delete/test_delete_command.py | 334 ++++++++++++++++++ 3 files changed, 375 insertions(+) create mode 100644 tests/integration/delete/__init__.py create mode 100644 tests/integration/delete/delete_integ_base.py create mode 100644 tests/integration/delete/test_delete_command.py diff --git a/tests/integration/delete/__init__.py b/tests/integration/delete/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/integration/delete/delete_integ_base.py b/tests/integration/delete/delete_integ_base.py new file mode 100644 index 0000000000..1eb70ef174 --- /dev/null +++ b/tests/integration/delete/delete_integ_base.py @@ -0,0 +1,41 @@ +import os +from unittest import TestCase + + +class DeleteIntegBase(TestCase): + @classmethod + def setUpClass(cls): + pass + + def setUp(self): + super().setUp() + + def tearDown(self): + super().tearDown() + + def base_command(self): + command = "sam" + if os.getenv("SAM_CLI_DEV"): + command = "samdev" + + return command + + def get_delete_command_list( + self, stack_name=None, region=None, config_file=None, config_env=None, profile=None, no_prompts=None + ): + command_list = [self.base_command(), "delete"] + + if stack_name: + command_list += ["--stack-name", str(stack_name)] + if region: + command_list += ["--region", str(region)] + if config_file: + command_list += ["--config-file", str(config_file)] + if config_env: + command_list += ["--config-env", str(config_env)] + if profile: + command_list += ["--profile", str(profile)] + if no_prompts: + command_list += ["--no-prompts"] + + return command_list diff --git a/tests/integration/delete/test_delete_command.py b/tests/integration/delete/test_delete_command.py new file mode 100644 index 0000000000..639152e746 --- /dev/null +++ b/tests/integration/delete/test_delete_command.py @@ -0,0 +1,334 @@ +import os +import shutil +import tempfile +import time +import uuid +from pathlib import Path +from unittest import skipIf +import logging +import boto3 +import docker +from botocore.config import Config +from parameterized import parameterized +from botocore.exceptions import ClientError + +from samcli.lib.bootstrap.bootstrap import SAM_CLI_STACK_NAME +from samcli.lib.config.samconfig import DEFAULT_CONFIG_FILE_NAME +from tests.integration.deploy.deploy_integ_base import DeployIntegBase +from tests.integration.delete.delete_integ_base import DeleteIntegBase +from tests.integration.package.package_integ_base import PackageIntegBase +from tests.testing_utils import RUNNING_ON_CI, RUNNING_TEST_FOR_MASTER_ON_CI, RUN_BY_CANARY +from tests.testing_utils import run_command, run_command_with_input + +# Delete tests require credentials and CI/CD will only add credentials to the env if the PR is from the same repo. +# This is to restrict package tests to run outside of CI/CD, when the branch is not master or tests are not run by Canary +SKIP_DELETE_TESTS = RUNNING_ON_CI and RUNNING_TEST_FOR_MASTER_ON_CI and not RUN_BY_CANARY +CFN_SLEEP = 3 +TIMEOUT = 300 +CFN_PYTHON_VERSION_SUFFIX = os.environ.get("PYTHON_VERSION", "0.0.0").replace(".", "-") +LOG = logging.getLogger(__name__) + + +@skipIf(SKIP_DELETE_TESTS, "Skip delete tests in CI/CD only") +class TestDelete(PackageIntegBase, DeployIntegBase, DeleteIntegBase): + @classmethod + def setUpClass(cls): + cls.docker_client = docker.from_env() + cls.local_images = [ + ("alpine", "latest"), + ] + # setup some images locally by pulling them. + for repo, tag in cls.local_images: + cls.docker_client.api.pull(repository=repo, tag=tag) + + PackageIntegBase.setUpClass() + DeployIntegBase.setUpClass() + DeleteIntegBase.setUpClass() + + def setUp(self): + self.cf_client = boto3.client("cloudformation") + self.sns_arn = os.environ.get("AWS_SNS") + time.sleep(CFN_SLEEP) + super().setUp() + + def test_delete_command_no_stack_deployed(self): + + stack_name = self._method_to_stack_name(self.id()) + + delete_command_list = self.get_delete_command_list(stack_name=stack_name, no_prompts=True) + + delete_process_execute = run_command(delete_command_list) + self.assertEqual(delete_process_execute.process.returncode, 0) + self.assertIn( + f"Error: The input stack {stack_name} does not exist on Cloudformation", str(delete_process_execute.stdout) + ) + + @parameterized.expand( + [ + "aws-serverless-function.yaml", + "aws-serverless-statemachine.yaml", + "aws-appsync-graphqlschema.yaml", + "aws-appsync-resolver.yaml", + "aws-appsync-functionconfiguration.yaml", + "aws-apigateway-restapi.yaml", + "aws-elasticbeanstalk-applicationversion.yaml", + "aws-cloudformation-moduleversion.yaml", + "aws-cloudformation-resourceversion.yaml", + "aws-cloudformation-stack.yaml", + "aws-serverless-application.yaml", + "aws-lambda-layerversion.yaml", + "aws-serverless-layerversion.yaml", + "aws-glue-job.yaml", + "aws-stepfunctions-statemachine.yaml", + ] + ) + def test_delete_with_s3_prefix_present_zip(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = self._method_to_stack_name(self.id()) + + config_file_name = stack_name + ".toml" + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, guided=True, config_file=config_file_name + ) + + deploy_process_execute = run_command_with_input( + deploy_command_list, "{}\n\n\n\n\n\n\n\n\n".format(stack_name).encode() + ) + + config_file_path = self.test_data_path.joinpath(config_file_name) + delete_command_list = self.get_delete_command_list( + stack_name=stack_name, config_file=config_file_path, no_prompts=True + ) + + delete_process_execute = run_command(delete_command_list) + self.assertEqual(delete_process_execute.process.returncode, 0) + + try: + resp = self.cf_client.describe_stacks(StackName=stack_name) + except ClientError as ex: + self.assertIn(f"Stack with id {stack_name} does not exist", str(ex)) + + # Remove the local config file created + if os.path.isfile(config_file_path): + os.remove(config_file_path) + + @parameterized.expand( + [ + "aws-serverless-function-image.yaml", + ] + ) + def test_delete_with_s3_prefix_present_image(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = self._method_to_stack_name(self.id()) + + config_file_name = stack_name + ".toml" + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, guided=True, config_file=config_file_name + ) + + deploy_process_execute = run_command_with_input( + deploy_command_list, f"{stack_name}\n\n{self.ecr_repo_name}\n\n\ny\n\n\n\n\n\n".encode() + ) + + config_file_path = self.test_data_path.joinpath(config_file_name) + delete_command_list = self.get_delete_command_list( + stack_name=stack_name, config_file=config_file_path, no_prompts=True + ) + + delete_process_execute = run_command(delete_command_list) + self.assertEqual(delete_process_execute.process.returncode, 0) + + try: + resp = self.cf_client.describe_stacks(StackName=stack_name) + except ClientError as ex: + self.assertIn(f"Stack with id {stack_name} does not exist", str(ex)) + + # Remove the local config file created + if os.path.isfile(config_file_path): + os.remove(config_file_path) + + @parameterized.expand( + [ + "aws-serverless-function.yaml", + ] + ) + def test_delete_guided_prompts(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = self._method_to_stack_name(self.id()) + + config_file_name = stack_name + ".toml" + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, guided=True, config_file=config_file_name + ) + + deploy_process_execute = run_command_with_input( + deploy_command_list, "{}\n\n\n\n\n\n\n\n\n".format(stack_name).encode() + ) + + config_file_path = self.test_data_path.joinpath(config_file_name) + delete_command_list = self.get_delete_command_list(stack_name=stack_name, config_file=config_file_path) + + LOG.info(delete_command_list) + delete_process_execute = run_command_with_input(delete_command_list, "y\nn\ny\n".encode()) + + self.assertEqual(delete_process_execute.process.returncode, 0) + + try: + resp = self.cf_client.describe_stacks(StackName=stack_name) + except ClientError as ex: + self.assertIn(f"Stack with id {stack_name} does not exist", str(ex)) + + # Remove the local config file created + if os.path.isfile(config_file_path): + os.remove(config_file_path) + + @parameterized.expand( + [ + "aws-serverless-function.yaml", + ] + ) + def test_delete_no_config_file_zip(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = self._method_to_stack_name(self.id()) + + deploy_command_list = self.get_deploy_command_list(template_file=template_path, guided=True) + + deploy_process_execute = run_command_with_input( + deploy_command_list, "{}\n\n\n\n\nn\n\n\n".format(stack_name).encode() + ) + + delete_command_list = self.get_delete_command_list(stack_name=stack_name, region="us-east-1", no_prompts=True) + + delete_process_execute = run_command(delete_command_list) + self.assertEqual(delete_process_execute.process.returncode, 0) + + try: + resp = self.cf_client.describe_stacks(StackName=stack_name) + except ClientError as ex: + self.assertIn(f"Stack with id {stack_name} does not exist", str(ex)) + + @parameterized.expand( + [ + "aws-serverless-function.yaml", + ] + ) + def test_delete_no_s3_prefix_zip(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = self._method_to_stack_name(self.id()) + + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_bucket=self.bucket_name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes foo_bar=baz", + confirm_changeset=False, + region="us-east-1", + ) + + deploy_process_execute = run_command(deploy_command_list) + + delete_command_list = self.get_delete_command_list(stack_name=stack_name, region="us-east-1", no_prompts=True) + + delete_process_execute = run_command(delete_command_list) + + self.assertEqual(delete_process_execute.process.returncode, 0) + + try: + resp = self.cf_client.describe_stacks(StackName=stack_name) + except ClientError as ex: + self.assertIn(f"Stack with id {stack_name} does not exist", str(ex)) + + @parameterized.expand( + [ + "aws-serverless-function-image.yaml", + ] + ) + def test_delete_no_s3_prefix_image(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = self._method_to_stack_name(self.id()) + + # Try to deploy to another region. + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + image_repository=self.ecr_repo_name, + s3_bucket=self.bucket_name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes foo_bar=baz", + confirm_changeset=False, + region="us-east-1", + ) + + deploy_process_execute = run_command(deploy_command_list) + + delete_command_list = self.get_delete_command_list(stack_name=stack_name, region="us-east-1", no_prompts=True) + + delete_process_execute = run_command(delete_command_list) + + self.assertEqual(delete_process_execute.process.returncode, 0) + + try: + resp = self.cf_client.describe_stacks(StackName=stack_name) + except ClientError as ex: + self.assertIn(f"Stack with id {stack_name} does not exist", str(ex)) + + @parameterized.expand( + [os.path.join("deep-nested", "template.yaml"), os.path.join("deep-nested-image", "template.yaml")] + ) + def test_delete_nested_stacks(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = self._method_to_stack_name(self.id()) + + # Package and Deploy in one go without confirming change set. + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + # Note(xinhol): --capabilities does not allow passing multiple, we need to fix it + # here we use samconfig-deep-nested.toml as a workaround + config_file=self.test_data_path.joinpath("samconfig-deep-nested.toml"), + s3_prefix="integ_deploy", + s3_bucket=self.s3_bucket.name, + force_upload=True, + notification_arns=self.sns_arn, + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes foo_bar=baz", + confirm_changeset=False, + image_repository=self.ecr_repo_name, + ) + + deploy_process_execute = run_command(deploy_command_list) + + delete_command_list = self.get_delete_command_list(stack_name=stack_name, region="us-east-1", no_prompts=True) + + delete_process_execute = run_command(delete_command_list) + + self.assertEqual(delete_process_execute.process.returncode, 0) + + try: + resp = self.cf_client.describe_stacks(StackName=stack_name) + except ClientError as ex: + self.assertIn(f"Stack with id {stack_name} does not exist", str(ex)) + + def _method_to_stack_name(self, method_name): + """Method expects method name which can be a full path. Eg: test.integration.test_deploy_command.method_name""" + method_name = method_name.split(".")[-1] + return f"{method_name.replace('_', '-')}-{CFN_PYTHON_VERSION_SUFFIX}" From dca984d097f764b67ca61d6df9ee8c17ff018401 Mon Sep 17 00:00:00 2001 From: hnnasit <84355507+hnnasit@users.noreply.github.com> Date: Wed, 21 Jul 2021 18:17:52 -0400 Subject: [PATCH 5/9] Auto ECR Companion Stack Deletion (#3080) * Added ecr_bootstrap * Added companion_stack_manager * Added Companion Stack Manager * Added update_companion_stack * Updated companion_stack_builder File Name * Formatted with Black * Updated get_unreferenced_repos * Updated guided_context to Use Companion Stack * Added Delete Auto Create ECR Repo Prompt * Updated prompt_image_repository Flow * Added --resolve-image-repos * Addressed Some of Pylint Issues * Updated Helper Text * Updated Comments * Fixed Typing * Removed Unused Imports * Updated Unit Tests * Updated UX and Fixed Windows ANSI * Updated Unit Tests * Fixed Import Order * Added Ignore Import Check * Added Integration Tests * Updated help text. Co-authored-by: Chris Rehn * Added Comments for Name Generation * Updated Image Option Validator * Updated CompanionStackBuilder to Use Dict instead of String * Fixed Argument Ordering * Added Mapping Information to Help Text * Updated delete_unreferenced_repos Doc String * Updated sync_repos Doc String * Added Justification for ECR Repo Physical ID * Refactored to be Less Coupled * Refactored for prompt_specify_repos * Fixed Unit Test * Moved WaiterConfig Out of Methods * Updated Typing * Updated Managed S3 Template to be Dict * Fixed Typo * Added Comments for _save_image_repositories * Fixed Pylint Issue * Added Missing Check for unreferenced_repo_uris * Updated Variable Name * Fixed Typo * Updated Windows Check to Use platform.system() * Updated update_companion_stack Logic * Fixed Comment Typo * Fixed Typos * Fixed Test Name * Added methods for cf and s3 files and init UI * Added unit tests for utils methods and s3_uploader * Removed s3_bucket and s3_prefix click options * chore: Increase awareness of same file warning during package (#2946) * chore: increase awareness of same file warning during package * fix formatting & grammar Co-authored-by: Mathieu Grandis <73313235+mgrandis@users.noreply.github.com> * fix: Allow the base64Encoded field in REST Api, skip validation of unknown fields and validate missing statusCode for Http Api (#2941) * fix API Gateway emulator: - skip validating the non allowed fields for Http Api Gateway, as it always skip the unknown fields - add base64Encoded as an allowed field for Rest Api gateway - base64 decoding will be always done for Http API gateway if the lambda response isBase64Encoded is true regardless the content-type - validate if statusCode is missing in case of Http API, and payload version 1.0 * - accept "true", "True", "false", "False" as valid isBase64Encoded values. - Validate on other isBase64Encoded Values - add more integration && unit test cases * fix lint && black issues * use smaller image to test Base64 response * Fixed lint errors and added few unit tests * Make black happy * Added methods for deleting template artifacts * Wait method added for delete cf api * fix: pass copy of environment variables for keeping cache valid (#2943) * fix: pass copy of environment variables for keeping cache valid * add integ tests * update docs * make black happy Co-authored-by: Qingchuan Ma <69653965+qingchm@users.noreply.github.com> * Added LOG statements * Added and updated changes based on CR * Fixed the unit tests in artifact_exporter.py * Update HELP_TEXT in delete/command.py Co-authored-by: Chris Rehn * fix: Skip build of Docker image if ImageUri is a valid ECR URL (#2934) (#2935) * Updated code based on Chris' comments * Added condition for resources that have deletionpolicy specified * Small changes and fixes based on the comments * Add condition to managed bucket policy (#2999) * Removed region prompt * Update appveyor.yml to do docker login on both dockerhub and Public ECR (#3005) (#3006) Co-authored-by: Wing Fung Lau <4760060+hawflau@users.noreply.github.com> * chore: bump version to 1.25.0 (#3007) Co-authored-by: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> * temp: reduce python testing matrix (#3008) * temp: disable testing against python 3.8, and enabled 3.7 (#3009) * temp: disable testing against python 3.8, and enabled 3.7 * temp: disable testing against python 3.8, and enabled 3.7 & 3.6 * fix: enable all runtimes in python testing matrix (#3011) * revert: enable all runtimes in python testing matrix * fix indentation for yml * Added unit tests for ecr delete method and typing for methods * Reformatted delete_context and added option to skip user prompts * Removed return type from artifact_exporter for delete method * Added unit tests for artifact_exporter and delete_context * Added more unit tests for delete_context and artifact_exporter * chore: update to aws-sam-translator 1.37.0 (#3019) * chore: bump version to 1.26.0 (#3020) * Added more unit tests for delete_context and artifact_exporter * Added docs and comments for artifact_exporter and ecr_uploader * Added log statements in delete_context and some updates in unit tests * Changed force to no-prompts and updated ecr delete method error handling * chore: Improved --resolve-s3 option documentation and deployment without s3 error messages (#2983) * Improve documentation on --resolve-s3 option and improve s3 failure messages * Changed indentation for integration test on s3 error message * Fixed a typo in description * Improve spacing on help text for resolve-s3 option * Created a separate function for parsing ecr url in ecr_uploader * Reformatted Template class init to pass template_str and init template_dict * Changed how s3 url is obtained for resource_zip edge-case: aws:glue:job * Fixed edge case where resource artifact points to a path style url * run Make black * Made the parse s3 url funcs protected and defined a parent method and modified delete method for ResourceImageDict * Changed parse_ecr_url function name to parse_image_url * Defined UI for auto ecr deleton and method calls from companion_stack_manager * Added code for deleting repos from companion stack * Handle json templates deployed to cf * Changed the order of companion stack and ecr repos deletion * Handle delete_failed status for ecr companion stack and changed delete_stack to include retain_resources * Reformatted auto ecr deletion to handle deleting companion stack as input stack name * Fixed and added more unit tests for delete_context * When region is not provided, prompt user to enter profile and region * Removed region prompt and reading it from current session or assign a default instead * Added ECR resource in packageable_resources and refactored ecr companion stack deletion * Added log statements and unit tests for ECRResource * Better error handling for ecr delete_artifact * Revert "Merge remote-tracking branch 'wiltons-repo/feat/auto-ecr' into auto-ecr-delete" This reverts commit 0e159c2fa3630b874f13f19336802f6085a92de9, reversing changes made to 1675b7ed231b6472d38eeeeb25e39f6310bbb86f. * Added unit test for delete ecr repository * Fixed small string nits and added docstring for ECRResource * Added some unit tests for s3_uploader, ecr_uploader and delete_context * Updated to context refresh only when region and profile have non None values and removed unused class variable in delete_context * Added unit test for ResourceImageDict class methods Co-authored-by: Wilton Wang Co-authored-by: Chris Rehn Co-authored-by: Mathieu Grandis <73313235+mgrandis@users.noreply.github.com> Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> Co-authored-by: Qingchuan Ma <69653965+qingchm@users.noreply.github.com> Co-authored-by: Alexis Facques Co-authored-by: Wing Fung Lau <4760060+hawflau@users.noreply.github.com> Co-authored-by: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> --- samcli/commands/_utils/resources.py | 2 + samcli/commands/delete/delete_context.py | 174 ++++++++++++++--- samcli/commands/delete/exceptions.py | 10 + samcli/lib/delete/cf_utils.py | 20 +- samcli/lib/package/artifact_exporter.py | 29 ++- samcli/lib/package/ecr_uploader.py | 51 +++-- samcli/lib/package/packageable_resources.py | 29 +++ .../commands/delete/test_delete_context.py | 182 ++++++++++++++++-- tests/unit/lib/delete/test_cf_utils.py | 23 ++- .../lib/package/test_artifact_exporter.py | 78 +++++++- tests/unit/lib/package/test_ecr_uploader.py | 76 ++++++-- tests/unit/lib/package/test_s3_uploader.py | 31 ++- 12 files changed, 604 insertions(+), 101 deletions(-) diff --git a/samcli/commands/_utils/resources.py b/samcli/commands/_utils/resources.py index d3b2a18be3..ce448d6968 100644 --- a/samcli/commands/_utils/resources.py +++ b/samcli/commands/_utils/resources.py @@ -23,6 +23,7 @@ AWS_GLUE_JOB = "AWS::Glue::Job" AWS_SERVERLESS_STATEMACHINE = "AWS::Serverless::StateMachine" AWS_STEPFUNCTIONS_STATEMACHINE = "AWS::StepFunctions::StateMachine" +AWS_ECR_REPOSITORY = "AWS::ECR::Repository" METADATA_WITH_LOCAL_PATHS = {AWS_SERVERLESSREPO_APPLICATION: ["LicenseUrl", "ReadmeUrl"]} @@ -50,6 +51,7 @@ RESOURCES_WITH_IMAGE_COMPONENT = { AWS_SERVERLESS_FUNCTION: ["ImageUri"], AWS_LAMBDA_FUNCTION: ["Code"], + AWS_ECR_REPOSITORY: ["RepositoryName"], } diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index dcfd9071b0..37014b74bd 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -2,18 +2,26 @@ Delete a SAM stack """ import logging + +import json import boto3 import click from click import confirm from click import prompt + +from samcli.lib.utils.hash import str_checksum from samcli.cli.cli_config_file import TomlProvider from samcli.lib.utils.botoconfig import get_boto_config_with_user_agent from samcli.lib.delete.cf_utils import CfUtils + from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.package.artifact_exporter import mktempfile, get_cf_template_name +from samcli.cli.context import Context + +from samcli.commands.delete.exceptions import CfDeleteFailedStatusError from samcli.lib.package.artifact_exporter import Template from samcli.lib.package.ecr_uploader import ECRUploader @@ -38,10 +46,12 @@ def __init__(self, stack_name: str, region: str, profile: str, config_file: str, self.s3_prefix = None self.cf_utils = None self.s3_uploader = None + self.ecr_uploader = None self.uploaders = None self.cf_template_file_name = None self.delete_artifacts_folder = None self.delete_cf_template_file = None + self.companion_stack_name = None def __enter__(self): self.parse_config_file() @@ -69,15 +79,13 @@ def parse_config_file(self): if not self.stack_name: self.stack_name = config_options.get("stack_name", None) # If the stack_name is same as the one present in samconfig file, - # get the information about parameters if not specified by customer. + # get the information about parameters if not specified by user. if self.stack_name and self.stack_name == config_options.get("stack_name", None): LOG.debug("Local config present and using the defined options") if not self.region: self.region = config_options.get("region", None) - click.get_current_context().region = self.region if not self.profile: self.profile = config_options.get("profile", None) - click.get_current_context().profile = self.profile self.s3_bucket = config_options.get("s3_bucket", None) self.s3_prefix = config_options.get("s3_prefix", None) @@ -85,6 +93,16 @@ def init_clients(self): """ Initialize all the clients being used by sam delete. """ + if not self.region: + session = boto3.Session() + region = session.region_name + self.region = region if region else "us-east-1" + + if self.profile: + Context.get_current_context().profile = self.profile + if self.region: + Context.get_current_context().region = self.region + boto_config = get_boto_config_with_user_agent() # Define cf_client based on the region as different regions can have same stack-names @@ -95,22 +113,21 @@ def init_clients(self): s3_client = boto3.client("s3", region_name=self.region if self.region else None, config=boto_config) ecr_client = boto3.client("ecr", region_name=self.region if self.region else None, config=boto_config) - self.region = s3_client._client_config.region_name if s3_client else self.region # pylint: disable=W0212 self.s3_uploader = S3Uploader(s3_client=s3_client, bucket_name=self.s3_bucket, prefix=self.s3_prefix) - ecr_uploader = ECRUploader(docker_client=None, ecr_client=ecr_client, ecr_repo=None, ecr_repo_multi=None) + self.ecr_uploader = ECRUploader(docker_client=None, ecr_client=ecr_client, ecr_repo=None, ecr_repo_multi=None) - self.uploaders = Uploaders(self.s3_uploader, ecr_uploader) + self.uploaders = Uploaders(self.s3_uploader, self.ecr_uploader) self.cf_utils = CfUtils(cloudformation_client) - def guided_prompts(self): + def s3_prompts(self): """ - Guided prompts asking customer to delete artifacts + Guided prompts asking user to delete s3 artifacts """ # Note: s3_bucket and s3_prefix information is only # available if a local toml file is present or if # this information is obtained from the template resources and so if this - # information is not found, warn the customer that S3 artifacts + # information is not found, warn the user that S3 artifacts # will need to be manually deleted. if not self.no_prompts and self.s3_bucket: @@ -118,7 +135,7 @@ def guided_prompts(self): self.delete_artifacts_folder = confirm( click.style( "\tAre you sure you want to delete the folder" - + f" {self.s3_prefix} in S3 which contains the artifacts?", + f" {self.s3_prefix} in S3 which contains the artifacts?", bold=True, ), default=False, @@ -137,6 +154,85 @@ def guided_prompts(self): else: self.delete_cf_template_file = True + def ecr_companion_stack_prompts(self): + """ + User prompt to delete the ECR companion stack. + """ + click.echo(f"\tFound ECR Companion Stack {self.companion_stack_name}") + if not self.no_prompts: + delete_ecr_companion_stack_prompt = confirm( + click.style( + "\tDo you you want to delete the ECR companion stack" + f" {self.companion_stack_name} in the region {self.region} ?", + bold=True, + ), + default=False, + ) + return delete_ecr_companion_stack_prompt + return True + + def ecr_repos_prompts(self, template: Template): + """ + User prompts to delete the ECR repositories for the given template. + + :param template: Template to get the ECR repositories. + """ + retain_repos = [] + ecr_repos = template.get_ecr_repos() + + if not self.no_prompts: + for logical_id in ecr_repos: + # Get all the repos from the companion stack + repo = ecr_repos[logical_id] + repo_name = repo["Repository"] + + delete_repo = confirm( + click.style( + f"\tECR repository {repo_name}" + " may not be empty. Do you want to delete the repository and all the images in it ?", + bold=True, + ), + default=False, + ) + if not delete_repo: + retain_repos.append(logical_id) + return retain_repos + + def delete_ecr_companion_stack(self): + """ + Delete the ECR companion stack and ECR repositories based + on user input. + """ + delete_ecr_companion_stack_prompt = self.ecr_companion_stack_prompts() + if delete_ecr_companion_stack_prompt or self.no_prompts: + cf_ecr_companion_stack = self.cf_utils.get_stack_template(self.companion_stack_name, TEMPLATE_STAGE) + ecr_stack_template_str = cf_ecr_companion_stack.get("TemplateBody", None) + ecr_stack_template_str = json.dumps(ecr_stack_template_str, indent=4, ensure_ascii=False) + + ecr_companion_stack_template = Template( + template_path=None, + parent_dir=None, + uploaders=self.uploaders, + code_signer=None, + template_str=ecr_stack_template_str, + ) + + retain_repos = self.ecr_repos_prompts(ecr_companion_stack_template) + + # Delete the repos created by ECR companion stack if not retained + ecr_companion_stack_template.delete(retain_resources=retain_repos) + + click.echo(f"\t- Deleting ECR Companion Stack {self.companion_stack_name}") + try: + # If delete_stack fails and its status changes to DELETE_FAILED, retain + # the user input repositories and delete the stack. + self.cf_utils.delete_stack(stack_name=self.companion_stack_name) + self.cf_utils.wait_for_delete(stack_name=self.companion_stack_name) + LOG.debug("Deleted ECR Companion Stack: %s", self.companion_stack_name) + except CfDeleteFailedStatusError: + LOG.debug("delete_stack resulted failed and so re-try with retain_resources") + self.cf_utils.delete_stack(stack_name=self.companion_stack_name, retain_resources=retain_repos) + def delete(self): """ Delete method calls for Cloudformation stacks and S3 and ECR artifacts @@ -145,12 +241,19 @@ def delete(self): cf_template = self.cf_utils.get_stack_template(self.stack_name, TEMPLATE_STAGE) template_str = cf_template.get("TemplateBody", None) + if isinstance(template_str, dict): + template_str = json.dumps(template_str, indent=4, ensure_ascii=False) + # Get the cloudformation template name using template_str with mktempfile() as temp_file: self.cf_template_file_name = get_cf_template_name(temp_file, template_str, "template") template = Template( - template_path=None, parent_dir=None, uploaders=self.uploaders, code_signer=None, template_str=template_str + template_path=None, + parent_dir=None, + uploaders=self.uploaders, + code_signer=None, + template_str=template_str, ) # If s3 info is not available, try to obtain it from CF @@ -163,16 +266,21 @@ def delete(self): self.s3_prefix = s3_info["s3_prefix"] self.s3_uploader.prefix = self.s3_prefix - self.guided_prompts() + self.s3_prompts() + + retain_resources = self.ecr_repos_prompts(template) - # Delete the primary stack - click.echo(f"\n\t- Deleting Cloudformation stack {self.stack_name}") - self.cf_utils.delete_stack(stack_name=self.stack_name) - self.cf_utils.wait_for_delete(self.stack_name) - LOG.debug("Deleted Cloudformation stack: %s", self.stack_name) + # ECR companion stack delete prompts, if it exists + parent_stack_hash = str_checksum(self.stack_name) + possible_companion_stack_name = f"{self.stack_name[:104]}-{parent_stack_hash[:8]}-CompanionStack" + ecr_companion_stack_exists = self.cf_utils.has_stack(stack_name=possible_companion_stack_name) + if ecr_companion_stack_exists: + LOG.debug("ECR Companion stack found for the input stack") + self.companion_stack_name = possible_companion_stack_name + self.delete_ecr_companion_stack() - # Delete the artifacts - template.delete() + # Delete the artifacts and retain resources user selected not to delete + template.delete(retain_resources=retain_resources) # Delete the CF template file in S3 if self.delete_cf_template_file: @@ -182,24 +290,33 @@ def delete(self): elif self.delete_artifacts_folder: self.s3_uploader.delete_prefix_artifacts() - # If s3_bucket information is not available - elif not self.s3_bucket: + # Delete the primary input stack + try: + click.echo(f"\t- Deleting Cloudformation stack {self.stack_name}") + self.cf_utils.delete_stack(stack_name=self.stack_name) + self.cf_utils.wait_for_delete(self.stack_name) + LOG.debug("Deleted Cloudformation stack: %s", self.stack_name) + except CfDeleteFailedStatusError: + LOG.debug("delete_stack resulted failed and so re-try with retain_resources") + self.cf_utils.delete_stack(stack_name=self.stack_name, retain_resources=retain_resources) + + # If s3_bucket information is not available, warn the user + if not self.s3_bucket: LOG.debug("Cannot delete s3 files as no s3_bucket found") click.secho( - "\nWarning: s3_bucket and s3_prefix information cannot be obtained," - " delete the files manually if required", + "\nWarning: s3_bucket and s3_prefix information could not be obtained from local config file" + " or cloudformation template, delete the s3 files manually if required", fg="yellow", ) def run(self): """ - Delete the stack based on the argument provided by customers and samconfig.toml. + Delete the stack based on the argument provided by user and samconfig.toml. """ if not self.no_prompts: delete_stack = confirm( click.style( - f"\tAre you sure you want to delete the stack {self.stack_name}" - + f" in the region {self.region} ?", + f"\tAre you sure you want to delete the stack {self.stack_name}" f" in the region {self.region} ?", bold=True, ), default=False, @@ -214,4 +331,7 @@ def run(self): click.echo("\nDeleted successfully") else: LOG.debug("Input stack does not exists on Cloudformation") - click.echo(f"Error: The input stack {self.stack_name} does not exist on Cloudformation") + click.echo( + f"Error: The input stack {self.stack_name} does" + f" not exist on Cloudformation in the region {self.region}" + ) diff --git a/samcli/commands/delete/exceptions.py b/samcli/commands/delete/exceptions.py index 7e2ba5105c..9a4b6a81cd 100644 --- a/samcli/commands/delete/exceptions.py +++ b/samcli/commands/delete/exceptions.py @@ -14,6 +14,16 @@ def __init__(self, stack_name, msg): super().__init__(message=message_fmt.format(stack_name=self.stack_name, msg=msg)) +class CfDeleteFailedStatusError(UserException): + def __init__(self, stack_name, msg): + self.stack_name = stack_name + self.msg = msg + + message_fmt = "Stack could not be deleted as it encountered DELETE_FAILED status: {stack_name}, {msg}" + + super().__init__(message=message_fmt.format(stack_name=self.stack_name, msg=msg)) + + class FetchTemplateFailedError(UserException): def __init__(self, stack_name, msg): self.stack_name = stack_name diff --git a/samcli/lib/delete/cf_utils.py b/samcli/lib/delete/cf_utils.py index 40d3b58183..d418306e00 100644 --- a/samcli/lib/delete/cf_utils.py +++ b/samcli/lib/delete/cf_utils.py @@ -5,9 +5,9 @@ import logging -from typing import Dict +from typing import Dict, List, Optional from botocore.exceptions import ClientError, BotoCoreError, WaiterError -from samcli.commands.delete.exceptions import DeleteFailedError, FetchTemplateFailedError +from samcli.commands.delete.exceptions import DeleteFailedError, FetchTemplateFailedError, CfDeleteFailedStatusError LOG = logging.getLogger(__name__) @@ -84,14 +84,17 @@ def get_stack_template(self, stack_name: str, stage: str) -> Dict: LOG.error("Unable to get stack details.", exc_info=e) raise e - def delete_stack(self, stack_name: str): + def delete_stack(self, stack_name: str, retain_resources: Optional[List] = None): """ Delete the Cloudformation stack with the given stack_name :param stack_name: Name or ID of the stack + :param retain_resources: List of repositories to retain if the stack has DELETE_FAILED status. """ + if not retain_resources: + retain_resources = [] try: - self._client.delete_stack(StackName=stack_name) + self._client.delete_stack(StackName=stack_name, RetainResources=retain_resources) except (ClientError, BotoCoreError) as e: # If there are credentials, environment errors, @@ -120,10 +123,7 @@ def wait_for_delete(self, stack_name): waiter.wait(StackName=stack_name, WaiterConfig=waiter_config) except WaiterError as ex: - resp = ex.last_response - status = resp["Status"] - reason = resp["StatusReason"] + if "DELETE_FAILED" in str(ex): + raise CfDeleteFailedStatusError(stack_name=stack_name, msg="ex: {0}".format(ex)) from ex - raise DeleteFailedError( - stack_name=stack_name, msg="ex: {0} Status: {1}. Reason: {2}".format(ex, status, reason) - ) from ex + raise DeleteFailedError(stack_name=stack_name, msg="ex: {0}".format(ex)) from ex diff --git a/samcli/lib/package/artifact_exporter.py b/samcli/lib/package/artifact_exporter.py index 99567558e8..1e0aa75b53 100644 --- a/samcli/lib/package/artifact_exporter.py +++ b/samcli/lib/package/artifact_exporter.py @@ -16,7 +16,7 @@ # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import os -from typing import Dict, Optional +from typing import Dict, Optional, List from botocore.utils import set_value_from_jmespath @@ -33,6 +33,7 @@ METADATA_EXPORT_LIST, GLOBAL_EXPORT_DICT, ResourceZip, + ECRResource, ) from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.package.uploaders import Uploaders, Destination @@ -239,7 +240,7 @@ def export(self) -> Dict: return self.template_dict - def delete(self): + def delete(self, retain_resources: List): """ Deletes all the artifacts referenced by the given Cloudformation template """ @@ -255,7 +256,7 @@ def delete(self): resource_deletion_policy = resource.get("DeletionPolicy", None) # If the deletion policy is set to Retain, # do not delete the artifact for the resource. - if resource_deletion_policy != "Retain": + if resource_deletion_policy != "Retain" and resource_id not in retain_resources: for exporter_class in self.resources_to_export: if exporter_class.RESOURCE_TYPE != resource_type: continue @@ -265,6 +266,28 @@ def delete(self): exporter = exporter_class(self.uploaders, None) exporter.delete(resource_id, resource_dict) + def get_ecr_repos(self): + """ + Get all the ecr repos from the template + """ + ecr_repos = {} + if "Resources" not in self.template_dict: + return ecr_repos + + self._apply_global_values() + for resource_id, resource in self.template_dict["Resources"].items(): + + resource_type = resource.get("Type", None) + resource_dict = resource.get("Properties", {}) + resource_deletion_policy = resource.get("DeletionPolicy", None) + if resource_deletion_policy == "Retain" or resource_type != "AWS::ECR::Repository": + continue + + ecr_resource = ECRResource(self.uploaders, None) + ecr_repos[resource_id] = {"Repository": ecr_resource.get_property_value(resource_dict)} + + return ecr_repos + def get_s3_info(self): """ Iterates the template_dict resources with S3 EXPORT_DESTINATION to get the diff --git a/samcli/lib/package/ecr_uploader.py b/samcli/lib/package/ecr_uploader.py index e899bab7c8..c9546aa93e 100644 --- a/samcli/lib/package/ecr_uploader.py +++ b/samcli/lib/package/ecr_uploader.py @@ -16,7 +16,6 @@ DockerPushFailedError, DockerLoginFailedError, ECRAuthorizationError, - ImageNotFoundError, DeleteArtifactFailedError, ) from samcli.lib.package.image_utils import tag_translation @@ -114,35 +113,47 @@ def delete_artifact(self, image_uri: str, resource_id: str, property_name: str): # Image not found image_details = resp["failures"][0] if image_details["failureCode"] == "ImageNotFound": - LOG.error("ImageNotFound Exception") - message_fmt = ( - "Could not delete image for {property_name}" - " parameter of {resource_id} resource as it does not exist. \n" + LOG.debug( + "Could not delete image for %s parameter of %s resource as it does not exist. \n", + property_name, + resource_id, ) - raise ImageNotFoundError(resource_id, property_name, message_fmt=message_fmt) - - LOG.error( - "Could not delete the image for the resource %s. FailureCode: %s, FailureReason: %s", - property_name, - image_details["failureCode"], - image_details["failureReason"], - ) - raise DeleteArtifactFailedError( - resource_id=resource_id, property_name=property_name, ex=image_details["failureReason"] - ) - - LOG.debug("Deleting ECR image with tag %s", image_tag) - click.echo(f"\t- Deleting ECR image {image_tag} in repository {repository}") + click.echo(f"\t- Could not find image with tag {image_tag} in repository {repository}") + else: + LOG.debug( + "Could not delete the image for the resource %s. FailureCode: %s, FailureReason: %s", + property_name, + image_details["failureCode"], + image_details["failureReason"], + ) + click.echo(f"\t- Could not delete image with tag {image_tag} in repository {repository}") + else: + LOG.debug("Deleting ECR image with tag %s", image_tag) + click.echo(f"\t- Deleting ECR image {image_tag} in repository {repository}") except botocore.exceptions.ClientError as ex: # Handle Client errors such as RepositoryNotFoundException or InvalidParameterException LOG.error("DeleteArtifactFailedError Exception : %s", str(ex)) raise DeleteArtifactFailedError(resource_id=resource_id, property_name=property_name, ex=ex) from ex + def delete_ecr_repository(self, physical_id: str): + """ + Delete ECR repository using the physical_id + + :param: physical_id of the repository to be deleted + """ + try: + click.echo(f"\t- Deleting ECR repository {physical_id}") + self.ecr_client.delete_repository(repositoryName=physical_id, force=True) + except self.ecr_client.exceptions.RepositoryNotFoundException: + # If the repository is empty, cloudformation automatically deletes + # the repository when cf_client.delete_stack is called. + LOG.debug("Could not find repository %s", physical_id) + @staticmethod def parse_image_url(image_uri: str) -> Dict: result = {} - registry_repo_tag = image_uri.split("/") + registry_repo_tag = image_uri.split("/", 1) repo_colon_image_tag = None if len(registry_repo_tag) == 1: # If there is no registry specified, e.g. repo:tag diff --git a/samcli/lib/package/packageable_resources.py b/samcli/lib/package/packageable_resources.py index 49f0e47653..808468a670 100644 --- a/samcli/lib/package/packageable_resources.py +++ b/samcli/lib/package/packageable_resources.py @@ -46,6 +46,7 @@ METADATA_WITH_LOCAL_PATHS, RESOURCES_WITH_LOCAL_PATHS, RESOURCES_WITH_IMAGE_COMPONENT, + AWS_ECR_REPOSITORY, ) from samcli.lib.utils.packagetype import IMAGE, ZIP @@ -517,6 +518,33 @@ class CloudFormationResourceVersionSchemaHandlerPackage(ResourceZip): PROPERTY_NAME = RESOURCES_WITH_LOCAL_PATHS[AWS_CLOUDFORMATION_RESOURCEVERSION][0] +class ECRResource(Resource): + """ + Represents CloudFormation resources ECR for deleting the ECR + repository with the property name RepositoryName. This class is used + only for deleting the repository and not exporting anything. + """ + + RESOURCE_TYPE = AWS_ECR_REPOSITORY + PROPERTY_NAME = RESOURCES_WITH_IMAGE_COMPONENT[RESOURCE_TYPE][0] + ARTIFACT_TYPE = ZIP + EXPORT_DESTINATION = Destination.ECR + + def delete(self, resource_id, resource_dict): + if resource_dict is None: + return + + repository_name = self.get_property_value(resource_dict) + if repository_name: + self.uploader.delete_ecr_repository(physical_id=repository_name) + + def get_property_value(self, resource_dict): + if resource_dict is None: + return None + + return jmespath.search(self.PROPERTY_NAME, resource_dict) + + RESOURCES_EXPORT_LIST = [ ServerlessFunctionResource, ServerlessFunctionImageResource, @@ -538,6 +566,7 @@ class CloudFormationResourceVersionSchemaHandlerPackage(ResourceZip): GlueJobCommandScriptLocationResource, CloudFormationModuleVersionModulePackage, CloudFormationResourceVersionSchemaHandlerPackage, + ECRResource, ] METADATA_EXPORT_LIST = [ServerlessRepoApplicationReadme, ServerlessRepoApplicationLicense] diff --git a/tests/unit/commands/delete/test_delete_context.py b/tests/unit/commands/delete/test_delete_context.py index f0975f144e..087f552bc4 100644 --- a/tests/unit/commands/delete/test_delete_context.py +++ b/tests/unit/commands/delete/test_delete_context.py @@ -4,15 +4,18 @@ import click from samcli.commands.delete.delete_context import DeleteContext +from samcli.lib.package.artifact_exporter import Template from samcli.cli.cli_config_file import TomlProvider from samcli.lib.delete.cf_utils import CfUtils from samcli.lib.package.s3_uploader import S3Uploader +from samcli.lib.package.ecr_uploader import ECRUploader class TestDeleteContext(TestCase): @patch("samcli.commands.delete.delete_context.click.echo") + @patch("samcli.commands.delete.delete_context.click.get_current_context") @patch.object(CfUtils, "has_stack", MagicMock(return_value=(False))) - def test_delete_context_stack_does_not_exist(self, patched_click_echo): + def test_delete_context_stack_does_not_exist(self, patched_click_get_current_context, patched_click_echo): with DeleteContext( stack_name="test", region="us-east-1", @@ -24,7 +27,7 @@ def test_delete_context_stack_does_not_exist(self, patched_click_echo): delete_context.run() expected_click_echo_calls = [ - call(f"Error: The input stack test does not exist on Cloudformation"), + call(f"Error: The input stack test does" + f" not exist on Cloudformation in the region us-east-1"), ] self.assertEqual(expected_click_echo_calls, patched_click_echo.call_args_list) @@ -57,7 +60,7 @@ def test_delete_context_enter(self): ) ), ) - @patch("samcli.commands.deploy.guided_context.click.get_current_context") + @patch("samcli.commands.delete.delete_context.click.get_current_context") def test_delete_context_parse_config_file(self, patched_click_get_current_context): patched_click_get_current_context = MagicMock() with DeleteContext( @@ -74,6 +77,30 @@ def test_delete_context_parse_config_file(self, patched_click_get_current_contex self.assertEqual(delete_context.s3_bucket, "s3-bucket") self.assertEqual(delete_context.s3_prefix, "s3-prefix") + @patch("samcli.commands.delete.delete_context.prompt") + @patch("samcli.commands.delete.delete_context.click.get_current_context") + @patch.object(CfUtils, "has_stack", MagicMock(return_value=(False))) + def test_delete_no_user_input(self, patched_click_get_current_context, patched_prompt): + patched_click_get_current_context = MagicMock() + with DeleteContext( + stack_name=None, + region=None, + config_file=None, + config_env=None, + profile=None, + no_prompts=True, + ) as delete_context: + delete_context.run() + + patched_prompt.side_effect = ["sam-app"] + + expected_prompt_calls = [ + call(click.style("\tEnter stack name you want to delete:", bold=True), type=click.STRING), + ] + + self.assertEqual(expected_prompt_calls, patched_prompt.call_args_list) + self.assertEqual(delete_context.region, "us-east-1") + @patch.object( TomlProvider, "__call__", @@ -93,8 +120,9 @@ def test_delete_context_parse_config_file(self, patched_click_get_current_contex @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) @patch.object(CfUtils, "delete_stack", MagicMock()) @patch.object(CfUtils, "wait_for_delete", MagicMock()) + @patch.object(Template, "get_ecr_repos", MagicMock(return_value=({"logical_id": {"Repository": "test_id"}}))) @patch.object(S3Uploader, "delete_prefix_artifacts", MagicMock()) - @patch("samcli.commands.deploy.guided_context.click.get_current_context") + @patch("samcli.commands.delete.delete_context.click.get_current_context") def test_delete_context_valid_execute_run(self, patched_click_get_current_context): patched_click_get_current_context = MagicMock() with DeleteContext( @@ -107,19 +135,23 @@ def test_delete_context_valid_execute_run(self, patched_click_get_current_contex ) as delete_context: delete_context.run() - self.assertEqual(CfUtils.has_stack.call_count, 1) - self.assertEqual(CfUtils.get_stack_template.call_count, 1) - self.assertEqual(CfUtils.delete_stack.call_count, 1) - self.assertEqual(CfUtils.wait_for_delete.call_count, 1) + self.assertEqual(CfUtils.has_stack.call_count, 2) + self.assertEqual(CfUtils.get_stack_template.call_count, 2) + self.assertEqual(CfUtils.delete_stack.call_count, 2) + self.assertEqual(CfUtils.wait_for_delete.call_count, 2) self.assertEqual(S3Uploader.delete_prefix_artifacts.call_count, 1) + self.assertEqual(Template.get_ecr_repos.call_count, 2) @patch("samcli.commands.delete.delete_context.click.echo") @patch("samcli.commands.deploy.guided_context.click.secho") - @patch.object(CfUtils, "has_stack", MagicMock(return_value=(True))) + @patch("samcli.commands.delete.delete_context.click.get_current_context") + @patch.object(CfUtils, "has_stack", MagicMock(side_effect=(True, False))) @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) @patch.object(CfUtils, "delete_stack", MagicMock()) @patch.object(CfUtils, "wait_for_delete", MagicMock()) - def test_delete_context_no_s3_bucket(self, patched_click_secho, patched_click_echo): + def test_delete_context_no_s3_bucket( + self, patched_click_get_current_context, patched_click_secho, patched_click_echo + ): with DeleteContext( stack_name="test", region="us-east-1", @@ -132,27 +164,30 @@ def test_delete_context_no_s3_bucket(self, patched_click_secho, patched_click_ec delete_context.run() expected_click_secho_calls = [ call( - "\nWarning: s3_bucket and s3_prefix information cannot be obtained," - " delete the files manually if required", + "\nWarning: s3_bucket and s3_prefix information could not be obtained from local config file" + " or cloudformation template, delete the s3 files manually if required", fg="yellow", ), ] self.assertEqual(expected_click_secho_calls, patched_click_secho.call_args_list) expected_click_echo_calls = [ - call("\n\t- Deleting Cloudformation stack test"), + call("\t- Deleting Cloudformation stack test"), call("\nDeleted successfully"), ] self.assertEqual(expected_click_echo_calls, patched_click_echo.call_args_list) @patch("samcli.commands.delete.delete_context.get_cf_template_name") @patch("samcli.commands.delete.delete_context.confirm") - @patch.object(CfUtils, "has_stack", MagicMock(return_value=(True))) + @patch("samcli.commands.delete.delete_context.click.get_current_context") + @patch.object(CfUtils, "has_stack", MagicMock(side_effect=(True, False))) @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) @patch.object(CfUtils, "delete_stack", MagicMock()) @patch.object(CfUtils, "wait_for_delete", MagicMock()) @patch.object(S3Uploader, "delete_artifact", MagicMock()) - def test_guided_prompts_s3_bucket_prefix_present_execute_run(self, patched_confirm, patched_get_cf_template_name): + def test_guided_prompts_s3_bucket_prefix_present_execute_run( + self, patched_click_get_current_context, patched_confirm, patched_get_cf_template_name + ): patched_get_cf_template_name.return_value = "hello.template" with DeleteContext( @@ -164,7 +199,6 @@ def test_guided_prompts_s3_bucket_prefix_present_execute_run(self, patched_confi no_prompts=None, ) as delete_context: patched_confirm.side_effect = [True, False, True] - delete_context.cf_template_file_name = "hello.template" delete_context.s3_bucket = "s3_bucket" delete_context.s3_prefix = "s3_prefix" @@ -201,13 +235,15 @@ def test_guided_prompts_s3_bucket_prefix_present_execute_run(self, patched_confi @patch("samcli.commands.delete.delete_context.get_cf_template_name") @patch("samcli.commands.delete.delete_context.confirm") - @patch.object(CfUtils, "has_stack", MagicMock(return_value=(True))) + @patch("samcli.commands.delete.delete_context.click.get_current_context") + @patch.object(CfUtils, "has_stack", MagicMock(side_effect=(True, False))) @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) @patch.object(CfUtils, "delete_stack", MagicMock()) @patch.object(CfUtils, "wait_for_delete", MagicMock()) @patch.object(S3Uploader, "delete_artifact", MagicMock()) + @patch.object(ECRUploader, "delete_ecr_repository", MagicMock()) def test_guided_prompts_s3_bucket_present_no_prefix_execute_run( - self, patched_confirm, patched_get_cf_template_name + self, patched_click_get_current_context, patched_confirm, patched_get_cf_template_name ): patched_get_cf_template_name.return_value = "hello.template" @@ -220,7 +256,6 @@ def test_guided_prompts_s3_bucket_present_no_prefix_execute_run( no_prompts=None, ) as delete_context: patched_confirm.side_effect = [True, True] - delete_context.cf_template_file_name = "hello.template" delete_context.s3_bucket = "s3_bucket" delete_context.run() @@ -244,3 +279,112 @@ def test_guided_prompts_s3_bucket_present_no_prefix_execute_run( self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) self.assertTrue(delete_context.delete_cf_template_file) + + @patch("samcli.commands.delete.delete_context.get_cf_template_name") + @patch("samcli.commands.delete.delete_context.confirm") + @patch("samcli.commands.delete.delete_context.click.get_current_context") + @patch.object(CfUtils, "has_stack", MagicMock(side_effect=(True, True))) + @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) + @patch.object(CfUtils, "delete_stack", MagicMock()) + @patch.object(CfUtils, "wait_for_delete", MagicMock()) + @patch.object(S3Uploader, "delete_artifact", MagicMock()) + @patch.object(ECRUploader, "delete_ecr_repository", MagicMock()) + @patch.object(Template, "get_ecr_repos", MagicMock(side_effect=({}, {"logical_id": {"Repository": "test_id"}}))) + def test_guided_prompts_ecr_companion_stack_present_execute_run( + self, patched_click_get_current_context, patched_confirm, patched_get_cf_template_name + ): + + patched_get_cf_template_name.return_value = "hello.template" + with DeleteContext( + stack_name="test", + region="us-east-1", + config_file="samconfig.toml", + config_env="default", + profile="test", + no_prompts=None, + ) as delete_context: + patched_confirm.side_effect = [True, False, True, True, True] + delete_context.s3_bucket = "s3_bucket" + delete_context.s3_prefix = "s3_prefix" + + delete_context.run() + # Now to check for all the defaults on confirmations. + expected_confirmation_calls = [ + call( + click.style( + f"\tAre you sure you want to delete the stack test in the region us-east-1 ?", + bold=True, + ), + default=False, + ), + call( + click.style( + "\tAre you sure you want to delete the folder" + + " s3_prefix in S3 which contains the artifacts?", + bold=True, + ), + default=False, + ), + call( + click.style( + "\tDo you want to delete the template file hello.template in S3?", + bold=True, + ), + default=False, + ), + call( + click.style( + "\tDo you you want to delete the ECR companion stack" + + " test-098f6bcd-CompanionStack in the region us-east-1 ?", + bold=True, + ), + default=False, + ), + call( + click.style( + f"\tECR repository test_id" + + " may not be empty. Do you want to delete the repository and all the images in it ?", + bold=True, + ), + default=False, + ), + ] + + self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) + self.assertFalse(delete_context.delete_artifacts_folder) + self.assertTrue(delete_context.delete_cf_template_file) + + @patch("samcli.commands.delete.delete_context.get_cf_template_name") + @patch("samcli.commands.delete.delete_context.click.echo") + @patch("samcli.commands.delete.delete_context.click.get_current_context") + @patch.object(CfUtils, "has_stack", MagicMock(side_effect=(True, False))) + @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) + @patch.object(CfUtils, "delete_stack", MagicMock()) + @patch.object(CfUtils, "wait_for_delete", MagicMock()) + @patch.object(S3Uploader, "delete_prefix_artifacts", MagicMock()) + @patch.object(ECRUploader, "delete_ecr_repository", MagicMock()) + @patch.object(Template, "get_ecr_repos", MagicMock(return_value=({"logical_id": {"Repository": "test_id"}}))) + def test_no_prompts_input_is_ecr_companion_stack_present_execute_run( + self, patched_click_get_current_context, patched_click_echo, patched_get_cf_template_name + ): + CfUtils.get_stack_template.return_value = { + "TemplateBody": {"Metadata": {"CompanionStackname": "test-098f6bcd-CompanionStack"}} + } + patched_get_cf_template_name.return_value = "hello.template" + with DeleteContext( + stack_name="test-098f6bcd-CompanionStack", + region="us-east-1", + config_file="samconfig.toml", + config_env="default", + profile="test", + no_prompts=True, + ) as delete_context: + delete_context.s3_bucket = "s3_bucket" + delete_context.s3_prefix = "s3_prefix" + + delete_context.run() + expected_click_echo_calls = [ + call("\t- Deleting Cloudformation stack test-098f6bcd-CompanionStack"), + call("\nDeleted successfully"), + ] + self.assertEqual(expected_click_echo_calls, patched_click_echo.call_args_list) diff --git a/tests/unit/lib/delete/test_cf_utils.py b/tests/unit/lib/delete/test_cf_utils.py index 90d764a5c4..61c1c186e1 100644 --- a/tests/unit/lib/delete/test_cf_utils.py +++ b/tests/unit/lib/delete/test_cf_utils.py @@ -2,7 +2,7 @@ from unittest import TestCase -from samcli.commands.delete.exceptions import DeleteFailedError, FetchTemplateFailedError +from samcli.commands.delete.exceptions import DeleteFailedError, FetchTemplateFailedError, CfDeleteFailedStatusError from botocore.exceptions import ClientError, BotoCoreError, WaiterError from samcli.lib.delete.cf_utils import CfUtils @@ -87,6 +87,12 @@ def test_cf_utils_get_stack_template_exception(self): with self.assertRaises(Exception): self.cf_utils.get_stack_template("test", "Original") + def test_cf_utils_get_stack_template_success(self): + self.cf_utils._client.get_template = MagicMock(return_value=({"TemplateBody": "Hello World"})) + + response = self.cf_utils.get_stack_template("test", "Original") + self.assertEqual(response, {"TemplateBody": "Hello World"}) + def test_cf_utils_delete_stack_exception_botocore(self): self.cf_utils._client.delete_stack = MagicMock(side_effect=BotoCoreError()) with self.assertRaises(DeleteFailedError): @@ -95,7 +101,7 @@ def test_cf_utils_delete_stack_exception_botocore(self): def test_cf_utils_delete_stack_exception(self): self.cf_utils._client.delete_stack = MagicMock(side_effect=Exception()) with self.assertRaises(Exception): - self.cf_utils.delete_stack("test") + self.cf_utils.delete_stack("test", ["retain_logical_id"]) def test_cf_utils_wait_for_delete_exception(self): self.cf_utils._client.get_waiter = MagicMock( @@ -109,3 +115,16 @@ def test_cf_utils_wait_for_delete_exception(self): ) with self.assertRaises(DeleteFailedError): self.cf_utils.wait_for_delete("test") + + def test_cf_utils_wait_for_delete_failed_status(self): + self.cf_utils._client.get_waiter = MagicMock( + return_value=MockDeleteWaiter( + ex=WaiterError( + name="wait_for_delete", + reason="DELETE_FAILED ", + last_response={"Status": "Failed", "StatusReason": "It's a unit test"}, + ) + ) + ) + with self.assertRaises(CfDeleteFailedStatusError): + self.cf_utils.wait_for_delete("test") diff --git a/tests/unit/lib/package/test_artifact_exporter.py b/tests/unit/lib/package/test_artifact_exporter.py index 0fea4bf3cb..339340097e 100644 --- a/tests/unit/lib/package/test_artifact_exporter.py +++ b/tests/unit/lib/package/test_artifact_exporter.py @@ -52,6 +52,8 @@ CloudFormationResourceVersionSchemaHandlerPackage, ResourceZip, ResourceImage, + ResourceImageDict, + ECRResource, ) @@ -454,6 +456,35 @@ class MockResource(ResourceImage): resource.delete(resource_id, resource_dict) self.assertEqual(self.ecr_uploader_mock.delete_artifact.call_count, 1) + @patch("samcli.lib.package.packageable_resources.upload_local_image_artifacts") + def test_resource_image_dict(self, upload_local_image_artifacts_mock): + # Property value is a path to an image + + class MockResource(ResourceImageDict): + PROPERTY_NAME = "foo" + + resource = MockResource(self.uploaders_mock, None) + + resource_id = "id" + resource_dict = {} + resource_dict[resource.PROPERTY_NAME] = "image:latest" + parent_dir = "dir" + ecr_url = "123456789.dkr.ecr.us-east-1.amazonaws.com/sam-cli" + + upload_local_image_artifacts_mock.return_value = ecr_url + + resource.export(resource_id, resource_dict, parent_dir) + + upload_local_image_artifacts_mock.assert_called_once_with( + resource_id, resource_dict, resource.PROPERTY_NAME, parent_dir, self.ecr_uploader_mock + ) + + self.assertEqual(resource_dict[resource.PROPERTY_NAME][resource.EXPORT_PROPERTY_CODE_KEY], ecr_url) + + self.ecr_uploader_mock.delete_artifact = MagicMock() + resource.delete(resource_id, resource_dict) + self.assertEqual(self.ecr_uploader_mock.delete_artifact.call_count, 1) + def test_lambda_image_resource_package_success(self): # Property value is set to an image @@ -768,6 +799,25 @@ class MockResource(ResourceWithS3UrlDict): resource.delete(resource_id, resource_dict) self.s3_uploader_mock.delete_artifact.assert_called_once_with(remote_path="key1/key2", is_key=True) + def test_ecr_resource_delete(self): + # Property value is set to an image + + class MockResource(ECRResource): + PROPERTY_NAME = "foo" + + resource = MockResource(self.uploaders_mock, None) + + resource_id = "id" + resource_dict = {} + repository = "repository" + resource_dict[resource.PROPERTY_NAME] = repository + + self.ecr_uploader_mock.delete_ecr_repository = Mock() + + resource.delete(resource_id, resource_dict) + + self.ecr_uploader_mock.delete_ecr_repository.assert_called_once_with(physical_id="repository") + @patch("samcli.lib.package.packageable_resources.upload_local_artifacts") def test_resource_with_signing_configuration(self, upload_local_artifacts_mock): class MockResource(ResourceZip): @@ -1442,7 +1492,7 @@ def test_template_delete(self): template_str=template_str, ) - template_exporter.delete() + template_exporter.delete(retain_resources=[]) resource_type1_class.assert_called_once_with(self.uploaders_mock, None) resource_type1_instance.delete.assert_called_once_with("Resource1", mock.ANY) @@ -1451,6 +1501,32 @@ def test_template_delete(self): resource_type3_class.assert_not_called() resource_type3_instance.delete.assert_not_called() + def test_get_ecr_repos(self): + resources_to_export = [ECRResource] + + properties = {"RepositoryName": "test_repo"} + template_dict = { + "Resources": { + "Resource1": {"Type": "AWS::ECR::Repository", "Properties": properties}, + "Resource2": {"Type": "resource_type1", "Properties": properties}, + "Resource3": {"Type": "AWS::ECR::Repository", "Properties": properties, "DeletionPolicy": "Retain"}, + } + } + + template_str = json.dumps(template_dict, indent=4, ensure_ascii=False) + + template_exporter = Template( + template_path=None, + parent_dir=None, + uploaders=self.uploaders_mock, + code_signer=None, + resources_to_export=resources_to_export, + template_str=template_str, + ) + + repos = template_exporter.get_ecr_repos() + self.assertEqual(repos, {"Resource1": {"Repository": "test_repo"}}) + def test_template_get_s3_info(self): resource_type1_class = Mock() diff --git a/tests/unit/lib/package/test_ecr_uploader.py b/tests/unit/lib/package/test_ecr_uploader.py index 25b2c4a047..7a0a2ca540 100644 --- a/tests/unit/lib/package/test_ecr_uploader.py +++ b/tests/unit/lib/package/test_ecr_uploader.py @@ -1,10 +1,11 @@ from unittest import TestCase -from unittest.mock import MagicMock, patch +from unittest.mock import MagicMock, patch, call from botocore.exceptions import ClientError from docker.errors import APIError, BuildError from parameterized import parameterized +# import click from samcli.commands.package.exceptions import ( DockerLoginFailedError, DockerPushFailedError, @@ -176,7 +177,33 @@ def test_upload_failure_while_streaming(self): with self.assertRaises(DockerPushFailedError): ecr_uploader.upload(image, resource_name="HelloWorldFunction") - def test_delete_artifact_no_image_error(self): + @patch("samcli.lib.package.ecr_uploader.click.echo") + def test_delete_artifact_successful(self, patched_click_echo): + ecr_uploader = ECRUploader( + docker_client=self.docker_client, + ecr_client=self.ecr_client, + ecr_repo=self.ecr_repo, + ecr_repo_multi=self.ecr_repo_multi, + tag=self.tag, + ) + ecr_uploader.ecr_client.batch_delete_image.return_value = { + "imageIds": [ + {"imageTag": self.tag}, + ], + "failures": [], + } + + ecr_uploader.delete_artifact( + image_uri=self.image_uri, resource_id=self.resource_id, property_name=self.property_name + ) + + expected_click_echo_calls = [ + call(f"\t- Deleting ECR image {self.tag} in repository {self.ecr_repo}"), + ] + self.assertEqual(expected_click_echo_calls, patched_click_echo.call_args_list) + + @patch("samcli.lib.package.ecr_uploader.click.echo") + def test_delete_artifact_no_image_found(self, patched_click_echo): ecr_uploader = ECRUploader( docker_client=self.docker_client, ecr_client=self.ecr_client, @@ -188,12 +215,17 @@ def test_delete_artifact_no_image_error(self): "failures": [{"imageId": {"imageTag": self.tag}, "failureCode": "ImageNotFound"}] } - with self.assertRaises(ImageNotFoundError): - ecr_uploader.delete_artifact( - image_uri=self.image_uri, resource_id=self.resource_id, property_name=self.property_name - ) + ecr_uploader.delete_artifact( + image_uri=self.image_uri, resource_id=self.resource_id, property_name=self.property_name + ) + + expected_click_echo_calls = [ + call(f"\t- Could not find image with tag {self.tag} in repository mock-image-repo"), + ] + self.assertEqual(expected_click_echo_calls, patched_click_echo.call_args_list) - def test_delete_artifact_resp_failure(self): + @patch("samcli.lib.package.ecr_uploader.click.echo") + def test_delete_artifact_resp_failure(self, patched_click_echo): ecr_uploader = ECRUploader( docker_client=self.docker_client, ecr_client=self.ecr_client, @@ -211,10 +243,14 @@ def test_delete_artifact_resp_failure(self): ] } - with self.assertRaises(DeleteArtifactFailedError): - ecr_uploader.delete_artifact( - image_uri=self.image_uri, resource_id=self.resource_id, property_name=self.property_name - ) + ecr_uploader.delete_artifact( + image_uri=self.image_uri, resource_id=self.resource_id, property_name=self.property_name + ) + + expected_click_echo_calls = [ + call(f"\t- Could not delete image with tag {self.tag} in repository mock-image-repo"), + ] + self.assertEqual(expected_click_echo_calls, patched_click_echo.call_args_list) def test_delete_artifact_client_error(self): ecr_uploader = ECRUploader( @@ -235,6 +271,24 @@ def test_delete_artifact_client_error(self): image_uri=self.image_uri, resource_id=self.resource_id, property_name=self.property_name ) + @patch("samcli.lib.package.ecr_uploader.click.echo") + def test_delete_ecr_repository(self, patched_click_echo): + ecr_uploader = ECRUploader( + docker_client=self.docker_client, + ecr_client=self.ecr_client, + ecr_repo=self.ecr_repo, + ecr_repo_multi=self.ecr_repo_multi, + tag=self.tag, + ) + ecr_uploader.ecr_client.delete_repository = MagicMock() + + ecr_uploader.delete_ecr_repository(physical_id=self.ecr_repo) + + expected_click_echo_calls = [ + call(f"\t- Deleting ECR repository {self.ecr_repo}"), + ] + self.assertEqual(expected_click_echo_calls, patched_click_echo.call_args_list) + def test_parse_image_url(self): valid = [ diff --git a/tests/unit/lib/package/test_s3_uploader.py b/tests/unit/lib/package/test_s3_uploader.py index 55b1bfbb2d..a78a7dcef4 100644 --- a/tests/unit/lib/package/test_s3_uploader.py +++ b/tests/unit/lib/package/test_s3_uploader.py @@ -172,10 +172,10 @@ def test_s3_upload_no_bucket(self): s3_uploader.upload(f.name, remote_path) self.assertEqual(BucketNotSpecifiedError().message, str(ex)) - def test_s3_delete_artifact(self): + def test_s3_delete_artifact_successfull(self): s3_uploader = S3Uploader( s3_client=self.s3, - bucket_name=None, + bucket_name=self.bucket_name, prefix=self.prefix, kms_key_id=self.kms_key_id, force_upload=self.force_upload, @@ -183,14 +183,14 @@ def test_s3_delete_artifact(self): ) self.s3.delete_object = MagicMock() self.s3.head_object = MagicMock() - with self.assertRaises(BucketNotSpecifiedError) as ex: - with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: - self.assertTrue(s3_uploader.delete_artifact(f.name)) + + with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: + self.assertTrue(s3_uploader.delete_artifact(f.name)) def test_s3_delete_non_existant_artifact(self): s3_uploader = S3Uploader( s3_client=self.s3, - bucket_name=None, + bucket_name=self.bucket_name, prefix=self.prefix, kms_key_id=self.kms_key_id, force_upload=self.force_upload, @@ -198,9 +198,24 @@ def test_s3_delete_non_existant_artifact(self): ) self.s3.delete_object = MagicMock() self.s3.head_object = MagicMock(side_effect=ClientError(error_response={}, operation_name="head_object")) - with self.assertRaises(BucketNotSpecifiedError) as ex: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: + self.assertFalse(s3_uploader.delete_artifact(f.name)) + + def test_s3_delete_artifact_client_error(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=self.bucket_name, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + no_progressbar=self.no_progressbar, + ) + self.s3.delete_object = MagicMock( + side_effect=ClientError(error_response={"Error": {"Code": "ClientError"}}, operation_name="delete_object") + ) + with self.assertRaises(ClientError): with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: - self.assertFalse(s3_uploader.delete_artifact(f.name)) + s3_uploader.delete_artifact(f.name) def test_s3_delete_artifact_no_bucket(self): s3_uploader = S3Uploader( From fd4f771b74c281abbe43518c942395ff0e2f77ef Mon Sep 17 00:00:00 2001 From: hnnasit <84355507+hnnasit@users.noreply.github.com> Date: Fri, 30 Jul 2021 13:03:34 -0400 Subject: [PATCH 6/9] Sam delete bug fixes (#3122) * Fixed small bugs for no_prompts and for deleting artifacts * Updated integration tests and added test for termination protection for sam delete * Added few more integration tests for guided and non-guided delete * Updated handling termination protection and changed the order of deleting artifacts and stack * Added comments for delete artifacts to handle intrinsic ref functions and handled this for image resources * Added integration test for retaining s3 artifact * Changed option_name to the correct values in delete_context * Small UX fix and updated delete_prefix_artifacts method prefix to fetch S3 files * Added a note in s3_uploader.py about using the api list_objects_v2 --- samcli/commands/delete/delete_context.py | 30 ++- samcli/lib/delete/cf_utils.py | 9 +- samcli/lib/package/ecr_uploader.py | 6 +- samcli/lib/package/packageable_resources.py | 35 ++- samcli/lib/package/s3_uploader.py | 4 +- tests/integration/delete/delete_integ_base.py | 3 +- .../integration/delete/test_delete_command.py | 225 +++++++++++++++++- .../testdata/delete/aws-ecr-repository.yaml | 7 + .../aws-serverless-function-retain.yaml | 19 ++ .../commands/delete/test_delete_context.py | 50 +++- tests/unit/lib/delete/test_cf_utils.py | 10 +- 11 files changed, 353 insertions(+), 45 deletions(-) create mode 100644 tests/integration/testdata/delete/aws-ecr-repository.yaml create mode 100644 tests/integration/testdata/delete/aws-serverless-function-retain.yaml diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 37014b74bd..2ce33336a7 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -35,6 +35,7 @@ class DeleteContext: + # TODO: Separate this context into 2 separate contexts guided and non-guided, just like deploy. def __init__(self, stack_name: str, region: str, profile: str, config_file: str, config_env: str, no_prompts: bool): self.stack_name = stack_name self.region = region @@ -57,9 +58,15 @@ def __enter__(self): self.parse_config_file() if not self.stack_name: LOG.debug("No stack-name input found") - self.stack_name = prompt( - click.style("\tEnter stack name you want to delete:", bold=True), type=click.STRING - ) + if not self.no_prompts: + self.stack_name = prompt( + click.style("\tEnter stack name you want to delete", bold=True), type=click.STRING + ) + else: + raise click.BadOptionUsage( + option_name="--stack-name", + message="Missing option '--stack-name', provide a stack name that needs to be deleted.", + ) self.init_clients() return self @@ -94,9 +101,15 @@ def init_clients(self): Initialize all the clients being used by sam delete. """ if not self.region: - session = boto3.Session() - region = session.region_name - self.region = region if region else "us-east-1" + if not self.no_prompts: + session = boto3.Session() + region = session.region_name + self.region = region if region else "us-east-1" + else: + raise click.BadOptionUsage( + option_name="--region", + message="Missing option '--region', region is required to run the non guided delete command.", + ) if self.profile: Context.get_current_context().profile = self.profile @@ -218,7 +231,6 @@ def delete_ecr_companion_stack(self): ) retain_repos = self.ecr_repos_prompts(ecr_companion_stack_template) - # Delete the repos created by ECR companion stack if not retained ecr_companion_stack_template.delete(retain_resources=retain_repos) @@ -229,9 +241,11 @@ def delete_ecr_companion_stack(self): self.cf_utils.delete_stack(stack_name=self.companion_stack_name) self.cf_utils.wait_for_delete(stack_name=self.companion_stack_name) LOG.debug("Deleted ECR Companion Stack: %s", self.companion_stack_name) + except CfDeleteFailedStatusError: LOG.debug("delete_stack resulted failed and so re-try with retain_resources") self.cf_utils.delete_stack(stack_name=self.companion_stack_name, retain_resources=retain_repos) + self.cf_utils.wait_for_delete(stack_name=self.companion_stack_name) def delete(self): """ @@ -296,9 +310,11 @@ def delete(self): self.cf_utils.delete_stack(stack_name=self.stack_name) self.cf_utils.wait_for_delete(self.stack_name) LOG.debug("Deleted Cloudformation stack: %s", self.stack_name) + except CfDeleteFailedStatusError: LOG.debug("delete_stack resulted failed and so re-try with retain_resources") self.cf_utils.delete_stack(stack_name=self.stack_name, retain_resources=retain_resources) + self.cf_utils.wait_for_delete(self.stack_name) # If s3_bucket information is not available, warn the user if not self.s3_bucket: diff --git a/samcli/lib/delete/cf_utils.py b/samcli/lib/delete/cf_utils.py index d418306e00..b2f6acae7b 100644 --- a/samcli/lib/delete/cf_utils.py +++ b/samcli/lib/delete/cf_utils.py @@ -30,6 +30,10 @@ def has_stack(self, stack_name: str) -> bool: return False stack = resp["Stacks"][0] + if stack["EnableTerminationProtection"]: + message = "Stack cannot be deleted while TerminationProtection is enabled." + raise DeleteFailedError(stack_name=stack_name, msg=message) + # Note: Stacks with REVIEW_IN_PROGRESS can be deleted # using delete_stack but get_template does not return # the template_str for this stack restricting deletion of @@ -53,11 +57,6 @@ def has_stack(self, stack_name: str) -> bool: LOG.error("Botocore Exception : %s", str(e)) raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e - except Exception as e: - # We don't know anything about this exception. Don't handle - LOG.error("Unable to get stack details.", exc_info=e) - raise e - def get_stack_template(self, stack_name: str, stage: str) -> Dict: """ Return the Cloudformation template of the given stack_name diff --git a/samcli/lib/package/ecr_uploader.py b/samcli/lib/package/ecr_uploader.py index c9546aa93e..4d5d8c18e6 100644 --- a/samcli/lib/package/ecr_uploader.py +++ b/samcli/lib/package/ecr_uploader.py @@ -133,8 +133,10 @@ def delete_artifact(self, image_uri: str, resource_id: str, property_name: str): except botocore.exceptions.ClientError as ex: # Handle Client errors such as RepositoryNotFoundException or InvalidParameterException - LOG.error("DeleteArtifactFailedError Exception : %s", str(ex)) - raise DeleteArtifactFailedError(resource_id=resource_id, property_name=property_name, ex=ex) from ex + if "RepositoryNotFoundException" not in str(ex): + LOG.debug("DeleteArtifactFailedError Exception : %s", str(ex)) + raise DeleteArtifactFailedError(resource_id=resource_id, property_name=property_name, ex=ex) from ex + LOG.debug("RepositoryNotFoundException : %s", str(ex)) def delete_ecr_repository(self, physical_id: str): """ diff --git a/samcli/lib/package/packageable_resources.py b/samcli/lib/package/packageable_resources.py index 808468a670..140aeaedf2 100644 --- a/samcli/lib/package/packageable_resources.py +++ b/samcli/lib/package/packageable_resources.py @@ -178,7 +178,11 @@ def get_property_value(self, resource_dict): return {"Bucket": None, "Key": None} resource_path = jmespath.search(self.PROPERTY_NAME, resource_dict) - if resource_path: + # In the case where resource_path is pointing to an intrinsinc + # ref function, sam delete will delete the stack but skip the deletion of this + # artifact, as deletion of intrinsic ref function artifacts is not supported yet. + # TODO: Allow deletion of S3 artifacts with intrinsic ref functions. + if resource_path and isinstance(resource_path, str): return self.uploader.parse_s3_url(resource_path) return {"Bucket": None, "Key": None} @@ -233,12 +237,14 @@ def delete(self, resource_id, resource_dict): return remote_path = resource_dict.get(self.PROPERTY_NAME, {}).get(self.EXPORT_PROPERTY_CODE_KEY) - if is_ecr_url(remote_path): + # In the case where remote_path is pointing to an intrinsinc + # ref function, sam delete will delete the stack but skip the deletion of this + # artifact, as deletion of intrinsic ref function artifacts is not supported yet. + # TODO: Allow deletion of ECR artifacts with intrinsic ref functions. + if isinstance(remote_path, str) and is_ecr_url(remote_path): self.uploader.delete_artifact( image_uri=remote_path, resource_id=resource_id, property_name=self.PROPERTY_NAME ) - else: - raise ValueError("URL given to the parse method is not a valid ECR url {0}".format(remote_path)) class ResourceImage(Resource): @@ -288,13 +294,15 @@ def delete(self, resource_id, resource_dict): if resource_dict is None: return - remote_path = resource_dict[self.PROPERTY_NAME] - if is_ecr_url(remote_path): + remote_path = resource_dict.get(self.PROPERTY_NAME) + # In the case where remote_path is pointing to an intrinsinc + # ref function, sam delete will delete the stack but skip the deletion of this + # artifact, as deletion of intrinsic ref function artifacts is not supported yet. + # TODO: Allow deletion of ECR artifacts with intrinsic ref functions. + if isinstance(remote_path, str) and is_ecr_url(remote_path): self.uploader.delete_artifact( image_uri=remote_path, resource_id=resource_id, property_name=self.PROPERTY_NAME ) - else: - raise ValueError("URL given to the parse method is not a valid ECR url {0}".format(remote_path)) class ResourceWithS3UrlDict(ResourceZip): @@ -350,7 +358,13 @@ def get_property_value(self, resource_dict): s3_bucket = resource_path.get(self.BUCKET_NAME_PROPERTY, None) key = resource_path.get(self.OBJECT_KEY_PROPERTY, None) - return {"Bucket": s3_bucket, "Key": key} + # In the case where resource_path is pointing to an intrinsinc + # ref function, sam delete will delete the stack but skip the deletion of this + # artifact, as deletion of intrinsic ref function artifacts is not supported yet. + # TODO: Allow deletion of S3 artifacts with intrinsic ref functions. + if isinstance(s3_bucket, str) and isinstance(key, str): + return {"Bucket": s3_bucket, "Key": key} + return {"Bucket": None, "Key": None} class ServerlessFunctionResource(ResourceZip): @@ -535,7 +549,8 @@ def delete(self, resource_id, resource_dict): return repository_name = self.get_property_value(resource_dict) - if repository_name: + # TODO: Allow deletion of ECR Repositories with intrinsic ref functions. + if repository_name and isinstance(repository_name, str): self.uploader.delete_ecr_repository(physical_id=repository_name) def get_property_value(self, resource_dict): diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index a7f1a9a8b9..0e0932f546 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -189,7 +189,9 @@ def delete_prefix_artifacts(self): LOG.error("Bucket not specified") raise BucketNotSpecifiedError() if self.prefix: - response = self.s3.list_objects_v2(Bucket=self.bucket_name, Prefix=self.prefix) + # Note: list_objects_v2 api uses prefix to fetch the keys that begin with the prefix + # To restrict fetching files with exact prefix self.prefix, "/" is used below. + response = self.s3.list_objects_v2(Bucket=self.bucket_name, Prefix=self.prefix + "/") prefix_files = response.get("Contents", []) for obj in prefix_files: self.delete_artifact(obj["Key"], True) diff --git a/tests/integration/delete/delete_integ_base.py b/tests/integration/delete/delete_integ_base.py index 1eb70ef174..5eb15810b3 100644 --- a/tests/integration/delete/delete_integ_base.py +++ b/tests/integration/delete/delete_integ_base.py @@ -1,11 +1,12 @@ import os +from pathlib import Path from unittest import TestCase class DeleteIntegBase(TestCase): @classmethod def setUpClass(cls): - pass + cls.delete_test_data_path = Path(__file__).resolve().parents[1].joinpath("testdata", "delete") def setUp(self): super().setUp() diff --git a/tests/integration/delete/test_delete_command.py b/tests/integration/delete/test_delete_command.py index 639152e746..9261245d55 100644 --- a/tests/integration/delete/test_delete_command.py +++ b/tests/integration/delete/test_delete_command.py @@ -5,7 +5,6 @@ import uuid from pathlib import Path from unittest import skipIf -import logging import boto3 import docker from botocore.config import Config @@ -26,7 +25,6 @@ CFN_SLEEP = 3 TIMEOUT = 300 CFN_PYTHON_VERSION_SUFFIX = os.environ.get("PYTHON_VERSION", "0.0.0").replace(".", "-") -LOG = logging.getLogger(__name__) @skipIf(SKIP_DELETE_TESTS, "Skip delete tests in CI/CD only") @@ -55,7 +53,7 @@ def test_delete_command_no_stack_deployed(self): stack_name = self._method_to_stack_name(self.id()) - delete_command_list = self.get_delete_command_list(stack_name=stack_name, no_prompts=True) + delete_command_list = self.get_delete_command_list(stack_name=stack_name, region="us-east-1", no_prompts=True) delete_process_execute = run_command(delete_command_list) self.assertEqual(delete_process_execute.process.returncode, 0) @@ -82,7 +80,7 @@ def test_delete_command_no_stack_deployed(self): "aws-stepfunctions-statemachine.yaml", ] ) - def test_delete_with_s3_prefix_present_zip(self, template_file): + def test_delete_no_prompts_with_s3_prefix_present_zip(self, template_file): template_path = self.test_data_path.joinpath(template_file) stack_name = self._method_to_stack_name(self.id()) @@ -98,7 +96,7 @@ def test_delete_with_s3_prefix_present_zip(self, template_file): config_file_path = self.test_data_path.joinpath(config_file_name) delete_command_list = self.get_delete_command_list( - stack_name=stack_name, config_file=config_file_path, no_prompts=True + stack_name=stack_name, config_file=config_file_path, region="us-east-1", no_prompts=True ) delete_process_execute = run_command(delete_command_list) @@ -118,14 +116,14 @@ def test_delete_with_s3_prefix_present_zip(self, template_file): "aws-serverless-function-image.yaml", ] ) - def test_delete_with_s3_prefix_present_image(self, template_file): + def test_delete_no_prompts_with_s3_prefix_present_image(self, template_file): template_path = self.test_data_path.joinpath(template_file) stack_name = self._method_to_stack_name(self.id()) config_file_name = stack_name + ".toml" deploy_command_list = self.get_deploy_command_list( - template_file=template_path, guided=True, config_file=config_file_name + template_file=template_path, guided=True, config_file=config_file_name, image_repository=self.ecr_repo_name ) deploy_process_execute = run_command_with_input( @@ -134,7 +132,7 @@ def test_delete_with_s3_prefix_present_image(self, template_file): config_file_path = self.test_data_path.joinpath(config_file_name) delete_command_list = self.get_delete_command_list( - stack_name=stack_name, config_file=config_file_path, no_prompts=True + stack_name=stack_name, config_file=config_file_path, region="us-east-1", no_prompts=True ) delete_process_execute = run_command(delete_command_list) @@ -154,7 +152,7 @@ def test_delete_with_s3_prefix_present_image(self, template_file): "aws-serverless-function.yaml", ] ) - def test_delete_guided_prompts(self, template_file): + def test_delete_guided_config_file_present(self, template_file): template_path = self.test_data_path.joinpath(template_file) stack_name = self._method_to_stack_name(self.id()) @@ -171,7 +169,6 @@ def test_delete_guided_prompts(self, template_file): config_file_path = self.test_data_path.joinpath(config_file_name) delete_command_list = self.get_delete_command_list(stack_name=stack_name, config_file=config_file_path) - LOG.info(delete_command_list) delete_process_execute = run_command_with_input(delete_command_list, "y\nn\ny\n".encode()) self.assertEqual(delete_process_execute.process.returncode, 0) @@ -216,7 +213,7 @@ def test_delete_no_config_file_zip(self, template_file): "aws-serverless-function.yaml", ] ) - def test_delete_no_s3_prefix_zip(self, template_file): + def test_delete_no_prompts_no_s3_prefix_zip(self, template_file): template_path = self.test_data_path.joinpath(template_file) stack_name = self._method_to_stack_name(self.id()) @@ -254,7 +251,7 @@ def test_delete_no_s3_prefix_zip(self, template_file): "aws-serverless-function-image.yaml", ] ) - def test_delete_no_s3_prefix_image(self, template_file): + def test_delete_no_prompts_no_s3_prefix_image(self, template_file): template_path = self.test_data_path.joinpath(template_file) stack_name = self._method_to_stack_name(self.id()) @@ -328,6 +325,210 @@ def test_delete_nested_stacks(self, template_file): except ClientError as ex: self.assertIn(f"Stack with id {stack_name} does not exist", str(ex)) + def test_delete_stack_termination_protection_enabled(self): + template_str = """ + AWSTemplateFormatVersion: '2010-09-09' + Description: Stack for testing termination protection enabled stacks. + Resources: + MyRepository: + Type: AWS::ECR::Repository + Properties: + RepositoryName: "test-termination-protection-repository" + """ + + stack_name = self._method_to_stack_name(self.id()) + + self.cf_client.create_stack(StackName=stack_name, TemplateBody=template_str, EnableTerminationProtection=True) + + delete_command_list = self.get_delete_command_list(stack_name=stack_name, region="us-east-1", no_prompts=True) + + delete_process_execute = run_command(delete_command_list) + + self.assertEqual(delete_process_execute.process.returncode, 1) + self.assertIn( + bytes( + "TerminationProtection is enabled", + encoding="utf-8", + ), + delete_process_execute.stderr, + ) + + self.cf_client.update_termination_protection(StackName=stack_name, EnableTerminationProtection=False) + + delete_process_execute = run_command(delete_command_list) + self.assertEqual(delete_process_execute.process.returncode, 0) + + try: + resp = self.cf_client.describe_stacks(StackName=stack_name) + except ClientError as ex: + self.assertIn(f"Stack with id {stack_name} does not exist", str(ex)) + + def test_no_prompts_no_stack_name(self): + + delete_command_list = self.get_delete_command_list(no_prompts=True) + delete_process_execute = run_command(delete_command_list) + self.assertEqual(delete_process_execute.process.returncode, 2) + + def test_no_prompts_no_region(self): + stack_name = self._method_to_stack_name(self.id()) + + delete_command_list = self.get_delete_command_list(stack_name=stack_name, no_prompts=True) + delete_process_execute = run_command(delete_command_list) + self.assertEqual(delete_process_execute.process.returncode, 2) + + @parameterized.expand( + [ + "aws-serverless-function.yaml", + ] + ) + def test_delete_guided_no_stack_name_no_region(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = self._method_to_stack_name(self.id()) + + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_bucket=self.bucket_name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes foo_bar=baz", + confirm_changeset=False, + region="us-east-1", + ) + deploy_process_execute = run_command(deploy_command_list) + + delete_command_list = self.get_delete_command_list() + delete_process_execute = run_command_with_input(delete_command_list, "{}\ny\ny\n".format(stack_name).encode()) + + self.assertEqual(delete_process_execute.process.returncode, 0) + + try: + resp = self.cf_client.describe_stacks(StackName=stack_name) + except ClientError as ex: + self.assertIn(f"Stack with id {stack_name} does not exist", str(ex)) + + @parameterized.expand( + [ + "aws-ecr-repository.yaml", + ] + ) + def test_delete_guided_ecr_repository_present(self, template_file): + template_path = self.delete_test_data_path.joinpath(template_file) + stack_name = self._method_to_stack_name(self.id()) + + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_bucket=self.bucket_name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes foo_bar=baz", + confirm_changeset=False, + region="us-east-1", + ) + deploy_process_execute = run_command(deploy_command_list) + + delete_command_list = self.get_delete_command_list(stack_name=stack_name, region="us-east-1") + delete_process_execute = run_command_with_input(delete_command_list, "y\ny\ny\n".encode()) + + self.assertEqual(delete_process_execute.process.returncode, 0) + + try: + resp = self.cf_client.describe_stacks(StackName=stack_name) + except ClientError as ex: + self.assertIn(f"Stack with id {stack_name} does not exist", str(ex)) + + @parameterized.expand( + [ + "aws-serverless-function-image.yaml", + ] + ) + def test_delete_guided_no_s3_prefix_image(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = self._method_to_stack_name(self.id()) + + # Try to deploy to another region. + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + image_repository=self.ecr_repo_name, + s3_bucket=self.bucket_name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes foo_bar=baz", + confirm_changeset=False, + region="us-east-1", + ) + + deploy_process_execute = run_command(deploy_command_list) + + delete_command_list = self.get_delete_command_list(stack_name=stack_name, region="us-east-1") + + delete_process_execute = run_command_with_input(delete_command_list, "y\n".encode()) + + self.assertEqual(delete_process_execute.process.returncode, 0) + + try: + resp = self.cf_client.describe_stacks(StackName=stack_name) + except ClientError as ex: + self.assertIn(f"Stack with id {stack_name} does not exist", str(ex)) + + @parameterized.expand( + [ + "aws-serverless-function-retain.yaml", + ] + ) + def test_delete_guided_retain_s3_artifact(self, template_file): + template_path = self.delete_test_data_path.joinpath(template_file) + stack_name = self._method_to_stack_name(self.id()) + + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_bucket=self.bucket_name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes foo_bar=baz", + confirm_changeset=False, + region="us-east-1", + ) + deploy_process_execute = run_command(deploy_command_list) + + delete_command_list = self.get_delete_command_list(stack_name=stack_name, region="us-east-1") + delete_process_execute = run_command_with_input(delete_command_list, "y\nn\nn\n".encode()) + + self.assertEqual(delete_process_execute.process.returncode, 0) + + try: + resp = self.cf_client.describe_stacks(StackName=stack_name) + except ClientError as ex: + self.assertIn(f"Stack with id {stack_name} does not exist", str(ex)) + + # TODO: Add 3 more tests after Auto ECR is merged to develop + # 1. Create a stack using guided deploy of type image and delete + # 2. Delete the ECR Companion Stack as input stack. + # 3. Retain ECR Repository that contains atleast 1 image. + # - Create a stack using guided deploy of type image + # - Select no for deleting ECR repository and this will retain the non-empty repository + def _method_to_stack_name(self, method_name): """Method expects method name which can be a full path. Eg: test.integration.test_deploy_command.method_name""" method_name = method_name.split(".")[-1] diff --git a/tests/integration/testdata/delete/aws-ecr-repository.yaml b/tests/integration/testdata/delete/aws-ecr-repository.yaml new file mode 100644 index 0000000000..af6d63c336 --- /dev/null +++ b/tests/integration/testdata/delete/aws-ecr-repository.yaml @@ -0,0 +1,7 @@ +AWSTemplateFormatVersion: '2010-09-09' +Description: Stack for creating ECR repository for testing +Resources: + MyRepository: + Type: AWS::ECR::Repository + Properties: + RepositoryName: "test-stack-with-ecr-repository" \ No newline at end of file diff --git a/tests/integration/testdata/delete/aws-serverless-function-retain.yaml b/tests/integration/testdata/delete/aws-serverless-function-retain.yaml new file mode 100644 index 0000000000..ae857aa196 --- /dev/null +++ b/tests/integration/testdata/delete/aws-serverless-function-retain.yaml @@ -0,0 +1,19 @@ +AWSTemplateFormatVersion : '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: A hello world application. + +Parameters: + Parameter: + Type: String + Default: Sample + Description: A custom parameter + +Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + Handler: main.handler + Runtime: python3.7 + CodeUri: . + Timeout: 600 + DeletionPolicy: Retain diff --git a/tests/unit/commands/delete/test_delete_context.py b/tests/unit/commands/delete/test_delete_context.py index 087f552bc4..25c98810c5 100644 --- a/tests/unit/commands/delete/test_delete_context.py +++ b/tests/unit/commands/delete/test_delete_context.py @@ -10,6 +10,8 @@ from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.package.ecr_uploader import ECRUploader +from samcli.commands.delete.exceptions import CfDeleteFailedStatusError + class TestDeleteContext(TestCase): @patch("samcli.commands.delete.delete_context.click.echo") @@ -78,9 +80,10 @@ def test_delete_context_parse_config_file(self, patched_click_get_current_contex self.assertEqual(delete_context.s3_prefix, "s3-prefix") @patch("samcli.commands.delete.delete_context.prompt") + @patch("samcli.commands.delete.delete_context.confirm") @patch("samcli.commands.delete.delete_context.click.get_current_context") @patch.object(CfUtils, "has_stack", MagicMock(return_value=(False))) - def test_delete_no_user_input(self, patched_click_get_current_context, patched_prompt): + def test_delete_no_user_input(self, patched_click_get_current_context, patched_confirm, patched_prompt): patched_click_get_current_context = MagicMock() with DeleteContext( stack_name=None, @@ -88,14 +91,15 @@ def test_delete_no_user_input(self, patched_click_get_current_context, patched_p config_file=None, config_env=None, profile=None, - no_prompts=True, + no_prompts=None, ) as delete_context: delete_context.run() patched_prompt.side_effect = ["sam-app"] + patched_confirm.side_effect = [True] expected_prompt_calls = [ - call(click.style("\tEnter stack name you want to delete:", bold=True), type=click.STRING), + call(click.style("\tEnter stack name you want to delete", bold=True), type=click.STRING), ] self.assertEqual(expected_prompt_calls, patched_prompt.call_args_list) @@ -388,3 +392,43 @@ def test_no_prompts_input_is_ecr_companion_stack_present_execute_run( call("\nDeleted successfully"), ] self.assertEqual(expected_click_echo_calls, patched_click_echo.call_args_list) + + @patch("samcli.commands.delete.delete_context.get_cf_template_name") + @patch("samcli.commands.delete.delete_context.click.get_current_context") + @patch.object(CfUtils, "has_stack", MagicMock(side_effect=(True, True))) + @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) + @patch.object(CfUtils, "delete_stack", MagicMock()) + @patch.object( + CfUtils, + "wait_for_delete", + MagicMock( + side_effect=( + CfDeleteFailedStatusError("test-098f6bcd-CompanionStack", "Mock WaitError"), + {}, + CfDeleteFailedStatusError("test", "Mock WaitError"), + {}, + ) + ), + ) + @patch.object(S3Uploader, "delete_prefix_artifacts", MagicMock()) + @patch.object(ECRUploader, "delete_ecr_repository", MagicMock()) + @patch.object(Template, "get_ecr_repos", MagicMock(side_effect=({}, {"logical_id": {"Repository": "test_id"}}))) + def test_retain_resources_delete_stack(self, patched_click_get_current_context, patched_get_cf_template_name): + patched_get_cf_template_name.return_value = "hello.template" + with DeleteContext( + stack_name="test", + region="us-east-1", + config_file="samconfig.toml", + config_env="default", + profile="test", + no_prompts=True, + ) as delete_context: + delete_context.s3_bucket = "s3_bucket" + delete_context.s3_prefix = "s3_prefix" + + delete_context.run() + + self.assertEqual(CfUtils.has_stack.call_count, 2) + self.assertEqual(CfUtils.get_stack_template.call_count, 2) + self.assertEqual(CfUtils.delete_stack.call_count, 4) + self.assertEqual(CfUtils.wait_for_delete.call_count, 4) diff --git a/tests/unit/lib/delete/test_cf_utils.py b/tests/unit/lib/delete/test_cf_utils.py index 61c1c186e1..8a18c782ce 100644 --- a/tests/unit/lib/delete/test_cf_utils.py +++ b/tests/unit/lib/delete/test_cf_utils.py @@ -51,14 +51,16 @@ def test_cf_utils_has_stack_exception_client_error(self): with self.assertRaises(DeleteFailedError): self.cf_utils.has_stack("test") - def test_cf_utils_has_stack_exception(self): - self.cf_utils._client.describe_stacks = MagicMock(side_effect=Exception()) - with self.assertRaises(Exception): + def test_cf_utils_has_stack_termination_protection_enabled(self): + self.cf_utils._client.describe_stacks = MagicMock( + return_value={"Stacks": [{"StackStatus": "CREATE_COMPLETE", "EnableTerminationProtection": True}]} + ) + with self.assertRaises(DeleteFailedError): self.cf_utils.has_stack("test") def test_cf_utils_has_stack_in_review(self): self.cf_utils._client.describe_stacks = MagicMock( - return_value={"Stacks": [{"StackStatus": "REVIEW_IN_PROGRESS"}]} + return_value={"Stacks": [{"StackStatus": "REVIEW_IN_PROGRESS", "EnableTerminationProtection": False}]} ) self.assertEqual(self.cf_utils.has_stack("test"), False) From 830f46592733b78bb1328ca5b96827a39326a952 Mon Sep 17 00:00:00 2001 From: Mohamed Elasmar Date: Tue, 17 Aug 2021 11:49:39 -0700 Subject: [PATCH 7/9] add sam delete to pyinstaller hooks file --- installer/pyinstaller/hook-samcli.py | 1 + 1 file changed, 1 insertion(+) diff --git a/installer/pyinstaller/hook-samcli.py b/installer/pyinstaller/hook-samcli.py index 72a939a9b2..0b799cf885 100644 --- a/installer/pyinstaller/hook-samcli.py +++ b/installer/pyinstaller/hook-samcli.py @@ -13,6 +13,7 @@ "samcli.commands.deploy", "samcli.commands.logs", "samcli.commands.publish", + "ssamcli.commands.delete", "samcli.commands.pipeline.pipeline", "samcli.commands.pipeline.init", "samcli.commands.pipeline.bootstrap", From 86bc4ca28c71ee83d5fa21e1a79a6d235df502ba Mon Sep 17 00:00:00 2001 From: Mohamed Elasmar Date: Tue, 17 Aug 2021 11:52:57 -0700 Subject: [PATCH 8/9] fix typo --- installer/pyinstaller/hook-samcli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/installer/pyinstaller/hook-samcli.py b/installer/pyinstaller/hook-samcli.py index 0b799cf885..5101b96d79 100644 --- a/installer/pyinstaller/hook-samcli.py +++ b/installer/pyinstaller/hook-samcli.py @@ -13,7 +13,7 @@ "samcli.commands.deploy", "samcli.commands.logs", "samcli.commands.publish", - "ssamcli.commands.delete", + "samcli.commands.delete", "samcli.commands.pipeline.pipeline", "samcli.commands.pipeline.init", "samcli.commands.pipeline.bootstrap", From 936744cbbf650bbc5230cc732a0d0aff1b36ef31 Mon Sep 17 00:00:00 2001 From: Mohamed Elasmar Date: Tue, 17 Aug 2021 15:10:36 -0700 Subject: [PATCH 9/9] resolve pr comments --- samcli/commands/delete/delete_context.py | 61 +++++++------ .../lib/delete/{cf_utils.py => cfn_utils.py} | 2 +- samcli/lib/deploy/deployer.py | 9 +- samcli/lib/package/artifact_exporter.py | 12 ++- samcli/lib/package/local_files_utils.py | 66 ++++++++++++++ samcli/lib/package/s3_uploader.py | 15 ++-- samcli/lib/package/utils.py | 33 +------ .../commands/delete/test_delete_context.py | 90 +++++++++---------- tests/unit/lib/delete/test_cf_utils.py | 4 +- 9 files changed, 165 insertions(+), 127 deletions(-) rename samcli/lib/delete/{cf_utils.py => cfn_utils.py} (99%) create mode 100644 samcli/lib/package/local_files_utils.py diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 2ce33336a7..cacfbf87bb 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -14,10 +14,10 @@ from samcli.lib.utils.hash import str_checksum from samcli.cli.cli_config_file import TomlProvider from samcli.lib.utils.botoconfig import get_boto_config_with_user_agent -from samcli.lib.delete.cf_utils import CfUtils +from samcli.lib.delete.cfn_utils import CfnUtils from samcli.lib.package.s3_uploader import S3Uploader -from samcli.lib.package.artifact_exporter import mktempfile, get_cf_template_name +from samcli.lib.package.local_files_utils import get_uploaded_s3_object_name from samcli.cli.context import Context @@ -82,19 +82,21 @@ def parse_config_file(self): config_options = toml_provider( config_path=self.config_file, config_env=self.config_env, cmd_names=[CONFIG_COMMAND] ) - if config_options: - if not self.stack_name: - self.stack_name = config_options.get("stack_name", None) - # If the stack_name is same as the one present in samconfig file, - # get the information about parameters if not specified by user. - if self.stack_name and self.stack_name == config_options.get("stack_name", None): - LOG.debug("Local config present and using the defined options") - if not self.region: - self.region = config_options.get("region", None) - if not self.profile: - self.profile = config_options.get("profile", None) - self.s3_bucket = config_options.get("s3_bucket", None) - self.s3_prefix = config_options.get("s3_prefix", None) + if not config_options: + return + + if not self.stack_name: + self.stack_name = config_options.get("stack_name", None) + # If the stack_name is same as the one present in samconfig file, + # get the information about parameters if not specified by user. + if self.stack_name and self.stack_name == config_options.get("stack_name", None): + LOG.debug("Local config present and using the defined options") + if not self.region: + self.region = config_options.get("region", None) + if not self.profile: + self.profile = config_options.get("profile", None) + self.s3_bucket = config_options.get("s3_bucket", None) + self.s3_prefix = config_options.get("s3_prefix", None) def init_clients(self): """ @@ -106,6 +108,8 @@ def init_clients(self): region = session.region_name self.region = region if region else "us-east-1" else: + # TODO: as part of the guided and non-guided context separation, we need also to move the options + # validations to a validator similar to samcli/lib/cli_validation/image_repository_validation.py. raise click.BadOptionUsage( option_name="--region", message="Missing option '--region', region is required to run the non guided delete command.", @@ -131,7 +135,7 @@ def init_clients(self): self.ecr_uploader = ECRUploader(docker_client=None, ecr_client=ecr_client, ecr_repo=None, ecr_repo_multi=None) self.uploaders = Uploaders(self.s3_uploader, self.ecr_uploader) - self.cf_utils = CfUtils(cloudformation_client) + self.cf_utils = CfnUtils(cloudformation_client) def s3_prompts(self): """ @@ -172,17 +176,17 @@ def ecr_companion_stack_prompts(self): User prompt to delete the ECR companion stack. """ click.echo(f"\tFound ECR Companion Stack {self.companion_stack_name}") - if not self.no_prompts: - delete_ecr_companion_stack_prompt = confirm( - click.style( - "\tDo you you want to delete the ECR companion stack" - f" {self.companion_stack_name} in the region {self.region} ?", - bold=True, - ), - default=False, - ) - return delete_ecr_companion_stack_prompt - return True + if self.no_prompts: + return True + + return confirm( + click.style( + "\tDo you you want to delete the ECR companion stack" + f" {self.companion_stack_name} in the region {self.region} ?", + bold=True, + ), + default=False, + ) def ecr_repos_prompts(self, template: Template): """ @@ -259,8 +263,7 @@ def delete(self): template_str = json.dumps(template_str, indent=4, ensure_ascii=False) # Get the cloudformation template name using template_str - with mktempfile() as temp_file: - self.cf_template_file_name = get_cf_template_name(temp_file, template_str, "template") + self.cf_template_file_name = get_uploaded_s3_object_name(file_content=template_str, extension="template") template = Template( template_path=None, diff --git a/samcli/lib/delete/cf_utils.py b/samcli/lib/delete/cfn_utils.py similarity index 99% rename from samcli/lib/delete/cf_utils.py rename to samcli/lib/delete/cfn_utils.py index b2f6acae7b..854e86a26f 100644 --- a/samcli/lib/delete/cf_utils.py +++ b/samcli/lib/delete/cfn_utils.py @@ -13,7 +13,7 @@ LOG = logging.getLogger(__name__) -class CfUtils: +class CfnUtils: def __init__(self, cloudformation_client): self._client = cloudformation_client diff --git a/samcli/lib/deploy/deployer.py b/samcli/lib/deploy/deployer.py index 4cf3c45b04..8c1326698f 100644 --- a/samcli/lib/deploy/deployer.py +++ b/samcli/lib/deploy/deployer.py @@ -34,7 +34,7 @@ ) from samcli.commands._utils.table_print import pprint_column_names, pprint_columns, newline_per_item, MIN_OFFSET from samcli.commands.deploy import exceptions as deploy_exceptions -from samcli.lib.package.artifact_exporter import mktempfile, get_cf_template_name +from samcli.lib.package.local_files_utils import mktempfile, get_uploaded_s3_object_name from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.utils.time import utc_to_timestamp @@ -176,10 +176,9 @@ def create_changeset( # TemplateBody. This is required for large templates. if s3_uploader: with mktempfile() as temporary_file: - - remote_path = get_cf_template_name( - temp_file=temporary_file, template_str=kwargs.pop("TemplateBody"), extension="template" - ) + temporary_file.write(kwargs.pop("TemplateBody")) + temporary_file.flush() + remote_path = get_uploaded_s3_object_name(file_path=temporary_file.name, extension="template") # TemplateUrl property requires S3 URL to be in path-style format parts = S3Uploader.parse_s3_url( s3_uploader.upload(temporary_file.name, remote_path), version_property="Version" diff --git a/samcli/lib/package/artifact_exporter.py b/samcli/lib/package/artifact_exporter.py index 1e0aa75b53..bfa4ad9bf8 100644 --- a/samcli/lib/package/artifact_exporter.py +++ b/samcli/lib/package/artifact_exporter.py @@ -41,10 +41,9 @@ is_local_folder, make_abs_path, is_local_file, - mktempfile, is_s3_url, - get_cf_template_name, ) +from samcli.lib.package.local_files_utils import mktempfile, get_uploaded_s3_object_name from samcli.lib.utils.packagetype import ZIP from samcli.yamlhelper import yaml_parse, yaml_dump @@ -84,10 +83,9 @@ def do_export(self, resource_id, resource_dict, parent_dir): exported_template_str = yaml_dump(exported_template_dict) with mktempfile() as temporary_file: - - remote_path = get_cf_template_name( - temp_file=temporary_file, template_str=exported_template_str, extension="template" - ) + temporary_file.write(exported_template_str) + temporary_file.flush() + remote_path = get_uploaded_s3_object_name(file_path=temporary_file.name, extension="template") url = self.uploader.upload(temporary_file.name, remote_path) # TemplateUrl property requires S3 URL to be in path-style format @@ -135,7 +133,7 @@ def __init__( """ if not template_str: if not (is_local_folder(parent_dir) and os.path.isabs(parent_dir)): - raise ValueError("parent_dir parameter must be " "an absolute path to a folder {0}".format(parent_dir)) + raise ValueError("parent_dir parameter must be an absolute path to a folder {0}".format(parent_dir)) abs_template_path = make_abs_path(parent_dir, template_path) template_dir = os.path.dirname(abs_template_path) diff --git a/samcli/lib/package/local_files_utils.py b/samcli/lib/package/local_files_utils.py new file mode 100644 index 0000000000..3e10b258b1 --- /dev/null +++ b/samcli/lib/package/local_files_utils.py @@ -0,0 +1,66 @@ +""" +Utilities for local files handling. +""" +import os +import tempfile +import uuid +from contextlib import contextmanager +from typing import Optional + +from samcli.lib.utils.hash import file_checksum + + +@contextmanager +def mktempfile(): + directory = tempfile.gettempdir() + filename = os.path.join(directory, uuid.uuid4().hex) + + try: + with open(filename, "w+") as handle: + yield handle + finally: + if os.path.exists(filename): + os.remove(filename) + + +def get_uploaded_s3_object_name( + precomputed_md5: Optional[str] = None, + file_content: Optional[str] = None, + file_path: Optional[str] = None, + extension: Optional[str] = None, +) -> str: + """ + Generate the file name that will be used while creating the S3 Object based on the file hash value. + This method expect either the precomuted hash value of the file, or the file content, or the file path + + Parameters + ---------- + precomputed_md5: str + the precomputed hash value of the file. + file_content : str + The file content to be uploaded to S3. + file_path : str + The file path to be uploaded to S3 + extension : str + The file extension in S3 + Returns + ------- + str + The generated S3 Object name + """ + if precomputed_md5: + filemd5 = precomputed_md5 + elif file_content: + with mktempfile() as temp_file: + temp_file.write(file_content) + temp_file.flush() + filemd5 = file_checksum(temp_file.name) + elif file_path: + filemd5 = file_checksum(file_path) + else: + raise Exception("Either File Content, File Path, or Precomputed Hash should has a value") + + if extension: + remote_path = filemd5 + "." + extension + + return remote_path diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index 0e0932f546..8a56b7ee4f 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -22,7 +22,6 @@ from collections import abc from typing import Optional, Dict, Any, cast from urllib.parse import urlparse, parse_qs -import click import botocore import botocore.exceptions @@ -30,7 +29,7 @@ from boto3.s3 import transfer from samcli.commands.package.exceptions import NoSuchBucketError, BucketNotSpecifiedError -from samcli.lib.utils.hash import file_checksum +from samcli.lib.package.local_files_utils import get_uploaded_s3_object_name LOG = logging.getLogger(__name__) @@ -138,11 +137,9 @@ def upload_with_dedup( # uploads of same object. Uploader will check if the file exists in S3 # and re-upload only if necessary. So the template points to same file # in multiple places, this will upload only once - filemd5 = precomputed_md5 or file_checksum(file_name) - remote_path = filemd5 - if extension: - remote_path = remote_path + "." + extension - + remote_path = get_uploaded_s3_object_name( + precomputed_md5=precomputed_md5, file_path=file_name, extension=extension + ) return self.upload(file_name, remote_path) def delete_artifact(self, remote_path: str, is_key: bool = False) -> bool: @@ -164,14 +161,14 @@ def delete_artifact(self, remote_path: str, is_key: bool = False) -> bool: # Deleting Specific file with key if self.file_exists(remote_path=key): - click.echo(f"\t- Deleting S3 object with key {key}") + LOG.info("\t- Deleting S3 object with key %s", key) self.s3.delete_object(Bucket=self.bucket_name, Key=key) LOG.debug("Deleted s3 object with key %s successfully", key) return True # Given s3 object key does not exist LOG.debug("Could not find the S3 file with the key %s", key) - click.echo(f"\t- Could not find and delete the S3 object with the key {key}") + LOG.info("\t- Could not find and delete the S3 object with the key %s", key) return False except botocore.exceptions.ClientError as ex: diff --git a/samcli/lib/package/utils.py b/samcli/lib/package/utils.py index c152b37aa0..8cfc2f3157 100644 --- a/samcli/lib/package/utils.py +++ b/samcli/lib/package/utils.py @@ -7,19 +7,17 @@ import re import shutil import tempfile -import uuid import zipfile import contextlib from contextlib import contextmanager -from typing import Dict, Optional, cast, TextIO +from typing import Dict, Optional, cast import jmespath -from samcli.commands.package import exceptions -from samcli.commands.package.exceptions import ImageNotFoundError +from samcli.commands.package.exceptions import ImageNotFoundError, InvalidLocalPathError from samcli.lib.package.ecr_utils import is_ecr_url from samcli.lib.package.s3_uploader import S3Uploader -from samcli.lib.utils.hash import dir_checksum, file_checksum +from samcli.lib.utils.hash import dir_checksum LOG = logging.getLogger(__name__) @@ -176,7 +174,7 @@ def upload_local_artifacts( if is_local_file(local_path): return uploader.upload_with_dedup(local_path) - raise exceptions.InvalidLocalPathError(resource_id=resource_id, property_name=property_name, local_path=local_path) + raise InvalidLocalPathError(resource_id=resource_id, property_name=property_name, local_path=local_path) def resource_not_packageable(resource_dict): @@ -267,31 +265,8 @@ def make_zip(file_name, source_root): return zipfile_name -@contextmanager -def mktempfile(): - directory = tempfile.gettempdir() - filename = os.path.join(directory, uuid.uuid4().hex) - - try: - with open(filename, "w+") as handle: - yield handle - finally: - if os.path.exists(filename): - os.remove(filename) - - def copy_to_temp_dir(filepath): tmp_dir = tempfile.mkdtemp() dst = os.path.join(tmp_dir, os.path.basename(filepath)) shutil.copyfile(filepath, dst) return tmp_dir - - -def get_cf_template_name(temp_file: TextIO, template_str: str, extension: str) -> str: - temp_file.write(template_str) - temp_file.flush() - - filemd5 = file_checksum(temp_file.name) - remote_path = filemd5 + "." + extension - - return remote_path diff --git a/tests/unit/commands/delete/test_delete_context.py b/tests/unit/commands/delete/test_delete_context.py index 25c98810c5..249b3f4afa 100644 --- a/tests/unit/commands/delete/test_delete_context.py +++ b/tests/unit/commands/delete/test_delete_context.py @@ -6,7 +6,7 @@ from samcli.commands.delete.delete_context import DeleteContext from samcli.lib.package.artifact_exporter import Template from samcli.cli.cli_config_file import TomlProvider -from samcli.lib.delete.cf_utils import CfUtils +from samcli.lib.delete.cfn_utils import CfnUtils from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.package.ecr_uploader import ECRUploader @@ -16,7 +16,7 @@ class TestDeleteContext(TestCase): @patch("samcli.commands.delete.delete_context.click.echo") @patch("samcli.commands.delete.delete_context.click.get_current_context") - @patch.object(CfUtils, "has_stack", MagicMock(return_value=(False))) + @patch.object(CfnUtils, "has_stack", MagicMock(return_value=(False))) def test_delete_context_stack_does_not_exist(self, patched_click_get_current_context, patched_click_echo): with DeleteContext( stack_name="test", @@ -82,7 +82,7 @@ def test_delete_context_parse_config_file(self, patched_click_get_current_contex @patch("samcli.commands.delete.delete_context.prompt") @patch("samcli.commands.delete.delete_context.confirm") @patch("samcli.commands.delete.delete_context.click.get_current_context") - @patch.object(CfUtils, "has_stack", MagicMock(return_value=(False))) + @patch.object(CfnUtils, "has_stack", MagicMock(return_value=(False))) def test_delete_no_user_input(self, patched_click_get_current_context, patched_confirm, patched_prompt): patched_click_get_current_context = MagicMock() with DeleteContext( @@ -120,10 +120,10 @@ def test_delete_no_user_input(self, patched_click_get_current_context, patched_c ) ), ) - @patch.object(CfUtils, "has_stack", MagicMock(return_value=(True))) - @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) - @patch.object(CfUtils, "delete_stack", MagicMock()) - @patch.object(CfUtils, "wait_for_delete", MagicMock()) + @patch.object(CfnUtils, "has_stack", MagicMock(return_value=(True))) + @patch.object(CfnUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) + @patch.object(CfnUtils, "delete_stack", MagicMock()) + @patch.object(CfnUtils, "wait_for_delete", MagicMock()) @patch.object(Template, "get_ecr_repos", MagicMock(return_value=({"logical_id": {"Repository": "test_id"}}))) @patch.object(S3Uploader, "delete_prefix_artifacts", MagicMock()) @patch("samcli.commands.delete.delete_context.click.get_current_context") @@ -139,20 +139,20 @@ def test_delete_context_valid_execute_run(self, patched_click_get_current_contex ) as delete_context: delete_context.run() - self.assertEqual(CfUtils.has_stack.call_count, 2) - self.assertEqual(CfUtils.get_stack_template.call_count, 2) - self.assertEqual(CfUtils.delete_stack.call_count, 2) - self.assertEqual(CfUtils.wait_for_delete.call_count, 2) + self.assertEqual(CfnUtils.has_stack.call_count, 2) + self.assertEqual(CfnUtils.get_stack_template.call_count, 2) + self.assertEqual(CfnUtils.delete_stack.call_count, 2) + self.assertEqual(CfnUtils.wait_for_delete.call_count, 2) self.assertEqual(S3Uploader.delete_prefix_artifacts.call_count, 1) self.assertEqual(Template.get_ecr_repos.call_count, 2) @patch("samcli.commands.delete.delete_context.click.echo") @patch("samcli.commands.deploy.guided_context.click.secho") @patch("samcli.commands.delete.delete_context.click.get_current_context") - @patch.object(CfUtils, "has_stack", MagicMock(side_effect=(True, False))) - @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) - @patch.object(CfUtils, "delete_stack", MagicMock()) - @patch.object(CfUtils, "wait_for_delete", MagicMock()) + @patch.object(CfnUtils, "has_stack", MagicMock(side_effect=(True, False))) + @patch.object(CfnUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) + @patch.object(CfnUtils, "delete_stack", MagicMock()) + @patch.object(CfnUtils, "wait_for_delete", MagicMock()) def test_delete_context_no_s3_bucket( self, patched_click_get_current_context, patched_click_secho, patched_click_echo ): @@ -181,13 +181,13 @@ def test_delete_context_no_s3_bucket( ] self.assertEqual(expected_click_echo_calls, patched_click_echo.call_args_list) - @patch("samcli.commands.delete.delete_context.get_cf_template_name") + @patch("samcli.commands.delete.delete_context.get_uploaded_s3_object_name") @patch("samcli.commands.delete.delete_context.confirm") @patch("samcli.commands.delete.delete_context.click.get_current_context") - @patch.object(CfUtils, "has_stack", MagicMock(side_effect=(True, False))) - @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) - @patch.object(CfUtils, "delete_stack", MagicMock()) - @patch.object(CfUtils, "wait_for_delete", MagicMock()) + @patch.object(CfnUtils, "has_stack", MagicMock(side_effect=(True, False))) + @patch.object(CfnUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) + @patch.object(CfnUtils, "delete_stack", MagicMock()) + @patch.object(CfnUtils, "wait_for_delete", MagicMock()) @patch.object(S3Uploader, "delete_artifact", MagicMock()) def test_guided_prompts_s3_bucket_prefix_present_execute_run( self, patched_click_get_current_context, patched_confirm, patched_get_cf_template_name @@ -237,13 +237,13 @@ def test_guided_prompts_s3_bucket_prefix_present_execute_run( self.assertFalse(delete_context.delete_artifacts_folder) self.assertTrue(delete_context.delete_cf_template_file) - @patch("samcli.commands.delete.delete_context.get_cf_template_name") + @patch("samcli.commands.delete.delete_context.get_uploaded_s3_object_name") @patch("samcli.commands.delete.delete_context.confirm") @patch("samcli.commands.delete.delete_context.click.get_current_context") - @patch.object(CfUtils, "has_stack", MagicMock(side_effect=(True, False))) - @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) - @patch.object(CfUtils, "delete_stack", MagicMock()) - @patch.object(CfUtils, "wait_for_delete", MagicMock()) + @patch.object(CfnUtils, "has_stack", MagicMock(side_effect=(True, False))) + @patch.object(CfnUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) + @patch.object(CfnUtils, "delete_stack", MagicMock()) + @patch.object(CfnUtils, "wait_for_delete", MagicMock()) @patch.object(S3Uploader, "delete_artifact", MagicMock()) @patch.object(ECRUploader, "delete_ecr_repository", MagicMock()) def test_guided_prompts_s3_bucket_present_no_prefix_execute_run( @@ -284,13 +284,13 @@ def test_guided_prompts_s3_bucket_present_no_prefix_execute_run( self.assertEqual(expected_confirmation_calls, patched_confirm.call_args_list) self.assertTrue(delete_context.delete_cf_template_file) - @patch("samcli.commands.delete.delete_context.get_cf_template_name") + @patch("samcli.commands.delete.delete_context.get_uploaded_s3_object_name") @patch("samcli.commands.delete.delete_context.confirm") @patch("samcli.commands.delete.delete_context.click.get_current_context") - @patch.object(CfUtils, "has_stack", MagicMock(side_effect=(True, True))) - @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) - @patch.object(CfUtils, "delete_stack", MagicMock()) - @patch.object(CfUtils, "wait_for_delete", MagicMock()) + @patch.object(CfnUtils, "has_stack", MagicMock(side_effect=(True, True))) + @patch.object(CfnUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) + @patch.object(CfnUtils, "delete_stack", MagicMock()) + @patch.object(CfnUtils, "wait_for_delete", MagicMock()) @patch.object(S3Uploader, "delete_artifact", MagicMock()) @patch.object(ECRUploader, "delete_ecr_repository", MagicMock()) @patch.object(Template, "get_ecr_repos", MagicMock(side_effect=({}, {"logical_id": {"Repository": "test_id"}}))) @@ -358,20 +358,20 @@ def test_guided_prompts_ecr_companion_stack_present_execute_run( self.assertFalse(delete_context.delete_artifacts_folder) self.assertTrue(delete_context.delete_cf_template_file) - @patch("samcli.commands.delete.delete_context.get_cf_template_name") + @patch("samcli.commands.delete.delete_context.get_uploaded_s3_object_name") @patch("samcli.commands.delete.delete_context.click.echo") @patch("samcli.commands.delete.delete_context.click.get_current_context") - @patch.object(CfUtils, "has_stack", MagicMock(side_effect=(True, False))) - @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) - @patch.object(CfUtils, "delete_stack", MagicMock()) - @patch.object(CfUtils, "wait_for_delete", MagicMock()) + @patch.object(CfnUtils, "has_stack", MagicMock(side_effect=(True, False))) + @patch.object(CfnUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) + @patch.object(CfnUtils, "delete_stack", MagicMock()) + @patch.object(CfnUtils, "wait_for_delete", MagicMock()) @patch.object(S3Uploader, "delete_prefix_artifacts", MagicMock()) @patch.object(ECRUploader, "delete_ecr_repository", MagicMock()) @patch.object(Template, "get_ecr_repos", MagicMock(return_value=({"logical_id": {"Repository": "test_id"}}))) def test_no_prompts_input_is_ecr_companion_stack_present_execute_run( self, patched_click_get_current_context, patched_click_echo, patched_get_cf_template_name ): - CfUtils.get_stack_template.return_value = { + CfnUtils.get_stack_template.return_value = { "TemplateBody": {"Metadata": {"CompanionStackname": "test-098f6bcd-CompanionStack"}} } patched_get_cf_template_name.return_value = "hello.template" @@ -393,13 +393,13 @@ def test_no_prompts_input_is_ecr_companion_stack_present_execute_run( ] self.assertEqual(expected_click_echo_calls, patched_click_echo.call_args_list) - @patch("samcli.commands.delete.delete_context.get_cf_template_name") + @patch("samcli.commands.delete.delete_context.get_uploaded_s3_object_name") @patch("samcli.commands.delete.delete_context.click.get_current_context") - @patch.object(CfUtils, "has_stack", MagicMock(side_effect=(True, True))) - @patch.object(CfUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) - @patch.object(CfUtils, "delete_stack", MagicMock()) + @patch.object(CfnUtils, "has_stack", MagicMock(side_effect=(True, True))) + @patch.object(CfnUtils, "get_stack_template", MagicMock(return_value=({"TemplateBody": "Hello World"}))) + @patch.object(CfnUtils, "delete_stack", MagicMock()) @patch.object( - CfUtils, + CfnUtils, "wait_for_delete", MagicMock( side_effect=( @@ -428,7 +428,7 @@ def test_retain_resources_delete_stack(self, patched_click_get_current_context, delete_context.run() - self.assertEqual(CfUtils.has_stack.call_count, 2) - self.assertEqual(CfUtils.get_stack_template.call_count, 2) - self.assertEqual(CfUtils.delete_stack.call_count, 4) - self.assertEqual(CfUtils.wait_for_delete.call_count, 4) + self.assertEqual(CfnUtils.has_stack.call_count, 2) + self.assertEqual(CfnUtils.get_stack_template.call_count, 2) + self.assertEqual(CfnUtils.delete_stack.call_count, 4) + self.assertEqual(CfnUtils.wait_for_delete.call_count, 4) diff --git a/tests/unit/lib/delete/test_cf_utils.py b/tests/unit/lib/delete/test_cf_utils.py index 8a18c782ce..c34d8170bf 100644 --- a/tests/unit/lib/delete/test_cf_utils.py +++ b/tests/unit/lib/delete/test_cf_utils.py @@ -5,7 +5,7 @@ from samcli.commands.delete.exceptions import DeleteFailedError, FetchTemplateFailedError, CfDeleteFailedStatusError from botocore.exceptions import ClientError, BotoCoreError, WaiterError -from samcli.lib.delete.cf_utils import CfUtils +from samcli.lib.delete.cfn_utils import CfnUtils class MockDeleteWaiter: @@ -23,7 +23,7 @@ def setUp(self): self.session = MagicMock() self.cloudformation_client = self.session.client("cloudformation") self.s3_client = self.session.client("s3") - self.cf_utils = CfUtils(self.cloudformation_client) + self.cf_utils = CfnUtils(self.cloudformation_client) def test_cf_utils_init(self): self.assertEqual(self.cf_utils._client, self.cloudformation_client)