diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 6491307247..dcfd9071b0 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -149,6 +149,20 @@ def delete(self): with mktempfile() as temp_file: self.cf_template_file_name = get_cf_template_name(temp_file, template_str, "template") + template = Template( + template_path=None, parent_dir=None, uploaders=self.uploaders, code_signer=None, template_str=template_str + ) + + # If s3 info is not available, try to obtain it from CF + # template resources. + if not self.s3_bucket: + s3_info = template.get_s3_info() + self.s3_bucket = s3_info["s3_bucket"] + self.s3_uploader.bucket_name = self.s3_bucket + + self.s3_prefix = s3_info["s3_prefix"] + self.s3_uploader.prefix = self.s3_prefix + self.guided_prompts() # Delete the primary stack @@ -158,9 +172,6 @@ def delete(self): LOG.debug("Deleted Cloudformation stack: %s", self.stack_name) # Delete the artifacts - template = Template( - template_path=None, parent_dir=None, uploaders=self.uploaders, code_signer=None, template_str=template_str - ) template.delete() # Delete the CF template file in S3 diff --git a/samcli/lib/package/artifact_exporter.py b/samcli/lib/package/artifact_exporter.py index 00fa5cb089..99567558e8 100644 --- a/samcli/lib/package/artifact_exporter.py +++ b/samcli/lib/package/artifact_exporter.py @@ -35,7 +35,7 @@ ResourceZip, ) from samcli.lib.package.s3_uploader import S3Uploader -from samcli.lib.package.uploaders import Uploaders +from samcli.lib.package.uploaders import Uploaders, Destination from samcli.lib.package.utils import ( is_local_folder, make_abs_path, @@ -47,7 +47,6 @@ from samcli.lib.utils.packagetype import ZIP from samcli.yamlhelper import yaml_parse, yaml_dump - # NOTE: sriram-mv, A cyclic dependency on `Template` needs to be broken. @@ -265,3 +264,48 @@ def delete(self): # Delete code resources exporter = exporter_class(self.uploaders, None) exporter.delete(resource_id, resource_dict) + + def get_s3_info(self): + """ + Iterates the template_dict resources with S3 EXPORT_DESTINATION to get the + s3_bucket and s3_prefix information for the purpose of deletion. + Method finds the first resource with s3 information, extracts the information + and then terminates. It is safe to assume that all the packaged files using the + commands package and deploy are in the same s3 bucket with the same s3 prefix. + """ + result = {"s3_bucket": None, "s3_prefix": None} + if "Resources" not in self.template_dict: + return result + + self._apply_global_values() + + for _, resource in self.template_dict["Resources"].items(): + + resource_type = resource.get("Type", None) + resource_dict = resource.get("Properties", {}) + + for exporter_class in self.resources_to_export: + # Skip the resources which don't give s3 information + if exporter_class.EXPORT_DESTINATION != Destination.S3: + continue + if exporter_class.RESOURCE_TYPE != resource_type: + continue + if resource_dict.get("PackageType", ZIP) != exporter_class.ARTIFACT_TYPE: + continue + + exporter = exporter_class(self.uploaders, None) + s3_info = exporter.get_property_value(resource_dict) + + result["s3_bucket"] = s3_info["Bucket"] + s3_key = s3_info["Key"] + + # Extract the prefix from the key + if s3_key: + key_split = s3_key.rsplit("/", 1) + if len(key_split) > 1: + result["s3_prefix"] = key_split[0] + break + if result["s3_bucket"]: + break + + return result diff --git a/samcli/lib/package/packageable_resources.py b/samcli/lib/package/packageable_resources.py index 39d9b73867..49f0e47653 100644 --- a/samcli/lib/package/packageable_resources.py +++ b/samcli/lib/package/packageable_resources.py @@ -164,11 +164,22 @@ def delete(self, resource_id, resource_dict): """ if resource_dict is None: return + + s3_info = self.get_property_value(resource_dict) + if s3_info["Key"]: + self.uploader.delete_artifact(s3_info["Key"], True) + + def get_property_value(self, resource_dict): + """ + Get the s3 property value for this resource + """ + if resource_dict is None: + return {"Bucket": None, "Key": None} + resource_path = jmespath.search(self.PROPERTY_NAME, resource_dict) - parsed_s3_url = self.uploader.parse_s3_url(resource_path) - if not self.uploader.bucket_name: - self.uploader.bucket_name = parsed_s3_url["Bucket"] - self.uploader.delete_artifact(parsed_s3_url["Key"], True) + if resource_path: + return self.uploader.parse_s3_url(resource_path) + return {"Bucket": None, "Key": None} class ResourceImageDict(Resource): @@ -322,13 +333,23 @@ def delete(self, resource_id, resource_dict): """ if resource_dict is None: return - resource_path = resource_dict[self.PROPERTY_NAME] - s3_bucket = resource_path[self.BUCKET_NAME_PROPERTY] - key = resource_path[self.OBJECT_KEY_PROPERTY] - if not self.uploader.bucket_name: - self.uploader.bucket_name = s3_bucket - self.uploader.delete_artifact(remote_path=key, is_key=True) + s3_info = self.get_property_value(resource_dict) + if s3_info["Key"]: + self.uploader.delete_artifact(remote_path=s3_info["Key"], is_key=True) + + def get_property_value(self, resource_dict): + """ + Get the s3 property value for this resource + """ + if resource_dict is None: + return {"Bucket": None, "Key": None} + + resource_path = resource_dict.get(self.PROPERTY_NAME, {}) + s3_bucket = resource_path.get(self.BUCKET_NAME_PROPERTY, None) + + key = resource_path.get(self.OBJECT_KEY_PROPERTY, None) + return {"Bucket": s3_bucket, "Key": key} class ServerlessFunctionResource(ResourceZip): diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index b3fbe53c7d..a7f1a9a8b9 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -145,7 +145,7 @@ def upload_with_dedup( return self.upload(file_name, remote_path) - def delete_artifact(self, remote_path: str, is_key: bool = False) -> Dict: + def delete_artifact(self, remote_path: str, is_key: bool = False) -> bool: """ Deletes a given file from S3 :param remote_path: Path to the file that will be deleted @@ -163,10 +163,16 @@ def delete_artifact(self, remote_path: str, is_key: bool = False) -> Dict: key = "{0}/{1}".format(self.prefix, remote_path) # Deleting Specific file with key - click.echo(f"\t- Deleting S3 file {key}") - resp = self.s3.delete_object(Bucket=self.bucket_name, Key=key) - LOG.debug("S3 method delete_object is called and returned: %s", resp["ResponseMetadata"]) - return dict(resp["ResponseMetadata"]) + if self.file_exists(remote_path=key): + click.echo(f"\t- Deleting S3 object with key {key}") + self.s3.delete_object(Bucket=self.bucket_name, Key=key) + LOG.debug("Deleted s3 object with key %s successfully", key) + return True + + # Given s3 object key does not exist + LOG.debug("Could not find the S3 file with the key %s", key) + click.echo(f"\t- Could not find and delete the S3 object with the key {key}") + return False except botocore.exceptions.ClientError as ex: error_code = ex.response["Error"]["Code"] @@ -183,9 +189,9 @@ def delete_prefix_artifacts(self): LOG.error("Bucket not specified") raise BucketNotSpecifiedError() if self.prefix: - prefix_files = self.s3.list_objects_v2(Bucket=self.bucket_name, Prefix=self.prefix) - - for obj in prefix_files["Contents"]: + response = self.s3.list_objects_v2(Bucket=self.bucket_name, Prefix=self.prefix) + prefix_files = response.get("Contents", []) + for obj in prefix_files: self.delete_artifact(obj["Key"], True) def file_exists(self, remote_path: str) -> bool: diff --git a/tests/unit/lib/package/test_artifact_exporter.py b/tests/unit/lib/package/test_artifact_exporter.py index e6b9d14320..0fea4bf3cb 100644 --- a/tests/unit/lib/package/test_artifact_exporter.py +++ b/tests/unit/lib/package/test_artifact_exporter.py @@ -14,7 +14,7 @@ from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.package.uploaders import Destination from samcli.lib.package.utils import zip_folder, make_zip -from samcli.lib.utils.packagetype import ZIP +from samcli.lib.utils.packagetype import ZIP, IMAGE from tests.testing_utils import FileCreator from samcli.commands.package import exceptions from samcli.lib.package.artifact_exporter import ( @@ -1401,7 +1401,6 @@ def example_yaml_template(self): """ def test_template_delete(self): - template_str = self.example_yaml_template() resource_type1_class = Mock() resource_type1_class.RESOURCE_TYPE = "resource_type1" @@ -1451,3 +1450,52 @@ def test_template_delete(self): resource_type2_instance.delete.assert_called_once_with("Resource2", mock.ANY) resource_type3_class.assert_not_called() resource_type3_instance.delete.assert_not_called() + + def test_template_get_s3_info(self): + + resource_type1_class = Mock() + resource_type1_class.RESOURCE_TYPE = "resource_type1" + resource_type1_class.ARTIFACT_TYPE = ZIP + resource_type1_class.PROPERTY_NAME = "CodeUri" + resource_type1_class.EXPORT_DESTINATION = Destination.S3 + resource_type1_instance = Mock() + resource_type1_class.return_value = resource_type1_instance + resource_type1_instance.get_property_value = Mock() + resource_type1_instance.get_property_value.return_value = {"Bucket": "bucket", "Key": "prefix/file"} + + resource_type2_class = Mock() + resource_type2_class.RESOURCE_TYPE = "resource_type2" + resource_type2_class.ARTIFACT_TYPE = ZIP + resource_type2_class.EXPORT_DESTINATION = Destination.S3 + resource_type2_instance = Mock() + resource_type2_class.return_value = resource_type2_instance + + resource_type3_class = Mock() + resource_type3_class.RESOURCE_TYPE = "resource_type3" + resource_type3_class.ARTIFACT_TYPE = IMAGE + resource_type3_class.EXPORT_DESTINATION = Destination.ECR + resource_type3_instance = Mock() + resource_type3_class.return_value = resource_type3_instance + + resources_to_export = [resource_type3_class, resource_type2_class, resource_type1_class] + + properties = {"foo": "bar", "CodeUri": "s3://bucket/prefix/file"} + template_dict = { + "Resources": { + "Resource1": {"Type": "resource_type1", "Properties": properties}, + } + } + template_str = json.dumps(template_dict, indent=4, ensure_ascii=False) + + template_exporter = Template( + template_path=None, + parent_dir=None, + uploaders=self.uploaders_mock, + code_signer=None, + resources_to_export=resources_to_export, + template_str=template_str, + ) + + s3_info = template_exporter.get_s3_info() + self.assertEqual(s3_info, {"s3_bucket": "bucket", "s3_prefix": "prefix"}) + resource_type1_instance.get_property_value.assert_called_once_with(properties) diff --git a/tests/unit/lib/package/test_s3_uploader.py b/tests/unit/lib/package/test_s3_uploader.py index f1765c3f8c..55b1bfbb2d 100644 --- a/tests/unit/lib/package/test_s3_uploader.py +++ b/tests/unit/lib/package/test_s3_uploader.py @@ -181,10 +181,26 @@ def test_s3_delete_artifact(self): force_upload=self.force_upload, no_progressbar=self.no_progressbar, ) - s3_uploader.artifact_metadata = {"a": "b"} + self.s3.delete_object = MagicMock() + self.s3.head_object = MagicMock() with self.assertRaises(BucketNotSpecifiedError) as ex: with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: - self.assertEqual(s3_uploader.delete_artifact(f.name), {"a": "b"}) + self.assertTrue(s3_uploader.delete_artifact(f.name)) + + def test_s3_delete_non_existant_artifact(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=None, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + no_progressbar=self.no_progressbar, + ) + self.s3.delete_object = MagicMock() + self.s3.head_object = MagicMock(side_effect=ClientError(error_response={}, operation_name="head_object")) + with self.assertRaises(BucketNotSpecifiedError) as ex: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: + self.assertFalse(s3_uploader.delete_artifact(f.name)) def test_s3_delete_artifact_no_bucket(self): s3_uploader = S3Uploader( @@ -217,6 +233,35 @@ def test_s3_delete_artifact_bucket_not_found(self): with self.assertRaises(NoSuchBucketError): s3_uploader.delete_artifact(f.name) + def test_delete_prefix_artifacts_no_bucket(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=None, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + no_progressbar=self.no_progressbar, + ) + with self.assertRaises(BucketNotSpecifiedError): + s3_uploader.delete_prefix_artifacts() + + def test_delete_prefix_artifacts_execute(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=self.bucket_name, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + no_progressbar=self.no_progressbar, + ) + + s3_uploader.s3.delete_object = MagicMock() + + s3_uploader.s3.list_objects_v2 = MagicMock(return_value={"Contents": [{"Key": "key"}]}) + + s3_uploader.delete_prefix_artifacts() + s3_uploader.s3.delete_object.assert_called_once_with(Bucket="mock-bucket", Key="key") + def test_s3_upload_with_dedup(self): s3_uploader = S3Uploader( s3_client=self.s3,