Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
42 commits
Select commit Hold shift + click to select a range
1263735
Added methods for cf and s3 files and init UI
hnnasit Jun 21, 2021
ba47369
Added unit tests for utils methods and s3_uploader
hnnasit Jun 23, 2021
d77f7c4
Removed s3_bucket and s3_prefix click options
hnnasit Jun 24, 2021
d506648
chore: Increase awareness of same file warning during package (#2946)
mndeveci Jun 24, 2021
698de67
fix: Allow the base64Encoded field in REST Api, skip validation of un…
moelasmar Jun 24, 2021
3f6f070
Fixed lint errors and added few unit tests
hnnasit Jun 25, 2021
af2f929
Make black happy
hnnasit Jun 25, 2021
481ef8a
Merge pull request #1 from hnnasit/delete-cf-s3-methods
hnnasit Jun 28, 2021
1d70155
Added methods for deleting template artifacts
hnnasit Jun 28, 2021
e3f7872
Wait method added for delete cf api
hnnasit Jun 28, 2021
99f7db4
Added LOG statements
hnnasit Jun 29, 2021
e7304ec
Added and updated changes based on CR
hnnasit Jun 29, 2021
c2e43db
Fixed the unit tests in artifact_exporter.py
hnnasit Jun 29, 2021
dd35d53
Update HELP_TEXT in delete/command.py
hnnasit Jun 29, 2021
f43e763
Updated code based on Chris' comments
hnnasit Jun 30, 2021
5779cd3
Added condition for resources that have deletionpolicy specified
hnnasit Jun 30, 2021
bc9db14
Small changes and fixes based on the comments
hnnasit Jun 30, 2021
401a950
Removed region prompt
hnnasit Jun 30, 2021
0a38340
Added unit tests for ecr delete method and typing for methods
hnnasit Jul 3, 2021
8977c6a
Merge branch 'delete-template-artifacts' into delete-cf-s3-methods
hnnasit Jul 3, 2021
99c80e1
Merge pull request #2 from hnnasit/delete-cf-s3-methods
hnnasit Jul 3, 2021
aaa1b05
Reformatted delete_context and added option to skip user prompts
hnnasit Jul 5, 2021
e2e85a9
Removed return type from artifact_exporter for delete method
hnnasit Jul 5, 2021
c98e6ee
Added unit tests for artifact_exporter and delete_context
hnnasit Jul 5, 2021
17a427a
Added more unit tests for delete_context and artifact_exporter
hnnasit Jul 5, 2021
e577d7f
Added more unit tests for delete_context and artifact_exporter
hnnasit Jul 6, 2021
70eac98
Merged feat/sam_delete changes into delete_template_artifacts repo
hnnasit Jul 6, 2021
58ead71
Added docs and comments for artifact_exporter and ecr_uploader
hnnasit Jul 6, 2021
45ee66f
Added log statements in delete_context and some updates in unit tests
hnnasit Jul 7, 2021
d151b01
Changed force to no-prompts and updated ecr delete method error hand…
hnnasit Jul 8, 2021
6f54240
Created a separate function for parsing ecr url in ecr_uploader
hnnasit Jul 9, 2021
30e3c02
Reformatted Template class init to pass template_str and init templat…
hnnasit Jul 9, 2021
b8a2591
Changed how s3 url is obtained for resource_zip edge-case: aws:glue:job
hnnasit Jul 9, 2021
7292353
Fixed edge case where resource artifact points to a path style url
hnnasit Jul 9, 2021
6ff2e22
run Make black
hnnasit Jul 9, 2021
6c9a060
Made the parse s3 url funcs protected and defined a parent method and…
hnnasit Jul 10, 2021
49e0967
Added methods to extract s3 info from cf template
hnnasit Jul 11, 2021
4450886
Added testing for get_s3_info method for artifact_exporter and s3_upl…
hnnasit Jul 12, 2021
5098e3c
Merged feat/sam_delete branch into get-s3-info-cf-template branch
hnnasit Jul 14, 2021
29add30
Removed commented code and updated method docstring
hnnasit Jul 16, 2021
308f859
Better error handling for s3 delete artifacts and fixed bug for getti…
hnnasit Jul 17, 2021
4275e34
Changed get_s3_info to get_property_value and changed output text for…
hnnasit Jul 19, 2021
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 14 additions & 3 deletions samcli/commands/delete/delete_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,20 @@ def delete(self):
with mktempfile() as temp_file:
self.cf_template_file_name = get_cf_template_name(temp_file, template_str, "template")

template = Template(
template_path=None, parent_dir=None, uploaders=self.uploaders, code_signer=None, template_str=template_str
)

# If s3 info is not available, try to obtain it from CF
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

do we really get the S3 info from CF, or from the template itself?
S3 info can only be given through the toml config file, right ?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes correct we get the s3 info from the cloudformation stack template and not cloudformation.

# template resources.
if not self.s3_bucket:
s3_info = template.get_s3_info()
self.s3_bucket = s3_info["s3_bucket"]
self.s3_uploader.bucket_name = self.s3_bucket

self.s3_prefix = s3_info["s3_prefix"]
self.s3_uploader.prefix = self.s3_prefix

self.guided_prompts()

# Delete the primary stack
Expand All @@ -158,9 +172,6 @@ def delete(self):
LOG.debug("Deleted Cloudformation stack: %s", self.stack_name)

# Delete the artifacts
template = Template(
template_path=None, parent_dir=None, uploaders=self.uploaders, code_signer=None, template_str=template_str
)
template.delete()

# Delete the CF template file in S3
Expand Down
48 changes: 46 additions & 2 deletions samcli/lib/package/artifact_exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
ResourceZip,
)
from samcli.lib.package.s3_uploader import S3Uploader
from samcli.lib.package.uploaders import Uploaders
from samcli.lib.package.uploaders import Uploaders, Destination
from samcli.lib.package.utils import (
is_local_folder,
make_abs_path,
Expand All @@ -47,7 +47,6 @@
from samcli.lib.utils.packagetype import ZIP
from samcli.yamlhelper import yaml_parse, yaml_dump


# NOTE: sriram-mv, A cyclic dependency on `Template` needs to be broken.


Expand Down Expand Up @@ -265,3 +264,48 @@ def delete(self):
# Delete code resources
exporter = exporter_class(self.uploaders, None)
exporter.delete(resource_id, resource_dict)

def get_s3_info(self):
"""
Iterates the template_dict resources with S3 EXPORT_DESTINATION to get the
s3_bucket and s3_prefix information for the purpose of deletion.
Method finds the first resource with s3 information, extracts the information
and then terminates. It is safe to assume that all the packaged files using the
commands package and deploy are in the same s3 bucket with the same s3 prefix.
"""
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think we should also mention it is safe to assume all packaged files are in the same S3 bucket and with the same prefix, because this is how sam package and sam deploy works now.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this condition only applies if the stack is created by sam, but if not so this condition is not correct, or if the stack template was originally contains a lambda function that its code was already in S3 bucket.

My understanding is we need this information to be able to delete the template file, so if for any reason, the S3 bucket information we got it from the template is not the correct one, what will be SAM behaviour?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That is a good point, this logic would only work for stacks which are created by sam. In the case we get bucket information which is not correct, SAM will not be able to tell that it is wrong and the template file wont be deleted.

result = {"s3_bucket": None, "s3_prefix": None}
if "Resources" not in self.template_dict:
return result

self._apply_global_values()

for _, resource in self.template_dict["Resources"].items():

resource_type = resource.get("Type", None)
resource_dict = resource.get("Properties", {})

for exporter_class in self.resources_to_export:
# Skip the resources which don't give s3 information
if exporter_class.EXPORT_DESTINATION != Destination.S3:
continue
if exporter_class.RESOURCE_TYPE != resource_type:
continue
if resource_dict.get("PackageType", ZIP) != exporter_class.ARTIFACT_TYPE:
continue

exporter = exporter_class(self.uploaders, None)
s3_info = exporter.get_property_value(resource_dict)

result["s3_bucket"] = s3_info["Bucket"]
s3_key = s3_info["Key"]

# Extract the prefix from the key
if s3_key:
key_split = s3_key.rsplit("/", 1)
if len(key_split) > 1:
result["s3_prefix"] = key_split[0]
break
if result["s3_bucket"]:
break

return result
41 changes: 31 additions & 10 deletions samcli/lib/package/packageable_resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,11 +164,22 @@ def delete(self, resource_id, resource_dict):
"""
if resource_dict is None:
return

s3_info = self.get_property_value(resource_dict)
if s3_info["Key"]:
self.uploader.delete_artifact(s3_info["Key"], True)

def get_property_value(self, resource_dict):
"""
Get the s3 property value for this resource
"""
if resource_dict is None:
return {"Bucket": None, "Key": None}

resource_path = jmespath.search(self.PROPERTY_NAME, resource_dict)
parsed_s3_url = self.uploader.parse_s3_url(resource_path)
if not self.uploader.bucket_name:
self.uploader.bucket_name = parsed_s3_url["Bucket"]
self.uploader.delete_artifact(parsed_s3_url["Key"], True)
if resource_path:
return self.uploader.parse_s3_url(resource_path)
return {"Bucket": None, "Key": None}


class ResourceImageDict(Resource):
Expand Down Expand Up @@ -322,13 +333,23 @@ def delete(self, resource_id, resource_dict):
"""
if resource_dict is None:
return
resource_path = resource_dict[self.PROPERTY_NAME]
s3_bucket = resource_path[self.BUCKET_NAME_PROPERTY]
key = resource_path[self.OBJECT_KEY_PROPERTY]

if not self.uploader.bucket_name:
self.uploader.bucket_name = s3_bucket
self.uploader.delete_artifact(remote_path=key, is_key=True)
s3_info = self.get_property_value(resource_dict)
if s3_info["Key"]:
self.uploader.delete_artifact(remote_path=s3_info["Key"], is_key=True)

def get_property_value(self, resource_dict):
"""
Get the s3 property value for this resource
"""
if resource_dict is None:
return {"Bucket": None, "Key": None}

resource_path = resource_dict.get(self.PROPERTY_NAME, {})
s3_bucket = resource_path.get(self.BUCKET_NAME_PROPERTY, None)

key = resource_path.get(self.OBJECT_KEY_PROPERTY, None)
return {"Bucket": s3_bucket, "Key": key}


class ServerlessFunctionResource(ResourceZip):
Expand Down
22 changes: 14 additions & 8 deletions samcli/lib/package/s3_uploader.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ def upload_with_dedup(

return self.upload(file_name, remote_path)

def delete_artifact(self, remote_path: str, is_key: bool = False) -> Dict:
def delete_artifact(self, remote_path: str, is_key: bool = False) -> bool:
"""
Deletes a given file from S3
:param remote_path: Path to the file that will be deleted
Expand All @@ -163,10 +163,16 @@ def delete_artifact(self, remote_path: str, is_key: bool = False) -> Dict:
key = "{0}/{1}".format(self.prefix, remote_path)

# Deleting Specific file with key
click.echo(f"\t- Deleting S3 file {key}")
resp = self.s3.delete_object(Bucket=self.bucket_name, Key=key)
LOG.debug("S3 method delete_object is called and returned: %s", resp["ResponseMetadata"])
return dict(resp["ResponseMetadata"])
if self.file_exists(remote_path=key):
click.echo(f"\t- Deleting S3 object with key {key}")
self.s3.delete_object(Bucket=self.bucket_name, Key=key)
LOG.debug("Deleted s3 object with key %s successfully", key)
return True

# Given s3 object key does not exist
LOG.debug("Could not find the S3 file with the key %s", key)
click.echo(f"\t- Could not find and delete the S3 object with the key {key}")
return False

except botocore.exceptions.ClientError as ex:
error_code = ex.response["Error"]["Code"]
Expand All @@ -183,9 +189,9 @@ def delete_prefix_artifacts(self):
LOG.error("Bucket not specified")
raise BucketNotSpecifiedError()
if self.prefix:
prefix_files = self.s3.list_objects_v2(Bucket=self.bucket_name, Prefix=self.prefix)

for obj in prefix_files["Contents"]:
response = self.s3.list_objects_v2(Bucket=self.bucket_name, Prefix=self.prefix)
prefix_files = response.get("Contents", [])
for obj in prefix_files:
self.delete_artifact(obj["Key"], True)

def file_exists(self, remote_path: str) -> bool:
Expand Down
52 changes: 50 additions & 2 deletions tests/unit/lib/package/test_artifact_exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from samcli.lib.package.s3_uploader import S3Uploader
from samcli.lib.package.uploaders import Destination
from samcli.lib.package.utils import zip_folder, make_zip
from samcli.lib.utils.packagetype import ZIP
from samcli.lib.utils.packagetype import ZIP, IMAGE
from tests.testing_utils import FileCreator
from samcli.commands.package import exceptions
from samcli.lib.package.artifact_exporter import (
Expand Down Expand Up @@ -1401,7 +1401,6 @@ def example_yaml_template(self):
"""

def test_template_delete(self):
template_str = self.example_yaml_template()

resource_type1_class = Mock()
resource_type1_class.RESOURCE_TYPE = "resource_type1"
Expand Down Expand Up @@ -1451,3 +1450,52 @@ def test_template_delete(self):
resource_type2_instance.delete.assert_called_once_with("Resource2", mock.ANY)
resource_type3_class.assert_not_called()
resource_type3_instance.delete.assert_not_called()

def test_template_get_s3_info(self):

resource_type1_class = Mock()
resource_type1_class.RESOURCE_TYPE = "resource_type1"
resource_type1_class.ARTIFACT_TYPE = ZIP
resource_type1_class.PROPERTY_NAME = "CodeUri"
resource_type1_class.EXPORT_DESTINATION = Destination.S3
resource_type1_instance = Mock()
resource_type1_class.return_value = resource_type1_instance
resource_type1_instance.get_property_value = Mock()
resource_type1_instance.get_property_value.return_value = {"Bucket": "bucket", "Key": "prefix/file"}

resource_type2_class = Mock()
resource_type2_class.RESOURCE_TYPE = "resource_type2"
resource_type2_class.ARTIFACT_TYPE = ZIP
resource_type2_class.EXPORT_DESTINATION = Destination.S3
resource_type2_instance = Mock()
resource_type2_class.return_value = resource_type2_instance

resource_type3_class = Mock()
resource_type3_class.RESOURCE_TYPE = "resource_type3"
resource_type3_class.ARTIFACT_TYPE = IMAGE
resource_type3_class.EXPORT_DESTINATION = Destination.ECR
resource_type3_instance = Mock()
resource_type3_class.return_value = resource_type3_instance

resources_to_export = [resource_type3_class, resource_type2_class, resource_type1_class]

properties = {"foo": "bar", "CodeUri": "s3://bucket/prefix/file"}
template_dict = {
"Resources": {
"Resource1": {"Type": "resource_type1", "Properties": properties},
}
}
template_str = json.dumps(template_dict, indent=4, ensure_ascii=False)

template_exporter = Template(
template_path=None,
parent_dir=None,
uploaders=self.uploaders_mock,
code_signer=None,
resources_to_export=resources_to_export,
template_str=template_str,
)

s3_info = template_exporter.get_s3_info()
self.assertEqual(s3_info, {"s3_bucket": "bucket", "s3_prefix": "prefix"})
resource_type1_instance.get_property_value.assert_called_once_with(properties)
49 changes: 47 additions & 2 deletions tests/unit/lib/package/test_s3_uploader.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,10 +181,26 @@ def test_s3_delete_artifact(self):
force_upload=self.force_upload,
no_progressbar=self.no_progressbar,
)
s3_uploader.artifact_metadata = {"a": "b"}
self.s3.delete_object = MagicMock()
self.s3.head_object = MagicMock()
with self.assertRaises(BucketNotSpecifiedError) as ex:
with tempfile.NamedTemporaryFile(mode="w", delete=False) as f:
self.assertEqual(s3_uploader.delete_artifact(f.name), {"a": "b"})
self.assertTrue(s3_uploader.delete_artifact(f.name))

def test_s3_delete_non_existant_artifact(self):
s3_uploader = S3Uploader(
s3_client=self.s3,
bucket_name=None,
prefix=self.prefix,
kms_key_id=self.kms_key_id,
force_upload=self.force_upload,
no_progressbar=self.no_progressbar,
)
self.s3.delete_object = MagicMock()
self.s3.head_object = MagicMock(side_effect=ClientError(error_response={}, operation_name="head_object"))
with self.assertRaises(BucketNotSpecifiedError) as ex:
with tempfile.NamedTemporaryFile(mode="w", delete=False) as f:
self.assertFalse(s3_uploader.delete_artifact(f.name))

def test_s3_delete_artifact_no_bucket(self):
s3_uploader = S3Uploader(
Expand Down Expand Up @@ -217,6 +233,35 @@ def test_s3_delete_artifact_bucket_not_found(self):
with self.assertRaises(NoSuchBucketError):
s3_uploader.delete_artifact(f.name)

def test_delete_prefix_artifacts_no_bucket(self):
s3_uploader = S3Uploader(
s3_client=self.s3,
bucket_name=None,
prefix=self.prefix,
kms_key_id=self.kms_key_id,
force_upload=self.force_upload,
no_progressbar=self.no_progressbar,
)
with self.assertRaises(BucketNotSpecifiedError):
s3_uploader.delete_prefix_artifacts()

def test_delete_prefix_artifacts_execute(self):
s3_uploader = S3Uploader(
s3_client=self.s3,
bucket_name=self.bucket_name,
prefix=self.prefix,
kms_key_id=self.kms_key_id,
force_upload=self.force_upload,
no_progressbar=self.no_progressbar,
)

s3_uploader.s3.delete_object = MagicMock()

s3_uploader.s3.list_objects_v2 = MagicMock(return_value={"Contents": [{"Key": "key"}]})

s3_uploader.delete_prefix_artifacts()
s3_uploader.s3.delete_object.assert_called_once_with(Bucket="mock-bucket", Key="key")

def test_s3_upload_with_dedup(self):
s3_uploader = S3Uploader(
s3_client=self.s3,
Expand Down