Skip to content

Commit 313e481

Browse files
authored
feat: S3 delete (#86)
* feat: add s3 delete methods * feat: add test case
1 parent 14904da commit 313e481

File tree

2 files changed

+110
-0
lines changed

2 files changed

+110
-0
lines changed

mdps_ds_lib/lib/aws/aws_s3.py

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -222,3 +222,55 @@ def read_small_txt_file(self):
222222
"""
223223
bytestream = BytesIO(self.get_stream().read()) # get the bytes stream of zipped file
224224
return bytestream.read().decode('UTF-8')
225+
226+
def delete_one(self):
227+
response = self.__s3_client.delete_object(
228+
Bucket=self.__target_bucket,
229+
Key=self.__target_key,
230+
# MFA='string',
231+
# VersionId='string',
232+
# RequestPayer='requester',
233+
# BypassGovernanceRetention=True | False,
234+
# ExpectedBucketOwner='string',
235+
# IfMatch='string',
236+
# IfMatchLastModifiedTime=datetime(2015, 1, 1),
237+
# IfMatchSize=123
238+
)
239+
return response
240+
241+
def delete_multiple(self, s3_urls: list=[], s3_bucket: str='', s3_paths: list=[]):
242+
if len(s3_urls) < 1 and len(s3_paths) < 1:
243+
raise ValueError(f'unable to delete empty list of URLs or Paths')
244+
if len(s3_urls) < 1:
245+
if s3_bucket == '':
246+
raise ValueError(f'empty s3 bucket for paths')
247+
else:
248+
s3_splits = [self.split_s3_url(k) for k in s3_urls]
249+
s3_bucket = list(set([k[0] for k in s3_splits]))
250+
if len(s3_bucket) > 1:
251+
raise ValueError(f'unable to delete multiple s3 buckets: {s3_bucket}')
252+
s3_bucket = s3_bucket[0]
253+
s3_paths = list(set([k[1] for k in s3_splits]))
254+
s3_paths = [{'Key': k,
255+
# 'VersionId': 'string',
256+
# 'ETag': 'string',
257+
# 'LastModifiedTime': datetime(2015, 1, 1),
258+
# 'Size': 123
259+
} for k in s3_paths]
260+
response = self.__s3_client.delete_objects(
261+
Bucket=s3_bucket,
262+
Delete={
263+
'Objects': s3_paths,
264+
'Quiet': True, # True | False
265+
},
266+
# MFA='string',
267+
# VersionId='string',
268+
# RequestPayer='requester',
269+
# BypassGovernanceRetention=True | False,
270+
# ExpectedBucketOwner='string',
271+
# IfMatch='string',
272+
# IfMatchLastModifiedTime=datetime(2015, 1, 1),
273+
# IfMatchSize=123
274+
)
275+
return response
276+
Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
from unittest import TestCase
2+
3+
from mdps_ds_lib.lib.aws.aws_s3 import AwsS3
4+
5+
6+
class TestAwsS3(TestCase):
7+
def test_delete_one_01(self):
8+
bucket = 'uds-sbx-cumulus-staging'
9+
s3 = AwsS3()
10+
single_path = 'tmp/unit-test/file1.txt'
11+
s3.set_s3_url(f's3://{bucket}/{single_path}').upload_bytes('This is a test'.encode())
12+
d = s3.delete_one()
13+
print(d)
14+
return
15+
16+
def test_delete_multiple_01(self):
17+
bucket = 'uds-sbx-cumulus-staging'
18+
s3 = AwsS3()
19+
deleting_s3_urls = []
20+
for i in range(10):
21+
single_path = f'tmp/unit-test/file{i}.txt'
22+
deleting_s3_urls.append(f's3://{bucket}/{single_path}')
23+
s3.set_s3_url(deleting_s3_urls[-1]).upload_bytes(f'This is a test - {i}'.encode())
24+
d = s3.delete_multiple(s3_urls=deleting_s3_urls)
25+
print(d)
26+
return
27+
28+
def test_delete_multiple_02(self):
29+
bucket = 'uds-sbx-cumulus-staging'
30+
s3 = AwsS3()
31+
deleting_s3_paths = []
32+
for i in range(10):
33+
single_path = f'tmp/unit-test/file{i}.txt'
34+
deleting_s3_paths.append(single_path)
35+
s3.set_s3_url(f's3://{bucket}/{single_path}').upload_bytes(f'This is a test - {i}'.encode())
36+
d = s3.delete_multiple(s3_bucket=bucket, s3_paths=deleting_s3_paths)
37+
print(d)
38+
return
39+
40+
def test_delete_multiple_03(self):
41+
bucket = 'uds-sbx-cumulus-staging'
42+
s3 = AwsS3()
43+
with self.assertRaises(ValueError) as context:
44+
s3.delete_multiple(s3_bucket=bucket, s3_paths=[])
45+
self.assertTrue(str(context.exception).startswith('unable to delete empty list of URLs or Paths'))
46+
47+
with self.assertRaises(ValueError) as context:
48+
s3.delete_multiple(s3_urls=[])
49+
self.assertTrue(str(context.exception).startswith('unable to delete empty list of URLs or Paths'))
50+
51+
with self.assertRaises(ValueError) as context:
52+
s3.delete_multiple(s3_bucket='', s3_paths=['a', 'b', 'c'])
53+
self.assertTrue(str(context.exception).startswith('empty s3 bucket for paths'))
54+
55+
with self.assertRaises(ValueError) as context:
56+
s3.delete_multiple(s3_urls=['s3://a/b', 's3://b/c'])
57+
self.assertTrue(str(context.exception).startswith('unable to delete multiple s3 buckets'))
58+
return

0 commit comments

Comments
 (0)