From efff9b54e00677d6d827b61704bf6a9e32445374 Mon Sep 17 00:00:00 2001 From: ckunki Date: Wed, 31 Jan 2024 10:39:25 +0100 Subject: [PATCH 01/14] Enhanced AwsAccess and added integration test --- .../ds/sandbox/lib/aws_access/aws_access.py | 24 ++++++++- .../aws/test_s3_transfer_multipart.py | 50 +++++++++++++++++++ 2 files changed, 73 insertions(+), 1 deletion(-) create mode 100644 test/integration/aws/test_s3_transfer_multipart.py diff --git a/exasol/ds/sandbox/lib/aws_access/aws_access.py b/exasol/ds/sandbox/lib/aws_access/aws_access.py index a022c40c..76059a0a 100644 --- a/exasol/ds/sandbox/lib/aws_access/aws_access.py +++ b/exasol/ds/sandbox/lib/aws_access/aws_access.py @@ -1,5 +1,5 @@ from functools import wraps -from typing import Optional, Any, List, Dict, Tuple +from typing import Any, Callable, Dict, List, Optional, Tuple import boto3 import botocore @@ -419,6 +419,28 @@ def copy_s3_object(self, bucket: str, source: str, dest: str): copy_source = {'Bucket': bucket, 'Key': source} cloud_client.copy_object(Bucket=bucket, CopySource=copy_source, Key=dest) + @_log_function_start + def transfer_to_s3( + self, + bucket: str, + source: str, + dest: str, + callback: Callable[[int], None] = None, + ): + """ + Transfers a file to an AWS bucket using an AWS transfer object. + The transfer object will perform a multi-part upload which supports to + transfer even large files bigger than 5 GM. Pleaes note minimum file size is 5 MB. + + Optional parameter :callback: is method which takes a number of bytes + transferred to be periodically called during the upload. + """ + cloud_client = self._get_aws_client("s3") + config = boto3.s3.transfer.TransferConfig() + # transfer = boto3.s3.transfer.S3Transfer(client=cloud_client, config=config) + # transfer.upload_file(source, bucket, dest) + cloud_client.upload_file(source, bucket, dest, Config=config, Callback=callback) + @_log_function_start def delete_s3_object(self, bucket: str, source: str): """ diff --git a/test/integration/aws/test_s3_transfer_multipart.py b/test/integration/aws/test_s3_transfer_multipart.py new file mode 100644 index 00000000..c0420ad6 --- /dev/null +++ b/test/integration/aws/test_s3_transfer_multipart.py @@ -0,0 +1,50 @@ +import os +import pytest + +from exasol.ds.sandbox.lib.asset_id import AssetId +from exasol.ds.sandbox.lib.aws_access.aws_access import AwsAccess +from exasol.ds.sandbox.lib.vm_bucket.vm_dss_bucket import find_vm_bucket + +from dataclasses import dataclass + +@dataclass +class Progress: + bytes: int = 0 + + def report(self, bytes: int): + self.bytes += bytes + display = round(self.bytes / 1024 / 1024) + print(f'\rTransferred {display} MB ...', flush=True, end="") + + +@pytest.fixture +def sample_file(tmp_path): + """ + Create a sample file of size 6 MB for transfer to S3 bucket. + """ + file = tmp_path / "sample-file.txt" + one_kb = "123456789 " * 102 + "1234" + file.write_text(one_kb * 1024 * 6) + yield file + file.unlink() + + +@pytest.mark.skipif( + os.environ.get('DSS_RUN_S3_TEST') != 'true', + reason="Tests accessing real S3 buckets need to be activated by env variable DSS_RUN_S3_TEST") +def test_s3_transfer_multipart(sample_file): + aws = AwsAccess(aws_profile="ci4_mfa") + source = sample_file + bucket = find_vm_bucket(aws) + s3_key = f"{AssetId.BUCKET_PREFIX}-itest-sample-file" + progress = Progress() + print("") + try: + aws.transfer_to_s3( + bucket=bucket, + source=source, + dest=s3_key, + callback=progress.report, + ) + finally: + aws.delete_s3_object(bucket, s3_key) From b7f51195924fe17073b12de7e10b0c5089167091 Mon Sep 17 00:00:00 2001 From: ckunki Date: Wed, 31 Jan 2024 10:46:53 +0100 Subject: [PATCH 02/14] Updated changes file --- doc/changes/changes_0.1.0.md | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/changes/changes_0.1.0.md b/doc/changes/changes_0.1.0.md index 686e143c..a659dc5d 100644 --- a/doc/changes/changes_0.1.0.md +++ b/doc/changes/changes_0.1.0.md @@ -32,6 +32,7 @@ Version: 0.1.0 * #75: Changed default port of Jupyter server to 49494 * #145: Add Docket Test Library to prepare Notebook tests * #255: Renamed data science sandbox to exasol-ai-lab +* #150: Used multipart upload for VM images ## Bug Fixes From 230e1539bc02538aa723f751eb7e233b1875b327 Mon Sep 17 00:00:00 2001 From: ckunki Date: Wed, 31 Jan 2024 11:13:05 +0100 Subject: [PATCH 03/14] Moved test to directory test/codebuild used identical environment variable as for other codebuild tests and renamed file to include test execution with codebuild tests --- .../test_ci_s3_transfer_multipart.py} | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) rename test/{integration/aws/test_s3_transfer_multipart.py => codebuild/test_ci_s3_transfer_multipart.py} (84%) diff --git a/test/integration/aws/test_s3_transfer_multipart.py b/test/codebuild/test_ci_s3_transfer_multipart.py similarity index 84% rename from test/integration/aws/test_s3_transfer_multipart.py rename to test/codebuild/test_ci_s3_transfer_multipart.py index c0420ad6..1314cdd4 100644 --- a/test/integration/aws/test_s3_transfer_multipart.py +++ b/test/codebuild/test_ci_s3_transfer_multipart.py @@ -29,11 +29,10 @@ def sample_file(tmp_path): file.unlink() -@pytest.mark.skipif( - os.environ.get('DSS_RUN_S3_TEST') != 'true', - reason="Tests accessing real S3 buckets need to be activated by env variable DSS_RUN_S3_TEST") +@pytest.mark.skipif(os.environ.get('DSS_RUN_CI_TEST') != 'true', + reason="CI test need to be activated by env variable DSS_RUN_CI_TEST") def test_s3_transfer_multipart(sample_file): - aws = AwsAccess(aws_profile="ci4_mfa") + aws = AwsAccess(None) source = sample_file bucket = find_vm_bucket(aws) s3_key = f"{AssetId.BUCKET_PREFIX}-itest-sample-file" From 1cfbcd00687c7cafef5ff881cafb2b9aef125076 Mon Sep 17 00:00:00 2001 From: ckunki Date: Wed, 31 Jan 2024 11:29:20 +0100 Subject: [PATCH 04/14] Commented out progress reporting. --- .../test_ci_s3_transfer_multipart.py | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/test/codebuild/test_ci_s3_transfer_multipart.py b/test/codebuild/test_ci_s3_transfer_multipart.py index 1314cdd4..6398eee0 100644 --- a/test/codebuild/test_ci_s3_transfer_multipart.py +++ b/test/codebuild/test_ci_s3_transfer_multipart.py @@ -5,16 +5,16 @@ from exasol.ds.sandbox.lib.aws_access.aws_access import AwsAccess from exasol.ds.sandbox.lib.vm_bucket.vm_dss_bucket import find_vm_bucket -from dataclasses import dataclass - -@dataclass -class Progress: - bytes: int = 0 - - def report(self, bytes: int): - self.bytes += bytes - display = round(self.bytes / 1024 / 1024) - print(f'\rTransferred {display} MB ...', flush=True, end="") +# from dataclasses import dataclass +# +# @dataclass +# class Progress: +# bytes: int = 0 +# +# def report(self, bytes: int): +# self.bytes += bytes +# display = round(self.bytes / 1024 / 1024) +# print(f'\rTransferred {display} MB ...', flush=True, end="") @pytest.fixture @@ -36,14 +36,14 @@ def test_s3_transfer_multipart(sample_file): source = sample_file bucket = find_vm_bucket(aws) s3_key = f"{AssetId.BUCKET_PREFIX}-itest-sample-file" - progress = Progress() - print("") + # progress = Progress() + # print("") try: aws.transfer_to_s3( bucket=bucket, source=source, dest=s3_key, - callback=progress.report, + # callback=progress.report, ) finally: aws.delete_s3_object(bucket, s3_key) From bbf31133b5697f87c9825271d03ced6831bfa535 Mon Sep 17 00:00:00 2001 From: ckunki Date: Wed, 31 Jan 2024 11:32:44 +0100 Subject: [PATCH 05/14] Updated method commment in AwsAccess --- exasol/ds/sandbox/lib/aws_access/aws_access.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/exasol/ds/sandbox/lib/aws_access/aws_access.py b/exasol/ds/sandbox/lib/aws_access/aws_access.py index 76059a0a..b0a40067 100644 --- a/exasol/ds/sandbox/lib/aws_access/aws_access.py +++ b/exasol/ds/sandbox/lib/aws_access/aws_access.py @@ -430,15 +430,13 @@ def transfer_to_s3( """ Transfers a file to an AWS bucket using an AWS transfer object. The transfer object will perform a multi-part upload which supports to - transfer even large files bigger than 5 GM. Pleaes note minimum file size is 5 MB. + transfer even files larger than 5 GB. Optional parameter :callback: is method which takes a number of bytes transferred to be periodically called during the upload. """ cloud_client = self._get_aws_client("s3") config = boto3.s3.transfer.TransferConfig() - # transfer = boto3.s3.transfer.S3Transfer(client=cloud_client, config=config) - # transfer.upload_file(source, bucket, dest) cloud_client.upload_file(source, bucket, dest, Config=config, Callback=callback) @_log_function_start From bb629dfddde2467e7b55fb2295a859afea61d709 Mon Sep 17 00:00:00 2001 From: ckunki Date: Wed, 31 Jan 2024 11:34:32 +0100 Subject: [PATCH 06/14] Replaced productive method by new method --- exasol/ds/sandbox/lib/export_vm/rename_s3_objects.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/exasol/ds/sandbox/lib/export_vm/rename_s3_objects.py b/exasol/ds/sandbox/lib/export_vm/rename_s3_objects.py index 4c1ebd3c..0ea659c5 100644 --- a/exasol/ds/sandbox/lib/export_vm/rename_s3_objects.py +++ b/exasol/ds/sandbox/lib/export_vm/rename_s3_objects.py @@ -39,5 +39,5 @@ def rename_image_in_s3(aws_access: AwsAccess, export_image_task: ExportImageTask vm_image_format=vm_image_format) dest = build_image_destination(prefix=export_image_task.s3_prefix, asset_id=asset_id, vm_image_format=vm_image_format) - aws_access.copy_s3_object(bucket=export_image_task.s3_bucket, source=source, dest=dest) + aws_access.transfer_to_s3(bucket=export_image_task.s3_bucket, source=source, dest=dest) aws_access.delete_s3_object(bucket=export_image_task.s3_bucket, source=source) From 1af4c557d55a71978f75b8c489fa3fbe4f2d00d6 Mon Sep 17 00:00:00 2001 From: ckunki Date: Wed, 31 Jan 2024 14:49:03 +0100 Subject: [PATCH 07/14] Fixed test --- test/integration/aws/test_export_vm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/aws/test_export_vm.py b/test/integration/aws/test_export_vm.py index 01e00fc1..2e4f37ce 100644 --- a/test/integration/aws/test_export_vm.py +++ b/test/integration/aws/test_export_vm.py @@ -112,5 +112,5 @@ def test_export_vm(aws_vm_export_mock, default_asset_id, vm_formats_to_test, tes expected_calls_copy.append(call(bucket=TEST_BUCKET_ID, source=source, dest=dest)) expected_calls_delete.append(call(bucket=TEST_BUCKET_ID, source=source)) - assert mock_cast(aws_vm_export_mock.copy_s3_object).call_args_list == expected_calls_copy + assert mock_cast(aws_vm_export_mock.transfer_to_s3).call_args_list == expected_calls_copy assert mock_cast(aws_vm_export_mock.delete_s3_object).call_args_list == expected_calls_delete From 648ba36f5e2e9f5168a4f0bec608bee4032bfb0e Mon Sep 17 00:00:00 2001 From: ckunki Date: Wed, 31 Jan 2024 14:50:04 +0100 Subject: [PATCH 08/14] Updated test to verify transfer_to_s3 with 16 MB to ensure multiple chunks --- test/codebuild/test_ci_s3_transfer_multipart.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/codebuild/test_ci_s3_transfer_multipart.py b/test/codebuild/test_ci_s3_transfer_multipart.py index 6398eee0..f8991f42 100644 --- a/test/codebuild/test_ci_s3_transfer_multipart.py +++ b/test/codebuild/test_ci_s3_transfer_multipart.py @@ -6,11 +6,11 @@ from exasol.ds.sandbox.lib.vm_bucket.vm_dss_bucket import find_vm_bucket # from dataclasses import dataclass -# +# # @dataclass # class Progress: # bytes: int = 0 -# +# # def report(self, bytes: int): # self.bytes += bytes # display = round(self.bytes / 1024 / 1024) @@ -24,7 +24,7 @@ def sample_file(tmp_path): """ file = tmp_path / "sample-file.txt" one_kb = "123456789 " * 102 + "1234" - file.write_text(one_kb * 1024 * 6) + file.write_text(one_kb * 1024 * 16) yield file file.unlink() From 19739083448743b3ccda6d71caa083329ace6b2e Mon Sep 17 00:00:00 2001 From: ckunki Date: Wed, 31 Jan 2024 15:21:50 +0100 Subject: [PATCH 09/14] Added progress report for every 50 MB --- .../ds/sandbox/lib/aws_access/aws_access.py | 21 +++++++++++++ .../test_ci_s3_transfer_multipart.py | 30 ++++++++----------- 2 files changed, 34 insertions(+), 17 deletions(-) diff --git a/exasol/ds/sandbox/lib/aws_access/aws_access.py b/exasol/ds/sandbox/lib/aws_access/aws_access.py index b0a40067..465ab15b 100644 --- a/exasol/ds/sandbox/lib/aws_access/aws_access.py +++ b/exasol/ds/sandbox/lib/aws_access/aws_access.py @@ -3,6 +3,7 @@ import boto3 import botocore +import humanfriendly from exasol.ds.sandbox.lib.aws_access.ami import Ami from exasol.ds.sandbox.lib.aws_access.cloudformation_stack import CloudformationStack @@ -36,6 +37,22 @@ def wrapper(self, *args, **kwargs): return wrapper +class Progress: + def __init__(self, report_every: str = "50 MB"): + self.report_every = humanfriendly.parse_size(report_every) + self.processed: int = 0 + self.unreported: int = 0 + + def report(self, chunk: int): + self.unreported += chunk + if self.unreported < self.report_every: + return + self.processed += self.unreported + self.unreported = 0 + display = round(self.processed / 1024 / 1024) + LOG.info(f'Transferred {display} MB ...') + + class AwsAccess(object): def __init__(self, aws_profile: Optional[str], region: Optional[str] = None): self._aws_profile = aws_profile @@ -419,6 +436,7 @@ def copy_s3_object(self, bucket: str, source: str, dest: str): copy_source = {'Bucket': bucket, 'Key': source} cloud_client.copy_object(Bucket=bucket, CopySource=copy_source, Key=dest) + @_log_function_start def transfer_to_s3( self, @@ -437,6 +455,9 @@ def transfer_to_s3( """ cloud_client = self._get_aws_client("s3") config = boto3.s3.transfer.TransferConfig() + if callback is None: + progress = Progress("50 MB") + callback = progress.report cloud_client.upload_file(source, bucket, dest, Config=config, Callback=callback) @_log_function_start diff --git a/test/codebuild/test_ci_s3_transfer_multipart.py b/test/codebuild/test_ci_s3_transfer_multipart.py index f8991f42..17b89e01 100644 --- a/test/codebuild/test_ci_s3_transfer_multipart.py +++ b/test/codebuild/test_ci_s3_transfer_multipart.py @@ -2,48 +2,44 @@ import pytest from exasol.ds.sandbox.lib.asset_id import AssetId -from exasol.ds.sandbox.lib.aws_access.aws_access import AwsAccess +from exasol.ds.sandbox.lib.aws_access.aws_access import ( + AwsAccess, + Progress, +) from exasol.ds.sandbox.lib.vm_bucket.vm_dss_bucket import find_vm_bucket -# from dataclasses import dataclass -# -# @dataclass -# class Progress: -# bytes: int = 0 -# -# def report(self, bytes: int): -# self.bytes += bytes -# display = round(self.bytes / 1024 / 1024) -# print(f'\rTransferred {display} MB ...', flush=True, end="") + +@pytest.fixture +def sample_size_kb(): + return 1024 * 16 @pytest.fixture -def sample_file(tmp_path): +def sample_file(tmp_path, sample_size_kb): """ Create a sample file of size 6 MB for transfer to S3 bucket. """ file = tmp_path / "sample-file.txt" one_kb = "123456789 " * 102 + "1234" - file.write_text(one_kb * 1024 * 16) + file.write_text(one_kb * sample_size_kb) yield file file.unlink() @pytest.mark.skipif(os.environ.get('DSS_RUN_CI_TEST') != 'true', reason="CI test need to be activated by env variable DSS_RUN_CI_TEST") -def test_s3_transfer_multipart(sample_file): +def test_s3_transfer_multipart(sample_file, sample_size_kb): aws = AwsAccess(None) source = sample_file bucket = find_vm_bucket(aws) s3_key = f"{AssetId.BUCKET_PREFIX}-itest-sample-file" - # progress = Progress() - # print("") + progress = Progress("1 MB") try: aws.transfer_to_s3( bucket=bucket, source=source, dest=s3_key, - # callback=progress.report, + callback=progress.report, ) finally: aws.delete_s3_object(bucket, s3_key) From b18e77a78b9daf4ae86fe8ac7f3b1883e2b6cc1e Mon Sep 17 00:00:00 2001 From: ckunki Date: Wed, 31 Jan 2024 17:48:44 +0100 Subject: [PATCH 10/14] Changed implementation of transfer_to_s3 Download to temp file in local filesystem before using upload. Renamed method to upload_large_s3_object() --- .../ds/sandbox/lib/aws_access/aws_access.py | 76 ++++++++++++++----- .../test_ci_s3_transfer_multipart.py | 23 ++++-- 2 files changed, 72 insertions(+), 27 deletions(-) diff --git a/exasol/ds/sandbox/lib/aws_access/aws_access.py b/exasol/ds/sandbox/lib/aws_access/aws_access.py index 465ab15b..a5ade7b1 100644 --- a/exasol/ds/sandbox/lib/aws_access/aws_access.py +++ b/exasol/ds/sandbox/lib/aws_access/aws_access.py @@ -3,8 +3,11 @@ import boto3 import botocore +import os import humanfriendly +from tempfile import NamedTemporaryFile + from exasol.ds.sandbox.lib.aws_access.ami import Ami from exasol.ds.sandbox.lib.aws_access.cloudformation_stack import CloudformationStack from exasol.ds.sandbox.lib.aws_access.deployer import Deployer @@ -39,18 +42,22 @@ def wrapper(self, *args, **kwargs): class Progress: def __init__(self, report_every: str = "50 MB"): - self.report_every = humanfriendly.parse_size(report_every) - self.processed: int = 0 - self.unreported: int = 0 + self._report_every = humanfriendly.parse_size(report_every) + self._processed: int = 0 + self._unreported: int = 0 def report(self, chunk: int): - self.unreported += chunk - if self.unreported < self.report_every: + self._unreported += chunk + if self._unreported < self._report_every: return - self.processed += self.unreported - self.unreported = 0 - display = round(self.processed / 1024 / 1024) - LOG.info(f'Transferred {display} MB ...') + self._processed += self._unreported + self._unreported = 0 + display = round(self._processed / 1024 / 1024) + LOG.info(f'Transferred {display} MB') + + def reset(self): + self._processed = 0 + self._unreported = 0 class AwsAccess(object): @@ -436,29 +443,60 @@ def copy_s3_object(self, bucket: str, source: str, dest: str): copy_source = {'Bucket': bucket, 'Key': source} cloud_client.copy_object(Bucket=bucket, CopySource=copy_source, Key=dest) + @_log_function_start + def upload_large_s3_object( + self, + bucket: str, + source: str, + dest: str, + progress: Progress = None, + ): + """ + :param source: path in the local filesystem. + """ + cloud_client = self._get_aws_client("s3") + config = boto3.s3.transfer.TransferConfig() + if progress is None: + progress = Progress("50 MB") + cloud_client.upload_file( + source, + bucket, + dest, + Config=config, + Callback=progress.report, + ) @_log_function_start - def transfer_to_s3( + def copy_large_s3_object( self, bucket: str, source: str, dest: str, - callback: Callable[[int], None] = None, + progress: Progress = None, ): """ - Transfers a file to an AWS bucket using an AWS transfer object. - The transfer object will perform a multi-part upload which supports to - transfer even files larger than 5 GB. + Copies an s3 object within a bucket but uses a TransferConfig to + process files larger than 5 GB as multi-part. - Optional parameter :callback: is method which takes a number of bytes - transferred to be periodically called during the upload. + The copy operation requires to first download the file from S3 to + local filesystem and then upload it again. """ cloud_client = self._get_aws_client("s3") config = boto3.s3.transfer.TransferConfig() - if callback is None: + + if progress is None: progress = Progress("50 MB") - callback = progress.report - cloud_client.upload_file(source, bucket, dest, Config=config, Callback=callback) + + callback = progress.report + tmpfile = NamedTemporaryFile(delete=False).name + try: + LOG.info(f"Downloading (large) S3 object {source} to temp file") + cloud_client.download_file(bucket, source, tmpfile, Callback=callback, Config=config) + progress.reset() + LOG.info(f"Uploading (large) temp file to S3 {dest}") + cloud_client.upload_file(tmpfile, bucket, dest, Config=config, Callback=callback) + finally: + os.unlink(tmpfile) @_log_function_start def delete_s3_object(self, bucket: str, source: str): diff --git a/test/codebuild/test_ci_s3_transfer_multipart.py b/test/codebuild/test_ci_s3_transfer_multipart.py index 17b89e01..0635cff4 100644 --- a/test/codebuild/test_ci_s3_transfer_multipart.py +++ b/test/codebuild/test_ci_s3_transfer_multipart.py @@ -25,21 +25,28 @@ def sample_file(tmp_path, sample_size_kb): yield file file.unlink() - @pytest.mark.skipif(os.environ.get('DSS_RUN_CI_TEST') != 'true', reason="CI test need to be activated by env variable DSS_RUN_CI_TEST") -def test_s3_transfer_multipart(sample_file, sample_size_kb): +def test_s3_transfer_multipart(sample_file): aws = AwsAccess(None) - source = sample_file bucket = find_vm_bucket(aws) s3_key = f"{AssetId.BUCKET_PREFIX}-itest-sample-file" - progress = Progress("1 MB") + s3_key2 = f"{s3_key}-copy" + progress = Progress("4 MB") try: - aws.transfer_to_s3( - bucket=bucket, - source=source, + aws.upload_large_s3_object( + bucket, + source=str(sample_file), dest=s3_key, - callback=progress.report, + progress=progress, + ) + progress.reset() + aws.copy_large_s3_object( + bucket=bucket, + source=s3_key, + dest=s3_key2, + progress=progress, ) finally: aws.delete_s3_object(bucket, s3_key) + aws.delete_s3_object(bucket, s3_key2) From 0bdefc4840c8dac150c7e014157d0ebad02668b3 Mon Sep 17 00:00:00 2001 From: ckunki Date: Wed, 31 Jan 2024 17:49:59 +0100 Subject: [PATCH 11/14] Updated method rename_image_in_s3() --- exasol/ds/sandbox/lib/export_vm/rename_s3_objects.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/exasol/ds/sandbox/lib/export_vm/rename_s3_objects.py b/exasol/ds/sandbox/lib/export_vm/rename_s3_objects.py index 0ea659c5..9d11d030 100644 --- a/exasol/ds/sandbox/lib/export_vm/rename_s3_objects.py +++ b/exasol/ds/sandbox/lib/export_vm/rename_s3_objects.py @@ -39,5 +39,5 @@ def rename_image_in_s3(aws_access: AwsAccess, export_image_task: ExportImageTask vm_image_format=vm_image_format) dest = build_image_destination(prefix=export_image_task.s3_prefix, asset_id=asset_id, vm_image_format=vm_image_format) - aws_access.transfer_to_s3(bucket=export_image_task.s3_bucket, source=source, dest=dest) + aws_access.copy_large_s3_object(bucket=export_image_task.s3_bucket, source=source, dest=dest) aws_access.delete_s3_object(bucket=export_image_task.s3_bucket, source=source) From 57ebbf21318af6a5ec9b783eded1b3937ea81cb8 Mon Sep 17 00:00:00 2001 From: ckunki Date: Wed, 31 Jan 2024 18:42:34 +0100 Subject: [PATCH 12/14] Fixed test_export_vm.py --- test/integration/aws/test_export_vm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/aws/test_export_vm.py b/test/integration/aws/test_export_vm.py index 2e4f37ce..88e24ce0 100644 --- a/test/integration/aws/test_export_vm.py +++ b/test/integration/aws/test_export_vm.py @@ -112,5 +112,5 @@ def test_export_vm(aws_vm_export_mock, default_asset_id, vm_formats_to_test, tes expected_calls_copy.append(call(bucket=TEST_BUCKET_ID, source=source, dest=dest)) expected_calls_delete.append(call(bucket=TEST_BUCKET_ID, source=source)) - assert mock_cast(aws_vm_export_mock.transfer_to_s3).call_args_list == expected_calls_copy + assert mock_cast(aws_vm_export_mock.copy_large_s3_object).call_args_list == expected_calls_copy assert mock_cast(aws_vm_export_mock.delete_s3_object).call_args_list == expected_calls_delete From 511028e0ac0aa0423e45de075f4cf4abb9a12f51 Mon Sep 17 00:00:00 2001 From: ckunki Date: Wed, 31 Jan 2024 20:37:12 +0100 Subject: [PATCH 13/14] Fixed run_update_release --- exasol/ds/sandbox/lib/update_release/run_update_release.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/exasol/ds/sandbox/lib/update_release/run_update_release.py b/exasol/ds/sandbox/lib/update_release/run_update_release.py index 84823506..0d6027dc 100644 --- a/exasol/ds/sandbox/lib/update_release/run_update_release.py +++ b/exasol/ds/sandbox/lib/update_release/run_update_release.py @@ -15,11 +15,11 @@ def run_update_release(aws_access: AwsAccess, gh_access: GithubReleaseAccess, additional_release_notes = render_template("additional_release_notes.jinja") with tempfile.TemporaryDirectory() as temp_dir: artifacts_file = f"{temp_dir}/artifacts.md" - asset_types = tuple( + asset_types = tuple(( AssetTypes.DOCKER, AssetTypes.AMI, AssetTypes.VM_S3, - ) + )) with open(artifacts_file, "w") as file: print_assets(aws_access, asset_id, file, asset_types) content = additional_release_notes + open(artifacts_file, "r").read() From 1388236f5957e5d5f686e4a92fbe950557d2d5af Mon Sep 17 00:00:00 2001 From: ckunki Date: Wed, 31 Jan 2024 20:40:52 +0100 Subject: [PATCH 14/14] Simplified run_update_release --- exasol/ds/sandbox/lib/update_release/run_update_release.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/exasol/ds/sandbox/lib/update_release/run_update_release.py b/exasol/ds/sandbox/lib/update_release/run_update_release.py index 0d6027dc..da2a280b 100644 --- a/exasol/ds/sandbox/lib/update_release/run_update_release.py +++ b/exasol/ds/sandbox/lib/update_release/run_update_release.py @@ -15,11 +15,11 @@ def run_update_release(aws_access: AwsAccess, gh_access: GithubReleaseAccess, additional_release_notes = render_template("additional_release_notes.jinja") with tempfile.TemporaryDirectory() as temp_dir: artifacts_file = f"{temp_dir}/artifacts.md" - asset_types = tuple(( + asset_types = ( AssetTypes.DOCKER, AssetTypes.AMI, AssetTypes.VM_S3, - )) + ) with open(artifacts_file, "w") as file: print_assets(aws_access, asset_id, file, asset_types) content = additional_release_notes + open(artifacts_file, "r").read()