From 7834eefb96173d95d4cbe4ae85f02f1913c3e626 Mon Sep 17 00:00:00 2001 From: Haithem SOUALA <75131226+haithem-souala@users.noreply.github.com> Date: Wed, 24 Aug 2022 11:46:29 +0200 Subject: [PATCH] :fix: Destination S3: connector check ignores the s3_bucket_path (#15207) * fix: use correct var for s3 bucket path * typo * fix - use bucket path instead of bucket name * fix: use correct var for s3 bucket path * typo * fix - use bucket path instead of bucket name * add fix to changelog * solve conflict * solve conflict * fix - use bucket path instead of bucket name * solve md file conflict * solve dockerfile conflict * auto-bump connector version [ci skip] * add eof Co-authored-by: Marcos Marx Co-authored-by: marcosmarxm Co-authored-by: Octavia Squidington III --- .../src/main/resources/seed/destination_definitions.yaml | 2 +- .../init/src/main/resources/seed/destination_specs.yaml | 2 +- .../connectors/destination-s3/Dockerfile | 2 +- .../integrations/destination/s3/S3Destination.java | 9 +++++---- docs/integrations/destinations/s3.md | 1 + 5 files changed, 9 insertions(+), 7 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml index ce69188600df..a498f6336fc2 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml @@ -250,7 +250,7 @@ - name: S3 destinationDefinitionId: 4816b78f-1489-44c1-9060-4b19d5fa9362 dockerRepository: airbyte/destination-s3 - dockerImageTag: 0.3.13 + dockerImageTag: 0.3.14 documentationUrl: https://docs.airbyte.io/integrations/destinations/s3 icon: s3.svg resourceRequirements: diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml index 9ea1ab07fa66..0d6c1b8c2d3f 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -4118,7 +4118,7 @@ supported_destination_sync_modes: - "append" - "overwrite" -- dockerImage: "airbyte/destination-s3:0.3.13" +- dockerImage: "airbyte/destination-s3:0.3.14" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/s3" connectionSpecification: diff --git a/airbyte-integrations/connectors/destination-s3/Dockerfile b/airbyte-integrations/connectors/destination-s3/Dockerfile index d0a8e9d0a05b..7dfc0dda81f5 100644 --- a/airbyte-integrations/connectors/destination-s3/Dockerfile +++ b/airbyte-integrations/connectors/destination-s3/Dockerfile @@ -41,5 +41,5 @@ RUN /bin/bash -c 'set -e && \ echo "unknown arch" ;\ fi' -LABEL io.airbyte.version=0.3.13 +LABEL io.airbyte.version=0.3.14 LABEL io.airbyte.name=airbyte/destination-s3 diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3Destination.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3Destination.java index 338ba132cdc4..5569ab7fc5a1 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3Destination.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3Destination.java @@ -59,7 +59,7 @@ public AirbyteConnectionStatus check(final JsonNode config) { final S3StorageOperations storageOperations = new S3StorageOperations(nameTransformer, s3Client, destinationConfig); // Test for writing, list and delete - S3Destination.attemptS3WriteAndDelete(storageOperations, destinationConfig, destinationConfig.getBucketName()); + S3Destination.attemptS3WriteAndDelete(storageOperations, destinationConfig, destinationConfig.getBucketPath()); // Test single upload (for small files) permissions testSingleUpload(s3Client, destinationConfig.getBucketName(), destinationConfig.getBucketPath()); @@ -136,16 +136,17 @@ static void attemptS3WriteAndDelete(final S3StorageOperations storageOperations, final AmazonS3 s3) { final var prefix = bucketPath.isEmpty() ? "" : bucketPath + (bucketPath.endsWith("/") ? "" : "/"); final String outputTableName = prefix + "_airbyte_connection_test_" + UUID.randomUUID().toString().replaceAll("-", ""); - attemptWriteAndDeleteS3Object(storageOperations, s3Config, outputTableName, s3); + attemptWriteAndDeleteS3Object(storageOperations, s3Config, outputTableName, s3, bucketPath); } private static void attemptWriteAndDeleteS3Object(final S3StorageOperations storageOperations, final S3DestinationConfig s3Config, final String outputTableName, - final AmazonS3 s3) { + final AmazonS3 s3, + final String bucketPath) { final var s3Bucket = s3Config.getBucketName(); - storageOperations.createBucketObjectIfNotExists(s3Bucket); + storageOperations.createBucketObjectIfNotExists(bucketPath); s3.putObject(s3Bucket, outputTableName, "check-content"); testIAMUserHasListObjectPermission(s3, s3Bucket); s3.deleteObject(s3Bucket, outputTableName); diff --git a/docs/integrations/destinations/s3.md b/docs/integrations/destinations/s3.md index 6d8613c00401..ae777a492e9a 100644 --- a/docs/integrations/destinations/s3.md +++ b/docs/integrations/destinations/s3.md @@ -320,6 +320,7 @@ In order for everything to work correctly, it is also necessary that the user wh | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------| +| 0.3.14 | 2022-08-24 | [\#15207](https://github.com/airbytehq/airbyte/pull/15207) | Fix S3 bucket path to be used for check. | | 0.3.13 | 2022-08-09 | [\#15394](https://github.com/airbytehq/airbyte/pull/15394) | Added LZO compression support to Parquet format | | 0.3.12 | 2022-08-05 | [\#14801](https://github.com/airbytehq/airbyte/pull/14801) | Fix multiple log bindings | | 0.3.11 | 2022-07-15 | [\#14494](https://github.com/airbytehq/airbyte/pull/14494) | Make S3 output filename configurable. |