Skip to content

Commit eb04048

Browse files
chore(deps): bump s3mock from 3.11.0 to 4.7.0
Updates S3Mock testcontainer dependency from 3.11.0 to 4.7.0 and refactors usage into a centralized wrapper class in runtime/test-common. Changes Upgraded S3Mock testcontainer to 4.7.0 Created S3Mock wrapper class for consistent configuration Consolidated S3 config properties generation Updated integration tests to use new wrapper No functional changes to test behavior.
1 parent 96f1459 commit eb04048

File tree

6 files changed

+91
-72
lines changed

6 files changed

+91
-72
lines changed

integration-tests/build.gradle.kts

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -48,10 +48,6 @@ dependencies {
4848

4949
implementation(libs.auth0.jwt)
5050

51-
implementation(platform(libs.testcontainers.bom))
52-
implementation("org.testcontainers:testcontainers")
53-
implementation(libs.s3mock.testcontainers)
54-
5551
implementation("org.apache.iceberg:iceberg-spark-3.5_2.12")
5652
implementation("org.apache.iceberg:iceberg-spark-extensions-3.5_2.12")
5753
implementation("org.apache.spark:spark-sql_2.12:3.5.6") {
@@ -69,6 +65,7 @@ dependencies {
6965
implementation(libs.assertj.core)
7066
implementation(libs.mockito.core)
7167
implementation(libs.awaitility)
68+
implementation(project(":polaris-runtime-test-common"))
7269
}
7370

7471
copiedCodeChecks {

integration-tests/src/main/java/org/apache/polaris/service/it/ext/PolarisSparkIntegrationTestBase.java

Lines changed: 10 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -20,12 +20,10 @@
2020

2121
import static org.apache.polaris.service.it.env.PolarisClient.polarisClient;
2222

23-
import com.adobe.testing.s3mock.testcontainers.S3MockContainer;
2423
import java.io.IOException;
2524
import java.net.URI;
2625
import java.nio.file.Path;
2726
import java.util.List;
28-
import java.util.Map;
2927
import org.apache.polaris.core.admin.model.AwsStorageConfigInfo;
3028
import org.apache.polaris.core.admin.model.Catalog;
3129
import org.apache.polaris.core.admin.model.CatalogProperties;
@@ -38,6 +36,7 @@
3836
import org.apache.polaris.service.it.env.ManagementApi;
3937
import org.apache.polaris.service.it.env.PolarisApiEndpoints;
4038
import org.apache.polaris.service.it.env.PolarisClient;
39+
import org.apache.polaris.test.commons.s3mock.S3Mock;
4140
import org.apache.spark.sql.Dataset;
4241
import org.apache.spark.sql.Row;
4342
import org.apache.spark.sql.SparkSession;
@@ -52,8 +51,10 @@
5251

5352
@ExtendWith(PolarisIntegrationTestExtension.class)
5453
public abstract class PolarisSparkIntegrationTestBase {
55-
protected static final S3MockContainer s3Container =
56-
new S3MockContainer("3.11.0").withInitialBuckets("my-bucket,my-old-bucket");
54+
protected static final String S3_BUCKET_NAMES = "my-bucket,my-old-bucket";
55+
protected static final String S3_ACCESS_KEY = "ap1";
56+
protected static final String S3_SECRET_KEY = "s3cr3t";
57+
protected static S3Mock s3Container;
5758
protected static SparkSession spark;
5859
protected PolarisApiEndpoints endpoints;
5960
protected PolarisClient client;
@@ -67,6 +68,7 @@ public abstract class PolarisSparkIntegrationTestBase {
6768

6869
@BeforeAll
6970
public static void setup() throws IOException {
71+
s3Container = new S3Mock(S3_BUCKET_NAMES);
7072
s3Container.start();
7173
}
7274

@@ -98,26 +100,8 @@ public void before(
98100
.setAllowedLocations(List.of("s3://my-old-bucket/path/to/data"))
99101
.build();
100102
CatalogProperties props = new CatalogProperties("s3://my-bucket/path/to/data");
101-
props.putAll(
102-
Map.of(
103-
"table-default.s3.endpoint",
104-
s3Container.getHttpEndpoint(),
105-
"table-default.s3.path-style-access",
106-
"true",
107-
"table-default.s3.access-key-id",
108-
"foo",
109-
"table-default.s3.secret-access-key",
110-
"bar",
111-
"s3.endpoint",
112-
s3Container.getHttpEndpoint(),
113-
"s3.path-style-access",
114-
"true",
115-
"s3.access-key-id",
116-
"foo",
117-
"s3.secret-access-key",
118-
"bar",
119-
"polaris.config.drop-with-purge.enabled",
120-
"true"));
103+
props.putAll(s3Container.getS3ConfigProperties(S3_ACCESS_KEY, S3_SECRET_KEY));
104+
props.put("polaris.config.drop-with-purge.enabled", "true");
121105
Catalog catalog =
122106
PolarisCatalog.builder()
123107
.setType(Catalog.TypeEnum.INTERNAL)
@@ -129,26 +113,8 @@ public void before(
129113
managementApi.createCatalog(catalog);
130114

131115
CatalogProperties externalProps = new CatalogProperties("s3://my-bucket/path/to/data");
132-
externalProps.putAll(
133-
Map.of(
134-
"table-default.s3.endpoint",
135-
s3Container.getHttpEndpoint(),
136-
"table-default.s3.path-style-access",
137-
"true",
138-
"table-default.s3.access-key-id",
139-
"foo",
140-
"table-default.s3.secret-access-key",
141-
"bar",
142-
"s3.endpoint",
143-
s3Container.getHttpEndpoint(),
144-
"s3.path-style-access",
145-
"true",
146-
"s3.access-key-id",
147-
"foo",
148-
"s3.secret-access-key",
149-
"bar",
150-
"polaris.config.drop-with-purge.enabled",
151-
"true"));
116+
externalProps.putAll(s3Container.getS3ConfigProperties(S3_ACCESS_KEY, S3_SECRET_KEY));
117+
externalProps.put("polaris.config.drop-with-purge.enabled", "true");
152118
Catalog externalCatalog =
153119
ExternalCatalog.builder()
154120
.setType(Catalog.TypeEnum.EXTERNAL)

plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkIntegrationBase.java

Lines changed: 8 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -18,15 +18,13 @@
1818
*/
1919
package org.apache.polaris.spark.quarkus.it;
2020

21-
import com.adobe.testing.s3mock.testcontainers.S3MockContainer;
2221
import com.google.common.collect.ImmutableList;
2322
import com.google.errorprone.annotations.FormatMethod;
2423
import java.io.File;
2524
import java.io.IOException;
2625
import java.net.URI;
2726
import java.nio.file.Path;
2827
import java.util.List;
29-
import java.util.Map;
3028
import java.util.UUID;
3129
import java.util.stream.Collectors;
3230
import java.util.stream.IntStream;
@@ -44,6 +42,7 @@
4442
import org.apache.polaris.service.it.env.PolarisApiEndpoints;
4543
import org.apache.polaris.service.it.ext.PolarisIntegrationTestExtension;
4644
import org.apache.polaris.service.it.ext.SparkSessionBuilder;
45+
import org.apache.polaris.test.commons.s3mock.S3Mock;
4746
import org.apache.spark.sql.Dataset;
4847
import org.apache.spark.sql.Row;
4948
import org.apache.spark.sql.SparkSession;
@@ -58,8 +57,10 @@
5857

5958
@ExtendWith(PolarisIntegrationTestExtension.class)
6059
public abstract class SparkIntegrationBase {
61-
protected static final S3MockContainer s3Container =
62-
new S3MockContainer("3.11.0").withInitialBuckets("my-bucket,my-old-bucket");
60+
protected static final String S3_BUCKET_NAMES = "my-bucket,my-old-bucket";
61+
protected static final String S3_ACCESS_KEY = "ap1";
62+
protected static final String S3_SECRET_KEY = "s3cr3t";
63+
protected static S3Mock s3Container;
6364
protected static SparkSession spark;
6465
protected PolarisApiEndpoints endpoints;
6566
protected PolarisManagementClient client;
@@ -71,6 +72,7 @@ public abstract class SparkIntegrationBase {
7172

7273
@BeforeAll
7374
public static void setup() throws IOException {
75+
s3Container = new S3Mock(S3_BUCKET_NAMES);
7476
s3Container.start();
7577
}
7678

@@ -100,26 +102,8 @@ public void before(
100102
.setAllowedLocations(List.of("s3://my-old-bucket/path/to/data"))
101103
.build();
102104
CatalogProperties props = new CatalogProperties("s3://my-bucket/path/to/data");
103-
props.putAll(
104-
Map.of(
105-
"table-default.s3.endpoint",
106-
s3Container.getHttpEndpoint(),
107-
"table-default.s3.path-style-access",
108-
"true",
109-
"table-default.s3.access-key-id",
110-
"foo",
111-
"table-default.s3.secret-access-key",
112-
"bar",
113-
"s3.endpoint",
114-
s3Container.getHttpEndpoint(),
115-
"s3.path-style-access",
116-
"true",
117-
"s3.access-key-id",
118-
"foo",
119-
"s3.secret-access-key",
120-
"bar",
121-
"polaris.config.drop-with-purge.enabled",
122-
"true"));
105+
props.putAll(s3Container.getS3ConfigProperties(S3_ACCESS_KEY, S3_SECRET_KEY));
106+
props.put("polaris.config.drop-with-purge.enabled", "true");
123107
Catalog catalog =
124108
PolarisCatalog.builder()
125109
.setType(Catalog.TypeEnum.INTERNAL)

runtime/test-common/build.gradle.kts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@ configurations.all {
3232
}
3333

3434
dependencies {
35+
api(libs.s3mock.testcontainers)
3536
implementation(project(":polaris-core"))
3637
implementation(libs.jakarta.ws.rs.api)
3738
implementation(enforcedPlatform(libs.quarkus.bom))
Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing,
13+
* software distributed under the License is distributed on an
14+
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15+
* KIND, either express or implied. See the License for the
16+
* specific language governing permissions and limitations
17+
* under the License.
18+
*/
19+
20+
package org.apache.polaris.test.commons.s3mock;
21+
22+
import com.adobe.testing.s3mock.testcontainers.S3MockContainer;
23+
import java.util.Map;
24+
import org.apache.polaris.containerspec.ContainerSpecHelper;
25+
26+
public class S3Mock extends S3MockContainer {
27+
28+
public S3Mock() {
29+
super(ContainerSpecHelper.containerSpecHelper("s3mock", S3Mock.class).dockerImageName(null));
30+
}
31+
32+
public S3Mock(String initialBuckets) {
33+
this();
34+
this.withInitialBuckets(initialBuckets);
35+
}
36+
37+
public Map<String, String> getS3ConfigProperties(String accessKey, String secretKey) {
38+
String endpoint = this.getHttpEndpoint();
39+
return Map.of(
40+
"table-default.s3.endpoint", endpoint,
41+
"table-default.s3.path-style-access", "true",
42+
"table-default.s3.access-key-id", accessKey,
43+
"table-default.s3.secret-access-key", secretKey,
44+
"s3.endpoint", endpoint,
45+
"s3.path-style-access", "true",
46+
"s3.access-key-id", accessKey,
47+
"s3.secret-access-key", secretKey);
48+
}
49+
}
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
#
2+
# Licensed to the Apache Software Foundation (ASF) under one
3+
# or more contributor license agreements. See the NOTICE file
4+
# distributed with this work for additional information
5+
# regarding copyright ownership. The ASF licenses this file
6+
# to you under the Apache License, Version 2.0 (the
7+
# "License"); you may not use this file except in compliance
8+
# with the License. You may obtain a copy of the License at
9+
#
10+
# http://www.apache.org/licenses/LICENSE-2.0
11+
#
12+
# Unless required by applicable law or agreed to in writing,
13+
# software distributed under the License is distributed on an
14+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15+
# KIND, either express or implied. See the License for the
16+
# specific language governing permissions and limitations
17+
# under the License.
18+
#
19+
20+
# Dockerfile to provide the image name and tag to a test.
21+
# Version is managed by Renovate - do not edit.
22+
FROM adobe/s3mock:4.7.0

0 commit comments

Comments
 (0)