Skip to content

Commit d2c60b3

Browse files
committed
chore(deps): bump s3mock from 3.11.0 to 4.7.0
Updates S3Mock testcontainer dependency from 3.11.0 to 4.7.0 and refactors usage into a centralized wrapper class in runtime/test-common. Changes Upgraded S3Mock testcontainer to 4.7.0 Created S3Mock wrapper class for consistent configuration Consolidated S3 config properties generation Updated integration tests to use new wrapper No functional changes to test behavior.
1 parent 96f1459 commit d2c60b3

File tree

6 files changed

+91
-72
lines changed

6 files changed

+91
-72
lines changed

integration-tests/build.gradle.kts

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -48,10 +48,6 @@ dependencies {
4848

4949
implementation(libs.auth0.jwt)
5050

51-
implementation(platform(libs.testcontainers.bom))
52-
implementation("org.testcontainers:testcontainers")
53-
implementation(libs.s3mock.testcontainers)
54-
5551
implementation("org.apache.iceberg:iceberg-spark-3.5_2.12")
5652
implementation("org.apache.iceberg:iceberg-spark-extensions-3.5_2.12")
5753
implementation("org.apache.spark:spark-sql_2.12:3.5.6") {
@@ -69,6 +65,8 @@ dependencies {
6965
implementation(libs.assertj.core)
7066
implementation(libs.mockito.core)
7167
implementation(libs.awaitility)
68+
implementation(libs.s3mock.testcontainers)
69+
implementation(project(":polaris-runtime-test-common"))
7270
}
7371

7472
copiedCodeChecks {

integration-tests/src/main/java/org/apache/polaris/service/it/ext/PolarisSparkIntegrationTestBase.java

Lines changed: 6 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -20,12 +20,10 @@
2020

2121
import static org.apache.polaris.service.it.env.PolarisClient.polarisClient;
2222

23-
import com.adobe.testing.s3mock.testcontainers.S3MockContainer;
2423
import java.io.IOException;
2524
import java.net.URI;
2625
import java.nio.file.Path;
2726
import java.util.List;
28-
import java.util.Map;
2927
import org.apache.polaris.core.admin.model.AwsStorageConfigInfo;
3028
import org.apache.polaris.core.admin.model.Catalog;
3129
import org.apache.polaris.core.admin.model.CatalogProperties;
@@ -38,6 +36,7 @@
3836
import org.apache.polaris.service.it.env.ManagementApi;
3937
import org.apache.polaris.service.it.env.PolarisApiEndpoints;
4038
import org.apache.polaris.service.it.env.PolarisClient;
39+
import org.apache.polaris.test.commons.s3mock.S3Mock;
4140
import org.apache.spark.sql.Dataset;
4241
import org.apache.spark.sql.Row;
4342
import org.apache.spark.sql.SparkSession;
@@ -52,8 +51,7 @@
5251

5352
@ExtendWith(PolarisIntegrationTestExtension.class)
5453
public abstract class PolarisSparkIntegrationTestBase {
55-
protected static final S3MockContainer s3Container =
56-
new S3MockContainer("3.11.0").withInitialBuckets("my-bucket,my-old-bucket");
54+
protected static final S3Mock s3Container = new S3Mock();
5755
protected static SparkSession spark;
5856
protected PolarisApiEndpoints endpoints;
5957
protected PolarisClient client;
@@ -98,26 +96,8 @@ public void before(
9896
.setAllowedLocations(List.of("s3://my-old-bucket/path/to/data"))
9997
.build();
10098
CatalogProperties props = new CatalogProperties("s3://my-bucket/path/to/data");
101-
props.putAll(
102-
Map.of(
103-
"table-default.s3.endpoint",
104-
s3Container.getHttpEndpoint(),
105-
"table-default.s3.path-style-access",
106-
"true",
107-
"table-default.s3.access-key-id",
108-
"foo",
109-
"table-default.s3.secret-access-key",
110-
"bar",
111-
"s3.endpoint",
112-
s3Container.getHttpEndpoint(),
113-
"s3.path-style-access",
114-
"true",
115-
"s3.access-key-id",
116-
"foo",
117-
"s3.secret-access-key",
118-
"bar",
119-
"polaris.config.drop-with-purge.enabled",
120-
"true"));
99+
props.putAll(s3Container.getS3ConfigProperties());
100+
props.put("polaris.config.drop-with-purge.enabled", "true");
121101
Catalog catalog =
122102
PolarisCatalog.builder()
123103
.setType(Catalog.TypeEnum.INTERNAL)
@@ -129,26 +109,8 @@ public void before(
129109
managementApi.createCatalog(catalog);
130110

131111
CatalogProperties externalProps = new CatalogProperties("s3://my-bucket/path/to/data");
132-
externalProps.putAll(
133-
Map.of(
134-
"table-default.s3.endpoint",
135-
s3Container.getHttpEndpoint(),
136-
"table-default.s3.path-style-access",
137-
"true",
138-
"table-default.s3.access-key-id",
139-
"foo",
140-
"table-default.s3.secret-access-key",
141-
"bar",
142-
"s3.endpoint",
143-
s3Container.getHttpEndpoint(),
144-
"s3.path-style-access",
145-
"true",
146-
"s3.access-key-id",
147-
"foo",
148-
"s3.secret-access-key",
149-
"bar",
150-
"polaris.config.drop-with-purge.enabled",
151-
"true"));
112+
externalProps.putAll(s3Container.getS3ConfigProperties());
113+
externalProps.put("polaris.config.drop-with-purge.enabled", "true");
152114
Catalog externalCatalog =
153115
ExternalCatalog.builder()
154116
.setType(Catalog.TypeEnum.EXTERNAL)

plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkIntegrationBase.java

Lines changed: 4 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -18,15 +18,13 @@
1818
*/
1919
package org.apache.polaris.spark.quarkus.it;
2020

21-
import com.adobe.testing.s3mock.testcontainers.S3MockContainer;
2221
import com.google.common.collect.ImmutableList;
2322
import com.google.errorprone.annotations.FormatMethod;
2423
import java.io.File;
2524
import java.io.IOException;
2625
import java.net.URI;
2726
import java.nio.file.Path;
2827
import java.util.List;
29-
import java.util.Map;
3028
import java.util.UUID;
3129
import java.util.stream.Collectors;
3230
import java.util.stream.IntStream;
@@ -44,6 +42,7 @@
4442
import org.apache.polaris.service.it.env.PolarisApiEndpoints;
4543
import org.apache.polaris.service.it.ext.PolarisIntegrationTestExtension;
4644
import org.apache.polaris.service.it.ext.SparkSessionBuilder;
45+
import org.apache.polaris.test.commons.s3mock.S3Mock;
4746
import org.apache.spark.sql.Dataset;
4847
import org.apache.spark.sql.Row;
4948
import org.apache.spark.sql.SparkSession;
@@ -58,8 +57,7 @@
5857

5958
@ExtendWith(PolarisIntegrationTestExtension.class)
6059
public abstract class SparkIntegrationBase {
61-
protected static final S3MockContainer s3Container =
62-
new S3MockContainer("3.11.0").withInitialBuckets("my-bucket,my-old-bucket");
60+
protected static final S3Mock s3Container = new S3Mock();
6361
protected static SparkSession spark;
6462
protected PolarisApiEndpoints endpoints;
6563
protected PolarisManagementClient client;
@@ -100,26 +98,8 @@ public void before(
10098
.setAllowedLocations(List.of("s3://my-old-bucket/path/to/data"))
10199
.build();
102100
CatalogProperties props = new CatalogProperties("s3://my-bucket/path/to/data");
103-
props.putAll(
104-
Map.of(
105-
"table-default.s3.endpoint",
106-
s3Container.getHttpEndpoint(),
107-
"table-default.s3.path-style-access",
108-
"true",
109-
"table-default.s3.access-key-id",
110-
"foo",
111-
"table-default.s3.secret-access-key",
112-
"bar",
113-
"s3.endpoint",
114-
s3Container.getHttpEndpoint(),
115-
"s3.path-style-access",
116-
"true",
117-
"s3.access-key-id",
118-
"foo",
119-
"s3.secret-access-key",
120-
"bar",
121-
"polaris.config.drop-with-purge.enabled",
122-
"true"));
101+
props.putAll(s3Container.getS3ConfigProperties());
102+
props.put("polaris.config.drop-with-purge.enabled", "true");
123103
Catalog catalog =
124104
PolarisCatalog.builder()
125105
.setType(Catalog.TypeEnum.INTERNAL)

runtime/test-common/build.gradle.kts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@ configurations.all {
3232
}
3333

3434
dependencies {
35+
implementation(libs.s3mock.testcontainers)
3536
implementation(project(":polaris-core"))
3637
implementation(libs.jakarta.ws.rs.api)
3738
implementation(enforcedPlatform(libs.quarkus.bom))
Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing,
13+
* software distributed under the License is distributed on an
14+
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15+
* KIND, either express or implied. See the License for the
16+
* specific language governing permissions and limitations
17+
* under the License.
18+
*/
19+
20+
package org.apache.polaris.test.commons.s3mock;
21+
22+
import com.adobe.testing.s3mock.testcontainers.S3MockContainer;
23+
import java.util.Map;
24+
import org.apache.polaris.containerspec.ContainerSpecHelper;
25+
26+
public class S3Mock extends S3MockContainer {
27+
28+
private static final String DEFAULT_BUCKETS = "my-bucket,my-old-bucket";
29+
private static final String DEFAULT_ACCESS_KEY = "ap1";
30+
private static final String DEFAULT_SECRET_KEY = "s3cr3t";
31+
32+
public S3Mock() {
33+
this(DEFAULT_BUCKETS);
34+
}
35+
36+
public S3Mock(String initialBuckets) {
37+
super(
38+
ContainerSpecHelper.containerSpecHelper("s3mock", S3Mock.class)
39+
.dockerImageName(null)
40+
.asCompatibleSubstituteFor("adobe/s3mock"));
41+
this.withInitialBuckets(initialBuckets);
42+
}
43+
44+
public Map<String, String> getS3ConfigProperties() {
45+
String endpoint = this.getHttpEndpoint();
46+
return Map.of(
47+
"table-default.s3.endpoint", endpoint,
48+
"table-default.s3.path-style-access", "true",
49+
"table-default.s3.access-key-id", DEFAULT_ACCESS_KEY,
50+
"table-default.s3.secret-access-key", DEFAULT_SECRET_KEY,
51+
"s3.endpoint", endpoint,
52+
"s3.path-style-access", "true",
53+
"s3.access-key-id", DEFAULT_ACCESS_KEY,
54+
"s3.secret-access-key", DEFAULT_SECRET_KEY);
55+
}
56+
}
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
#
2+
# Licensed to the Apache Software Foundation (ASF) under one
3+
# or more contributor license agreements. See the NOTICE file
4+
# distributed with this work for additional information
5+
# regarding copyright ownership. The ASF licenses this file
6+
# to you under the Apache License, Version 2.0 (the
7+
# "License"); you may not use this file except in compliance
8+
# with the License. You may obtain a copy of the License at
9+
#
10+
# http://www.apache.org/licenses/LICENSE-2.0
11+
#
12+
# Unless required by applicable law or agreed to in writing,
13+
# software distributed under the License is distributed on an
14+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15+
# KIND, either express or implied. See the License for the
16+
# specific language governing permissions and limitations
17+
# under the License.
18+
#
19+
20+
# Dockerfile to provide the image name and tag to a test.
21+
# Version is managed by Renovate - do not edit.
22+
FROM docker.io/adobe/s3mock:4.7.0

0 commit comments

Comments
 (0)