Skip to content

Commit a971766

Browse files
pavibhaiPavan Lanka
authored andcommitted
Addressing review comments
1 parent 36151b2 commit a971766

File tree

11 files changed

+529
-272
lines changed

11 files changed

+529
-272
lines changed

integration-tests/src/main/java/org/apache/polaris/service/it/ext/PolarisSparkIntegrationTestBase.java

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,6 @@ public abstract class PolarisSparkIntegrationTestBase {
6262
protected String sparkToken;
6363
protected String catalogName;
6464
protected String externalCatalogName;
65-
protected String s3Scheme = "s3";
6665

6766
protected URI warehouseDir;
6867

@@ -96,9 +95,9 @@ public void before(
9695
.setExternalId("externalId")
9796
.setUserArn("userArn")
9897
.setStorageType(StorageConfigInfo.StorageTypeEnum.S3)
99-
.setAllowedLocations(List.of(s3Scheme + "://my-old-bucket/path/to/data"))
98+
.setAllowedLocations(List.of("s3://my-old-bucket/path/to/data"))
10099
.build();
101-
CatalogProperties props = new CatalogProperties(s3Scheme + "://my-bucket/path/to/data");
100+
CatalogProperties props = new CatalogProperties("s3://my-bucket/path/to/data");
102101
props.putAll(
103102
Map.of(
104103
"table-default.s3.endpoint",
@@ -129,7 +128,7 @@ public void before(
129128

130129
managementApi.createCatalog(catalog);
131130

132-
CatalogProperties externalProps = new CatalogProperties(s3Scheme + "://my-bucket/path/to/data");
131+
CatalogProperties externalProps = new CatalogProperties("s3://my-bucket/path/to/data");
133132
externalProps.putAll(
134133
Map.of(
135134
"table-default.s3.endpoint",

plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkCatalogIcebergIT.java

Lines changed: 26 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -21,47 +21,31 @@
2121
import io.quarkus.test.junit.QuarkusIntegrationTest;
2222
import org.apache.spark.sql.SparkSession;
2323

24-
public class SparkCatalogIcebergIT {
25-
26-
abstract static class BaseTest extends SparkCatalogBaseIT {
27-
/** Initialize the spark catalog to use the iceberg spark catalog. */
28-
@Override
29-
protected SparkSession.Builder withCatalog(SparkSession.Builder builder, String catalogName) {
30-
return builder
31-
.config(
32-
"spark.sql.extensions",
33-
"org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions")
34-
.config(
35-
String.format("spark.sql.catalog.%s", catalogName),
36-
"org.apache.iceberg.spark.SparkCatalog")
37-
.config("spark.sql.warehouse.dir", warehouseDir.toString())
38-
.config(String.format("spark.sql.catalog.%s.type", catalogName), "rest")
39-
.config(
40-
String.format("spark.sql.catalog.%s.uri", catalogName),
41-
endpoints.catalogApiEndpoint().toString())
42-
.config(String.format("spark.sql.catalog.%s.warehouse", catalogName), catalogName)
43-
.config(String.format("spark.sql.catalog.%s.scope", catalogName), "PRINCIPAL_ROLE:ALL")
44-
.config(
45-
String.format("spark.sql.catalog.%s.header.realm", catalogName), endpoints.realmId())
46-
.config(String.format("spark.sql.catalog.%s.token", catalogName), sparkToken)
47-
.config(String.format("spark.sql.catalog.%s.s3.access-key-id", catalogName), "fakekey")
48-
.config(
49-
String.format("spark.sql.catalog.%s.s3.secret-access-key", catalogName), "fakesecret")
50-
.config(String.format("spark.sql.catalog.%s.s3.region", catalogName), "us-west-2");
51-
}
52-
}
53-
54-
@QuarkusIntegrationTest
55-
static class S3ATest extends BaseTest {
56-
public S3ATest() {
57-
s3Scheme = "s3a";
58-
}
59-
}
60-
61-
@QuarkusIntegrationTest
62-
static class S3Test extends BaseTest {
63-
public S3Test() {
64-
s3Scheme = "s3";
65-
}
24+
@QuarkusIntegrationTest
25+
public class SparkCatalogIcebergIT extends SparkCatalogBaseIT {
26+
/** Initialize the spark catalog to use the iceberg spark catalog. */
27+
@Override
28+
protected SparkSession.Builder withCatalog(SparkSession.Builder builder, String catalogName) {
29+
return builder
30+
.config(
31+
"spark.sql.extensions",
32+
"org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions")
33+
.config(
34+
String.format("spark.sql.catalog.%s", catalogName),
35+
"org.apache.iceberg.spark.SparkCatalog")
36+
.config("spark.sql.warehouse.dir", warehouseDir.toString())
37+
.config(String.format("spark.sql.catalog.%s.type", catalogName), "rest")
38+
.config(
39+
String.format("spark.sql.catalog.%s.uri", catalogName),
40+
endpoints.catalogApiEndpoint().toString())
41+
.config(String.format("spark.sql.catalog.%s.warehouse", catalogName), catalogName)
42+
.config(String.format("spark.sql.catalog.%s.scope", catalogName), "PRINCIPAL_ROLE:ALL")
43+
.config(
44+
String.format("spark.sql.catalog.%s.header.realm", catalogName), endpoints.realmId())
45+
.config(String.format("spark.sql.catalog.%s.token", catalogName), sparkToken)
46+
.config(String.format("spark.sql.catalog.%s.s3.access-key-id", catalogName), "fakekey")
47+
.config(
48+
String.format("spark.sql.catalog.%s.s3.secret-access-key", catalogName), "fakesecret")
49+
.config(String.format("spark.sql.catalog.%s.s3.region", catalogName), "us-west-2");
6650
}
6751
}

plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkCatalogPolarisIT.java

Lines changed: 2 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -20,18 +20,5 @@
2020

2121
import io.quarkus.test.junit.QuarkusIntegrationTest;
2222

23-
public class SparkCatalogPolarisIT {
24-
@QuarkusIntegrationTest
25-
static class S3ATest extends SparkCatalogBaseIT {
26-
public S3ATest() {
27-
s3Scheme = "s3a";
28-
}
29-
}
30-
31-
@QuarkusIntegrationTest
32-
static class S3Test extends SparkCatalogBaseIT {
33-
public S3Test() {
34-
s3Scheme = "s3";
35-
}
36-
}
37-
}
23+
@QuarkusIntegrationTest
24+
public class SparkCatalogPolarisIT extends SparkCatalogBaseIT {}

0 commit comments

Comments
 (0)