Skip to content

Commit 59aff04

Browse files
committed
address feedback
1 parent e12453c commit 59aff04

File tree

3 files changed

+17
-4
lines changed

3 files changed

+17
-4
lines changed

.github/workflows/spark_client_regtests.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,4 +58,4 @@ jobs:
5858
AWS_ACCESS_KEY_ID: ${{secrets.AWS_ACCESS_KEY_ID}}
5959
AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_SECRET_ACCESS_KEY}}
6060
run: |
61-
docker compose -f plugins/spark/v3.5/regtests/docker-compose.yml up --build --exit-code-from regtest
61+
docker compose -f plugins/spark/v3.5/regtests/docker-compose.yml up --build --exit-code-from regtest

plugins/spark/v3.5/regtests/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -96,5 +96,5 @@ Running the test harness will automatically run the idempotent setup script. Fro
9696
project, just run:
9797

9898
```shell
99-
env POLARIS_HOST=localhost ./regtests/run.sh
99+
env POLARIS_HOST=localhost ./plugins/spark/v3.5/regtests/run.sh
100100
```

plugins/spark/v3.5/regtests/setup.sh

Lines changed: 15 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,10 +17,20 @@
1717
# specific language governing permissions and limitations
1818
# under the License.
1919
#
20-
# Idempotent setup for regression tests. Run manually or let run.sh auto-run.
20+
###################################
21+
# Idempotent setup for spark regression tests. Run manually or let run.sh auto-run.
2122
#
2223
# Warning - first time setup may download large amounts of files
2324
# Warning - may clobber conf/spark-defaults.conf
25+
# Warning - it will set the SPARK_HOME environment variable with the spark setup
26+
#
27+
# The script can be called independently like following
28+
# ./setup.sh --sparkVersion ${SPARK_VERSION} --scalaVersion ${SCALA_VERSION} --jar ${JAR_PATH}
29+
# Required Parameters:
30+
# --sparkVersion : the spark version to setup
31+
# --scalaVersion : the scala version of spark to setup
32+
# --jar : path to the local Polaris Spark client jar
33+
#
2434

2535
set -x
2636

@@ -131,12 +141,15 @@ spark.sql.variable.substitute true
131141
spark.driver.extraJavaOptions -Dderby.system.home=${DERBY_HOME}
132142
133143
spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions,io.delta.sql.DeltaSparkSessionExtension
144+
# this configuration is needed for delta table
134145
spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog
135146
spark.sql.catalog.polaris=org.apache.polaris.spark.SparkCatalog
136-
spark.sql.catalog.polaris.type=rest
137147
spark.sql.catalog.polaris.uri=http://${POLARIS_HOST:-localhost}:8181/api/catalog
148+
# this configuration is used
138149
spark.sql.catalog.polaris.header.X-Iceberg-Access-Delegation=vended-credentials
139150
spark.sql.catalog.polaris.client.region=us-west-2
151+
# configuration required to ensure DataSourceV2 load works correctly for
152+
# different table formats
140153
spark.sql.sources.useV1SourceList=''
141154
EOF
142155
echo 'Success!'

0 commit comments

Comments
 (0)