Skip to content

Commit 8f1614c

Browse files
authored
Bump: Iceberg client in tests and documentation to 1.10 (apache#2588)
1 parent b1fc3d2 commit 8f1614c

File tree

10 files changed

+14
-14
lines changed

10 files changed

+14
-14
lines changed

getting-started/eclipselink/docker-compose.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ services:
7979
retries: 15
8080
command: [
8181
/opt/spark/bin/spark-sql,
82-
--packages, "org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.9.1,org.apache.iceberg:iceberg-aws-bundle:1.9.1,org.apache.iceberg:iceberg-gcp-bundle:1.9.1,org.apache.iceberg:iceberg-azure-bundle:1.9.1",
82+
--packages, "org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.10.0,org.apache.iceberg:iceberg-aws-bundle:1.10.0,org.apache.iceberg:iceberg-gcp-bundle:1.10.0,org.apache.iceberg:iceberg-azure-bundle:1.10.0",
8383
--conf, "spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions",
8484
--conf, "spark.sql.catalog.quickstart_catalog=org.apache.iceberg.spark.SparkCatalog",
8585
--conf, "spark.sql.catalog.quickstart_catalog.type=rest",

getting-started/jdbc/docker-compose.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ services:
8181
retries: 15
8282
command: [
8383
/opt/spark/bin/spark-sql,
84-
--packages, "org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.9.1,org.apache.iceberg:iceberg-aws-bundle:1.9.1,org.apache.iceberg:iceberg-gcp-bundle:1.9.1,org.apache.iceberg:iceberg-azure-bundle:1.9.1",
84+
--packages, "org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.10.0,org.apache.iceberg:iceberg-aws-bundle:1.10.0,org.apache.iceberg:iceberg-gcp-bundle:1.10.0,org.apache.iceberg:iceberg-azure-bundle:1.10.0",
8585
--conf, "spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions",
8686
--conf, "spark.sql.catalog.polaris=org.apache.iceberg.spark.SparkCatalog",
8787
--conf, "spark.sql.catalog.polaris.type=rest",

getting-started/spark/notebooks/SparkPolaris.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -256,7 +256,7 @@
256256
"\n",
257257
"spark = (SparkSession.builder\n",
258258
" .config(\"spark.sql.catalog.spark_catalog\", \"org.apache.iceberg.spark.SparkSessionCatalog\")\n",
259-
" .config(\"spark.jars.packages\", \"org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.9.1,org.apache.iceberg:iceberg-aws-bundle:1.9.1\")\n",
259+
" .config(\"spark.jars.packages\", \"org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.10.0,org.apache.iceberg:iceberg-aws-bundle:1.10.0\")\n",
260260
" .config('spark.sql.iceberg.vectorization.enabled', 'false')\n",
261261
" \n",
262262
" # Configure the 'polaris' catalog as an Iceberg rest catalog\n",

plugins/pluginlibs.versions.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
#
1919

2020
[versions]
21-
iceberg = "1.9.1"
21+
iceberg = "1.10.0"
2222
spark35 = "3.5.6"
2323
scala212 = "2.12.19"
2424
scala213 = "2.13.15"

plugins/spark/README.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ Run the following command to build the Polaris Spark project and publish the sou
5050

5151
```shell
5252
bin/spark-shell \
53-
--packages org.apache.polaris:polaris-spark-<spark_version>_<scala_version>:<polaris_version>,org.apache.iceberg:iceberg-aws-bundle:1.9.1,io.delta:delta-spark_2.12:3.3.1 \
53+
--packages org.apache.polaris:polaris-spark-<spark_version>_<scala_version>:<polaris_version>,org.apache.iceberg:iceberg-aws-bundle:1.10.0,io.delta:delta-spark_2.12:3.3.1 \
5454
--conf spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions,io.delta.sql.DeltaSparkSessionExtension \
5555
--conf spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog \
5656
--conf spark.sql.catalog.<catalog-name>.warehouse=<catalog-name> \
@@ -73,7 +73,7 @@ The Spark command would look like following:
7373

7474
```shell
7575
bin/spark-shell \
76-
--packages org.apache.polaris:polaris-spark-3.5_2.12:1.1.0-incubating-SNAPSHOT,org.apache.iceberg:iceberg-aws-bundle:1.9.1,io.delta:delta-spark_2.12:3.3.1 \
76+
--packages org.apache.polaris:polaris-spark-3.5_2.12:1.1.0-incubating-SNAPSHOT,org.apache.iceberg:iceberg-aws-bundle:1.10.0,io.delta:delta-spark_2.12:3.3.1 \
7777
--conf spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions,io.delta.sql.DeltaSparkSessionExtension \
7878
--conf spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog \
7979
--conf spark.sql.catalog.polaris.warehouse=polaris \
@@ -99,7 +99,7 @@ To start Spark using the bundle JAR, specify it with the `--jars` option as show
9999
```shell
100100
bin/spark-shell \
101101
--jars <path-to-spark-client-jar> \
102-
--packages org.apache.iceberg:iceberg-aws-bundle:1.9.1,io.delta:delta-spark_2.12:3.3.1 \
102+
--packages org.apache.iceberg:iceberg-aws-bundle:1.10.0,io.delta:delta-spark_2.12:3.3.1 \
103103
--conf spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions,io.delta.sql.DeltaSparkSessionExtension \
104104
--conf spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog \
105105
--conf spark.sql.catalog.<catalog-name>.warehouse=<catalog-name> \

plugins/spark/v3.5/getting-started/notebooks/SparkPolaris.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -266,7 +266,7 @@
266266
"\n",
267267
"spark = (SparkSession.builder\n",
268268
" .config(\"spark.jars\", \"../polaris_libs/polaris-spark-3.5_2.12-1.1.0-incubating-SNAPSHOT-bundle.jar\") # TODO: add a way to automatically discover the Jar\n",
269-
" .config(\"spark.jars.packages\", \"org.apache.iceberg:iceberg-aws-bundle:1.9.1,io.delta:delta-spark_2.12:3.2.1\")\n",
269+
" .config(\"spark.jars.packages\", \"org.apache.iceberg:iceberg-aws-bundle:1.10.0,io.delta:delta-spark_2.12:3.2.1\")\n",
270270
" .config(\"spark.sql.catalog.spark_catalog\", \"org.apache.spark.sql.delta.catalog.DeltaCatalog\")\n",
271271
" .config('spark.sql.iceberg.vectorization.enabled', 'false')\n",
272272
"\n",

regtests/setup.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ if [ -z "${SPARK_HOME}" ]; then
3131
fi
3232
SPARK_CONF="${SPARK_HOME}/conf/spark-defaults.conf"
3333
DERBY_HOME="/tmp/derby"
34-
ICEBERG_VERSION="1.9.1"
34+
ICEBERG_VERSION="1.10.0"
3535
export PYTHONPATH="${SPARK_HOME}/python/:${SPARK_HOME}/python/lib/py4j-0.10.9.7-src.zip:$PYTHONPATH"
3636

3737
# Ensure binaries are downloaded locally

regtests/t_pyspark/src/iceberg_spark.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -73,8 +73,8 @@ def __enter__(self):
7373
"""Initial method for Iceberg Spark session. Creates a Spark session with specified configs.
7474
"""
7575
packages = [
76-
"org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.9.1",
77-
"org.apache.iceberg:iceberg-aws-bundle:1.9.1",
76+
"org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.10.0",
77+
"org.apache.iceberg:iceberg-aws-bundle:1.10.0",
7878
]
7979
excludes = ["org.checkerframework:checker-qual", "com.google.errorprone:error_prone_annotations"]
8080

site/content/in-dev/unreleased/getting-started/using-polaris.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@ _Note: the credentials provided here are those for our principal, not the root c
158158

159159
```shell
160160
bin/spark-sql \
161-
--packages org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.9.1,org.apache.iceberg:iceberg-aws-bundle:1.9.1 \
161+
--packages org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.10.0,org.apache.iceberg:iceberg-aws-bundle:1.10.0 \
162162
--conf spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions \
163163
--conf spark.sql.catalog.quickstart_catalog.warehouse=quickstart_catalog \
164164
--conf spark.sql.catalog.quickstart_catalog.header.X-Iceberg-Access-Delegation=vended-credentials \

site/content/in-dev/unreleased/polaris-spark-client.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ a released Polaris Spark client.
5959

6060
```shell
6161
bin/spark-shell \
62-
--packages <polaris-spark-client-package>,org.apache.iceberg:iceberg-aws-bundle:1.9.1,io.delta:delta-spark_2.12:3.3.1 \
62+
--packages <polaris-spark-client-package>,org.apache.iceberg:iceberg-aws-bundle:1.10.0,io.delta:delta-spark_2.12:3.3.1 \
6363
--conf spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions,io.delta.sql.DeltaSparkSessionExtension \
6464
--conf spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog \
6565
--conf spark.sql.catalog.<spark-catalog-name>.warehouse=<polaris-catalog-name> \
@@ -87,7 +87,7 @@ You can also start the connection by programmatically initialize a SparkSession,
8787
from pyspark.sql import SparkSession
8888

8989
spark = SparkSession.builder
90-
.config("spark.jars.packages", "<polaris-spark-client-package>,org.apache.iceberg:iceberg-aws-bundle:1.9.1,io.delta:delta-spark_2.12:3.3.1")
90+
.config("spark.jars.packages", "<polaris-spark-client-package>,org.apache.iceberg:iceberg-aws-bundle:1.10.0,io.delta:delta-spark_2.12:3.3.1")
9191
.config("spark.sql.catalog.spark_catalog", "org.apache.spark.sql.delta.catalog.DeltaCatalog")
9292
.config("spark.sql.extensions", "org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions,io.delta.sql.DeltaSparkSessionExtension")
9393
.config("spark.sql.catalog.<spark-catalog-name>", "org.apache.polaris.spark.SparkCatalog")

0 commit comments

Comments
 (0)