diff --git a/getting-started/eclipselink/docker-compose.yml b/getting-started/eclipselink/docker-compose.yml index 15480c43ff..74a6f311f6 100644 --- a/getting-started/eclipselink/docker-compose.yml +++ b/getting-started/eclipselink/docker-compose.yml @@ -79,7 +79,7 @@ services: retries: 15 command: [ /opt/spark/bin/spark-sql, - --packages, "org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.9.1,org.apache.iceberg:iceberg-aws-bundle:1.9.1,org.apache.iceberg:iceberg-gcp-bundle:1.9.1,org.apache.iceberg:iceberg-azure-bundle:1.9.1", + --packages, "org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.10.0,org.apache.iceberg:iceberg-aws-bundle:1.10.0,org.apache.iceberg:iceberg-gcp-bundle:1.10.0,org.apache.iceberg:iceberg-azure-bundle:1.10.0", --conf, "spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions", --conf, "spark.sql.catalog.quickstart_catalog=org.apache.iceberg.spark.SparkCatalog", --conf, "spark.sql.catalog.quickstart_catalog.type=rest", diff --git a/getting-started/jdbc/docker-compose.yml b/getting-started/jdbc/docker-compose.yml index dad756f0d2..f1077f63a5 100644 --- a/getting-started/jdbc/docker-compose.yml +++ b/getting-started/jdbc/docker-compose.yml @@ -81,7 +81,7 @@ services: retries: 15 command: [ /opt/spark/bin/spark-sql, - --packages, "org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.9.1,org.apache.iceberg:iceberg-aws-bundle:1.9.1,org.apache.iceberg:iceberg-gcp-bundle:1.9.1,org.apache.iceberg:iceberg-azure-bundle:1.9.1", + --packages, "org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.10.0,org.apache.iceberg:iceberg-aws-bundle:1.10.0,org.apache.iceberg:iceberg-gcp-bundle:1.10.0,org.apache.iceberg:iceberg-azure-bundle:1.10.0", --conf, "spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions", --conf, "spark.sql.catalog.polaris=org.apache.iceberg.spark.SparkCatalog", --conf, "spark.sql.catalog.polaris.type=rest", diff --git a/getting-started/spark/notebooks/SparkPolaris.ipynb b/getting-started/spark/notebooks/SparkPolaris.ipynb index 76e046e7ce..0e55137974 100644 --- a/getting-started/spark/notebooks/SparkPolaris.ipynb +++ b/getting-started/spark/notebooks/SparkPolaris.ipynb @@ -256,7 +256,7 @@ "\n", "spark = (SparkSession.builder\n", " .config(\"spark.sql.catalog.spark_catalog\", \"org.apache.iceberg.spark.SparkSessionCatalog\")\n", - " .config(\"spark.jars.packages\", \"org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.9.1,org.apache.iceberg:iceberg-aws-bundle:1.9.1\")\n", + " .config(\"spark.jars.packages\", \"org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.10.0,org.apache.iceberg:iceberg-aws-bundle:1.10.0\")\n", " .config('spark.sql.iceberg.vectorization.enabled', 'false')\n", " \n", " # Configure the 'polaris' catalog as an Iceberg rest catalog\n", diff --git a/plugins/pluginlibs.versions.toml b/plugins/pluginlibs.versions.toml index 895e286b19..37b7696b41 100644 --- a/plugins/pluginlibs.versions.toml +++ b/plugins/pluginlibs.versions.toml @@ -18,7 +18,7 @@ # [versions] -iceberg = "1.9.1" +iceberg = "1.10.0" spark35 = "3.5.6" scala212 = "2.12.19" scala213 = "2.13.15" diff --git a/plugins/spark/README.md b/plugins/spark/README.md index e925150d64..c3ac4180c0 100644 --- a/plugins/spark/README.md +++ b/plugins/spark/README.md @@ -50,7 +50,7 @@ Run the following command to build the Polaris Spark project and publish the sou ```shell bin/spark-shell \ ---packages org.apache.polaris:polaris-spark-_:,org.apache.iceberg:iceberg-aws-bundle:1.9.1,io.delta:delta-spark_2.12:3.3.1 \ +--packages org.apache.polaris:polaris-spark-_:,org.apache.iceberg:iceberg-aws-bundle:1.10.0,io.delta:delta-spark_2.12:3.3.1 \ --conf spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions,io.delta.sql.DeltaSparkSessionExtension \ --conf spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog \ --conf spark.sql.catalog..warehouse= \ @@ -73,7 +73,7 @@ The Spark command would look like following: ```shell bin/spark-shell \ ---packages org.apache.polaris:polaris-spark-3.5_2.12:1.1.0-incubating-SNAPSHOT,org.apache.iceberg:iceberg-aws-bundle:1.9.1,io.delta:delta-spark_2.12:3.3.1 \ +--packages org.apache.polaris:polaris-spark-3.5_2.12:1.1.0-incubating-SNAPSHOT,org.apache.iceberg:iceberg-aws-bundle:1.10.0,io.delta:delta-spark_2.12:3.3.1 \ --conf spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions,io.delta.sql.DeltaSparkSessionExtension \ --conf spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog \ --conf spark.sql.catalog.polaris.warehouse=polaris \ @@ -99,7 +99,7 @@ To start Spark using the bundle JAR, specify it with the `--jars` option as show ```shell bin/spark-shell \ --jars \ ---packages org.apache.iceberg:iceberg-aws-bundle:1.9.1,io.delta:delta-spark_2.12:3.3.1 \ +--packages org.apache.iceberg:iceberg-aws-bundle:1.10.0,io.delta:delta-spark_2.12:3.3.1 \ --conf spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions,io.delta.sql.DeltaSparkSessionExtension \ --conf spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog \ --conf spark.sql.catalog..warehouse= \ diff --git a/plugins/spark/v3.5/getting-started/notebooks/SparkPolaris.ipynb b/plugins/spark/v3.5/getting-started/notebooks/SparkPolaris.ipynb index baa2e94ba4..ad8f7cf443 100644 --- a/plugins/spark/v3.5/getting-started/notebooks/SparkPolaris.ipynb +++ b/plugins/spark/v3.5/getting-started/notebooks/SparkPolaris.ipynb @@ -266,7 +266,7 @@ "\n", "spark = (SparkSession.builder\n", " .config(\"spark.jars\", \"../polaris_libs/polaris-spark-3.5_2.12-1.1.0-incubating-SNAPSHOT-bundle.jar\") # TODO: add a way to automatically discover the Jar\n", - " .config(\"spark.jars.packages\", \"org.apache.iceberg:iceberg-aws-bundle:1.9.1,io.delta:delta-spark_2.12:3.2.1\")\n", + " .config(\"spark.jars.packages\", \"org.apache.iceberg:iceberg-aws-bundle:1.10.0,io.delta:delta-spark_2.12:3.2.1\")\n", " .config(\"spark.sql.catalog.spark_catalog\", \"org.apache.spark.sql.delta.catalog.DeltaCatalog\")\n", " .config('spark.sql.iceberg.vectorization.enabled', 'false')\n", "\n", diff --git a/regtests/setup.sh b/regtests/setup.sh index 2c19a6d944..f6c67b353d 100755 --- a/regtests/setup.sh +++ b/regtests/setup.sh @@ -31,7 +31,7 @@ if [ -z "${SPARK_HOME}" ]; then fi SPARK_CONF="${SPARK_HOME}/conf/spark-defaults.conf" DERBY_HOME="/tmp/derby" -ICEBERG_VERSION="1.9.1" +ICEBERG_VERSION="1.10.0" export PYTHONPATH="${SPARK_HOME}/python/:${SPARK_HOME}/python/lib/py4j-0.10.9.7-src.zip:$PYTHONPATH" # Ensure binaries are downloaded locally diff --git a/regtests/t_pyspark/src/iceberg_spark.py b/regtests/t_pyspark/src/iceberg_spark.py index 23a5465cf9..7d866bde2e 100644 --- a/regtests/t_pyspark/src/iceberg_spark.py +++ b/regtests/t_pyspark/src/iceberg_spark.py @@ -73,8 +73,8 @@ def __enter__(self): """Initial method for Iceberg Spark session. Creates a Spark session with specified configs. """ packages = [ - "org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.9.1", - "org.apache.iceberg:iceberg-aws-bundle:1.9.1", + "org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.10.0", + "org.apache.iceberg:iceberg-aws-bundle:1.10.0", ] excludes = ["org.checkerframework:checker-qual", "com.google.errorprone:error_prone_annotations"] diff --git a/site/content/in-dev/unreleased/getting-started/using-polaris.md b/site/content/in-dev/unreleased/getting-started/using-polaris.md index 5ce5c3c0d3..ec509a404b 100644 --- a/site/content/in-dev/unreleased/getting-started/using-polaris.md +++ b/site/content/in-dev/unreleased/getting-started/using-polaris.md @@ -158,7 +158,7 @@ _Note: the credentials provided here are those for our principal, not the root c ```shell bin/spark-sql \ ---packages org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.9.1,org.apache.iceberg:iceberg-aws-bundle:1.9.1 \ +--packages org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.10.0,org.apache.iceberg:iceberg-aws-bundle:1.10.0 \ --conf spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions \ --conf spark.sql.catalog.quickstart_catalog.warehouse=quickstart_catalog \ --conf spark.sql.catalog.quickstart_catalog.header.X-Iceberg-Access-Delegation=vended-credentials \ diff --git a/site/content/in-dev/unreleased/polaris-spark-client.md b/site/content/in-dev/unreleased/polaris-spark-client.md index 3d597f19f4..b78168e195 100644 --- a/site/content/in-dev/unreleased/polaris-spark-client.md +++ b/site/content/in-dev/unreleased/polaris-spark-client.md @@ -59,7 +59,7 @@ a released Polaris Spark client. ```shell bin/spark-shell \ ---packages ,org.apache.iceberg:iceberg-aws-bundle:1.9.1,io.delta:delta-spark_2.12:3.3.1 \ +--packages ,org.apache.iceberg:iceberg-aws-bundle:1.10.0,io.delta:delta-spark_2.12:3.3.1 \ --conf spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions,io.delta.sql.DeltaSparkSessionExtension \ --conf spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog \ --conf spark.sql.catalog..warehouse= \ @@ -87,7 +87,7 @@ You can also start the connection by programmatically initialize a SparkSession, from pyspark.sql import SparkSession spark = SparkSession.builder - .config("spark.jars.packages", ",org.apache.iceberg:iceberg-aws-bundle:1.9.1,io.delta:delta-spark_2.12:3.3.1") + .config("spark.jars.packages", ",org.apache.iceberg:iceberg-aws-bundle:1.10.0,io.delta:delta-spark_2.12:3.3.1") .config("spark.sql.catalog.spark_catalog", "org.apache.spark.sql.delta.catalog.DeltaCatalog") .config("spark.sql.extensions", "org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions,io.delta.sql.DeltaSparkSessionExtension") .config("spark.sql.catalog.", "org.apache.polaris.spark.SparkCatalog")