@@ -64,6 +64,10 @@ allprojects {
6464 mavenCentral()
6565 mavenLocal()
6666 }
67+ project. ext {
68+ Spark30Version = ' 3.0.1'
69+ Spark31Version = ' 3.1.1'
70+ }
6771}
6872
6973subprojects {
@@ -977,6 +981,21 @@ if (jdkVersion == '8') {
977981}
978982
979983project(' :iceberg-spark3' ) {
984+ sourceSets {
985+ // Compile test source against Spark 3.1 and main classes compiled against Spark 3.0
986+ spark31 {
987+ java. srcDir " $projectDir /src/test/java"
988+ resources. srcDir " $projectDir /src/test/resources"
989+ compileClasspath + = sourceSets. test. output + sourceSets. main. output
990+ runtimeClasspath + = sourceSets. test. output
991+ }
992+ }
993+
994+ configurations {
995+ spark31Implementation. extendsFrom testImplementation
996+ spark31RuntimeOnly. extendsFrom testRuntimeOnly
997+ }
998+
980999 dependencies {
9811000 compile project(' :iceberg-api' )
9821001 compile project(' :iceberg-common' )
@@ -989,7 +1008,7 @@ project(':iceberg-spark3') {
9891008 compile project(' :iceberg-spark' )
9901009
9911010 compileOnly " org.apache.avro:avro"
992- compileOnly(" org.apache.spark:spark-hive_2.12" ) {
1011+ compileOnly(" org.apache.spark:spark-hive_2.12: ${ project.ext.Spark30Version } " ) {
9931012 exclude group : ' org.apache.avro' , module : ' avro'
9941013 exclude group : ' org.apache.arrow'
9951014 }
@@ -1003,9 +1022,14 @@ project(':iceberg-spark3') {
10031022 testCompile project(path : ' :iceberg-api' , configuration : ' testArtifacts' )
10041023 testCompile project(path : ' :iceberg-data' , configuration : ' testArtifacts' )
10051024 testCompile " org.xerial:sqlite-jdbc"
1025+
1026+ spark31Implementation(" org.apache.spark:spark-hive_2.12:${ project.ext.Spark31Version} " ) {
1027+ exclude group : ' org.apache.avro' , module : ' avro'
1028+ exclude group : ' org.apache.arrow'
1029+ }
10061030 }
10071031
1008- test {
1032+ tasks . withType( Test ) {
10091033 // For vectorized reads
10101034 // Allow unsafe memory access to avoid the costly check arrow does to check if index is within bounds
10111035 systemProperty(" arrow.enable_unsafe_memory_access" , " true" )
@@ -1014,16 +1038,39 @@ project(':iceberg-spark3') {
10141038 systemProperty(" arrow.enable_null_check_for_get" , " false" )
10151039
10161040 // Vectorized reads need more memory
1017- maxHeapSize ' 2500m'
1041+ maxHeapSize ' 2560m'
1042+ }
1043+
1044+ task testSpark31(type : Test ) {
1045+ dependsOn classes
1046+ description = " Test against Spark 3.1"
1047+ testClassesDirs = sourceSets. spark31. output. classesDirs
1048+ classpath = sourceSets. spark31. runtimeClasspath + sourceSets. main. output
10181049 }
1050+
1051+ test. dependsOn testSpark31
10191052}
10201053
10211054project(" :iceberg-spark3-extensions" ) {
10221055 apply plugin : ' java'
10231056 apply plugin : ' scala'
10241057 apply plugin : ' antlr'
10251058
1059+ sourceSets {
1060+ // Compile test source against Spark 3.1 and main classes compiled against Spark 3.0
1061+ spark31 {
1062+ // Main source is in scala, but test source is only in java
1063+ java. srcDir " $projectDir /src/test/java"
1064+ resources. srcDir " $projectDir /src/test/resources"
1065+ compileClasspath + = sourceSets. test. output + sourceSets. main. output
1066+ runtimeClasspath + = sourceSets. test. output
1067+ }
1068+ }
1069+
10261070 configurations {
1071+ spark31Implementation. extendsFrom testImplementation
1072+ spark31RuntimeOnly. extendsFrom testRuntimeOnly
1073+
10271074 /*
10281075 The Gradle Antlr plugin erroneously adds both antlr-build and runtime dependencies to the runtime path. This
10291076 bug https://github.com/gradle/gradle/issues/820 exists because older versions of Antlr do not have separate
@@ -1037,10 +1084,9 @@ project(":iceberg-spark3-extensions") {
10371084 }
10381085
10391086 dependencies {
1040- compileOnly project(' :iceberg-spark3' )
1041-
10421087 compileOnly " org.scala-lang:scala-library"
1043- compileOnly(" org.apache.spark:spark-hive_2.12" ) {
1088+ compileOnly project(' :iceberg-spark3' )
1089+ compileOnly(" org.apache.spark:spark-hive_2.12:${ project.ext.Spark30Version} " ) {
10441090 exclude group : ' org.apache.avro' , module : ' avro'
10451091 exclude group : ' org.apache.arrow'
10461092 }
@@ -1050,6 +1096,11 @@ project(":iceberg-spark3-extensions") {
10501096 testCompile project(path : ' :iceberg-spark' , configuration : ' testArtifacts' )
10511097 testCompile project(path : ' :iceberg-spark3' , configuration : ' testArtifacts' )
10521098
1099+ spark31Implementation(" org.apache.spark:spark-hive_2.12:${ project.ext.Spark31Version} " ) {
1100+ exclude group : ' org.apache.avro' , module : ' avro'
1101+ exclude group : ' org.apache.arrow'
1102+ }
1103+
10531104 // Required because we remove antlr plugin dependencies from the compile configuration, see note above
10541105 // We shade this in Spark3 Runtime to avoid issues with Spark's Antlr Runtime
10551106 runtime " org.antlr:antlr4-runtime:4.7.1"
@@ -1060,6 +1111,15 @@ project(":iceberg-spark3-extensions") {
10601111 maxHeapSize = " 64m"
10611112 arguments + = [' -visitor' , ' -package' , ' org.apache.spark.sql.catalyst.parser.extensions' ]
10621113 }
1114+
1115+ task testSpark31(type : Test ) {
1116+ dependsOn classes
1117+ description = " Test against Spark 3.1"
1118+ testClassesDirs = sourceSets. spark31. output. classesDirs
1119+ classpath = sourceSets. spark31. runtimeClasspath + sourceSets. main. output
1120+ }
1121+
1122+ test. dependsOn testSpark31
10631123}
10641124
10651125project(' :iceberg-spark3-runtime' ) {
@@ -1072,6 +1132,12 @@ project(':iceberg-spark3-runtime') {
10721132 java. srcDir " $projectDir /src/integration/java"
10731133 resources. srcDir " $projectDir /src/integration/resources"
10741134 }
1135+ spark31 {
1136+ java. srcDir " $projectDir /src/integration/java"
1137+ resources. srcDir " $projectDir /src/integration/resources"
1138+ compileClasspath + = sourceSets. integration. output
1139+ runtimeClasspath + = sourceSets. integration. output
1140+ }
10751141 }
10761142
10771143 configurations {
@@ -1086,6 +1152,8 @@ project(':iceberg-spark3-runtime') {
10861152 exclude group : ' javax.xml.bind'
10871153 exclude group : ' javax.annotation'
10881154 }
1155+ spark31Implementation. extendsFrom integrationImplementation
1156+ spark31CompileOnly. extendsFrom integrationCompileOnly
10891157 }
10901158
10911159 dependencies {
@@ -1096,7 +1164,7 @@ project(':iceberg-spark3-runtime') {
10961164 exclude group : ' com.google.code.findbugs' , module : ' jsr305'
10971165 }
10981166
1099- integrationImplementation ' org.apache.spark:spark-hive_2.12'
1167+ integrationImplementation " org.apache.spark:spark-hive_2.12: ${ project.ext.Spark30Version } "
11001168 integrationImplementation ' junit:junit'
11011169 integrationImplementation ' org.slf4j:slf4j-simple'
11021170 integrationImplementation project(path : ' :iceberg-api' , configuration : ' testArtifacts' )
@@ -1107,6 +1175,8 @@ project(':iceberg-spark3-runtime') {
11071175 // Not allowed on our classpath, only the runtime jar is allowed
11081176 integrationCompileOnly project(' :iceberg-spark3-extensions' )
11091177 integrationCompileOnly project(' :iceberg-spark3' )
1178+
1179+ spark31Implementation " org.apache.spark:spark-hive_2.12:${ project.ext.Spark31Version} "
11101180 }
11111181
11121182 shadowJar {
@@ -1144,14 +1214,24 @@ project(':iceberg-spark3-runtime') {
11441214 }
11451215
11461216 task integrationTest(type : Test ) {
1147- description = " Test Spark3 Runtime Jar"
1217+ description = " Test Spark3 Runtime Jar against Spark 3.0 "
11481218 group = " verification"
11491219 testClassesDirs = sourceSets. integration. output. classesDirs
11501220 classpath = sourceSets. integration. runtimeClasspath + files(shadowJar. archiveFile. get(). asFile. path)
11511221 inputs. file(shadowJar. archiveFile. get(). asFile. path)
11521222 }
11531223 integrationTest. dependsOn shadowJar
1154- check. dependsOn integrationTest
1224+
1225+ task spark31IntegrationTest(type : Test ) {
1226+ dependsOn classes
1227+ description = " Test Spark3 Runtime Jar against Spark 3.1"
1228+ group = " verification"
1229+ testClassesDirs = sourceSets. spark31. output. classesDirs
1230+ classpath = sourceSets. spark31. runtimeClasspath + files(shadowJar. archiveFile. get(). asFile. path)
1231+ }
1232+ spark31IntegrationTest. dependsOn shadowJar
1233+
1234+ check. dependsOn integrationTest, spark31IntegrationTest
11551235
11561236 jar {
11571237 enabled = false
0 commit comments