diff --git a/Jenkinsfile b/Jenkinsfile
index b6b01ef538d8..bfbdb46977e5 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -58,7 +58,7 @@ pipeline {
'build-gpu-cuda8.0': { BuildCUDA(cuda_version: '8.0') },
'build-gpu-cuda9.2': { BuildCUDA(cuda_version: '9.2') },
'build-gpu-cuda10.0': { BuildCUDA(cuda_version: '10.0') },
- 'build-jvm-packages': { BuildJVMPackages(spark_version: '2.4.1') },
+ 'build-jvm-packages': { BuildJVMPackages(spark_version: '2.4.3') },
'build-jvm-doc': { BuildJVMDoc() }
])
}
@@ -77,7 +77,7 @@ pipeline {
'test-python-mgpu-cuda10.0': { TestPythonGPU(cuda_version: '10.0', multi_gpu: true) },
'test-cpp-gpu': { TestCppGPU(cuda_version: '10.0') },
'test-cpp-mgpu': { TestCppGPU(cuda_version: '10.0', multi_gpu: true) },
- 'test-jvm-jdk8': { CrossTestJVMwithJDK(jdk_version: '8') },
+ 'test-jvm-jdk8': { CrossTestJVMwithJDK(jdk_version: '8', spark_version: '2.4.3') },
'test-jvm-jdk11': { CrossTestJVMwithJDK(jdk_version: '11') },
'test-jvm-jdk12': { CrossTestJVMwithJDK(jdk_version: '12') },
'test-r-3.4.4': { TestR(use_r35: false) },
@@ -213,7 +213,7 @@ def BuildJVMPackages(args) {
// Use only 4 CPU cores
def docker_extra_params = "CI_DOCKER_EXTRA_PARAMS_INIT='--cpuset-cpus 0-3'"
sh """
- ${docker_extra_params} ${dockerRun} ${container_type} ${docker_binary} tests/ci_build/build_jvm_packages.sh
+ ${docker_extra_params} ${dockerRun} ${container_type} ${docker_binary} tests/ci_build/build_jvm_packages.sh ${args.spark_version}
"""
echo 'Stashing XGBoost4J JAR...'
stash name: 'xgboost4j_jar', includes: 'jvm-packages/xgboost4j/target/*.jar,jvm-packages/xgboost4j-spark/target/*.jar,jvm-packages/xgboost4j-example/target/*.jar'
@@ -299,12 +299,17 @@ def CrossTestJVMwithJDK(args) {
node('linux && cpu') {
unstash name: 'xgboost4j_jar'
unstash name: 'srcs'
- echo "Test XGBoost4J on a machine with JDK ${args.jdk_version}"
+ if (args.spark_version != null) {
+ echo "Test XGBoost4J on a machine with JDK ${args.jdk_version}, Spark ${args.spark_version}"
+ } else {
+ echo "Test XGBoost4J on a machine with JDK ${args.jdk_version}"
+ }
def container_type = "jvm_cross"
def docker_binary = "docker"
- def docker_args = "--build-arg JDK_VERSION=${args.jdk_version}"
- // Only run integration tests for JDK 8, as Spark doesn't support later JDKs yet
- def docker_extra_params = (args.jdk_version == '8') ? "CI_DOCKER_EXTRA_PARAMS_INIT='-e RUN_INTEGRATION_TEST=1'" : ""
+ def spark_arg = (args.spark_version != null) ? "--build-arg SPARK_VERSION=${args.spark_version}" : ""
+ def docker_args = "--build-arg JDK_VERSION=${args.jdk_version} ${spark_arg}"
+ // Run integration tests only when spark_version is given
+ def docker_extra_params = (args.spark_version != null) ? "CI_DOCKER_EXTRA_PARAMS_INIT='-e RUN_INTEGRATION_TEST=1'" : ""
sh """
${docker_extra_params} ${dockerRun} ${container_type} ${docker_binary} ${docker_args} tests/ci_build/test_jvm_cross.sh
"""
diff --git a/jvm-packages/pom.xml b/jvm-packages/pom.xml
index d25cf51b1deb..c9a406713080 100644
--- a/jvm-packages/pom.xml
+++ b/jvm-packages/pom.xml
@@ -34,7 +34,7 @@
1.7
1.7
1.5.0
- 2.4.1
+ 2.4.3
2.11.12
2.11
diff --git a/tests/ci_build/Dockerfile.jvm_cross b/tests/ci_build/Dockerfile.jvm_cross
index d6a6a5d69a11..aeb44ff82cd0 100644
--- a/tests/ci_build/Dockerfile.jvm_cross
+++ b/tests/ci_build/Dockerfile.jvm_cross
@@ -1,5 +1,6 @@
FROM ubuntu:19.04
ARG JDK_VERSION=8
+ARG SPARK_VERSION=2.4.3
# Environment
ENV DEBIAN_FRONTEND noninteractive
@@ -19,9 +20,9 @@ RUN \
tar xvf apache-maven-3.6.1-bin.tar.gz -C /opt && \
ln -s /opt/apache-maven-3.6.1/ /opt/maven && \
# Spark
- wget https://archive.apache.org/dist/spark/spark-2.4.1/spark-2.4.1-bin-hadoop2.7.tgz && \
- tar xvf spark-2.4.1-bin-hadoop2.7.tgz -C /opt && \
- ln -s /opt/spark-2.4.1-bin-hadoop2.7 /opt/spark
+ wget https://archive.apache.org/dist/spark/spark-$SPARK_VERSION/spark-$SPARK_VERSION-bin-hadoop2.7.tgz && \
+ tar xvf spark-$SPARK_VERSION-bin-hadoop2.7.tgz -C /opt && \
+ ln -s /opt/spark-$SPARK_VERSION-bin-hadoop2.7 /opt/spark
ENV PATH=/opt/python/bin:/opt/spark/bin:/opt/maven/bin:$PATH
diff --git a/tests/ci_build/build_jvm_packages.sh b/tests/ci_build/build_jvm_packages.sh
index 5dcc95a0feb0..e342c8f8909c 100755
--- a/tests/ci_build/build_jvm_packages.sh
+++ b/tests/ci_build/build_jvm_packages.sh
@@ -3,13 +3,20 @@
set -e
set -x
+if [ $# -ne 1 ]; then
+ echo "Usage: $0 [spark version]"
+ exit 1
+fi
+
+spark_version=$1
+
# Initialize local Maven repository
./tests/ci_build/initialize_maven.sh
rm -rf build/
cd jvm-packages
-mvn --no-transfer-progress package
+mvn --no-transfer-progress package -Dspark.version=${spark_version}
set +x
set +e
diff --git a/tests/ci_build/ci_build.sh b/tests/ci_build/ci_build.sh
index 56cf85467bfa..37a98dd804d7 100755
--- a/tests/ci_build/ci_build.sh
+++ b/tests/ci_build/ci_build.sh
@@ -51,11 +51,11 @@ if [[ "$1" == "-it" ]]; then
shift 1
fi
-if [[ "$1" == "--build-arg" ]]; then
- CI_DOCKER_BUILD_ARG+="$1"
+while [[ "$1" == "--build-arg" ]]; do
+ CI_DOCKER_BUILD_ARG+=" $1"
CI_DOCKER_BUILD_ARG+=" $2"
shift 2
-fi
+done
if [[ ! -f "${DOCKERFILE_PATH}" ]]; then
echo "Invalid Dockerfile path: \"${DOCKERFILE_PATH}\""