Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[BLOCKING][CI] Upgrade to Spark 2.4.3 #4414

Merged
merged 11 commits into from
May 10, 2019
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 12 additions & 7 deletions Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ pipeline {
'build-gpu-cuda8.0': { BuildCUDA(cuda_version: '8.0') },
'build-gpu-cuda9.2': { BuildCUDA(cuda_version: '9.2') },
'build-gpu-cuda10.0': { BuildCUDA(cuda_version: '10.0') },
'build-jvm-packages': { BuildJVMPackages(spark_version: '2.4.1') },
'build-jvm-packages': { BuildJVMPackages(spark_version: '2.4.2') },
'build-jvm-doc': { BuildJVMDoc() }
])
}
Expand All @@ -77,7 +77,7 @@ pipeline {
'test-python-mgpu-cuda10.0': { TestPythonGPU(cuda_version: '10.0', multi_gpu: true) },
'test-cpp-gpu': { TestCppGPU(cuda_version: '10.0') },
'test-cpp-mgpu': { TestCppGPU(cuda_version: '10.0', multi_gpu: true) },
'test-jvm-jdk8': { CrossTestJVMwithJDK(jdk_version: '8') },
'test-jvm-jdk8': { CrossTestJVMwithJDK(jdk_version: '8', spark_version: '2.4.2') },
'test-jvm-jdk11': { CrossTestJVMwithJDK(jdk_version: '11') },
'test-jvm-jdk12': { CrossTestJVMwithJDK(jdk_version: '12') },
'test-r-3.4.4': { TestR(use_r35: false) },
Expand Down Expand Up @@ -212,7 +212,7 @@ def BuildJVMPackages(args) {
// Use only 4 CPU cores
def docker_extra_params = "CI_DOCKER_EXTRA_PARAMS_INIT='--cpuset-cpus 0-3'"
sh """
${docker_extra_params} ${dockerRun} ${container_type} ${docker_binary} tests/ci_build/build_jvm_packages.sh
${docker_extra_params} ${dockerRun} ${container_type} ${docker_binary} tests/ci_build/build_jvm_packages.sh ${args.spark_version}
"""
echo 'Stashing XGBoost4J JAR...'
stash name: 'xgboost4j_jar', includes: 'jvm-packages/xgboost4j/target/*.jar,jvm-packages/xgboost4j-spark/target/*.jar,jvm-packages/xgboost4j-example/target/*.jar'
Expand Down Expand Up @@ -298,12 +298,17 @@ def CrossTestJVMwithJDK(args) {
node('linux && cpu') {
unstash name: 'xgboost4j_jar'
unstash name: 'srcs'
echo "Test XGBoost4J on a machine with JDK ${args.jdk_version}"
if (args.spark_version != null) {
echo "Test XGBoost4J on a machine with JDK ${args.jdk_version}, Spark ${args.spark_version}"
} else {
echo "Test XGBoost4J on a machine with JDK ${args.jdk_version}"
}
def container_type = "jvm_cross"
def docker_binary = "docker"
def docker_args = "--build-arg JDK_VERSION=${args.jdk_version}"
// Only run integration tests for JDK 8, as Spark doesn't support later JDKs yet
def docker_extra_params = (args.jdk_version == '8') ? "CI_DOCKER_EXTRA_PARAMS_INIT='-e RUN_INTEGRATION_TEST=1'" : ""
def spark_arg = (args.spark_version != null) ? "--build-arg SPARK_VERSION=${args.spark_version}" : ""
def docker_args = "--build-arg JDK_VERSION=${args.jdk_version} ${spark_arg}"
// Run integration tests only when spark_version is given
def docker_extra_params = (args.spark_version != null) ? "CI_DOCKER_EXTRA_PARAMS_INIT='-e RUN_INTEGRATION_TEST=1'" : ""
sh """
${docker_extra_params} ${dockerRun} ${container_type} ${docker_binary} ${docker_args} tests/ci_build/test_jvm_cross.sh
"""
Expand Down
7 changes: 4 additions & 3 deletions tests/ci_build/Dockerfile.jvm_cross
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
FROM ubuntu:19.04
ARG JDK_VERSION=8
ARG SPARK_VERSION=2.4.2

# Environment
ENV DEBIAN_FRONTEND noninteractive
Expand All @@ -19,9 +20,9 @@ RUN \
tar xvf apache-maven-3.6.1-bin.tar.gz -C /opt && \
ln -s /opt/apache-maven-3.6.1/ /opt/maven && \
# Spark
wget https://archive.apache.org/dist/spark/spark-2.4.1/spark-2.4.1-bin-hadoop2.7.tgz && \
tar xvf spark-2.4.1-bin-hadoop2.7.tgz -C /opt && \
ln -s /opt/spark-2.4.1-bin-hadoop2.7 /opt/spark
wget https://archive.apache.org/dist/spark/spark-$SPARK_VERSION/spark-$SPARK_VERSION-bin-hadoop2.7.tgz && \
tar xvf spark-$SPARK_VERSION-bin-hadoop2.7.tgz -C /opt && \
ln -s /opt/spark-$SPARK_VERSION-bin-hadoop2.7 /opt/spark

ENV PATH=/opt/python/bin:/opt/spark/bin:/opt/maven/bin:$PATH

Expand Down
9 changes: 8 additions & 1 deletion tests/ci_build/build_jvm_packages.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,20 @@
set -e
set -x

if [ $# -ne 1 ]; then
echo "Usage: $0 [spark version]"
exit 1
fi

spark_version=$1

# Initialize local Maven repository
./tests/ci_build/initialize_maven.sh

rm -rf build/
cd jvm-packages

mvn --no-transfer-progress package
mvn --no-transfer-progress package -Dspark.version=${spark_version}

set +x
set +e
6 changes: 3 additions & 3 deletions tests/ci_build/ci_build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -51,11 +51,11 @@ if [[ "$1" == "-it" ]]; then
shift 1
fi

if [[ "$1" == "--build-arg" ]]; then
CI_DOCKER_BUILD_ARG+="$1"
while [[ "$1" == "--build-arg" ]]; do
CI_DOCKER_BUILD_ARG+=" $1"
CI_DOCKER_BUILD_ARG+=" $2"
shift 2
fi
done

if [[ ! -f "${DOCKERFILE_PATH}" ]]; then
echo "Invalid Dockerfile path: \"${DOCKERFILE_PATH}\""
Expand Down