Skip to content

Commit

Permalink
Further improvements and savings in Jenkins pipeline (#5904)
Browse files Browse the repository at this point in the history
* Publish artifacts only on the master and release branches

* Build CUDA only for Compute Capability 7.5 when building PRs

* Run all Windows jobs in a single worker image

* Build nightly XGBoost4J SNAPSHOT JARs with Scala 2.12 only

* Show skipped Python tests on Windows

* Make Graphviz optional for Python tests

* Add back C++ tests

* Unstash xgboost_cpp_tests

* Fix label to CUDA 10.1

* Install cuPy for CUDA 10.1

* Install jsonschema

* Address reviewer's feedback
  • Loading branch information
hcho3 authored Jul 18, 2020
1 parent 6c0c872 commit ac9136e
Show file tree
Hide file tree
Showing 8 changed files with 93 additions and 59 deletions.
25 changes: 18 additions & 7 deletions Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -173,8 +173,10 @@ def Doxygen() {
sh """
${dockerRun} ${container_type} ${docker_binary} tests/ci_build/doxygen.sh ${BRANCH_NAME}
"""
echo 'Uploading doc...'
s3Upload file: "build/${BRANCH_NAME}.tar.bz2", bucket: 'xgboost-docs', acl: 'PublicRead', path: "doxygen/${BRANCH_NAME}.tar.bz2"
if (env.BRANCH_NAME == 'master' || env.BRANCH_NAME.startsWith('release')) {
echo 'Uploading doc...'
s3Upload file: "build/${BRANCH_NAME}.tar.bz2", bucket: 'xgboost-docs', acl: 'PublicRead', path: "doxygen/${BRANCH_NAME}.tar.bz2"
}
deleteDir()
}
}
Expand Down Expand Up @@ -245,17 +247,24 @@ def BuildCUDA(args) {
def container_type = "gpu_build"
def docker_binary = "docker"
def docker_args = "--build-arg CUDA_VERSION=${args.cuda_version}"
def arch_flag = ""
if (env.BRANCH_NAME != 'master' && !(env.BRANCH_NAME.startsWith('release'))) {
arch_flag = "-DGPU_COMPUTE_VER=75"
}
sh """
${dockerRun} ${container_type} ${docker_binary} ${docker_args} tests/ci_build/build_via_cmake.sh -DUSE_CUDA=ON -DUSE_NCCL=ON -DOPEN_MP:BOOL=ON -DHIDE_CXX_SYMBOLS=ON
${dockerRun} ${container_type} ${docker_binary} ${docker_args} tests/ci_build/build_via_cmake.sh -DUSE_CUDA=ON -DUSE_NCCL=ON -DOPEN_MP:BOOL=ON -DHIDE_CXX_SYMBOLS=ON ${arch_flag}
${dockerRun} ${container_type} ${docker_binary} ${docker_args} bash -c "cd python-package && rm -rf dist/* && python setup.py bdist_wheel --universal"
${dockerRun} ${container_type} ${docker_binary} ${docker_args} python3 tests/ci_build/rename_whl.py python-package/dist/*.whl ${commit_id} manylinux2010_x86_64
"""
// Stash wheel for CUDA 10.0 target
if (args.cuda_version == '10.0') {
echo 'Stashing Python wheel...'
stash name: 'xgboost_whl_cuda10', includes: 'python-package/dist/*.whl'
path = ("${BRANCH_NAME}" == 'master') ? '' : "${BRANCH_NAME}/"
s3Upload bucket: 'xgboost-nightly-builds', path: path, acl: 'PublicRead', workingDir: 'python-package/dist', includePathPattern:'**/*.whl'
if (env.BRANCH_NAME == 'master' || env.BRANCH_NAME.startsWith('release')) {
echo 'Uploading Python wheel...'
path = ("${BRANCH_NAME}" == 'master') ? '' : "${BRANCH_NAME}/"
s3Upload bucket: 'xgboost-nightly-builds', path: path, acl: 'PublicRead', workingDir: 'python-package/dist', includePathPattern:'**/*.whl'
}
echo 'Stashing C++ test executable (testxgboost)...'
stash name: 'xgboost_cpp_tests', includes: 'build/testxgboost'
}
Expand Down Expand Up @@ -289,8 +298,10 @@ def BuildJVMDoc() {
sh """
${dockerRun} ${container_type} ${docker_binary} tests/ci_build/build_jvm_doc.sh ${BRANCH_NAME}
"""
echo 'Uploading doc...'
s3Upload file: "jvm-packages/${BRANCH_NAME}.tar.bz2", bucket: 'xgboost-docs', acl: 'PublicRead', path: "${BRANCH_NAME}.tar.bz2"
if (env.BRANCH_NAME == 'master' || env.BRANCH_NAME.startsWith('release')) {
echo 'Uploading doc...'
s3Upload file: "jvm-packages/${BRANCH_NAME}.tar.bz2", bucket: 'xgboost-docs', acl: 'PublicRead', path: "${BRANCH_NAME}.tar.bz2"
}
deleteDir()
}
}
Expand Down
72 changes: 32 additions & 40 deletions Jenkinsfile-win64
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,15 @@ def commit_id // necessary to pass a variable from one stage to another

pipeline {
agent none

// Setup common job properties
options {
timestamps()
timeout(time: 240, unit: 'MINUTES')
buildDiscarder(logRotator(numToKeepStr: '10'))
preserveStashes()
}

// Build stages
stages {
stage('Jenkins Win64: Initialize') {
Expand All @@ -29,7 +38,7 @@ pipeline {
steps {
script {
parallel ([
'build-win64-cuda10.0': { BuildWin64() }
'build-win64-cuda10.1': { BuildWin64() }
])
}
milestone ordinal: 2
Expand All @@ -40,8 +49,7 @@ pipeline {
steps {
script {
parallel ([
'test-win64-cpu': { TestWin64CPU() },
'test-win64-gpu-cuda10.1': { TestWin64GPU(cuda_target: 'cuda10_1') }
'test-win64-cuda10.1': { TestWin64() },
])
}
milestone ordinal: 3
Expand All @@ -66,14 +74,18 @@ def checkoutSrcs() {
}

def BuildWin64() {
node('win64 && build && cuda10') {
node('win64 && cuda10_unified') {
unstash name: 'srcs'
echo "Building XGBoost for Windows AMD64 target..."
bat "nvcc --version"
def arch_flag = ""
if (env.BRANCH_NAME != 'master' && !(env.BRANCH_NAME.startsWith('release'))) {
arch_flag = "-DGPU_COMPUTE_VER=75"
}
bat """
mkdir build
cd build
cmake .. -G"Visual Studio 15 2017 Win64" -DUSE_CUDA=ON -DCMAKE_VERBOSE_MAKEFILE=ON -DGOOGLE_TEST=ON -DUSE_DMLC_GTEST=ON
cmake .. -G"Visual Studio 15 2017 Win64" -DUSE_CUDA=ON -DCMAKE_VERBOSE_MAKEFILE=ON -DGOOGLE_TEST=ON -DUSE_DMLC_GTEST=ON ${arch_flag}
"""
bat """
cd build
Expand All @@ -91,61 +103,41 @@ def BuildWin64() {
"""
echo 'Stashing Python wheel...'
stash name: 'xgboost_whl', includes: 'python-package/dist/*.whl'
path = ("${BRANCH_NAME}" == 'master') ? '' : "${BRANCH_NAME}/"
s3Upload bucket: 'xgboost-nightly-builds', path: path, acl: 'PublicRead', workingDir: 'python-package/dist', includePathPattern:'**/*.whl'
if (env.BRANCH_NAME == 'master' || env.BRANCH_NAME.startsWith('release')) {
echo 'Uploading Python wheel...'
path = ("${BRANCH_NAME}" == 'master') ? '' : "${BRANCH_NAME}/"
s3Upload bucket: 'xgboost-nightly-builds', path: path, acl: 'PublicRead', workingDir: 'python-package/dist', includePathPattern:'**/*.whl'
}
echo 'Stashing C++ test executable (testxgboost)...'
stash name: 'xgboost_cpp_tests', includes: 'build/testxgboost.exe'
stash name: 'xgboost_cli', includes: 'xgboost.exe'
deleteDir()
}
}

def TestWin64CPU() {
node('win64 && cpu') {
def TestWin64() {
node('win64 && cuda10_unified') {
unstash name: 'srcs'
unstash name: 'xgboost_whl'
unstash name: 'xgboost_cli'
echo "Test Win64 CPU"
echo "Installing Python wheel..."
bat "conda activate && (python -m pip uninstall -y xgboost || cd .)"
bat """
conda activate && for /R %%i in (python-package\\dist\\*.whl) DO python -m pip install "%%i"
"""
echo "Installing Python dependencies..."
bat """
conda activate && conda install -y hypothesis && conda upgrade scikit-learn pandas numpy hypothesis
"""
echo "Running Python tests..."
bat "conda activate && python -m pytest -v -s --fulltrace tests\\python"
bat "conda activate && python -m pip uninstall -y xgboost"
deleteDir()
}
}

def TestWin64GPU(args) {
node("win64 && gpu && ${args.cuda_target}") {
unstash name: 'srcs'
unstash name: 'xgboost_whl'
unstash name: 'xgboost_cpp_tests'
echo "Test Win64 GPU (${args.cuda_target})"
echo "Test Win64"
bat "nvcc --version"
echo "Running C++ tests..."
bat "build\\testxgboost.exe"
echo "Installing Python wheel..."
bat "conda activate && (python -m pip uninstall -y xgboost || cd .)"
bat """
conda activate && for /R %%i in (python-package\\dist\\*.whl) DO python -m pip install "%%i"
"""
echo "Installing Python dependencies..."
def cuda_short_ver = args.cuda_target.replaceAll('_', '')
def env_name = 'win64_' + UUID.randomUUID().toString().replaceAll('-', '')
bat "conda env create -n ${env_name} --file=tests/ci_build/win64_conda_env.yml"
echo "Installing Python wheel..."
bat """
conda activate && conda install -y hypothesis && conda upgrade scikit-learn pandas numpy hypothesis && python -m pip install cupy-${cuda_short_ver}
conda activate ${env_name} && for /R %%i in (python-package\\dist\\*.whl) DO python -m pip install "%%i"
"""
echo "Running Python tests..."
bat "conda activate ${env_name} && python -m pytest -v -s -rxXs --fulltrace tests\\python"
bat """
conda activate && python -m pytest -v -s --fulltrace -m "(not slow) and (not mgpu)" tests\\python-gpu
conda activate ${env_name} && python -m pytest -v -s -rxXs --fulltrace -m "(not slow) and (not mgpu)" tests\\python-gpu
"""
bat "conda activate && python -m pip uninstall -y xgboost"
bat "conda env remove --name ${env_name}"
deleteDir()
}
}
11 changes: 0 additions & 11 deletions tests/ci_build/deploy_jvm_packages.sh
Original file line number Diff line number Diff line change
Expand Up @@ -20,16 +20,5 @@ cd jvm-packages
# Deploy to S3 bucket xgboost-maven-repo
mvn --no-transfer-progress package deploy -P release-to-s3 -Dspark.version=${spark_version} -DskipTests

# Compile XGBoost4J with Scala 2.11 too
mvn clean
# Rename artifactId of all XGBoost4J packages with suffix _2.11
sed -i -e 's/<artifactId>xgboost\(.*\)_[0-9\.]\+/<artifactId>xgboost\1_2.11/' $(find . -name pom.xml)
# Modify scala.version and scala.binary.version fields
sed -i -e 's/<scala\.version>[0-9\.]\+/<scala.version>2.11.12/' $(find . -name pom.xml)
sed -i -e 's/<scala\.binary\.version>[0-9\.]\+/<scala.binary.version>2.11/' $(find . -name pom.xml)

# Re-build and deploy
mvn --no-transfer-progress package deploy -P release-to-s3 -Dspark.version=${spark_version} -DskipTests

set +x
set +e
18 changes: 18 additions & 0 deletions tests/ci_build/win64_conda_env.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
name: win64_env
channels:
- conda-forge
dependencies:
- python=3.7
- numpy
- scipy
- matplotlib
- scikit-learn
- pandas
- pytest
- python-graphviz
- boto3
- hypothesis
- jsonschema
- pip
- pip:
- cupy-cuda101
3 changes: 3 additions & 0 deletions tests/python-gpu/test_gpu_demos.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
import os
import subprocess
import sys
import pytest
sys.path.append("tests/python")
import testing as tm
import test_demos as td # noqa


@pytest.mark.skipif(**tm.no_cupy())
def test_data_iterator():
script = os.path.join(td.PYTHON_DEMO_DIR, 'data_iterator.py')
cmd = ['python', script]
Expand Down
2 changes: 1 addition & 1 deletion tests/python/test_plotting.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
pass


pytestmark = pytest.mark.skipif(**tm.no_matplotlib())
pytestmark = pytest.mark.skipif(**tm.no_multiple(tm.no_matplotlib(), tm.no_graphviz()))


dpath = 'demo/data/'
Expand Down
1 change: 1 addition & 0 deletions tests/python/test_with_sklearn.py
Original file line number Diff line number Diff line change
Expand Up @@ -437,6 +437,7 @@ def test_sklearn_api_gblinear():


@pytest.mark.skipif(**tm.no_matplotlib())
@pytest.mark.skipif(**tm.no_graphviz())
def test_sklearn_plotting():
from sklearn.datasets import load_iris

Expand Down
20 changes: 20 additions & 0 deletions tests/python/testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,26 @@ def no_json_schema():
return {'condition': True, 'reason': reason}


def no_graphviz():
reason = 'graphviz is not installed'
try:
import graphviz # noqa
return {'condition': False, 'reason': reason}
except ImportError:
return {'condition': True, 'reason': reason}


def no_multiple(*args):
condition = False
reason = ''
for arg in args:
condition = (condition or arg['condition'])
if arg['condition']:
reason = arg['reason']
break
return {'condition': condition, 'reason': reason}


# Contains a dataset in numpy format as well as the relevant objective and metric
class TestDataset:
def __init__(self, name, get_dataset, objective, metric
Expand Down

0 comments on commit ac9136e

Please sign in to comment.