From 66c8e7d418e602dc04744dd0c6da9aca46e5e318 Mon Sep 17 00:00:00 2001 From: driazati Date: Mon, 22 Nov 2021 09:44:04 -0800 Subject: [PATCH 1/4] Test out naming Jenkins steps --- Jenkinsfile | 52 ++++++++++++++++++++++++---------------------------- 1 file changed, 24 insertions(+), 28 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index a5e1d2824566..ab818b0a3b76 100755 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -92,26 +92,17 @@ def per_exec_ws(folder) { // initialize source codes def init_git() { // Add more info about job node - sh """ + sh (script: """ echo "INFO: NODE_NAME=${NODE_NAME} EXECUTOR_NUMBER=${EXECUTOR_NUMBER}" - """ + """, label: "Show executor node info") checkout scm retry(5) { timeout(time: 2, unit: 'MINUTES') { - sh 'git submodule update --init -f' + sh (script: 'git submodule update --init -f', label: "Update git submodules") } } } -def init_git_win() { - checkout scm - retry(5) { - timeout(time: 2, unit: 'MINUTES') { - bat 'git submodule update --init -f' - } - } -} - def cancel_previous_build() { // cancel previous build if it is not on main. if (env.BRANCH_NAME != "main") { @@ -136,7 +127,7 @@ stage('Prepare') { ci_qemu = params.ci_qemu_param ?: ci_qemu ci_arm = params.ci_arm_param ?: ci_arm - sh """ + sh script: """ echo "Docker images being used in this build:" echo " ci_lint = ${ci_lint}" echo " ci_cpu = ${ci_cpu}" @@ -145,7 +136,7 @@ stage('Prepare') { echo " ci_i386 = ${ci_i386}" echo " ci_qemu = ${ci_qemu}" echo " ci_arm = ${ci_arm}" - """ + """, label: "Docker image names" } } @@ -156,52 +147,56 @@ stage('Sanity Check') { init_git() is_docs_only_build = sh (returnStatus: true, script: ''' ./tests/scripts/git_change_docs.sh - ''' + ''', label: "Check for docs only changes" ) - sh "${docker_run} ${ci_lint} ./tests/scripts/task_lint.sh" + sh (script: "${docker_run} ${ci_lint} ./tests/scripts/task_lint.sh", label: "Run lint") } } } } // Run make. First try to do an incremental make from a previous workspace in hope to -// accelerate the compilation. If something wrong, clean the workspace and then +// accelerate the compilation. If something is wrong, clean the workspace and then // build from scratch. def make(docker_type, path, make_flag) { timeout(time: max_time, unit: 'MINUTES') { try { - sh "${docker_run} ${docker_type} ./tests/scripts/task_build.sh ${path} ${make_flag}" + sh (script: "${docker_run} ${docker_type} ./tests/scripts/task_build.sh ${path} ${make_flag}", label: "Run cmake build") // always run cpp test when build - sh "${docker_run} ${docker_type} ./tests/scripts/task_cpp_unittest.sh" + sh (script: "${docker_run} ${docker_type} ./tests/scripts/task_cpp_unittest.sh", label: "Build and run C++ tests") } catch (hudson.AbortException ae) { // script exited due to user abort, directly throw instead of retry if (ae.getMessage().contains('script returned exit code 143')) { throw ae } echo 'Incremental compilation failed. Fall back to build from scratch' - sh "${docker_run} ${docker_type} ./tests/scripts/task_clean.sh ${path}" - sh "${docker_run} ${docker_type} ./tests/scripts/task_build.sh ${path} ${make_flag}" - sh "${docker_run} ${docker_type} ./tests/scripts/task_cpp_unittest.sh" + sh (script: "${docker_run} ${docker_type} ./tests/scripts/task_clean.sh ${path}", label: "Clear old cmake workspace") + sh (script: "${docker_run} ${docker_type} ./tests/scripts/task_build.sh ${path} ${make_flag}", label: "Run cmake build") + sh (script: "${docker_run} ${docker_type} ./tests/scripts/task_cpp_unittest.sh", label: "Build and run C++ tests") } } } // pack libraries for later use def pack_lib(name, libs) { - sh """ + sh (script: """ echo "Packing ${libs} into ${name}" echo ${libs} | sed -e 's/,/ /g' | xargs md5sum - """ + """, label: "Stash libraries and show md5") stash includes: libs, name: name } // unpack libraries saved before def unpack_lib(name, libs) { unstash name - sh """ + sh (script: """ echo "Unpacked ${libs} from ${name}" echo ${libs} | sed -e 's/,/ /g' | xargs md5sum - """ + """, label: "Unstash libraries and show md5") +} + +def ci_setup(image) { + sh (script: "${docker_run} ${image} ./tests/scripts/task_ci_setup.sh", label: "Setup CI environment") } stage('Build') { @@ -223,11 +218,12 @@ stage('Build') { node('CPU') { ws(per_exec_ws('tvm/build-cpu')) { init_git() - sh "${docker_run} ${ci_cpu} ./tests/scripts/task_config_build_cpu.sh" + sh (script: "${docker_run} ${ci_cpu} ./tests/scripts/task_config_build_cpu.sh", label: "Create CPU build cmake config") make(ci_cpu, 'build', '-j2') pack_lib('cpu', tvm_multilib_tsim) timeout(time: max_time, unit: 'MINUTES') { - sh "${docker_run} ${ci_cpu} ./tests/scripts/task_ci_setup.sh" + // sh "${docker_run} ${ci_cpu} ./tests/scripts/task_ci_setup.sh" + ci_setup(ci_cpu) sh "${docker_run} ${ci_cpu} ./tests/scripts/task_python_unittest.sh" sh "${docker_run} ${ci_cpu} ./tests/scripts/task_python_vta_fsim.sh" sh "${docker_run} ${ci_cpu} ./tests/scripts/task_python_vta_tsim.sh" From 161a4ad23d1ecefbaa979f24d9eb47ab5c718cfc Mon Sep 17 00:00:00 2001 From: driazati Date: Mon, 22 Nov 2021 13:14:55 -0800 Subject: [PATCH 2/4] Add labels to the rest of the steps --- Jenkinsfile | 178 ++++++++++++++++++++++++++++++++++++++-------------- 1 file changed, 132 insertions(+), 46 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index ab818b0a3b76..c2c33eb6c77c 100755 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -161,18 +161,33 @@ stage('Sanity Check') { def make(docker_type, path, make_flag) { timeout(time: max_time, unit: 'MINUTES') { try { - sh (script: "${docker_run} ${docker_type} ./tests/scripts/task_build.sh ${path} ${make_flag}", label: "Run cmake build") + sh ( + script: "${docker_run} ${docker_type} ./tests/scripts/task_build.sh ${path} ${make_flag}", + label: "Run cmake build", + ) // always run cpp test when build - sh (script: "${docker_run} ${docker_type} ./tests/scripts/task_cpp_unittest.sh", label: "Build and run C++ tests") + sh ( + script: "${docker_run} ${docker_type} ./tests/scripts/task_cpp_unittest.sh", + label: "Build and run C++ tests", + ) } catch (hudson.AbortException ae) { // script exited due to user abort, directly throw instead of retry if (ae.getMessage().contains('script returned exit code 143')) { throw ae } echo 'Incremental compilation failed. Fall back to build from scratch' - sh (script: "${docker_run} ${docker_type} ./tests/scripts/task_clean.sh ${path}", label: "Clear old cmake workspace") - sh (script: "${docker_run} ${docker_type} ./tests/scripts/task_build.sh ${path} ${make_flag}", label: "Run cmake build") - sh (script: "${docker_run} ${docker_type} ./tests/scripts/task_cpp_unittest.sh", label: "Build and run C++ tests") + sh ( + script: "${docker_run} ${docker_type} ./tests/scripts/task_clean.sh ${path}", + label: "Clear old cmake workspace", + ) + sh ( + script: "${docker_run} ${docker_type} ./tests/scripts/task_build.sh ${path} ${make_flag}", + label: "Run cmake build", + ) + sh ( + script: "${docker_run} ${docker_type} ./tests/scripts/task_cpp_unittest.sh", + label: "Build and run C++ tests", + ) } } } @@ -196,7 +211,24 @@ def unpack_lib(name, libs) { } def ci_setup(image) { - sh (script: "${docker_run} ${image} ./tests/scripts/task_ci_setup.sh", label: "Setup CI environment") + sh ( + script: "${docker_run} ${image} ./tests/scripts/task_ci_setup.sh", + label: "Set up CI environment", + ) +} + +def unittest(image) { + sh ( + script: "${docker_run} ${image} ./tests/scripts/task_python_unittest.sh", + label: "Run Python unittests", + ) +} + +def fsim_test(image) { + sh ( + script: "${docker_run} ${image} ./tests/scripts/task_python_vta_fsim.sh", + label: "Run VTA tests in FSIM ", + ) } stage('Build') { @@ -218,18 +250,24 @@ stage('Build') { node('CPU') { ws(per_exec_ws('tvm/build-cpu')) { init_git() - sh (script: "${docker_run} ${ci_cpu} ./tests/scripts/task_config_build_cpu.sh", label: "Create CPU build cmake config") + sh ( + script: "${docker_run} ${ci_cpu} ./tests/scripts/task_config_build_cpu.sh", + label: "Create CPU cmake config", + ) make(ci_cpu, 'build', '-j2') pack_lib('cpu', tvm_multilib_tsim) timeout(time: max_time, unit: 'MINUTES') { // sh "${docker_run} ${ci_cpu} ./tests/scripts/task_ci_setup.sh" ci_setup(ci_cpu) - sh "${docker_run} ${ci_cpu} ./tests/scripts/task_python_unittest.sh" - sh "${docker_run} ${ci_cpu} ./tests/scripts/task_python_vta_fsim.sh" - sh "${docker_run} ${ci_cpu} ./tests/scripts/task_python_vta_tsim.sh" + unittest(ci_cpu) + fsim_test(ci_cpu) + sh ( + script: "${docker_run} ${ci_cpu} ./tests/scripts/task_python_vta_tsim.sh", + label: "Run VTA tests in TSIM", + ) // sh "${docker_run} ${ci_cpu} ./tests/scripts/task_golang.sh" // TODO(@jroesch): need to resolve CI issue will turn back on in follow up patch - sh "${docker_run} ${ci_cpu} ./tests/scripts/task_rust.sh" + sh (script: "${docker_run} ${ci_cpu} ./tests/scripts/task_rust.sh", label: "Rust build and test") junit "build/pytest-results/*.xml" } } @@ -243,11 +281,17 @@ stage('Build') { node('CPU') { ws(per_exec_ws('tvm/build-wasm')) { init_git() - sh "${docker_run} ${ci_wasm} ./tests/scripts/task_config_build_wasm.sh" + sh ( + script: "${docker_run} ${ci_wasm} ./tests/scripts/task_config_build_wasm.sh", + label: "Create WASM cmake config", + ) make(ci_wasm, 'build', '-j2') timeout(time: max_time, unit: 'MINUTES') { - sh "${docker_run} ${ci_wasm} ./tests/scripts/task_ci_setup.sh" - sh "${docker_run} ${ci_wasm} ./tests/scripts/task_web_wasm.sh" + ci_setup(ci_wasm) + sh ( + script: "${docker_run} ${ci_wasm} ./tests/scripts/task_web_wasm.sh", + label: "Run WASM lint and tests", + ) } } } @@ -255,32 +299,38 @@ stage('Build') { Utils.markStageSkippedForConditional('BUILD: WASM') } }, - 'BUILD : i386': { + 'BUILD: i386': { if ( is_docs_only_build != 1) { node('CPU') { ws(per_exec_ws('tvm/build-i386')) { init_git() - sh "${docker_run} ${ci_i386} ./tests/scripts/task_config_build_i386.sh" + sh ( + script: "${docker_run} ${ci_i386} ./tests/scripts/task_config_build_i386.sh", + label: "Create i386 cmake config", + ) make(ci_i386, 'build', '-j2') pack_lib('i386', tvm_multilib_tsim) } } } else { - Utils.markStageSkippedForConditional('BUILD : i386') + Utils.markStageSkippedForConditional('BUILD: i386') } }, - 'BUILD : arm': { + 'BUILD: arm': { if (is_docs_only_build != 1) { node('ARM') { ws(per_exec_ws('tvm/build-arm')) { init_git() - sh "${docker_run} ${ci_arm} ./tests/scripts/task_config_build_arm.sh" + sh ( + script: "${docker_run} ${ci_arm} ./tests/scripts/task_config_build_arm.sh", + label: "Create ARM cmake config", + ) make(ci_arm, 'build', '-j4') pack_lib('arm', tvm_multilib) } } } else { - Utils.markStageSkippedForConditional('BUILD : arm') + Utils.markStageSkippedForConditional('BUILD: arm') } }, 'BUILD: QEMU': { @@ -288,11 +338,17 @@ stage('Build') { node('CPU') { ws(per_exec_ws('tvm/build-qemu')) { init_git() - sh "${docker_run} ${ci_qemu} ./tests/scripts/task_config_build_qemu.sh" + sh ( + script: "${docker_run} ${ci_qemu} ./tests/scripts/task_config_build_qemu.sh", + label: "Create QEMU cmake config", + ) make(ci_qemu, 'build', '-j2') timeout(time: max_time, unit: 'MINUTES') { - sh "${docker_run} ${ci_qemu} ./tests/scripts/task_ci_setup.sh" - sh "${docker_run} ${ci_qemu} ./tests/scripts/task_python_microtvm.sh" + ci_setup(ci_qemu) + sh ( + script: "${docker_run} ${ci_qemu} ./tests/scripts/task_python_microtvm.sh", + label: "Run microTVM tests", + ) junit "build/pytest-results/*.xml" } } @@ -311,10 +367,19 @@ stage('Unit Test') { init_git() unpack_lib('gpu', tvm_multilib) timeout(time: max_time, unit: 'MINUTES') { - sh "${docker_run} ${ci_gpu} ./tests/scripts/task_ci_setup.sh" - sh "${docker_run} ${ci_gpu} ./tests/scripts/task_sphinx_precheck.sh" - sh "${docker_run} ${ci_gpu} ./tests/scripts/task_python_unittest_gpuonly.sh" - sh "${docker_run} ${ci_gpu} ./tests/scripts/task_python_integration_gpuonly.sh" + ci_setup(ci_gpu) + sh ( + script: "${docker_run} ${ci_gpu} ./tests/scripts/task_sphinx_precheck.sh", + label: "Check Sphinx warnings in docs", + ) + sh ( + script: "${docker_run} ${ci_gpu} ./tests/scripts/task_python_unittest_gpuonly.sh", + label: "Run GPU unit tests", + ) + sh ( + script: "${docker_run} ${ci_gpu} ./tests/scripts/task_python_integration_gpuonly.sh", + label: "Run GPU integration tests", + ) junit "build/pytest-results/*.xml" } } @@ -330,8 +395,11 @@ stage('Unit Test') { init_git() unpack_lib('cpu', tvm_multilib_tsim) timeout(time: max_time, unit: 'MINUTES') { - sh "${docker_run} ${ci_cpu} ./tests/scripts/task_ci_setup.sh" - sh "${docker_run} ${ci_cpu} ./tests/scripts/task_python_integration.sh" + ci_setup(ci_cpu) + sh ( + script: "${docker_run} ${ci_cpu} ./tests/scripts/task_python_integration.sh", + label: "Run CPU integration tests", + ) junit "build/pytest-results/*.xml" } } @@ -347,10 +415,13 @@ stage('Unit Test') { init_git() unpack_lib('i386', tvm_multilib) timeout(time: max_time, unit: 'MINUTES') { - sh "${docker_run} ${ci_i386} ./tests/scripts/task_ci_setup.sh" - sh "${docker_run} ${ci_i386} ./tests/scripts/task_python_unittest.sh" - sh "${docker_run} ${ci_i386} ./tests/scripts/task_python_integration_i386only.sh" - sh "${docker_run} ${ci_i386} ./tests/scripts/task_python_vta_fsim.sh" + ci_setup(ci_i386) + unittest(ci_i386) + sh ( + script: "${docker_run} ${ci_i386} ./tests/scripts/task_python_integration_i386only.sh", + label: "Run i386 integration tests", + ) + fsim_test(ci_i386) junit "build/pytest-results/*.xml" } } @@ -366,9 +437,12 @@ stage('Unit Test') { init_git() unpack_lib('arm', tvm_multilib) timeout(time: max_time, unit: 'MINUTES') { - sh "${docker_run} ${ci_arm} ./tests/scripts/task_ci_setup.sh" - sh "${docker_run} ${ci_arm} ./tests/scripts/task_python_unittest.sh" - sh "${docker_run} ${ci_arm} ./tests/scripts/task_python_arm_compute_library.sh" + ci_setup(ci_arm) + unittest(ci_arm) + sh ( + script: "${docker_run} ${ci_arm} ./tests/scripts/task_python_arm_compute_library.sh", + label: "Run test_arm_compute_lib test", + ) junit "build/pytest-results/*.xml" // sh "${docker_run} ${ci_arm} ./tests/scripts/task_python_integration.sh" } @@ -385,8 +459,11 @@ stage('Unit Test') { init_git() unpack_lib('gpu', tvm_multilib) timeout(time: max_time, unit: 'MINUTES') { - sh "${docker_run} ${ci_gpu} ./tests/scripts/task_ci_setup.sh" - sh "${docker_run} ${ci_gpu} ./tests/scripts/task_java_unittest.sh" + ci_setup(ci_gpu) + sh ( + script: "${docker_run} ${ci_gpu} ./tests/scripts/task_java_unittest.sh", + label: "Run Java unit tests", + ) } } } @@ -404,8 +481,11 @@ stage('Integration Test') { init_git() unpack_lib('gpu', tvm_multilib) timeout(time: max_time, unit: 'MINUTES') { - sh "${docker_run} ${ci_gpu} ./tests/scripts/task_ci_setup.sh" - sh "${docker_run} ${ci_gpu} ./tests/scripts/task_python_topi.sh" + ci_setup(ci_gpu) + sh ( + script: "${docker_run} ${ci_gpu} ./tests/scripts/task_python_topi.sh", + label: "Run TOPI tests", + ) junit "build/pytest-results/*.xml" } } @@ -421,8 +501,11 @@ stage('Integration Test') { init_git() unpack_lib('gpu', tvm_multilib) timeout(time: max_time, unit: 'MINUTES') { - sh "${docker_run} ${ci_gpu} ./tests/scripts/task_ci_setup.sh" - sh "${docker_run} ${ci_gpu} ./tests/scripts/task_python_frontend.sh" + ci_setup(ci_gpu) + sh ( + script: "${docker_run} ${ci_gpu} ./tests/scripts/task_python_frontend.sh", + label: "Run Python frontend tests", + ) junit "build/pytest-results/*.xml" } } @@ -438,8 +521,11 @@ stage('Integration Test') { init_git() unpack_lib('cpu', tvm_multilib) timeout(time: max_time, unit: 'MINUTES') { - sh "${docker_run} ${ci_cpu} ./tests/scripts/task_ci_setup.sh" - sh "${docker_run} ${ci_cpu} ./tests/scripts/task_python_frontend_cpu.sh" + ci_setup(ci_cpu) + sh ( + script: "${docker_run} ${ci_gpu} ./tests/scripts/task_python_frontend.sh", + label: "Run Python frontend tests", + ) junit "build/pytest-results/*.xml" } } @@ -454,8 +540,8 @@ stage('Integration Test') { init_git() unpack_lib('gpu', tvm_multilib) timeout(time: max_time, unit: 'MINUTES') { - sh "${docker_run} ${ci_gpu} ./tests/scripts/task_ci_setup.sh" - sh "${docker_run} ${ci_gpu} ./tests/scripts/task_python_docs.sh" + ci_setup(ci_gpu) + sh (script: "${docker_run} ${ci_gpu} ./tests/scripts/task_python_docs.sh", label: "Build docs") } pack_lib('mydocs', 'docs.tgz') } From 4591f57b7405539264d1f707ba821aa7689c0a0d Mon Sep 17 00:00:00 2001 From: driazati Date: Mon, 22 Nov 2021 13:33:15 -0800 Subject: [PATCH 3/4] More cleanup --- Jenkinsfile | 31 ++++++++++++++++++++----------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index c2c33eb6c77c..a87a5ee3649f 100755 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -92,9 +92,12 @@ def per_exec_ws(folder) { // initialize source codes def init_git() { // Add more info about job node - sh (script: """ + sh ( + script: """ echo "INFO: NODE_NAME=${NODE_NAME} EXECUTOR_NUMBER=${EXECUTOR_NUMBER}" - """, label: "Show executor node info") + """, + label: "Show executor node info", + ) checkout scm retry(5) { timeout(time: 2, unit: 'MINUTES') { @@ -127,7 +130,7 @@ stage('Prepare') { ci_qemu = params.ci_qemu_param ?: ci_qemu ci_arm = params.ci_arm_param ?: ci_arm - sh script: """ + sh (script: """ echo "Docker images being used in this build:" echo " ci_lint = ${ci_lint}" echo " ci_cpu = ${ci_cpu}" @@ -136,7 +139,7 @@ stage('Prepare') { echo " ci_i386 = ${ci_i386}" echo " ci_qemu = ${ci_qemu}" echo " ci_arm = ${ci_arm}" - """, label: "Docker image names" + """, label: "Docker image names") } } @@ -145,11 +148,15 @@ stage('Sanity Check') { node('CPU') { ws(per_exec_ws('tvm/sanity')) { init_git() - is_docs_only_build = sh (returnStatus: true, script: ''' - ./tests/scripts/git_change_docs.sh - ''', label: "Check for docs only changes" + is_docs_only_build = sh ( + returnStatus: true, + script: './tests/scripts/git_change_docs.sh', + label: "Check for docs only changes", + ) + sh ( + script: "${docker_run} ${ci_lint} ./tests/scripts/task_lint.sh", + label: "Run lint", ) - sh (script: "${docker_run} ${ci_lint} ./tests/scripts/task_lint.sh", label: "Run lint") } } } @@ -220,7 +227,7 @@ def ci_setup(image) { def unittest(image) { sh ( script: "${docker_run} ${image} ./tests/scripts/task_python_unittest.sh", - label: "Run Python unittests", + label: "Run Python unit tests", ) } @@ -257,7 +264,6 @@ stage('Build') { make(ci_cpu, 'build', '-j2') pack_lib('cpu', tvm_multilib_tsim) timeout(time: max_time, unit: 'MINUTES') { - // sh "${docker_run} ${ci_cpu} ./tests/scripts/task_ci_setup.sh" ci_setup(ci_cpu) unittest(ci_cpu) fsim_test(ci_cpu) @@ -541,7 +547,10 @@ stage('Integration Test') { unpack_lib('gpu', tvm_multilib) timeout(time: max_time, unit: 'MINUTES') { ci_setup(ci_gpu) - sh (script: "${docker_run} ${ci_gpu} ./tests/scripts/task_python_docs.sh", label: "Build docs") + sh ( + script: "${docker_run} ${ci_gpu} ./tests/scripts/task_python_docs.sh", + label: "Build docs", + ) } pack_lib('mydocs', 'docs.tgz') } From d138088b055a88d1af9576610115be4a1982c38c Mon Sep 17 00:00:00 2001 From: driazati Date: Wed, 24 Nov 2021 11:09:16 -0800 Subject: [PATCH 4/4] Address comments --- Jenkinsfile | 46 ++++++++++++++++++++++++---------------------- 1 file changed, 24 insertions(+), 22 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index a87a5ee3649f..21c5fb3a9a69 100755 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -168,15 +168,9 @@ stage('Sanity Check') { def make(docker_type, path, make_flag) { timeout(time: max_time, unit: 'MINUTES') { try { - sh ( - script: "${docker_run} ${docker_type} ./tests/scripts/task_build.sh ${path} ${make_flag}", - label: "Run cmake build", - ) + cmake_build(docker_type, path, make_flag) // always run cpp test when build - sh ( - script: "${docker_run} ${docker_type} ./tests/scripts/task_cpp_unittest.sh", - label: "Build and run C++ tests", - ) + cpp_unittest(docker_type) } catch (hudson.AbortException ae) { // script exited due to user abort, directly throw instead of retry if (ae.getMessage().contains('script returned exit code 143')) { @@ -187,14 +181,8 @@ def make(docker_type, path, make_flag) { script: "${docker_run} ${docker_type} ./tests/scripts/task_clean.sh ${path}", label: "Clear old cmake workspace", ) - sh ( - script: "${docker_run} ${docker_type} ./tests/scripts/task_build.sh ${path} ${make_flag}", - label: "Run cmake build", - ) - sh ( - script: "${docker_run} ${docker_type} ./tests/scripts/task_cpp_unittest.sh", - label: "Build and run C++ tests", - ) + cmake_build(docker_type, path, make_flag) + cpp_unittest(docker_type) } } } @@ -224,7 +212,7 @@ def ci_setup(image) { ) } -def unittest(image) { +def python_unittest(image) { sh ( script: "${docker_run} ${image} ./tests/scripts/task_python_unittest.sh", label: "Run Python unit tests", @@ -238,6 +226,20 @@ def fsim_test(image) { ) } +def cmake_build(image, path, make_flag) { + sh ( + script: "${docker_run} ${image} ./tests/scripts/task_build.sh ${path} ${make_flag}", + label: "Run cmake build", + ) +} + +def cpp_unittest(image) { + sh ( + script: "${docker_run} ${docker_type} ./tests/scripts/task_cpp_unittest.sh", + label: "Build and run C++ tests", + ) +} + stage('Build') { parallel 'BUILD: GPU': { node('GPUBUILD') { @@ -265,7 +267,7 @@ stage('Build') { pack_lib('cpu', tvm_multilib_tsim) timeout(time: max_time, unit: 'MINUTES') { ci_setup(ci_cpu) - unittest(ci_cpu) + python_unittest(ci_cpu) fsim_test(ci_cpu) sh ( script: "${docker_run} ${ci_cpu} ./tests/scripts/task_python_vta_tsim.sh", @@ -380,11 +382,11 @@ stage('Unit Test') { ) sh ( script: "${docker_run} ${ci_gpu} ./tests/scripts/task_python_unittest_gpuonly.sh", - label: "Run GPU unit tests", + label: "Run Python GPU unit tests", ) sh ( script: "${docker_run} ${ci_gpu} ./tests/scripts/task_python_integration_gpuonly.sh", - label: "Run GPU integration tests", + label: "Run Python GPU integration tests", ) junit "build/pytest-results/*.xml" } @@ -422,7 +424,7 @@ stage('Unit Test') { unpack_lib('i386', tvm_multilib) timeout(time: max_time, unit: 'MINUTES') { ci_setup(ci_i386) - unittest(ci_i386) + python_unittest(ci_i386) sh ( script: "${docker_run} ${ci_i386} ./tests/scripts/task_python_integration_i386only.sh", label: "Run i386 integration tests", @@ -444,7 +446,7 @@ stage('Unit Test') { unpack_lib('arm', tvm_multilib) timeout(time: max_time, unit: 'MINUTES') { ci_setup(ci_arm) - unittest(ci_arm) + python_unittest(ci_arm) sh ( script: "${docker_run} ${ci_arm} ./tests/scripts/task_python_arm_compute_library.sh", label: "Run test_arm_compute_lib test",