From 558780e9964138228393ddd74249ee2be74c833a Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Thu, 9 Jul 2020 11:06:53 -0400 Subject: [PATCH 01/22] Revert "Revert #64011 and subsequent fixes (#71137)" This reverts commit b6a06dfb61e631b6d2cf89c0b5fe1510c748cf96. --- .ci/Dockerfile | 35 ++++ .ci/runbld_no_junit.yml | 2 +- .gitignore | 2 + Jenkinsfile | 45 +---- src/dev/ci_setup/checkout_sibling_es.sh | 12 +- src/dev/ci_setup/setup_env.sh | 4 +- src/dev/notice/generate_notice_from_source.ts | 2 + tasks/config/karma.js | 2 +- tasks/test_jest.js | 6 +- test/scripts/checks/doc_api_changes.sh | 5 + test/scripts/checks/file_casing.sh | 5 + test/scripts/checks/i18n.sh | 5 + test/scripts/checks/licenses.sh | 5 + test/scripts/checks/lock_file_symlinks.sh | 5 + test/scripts/checks/test_hardening.sh | 5 + test/scripts/checks/test_projects.sh | 5 + test/scripts/checks/ts_projects.sh | 5 + test/scripts/checks/type_check.sh | 5 + .../checks/verify_dependency_versions.sh | 5 + test/scripts/checks/verify_notice.sh | 5 + .../jenkins_build_kbn_sample_panel_action.sh | 0 test/scripts/jenkins_build_kibana.sh | 19 +- test/scripts/jenkins_build_plugins.sh | 19 ++ test/scripts/jenkins_ci_group.sh | 2 +- test/scripts/jenkins_plugin_functional.sh | 15 ++ .../jenkins_security_solution_cypress.sh | 8 +- .../jenkins_setup_parallel_workspace.sh | 32 ++++ test/scripts/jenkins_test_setup.sh | 4 + test/scripts/jenkins_test_setup_oss.sh | 15 +- test/scripts/jenkins_test_setup_xpack.sh | 15 +- test/scripts/jenkins_xpack_build_kibana.sh | 23 +-- test/scripts/jenkins_xpack_build_plugins.sh | 21 +++ .../jenkins_xpack_page_load_metrics.sh | 0 test/scripts/lint/eslint.sh | 5 + test/scripts/lint/sasslint.sh | 5 + test/scripts/test/api_integration.sh | 5 + test/scripts/test/jest_integration.sh | 5 + test/scripts/test/jest_unit.sh | 5 + test/scripts/test/karma_ci.sh | 5 + test/scripts/test/mocha.sh | 5 + test/scripts/test/xpack_jest_unit.sh | 6 + test/scripts/test/xpack_karma.sh | 6 + .../test/xpack_list_cyclic_dependency.sh | 6 + .../test/xpack_siem_cyclic_dependency.sh | 6 + vars/catchErrors.groovy | 11 +- vars/kibanaPipeline.groovy | 175 +++++++++++++++--- vars/task.groovy | 5 + vars/tasks.groovy | 124 +++++++++++++ vars/withTaskQueue.groovy | 154 +++++++++++++++ vars/workers.groovy | 12 +- .../canvas/.storybook/storyshots.test.js | 7 + 51 files changed, 755 insertions(+), 130 deletions(-) create mode 100644 .ci/Dockerfile create mode 100755 test/scripts/checks/doc_api_changes.sh create mode 100755 test/scripts/checks/file_casing.sh create mode 100755 test/scripts/checks/i18n.sh create mode 100755 test/scripts/checks/licenses.sh create mode 100755 test/scripts/checks/lock_file_symlinks.sh create mode 100755 test/scripts/checks/test_hardening.sh create mode 100755 test/scripts/checks/test_projects.sh create mode 100755 test/scripts/checks/ts_projects.sh create mode 100755 test/scripts/checks/type_check.sh create mode 100755 test/scripts/checks/verify_dependency_versions.sh create mode 100755 test/scripts/checks/verify_notice.sh mode change 100644 => 100755 test/scripts/jenkins_build_kbn_sample_panel_action.sh create mode 100755 test/scripts/jenkins_build_plugins.sh create mode 100755 test/scripts/jenkins_plugin_functional.sh mode change 100644 => 100755 test/scripts/jenkins_security_solution_cypress.sh create mode 100755 test/scripts/jenkins_setup_parallel_workspace.sh mode change 100644 => 100755 test/scripts/jenkins_test_setup.sh mode change 100644 => 100755 test/scripts/jenkins_test_setup_oss.sh mode change 100644 => 100755 test/scripts/jenkins_test_setup_xpack.sh create mode 100755 test/scripts/jenkins_xpack_build_plugins.sh mode change 100644 => 100755 test/scripts/jenkins_xpack_page_load_metrics.sh create mode 100755 test/scripts/lint/eslint.sh create mode 100755 test/scripts/lint/sasslint.sh create mode 100755 test/scripts/test/api_integration.sh create mode 100755 test/scripts/test/jest_integration.sh create mode 100755 test/scripts/test/jest_unit.sh create mode 100755 test/scripts/test/karma_ci.sh create mode 100755 test/scripts/test/mocha.sh create mode 100755 test/scripts/test/xpack_jest_unit.sh create mode 100755 test/scripts/test/xpack_karma.sh create mode 100755 test/scripts/test/xpack_list_cyclic_dependency.sh create mode 100755 test/scripts/test/xpack_siem_cyclic_dependency.sh create mode 100644 vars/task.groovy create mode 100644 vars/tasks.groovy create mode 100644 vars/withTaskQueue.groovy diff --git a/.ci/Dockerfile b/.ci/Dockerfile new file mode 100644 index 0000000000000..201e17b93c116 --- /dev/null +++ b/.ci/Dockerfile @@ -0,0 +1,35 @@ +ARG NODE_VERSION=10.21.0 + +FROM node:${NODE_VERSION} AS base + +RUN apt-get update && \ + apt-get -y install xvfb gconf-service libasound2 libatk1.0-0 libc6 libcairo2 libcups2 \ + libdbus-1-3 libexpat1 libfontconfig1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 \ + libgtk-3-0 libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 \ + libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 \ + libxtst6 ca-certificates fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils wget openjdk-8-jre && \ + rm -rf /var/lib/apt/lists/* + +RUN curl -sSL https://dl.google.com/linux/linux_signing_key.pub | apt-key add - \ + && sh -c 'echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list' \ + && apt-get update \ + && apt-get install -y rsync jq bsdtar google-chrome-stable \ + --no-install-recommends \ + && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + +RUN LATEST_VAULT_RELEASE=$(curl -s https://api.github.com/repos/hashicorp/vault/tags | jq --raw-output .[0].name[1:]) \ + && curl -L https://releases.hashicorp.com/vault/${LATEST_VAULT_RELEASE}/vault_${LATEST_VAULT_RELEASE}_linux_amd64.zip -o vault.zip \ + && unzip vault.zip \ + && rm vault.zip \ + && chmod +x vault \ + && mv vault /usr/local/bin/vault + +RUN groupadd -r kibana && useradd -r -g kibana kibana && mkdir /home/kibana && chown kibana:kibana /home/kibana + +COPY ./bash_standard_lib.sh /usr/local/bin/bash_standard_lib.sh +RUN chmod +x /usr/local/bin/bash_standard_lib.sh + +COPY ./runbld /usr/local/bin/runbld +RUN chmod +x /usr/local/bin/runbld + +USER kibana diff --git a/.ci/runbld_no_junit.yml b/.ci/runbld_no_junit.yml index 67b5002c1c437..1bcb7e22a2648 100644 --- a/.ci/runbld_no_junit.yml +++ b/.ci/runbld_no_junit.yml @@ -3,4 +3,4 @@ profiles: - ".*": # Match any job tests: - junit-filename-pattern: "8d8bd494-d909-4e67-a052-7e8b5aaeb5e4" # A bogus path that should never exist + junit-filename-pattern: false diff --git a/.gitignore b/.gitignore index 32377ec0f1ffe..25a8c369bb704 100644 --- a/.gitignore +++ b/.gitignore @@ -47,6 +47,8 @@ npm-debug.log* .tern-project .nyc_output .ci/pipeline-library/build/ +.ci/runbld +.ci/bash_standard_lib.sh .gradle # apm plugin diff --git a/Jenkinsfile b/Jenkinsfile index f6f77ccae8427..491a1e386deb1 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -8,50 +8,7 @@ kibanaPipeline(timeoutMinutes: 155, checkPrChanges: true, setCommitStatus: true) ciStats.trackBuild { catchError { retryable.enable() - parallel([ - 'kibana-intake-agent': workers.intake('kibana-intake', './test/scripts/jenkins_unit.sh'), - 'x-pack-intake-agent': workers.intake('x-pack-intake', './test/scripts/jenkins_xpack.sh'), - 'kibana-oss-agent': workers.functional('kibana-oss-tests', { kibanaPipeline.buildOss() }, [ - 'oss-firefoxSmoke': kibanaPipeline.functionalTestProcess('kibana-firefoxSmoke', './test/scripts/jenkins_firefox_smoke.sh'), - 'oss-ciGroup1': kibanaPipeline.ossCiGroupProcess(1), - 'oss-ciGroup2': kibanaPipeline.ossCiGroupProcess(2), - 'oss-ciGroup3': kibanaPipeline.ossCiGroupProcess(3), - 'oss-ciGroup4': kibanaPipeline.ossCiGroupProcess(4), - 'oss-ciGroup5': kibanaPipeline.ossCiGroupProcess(5), - 'oss-ciGroup6': kibanaPipeline.ossCiGroupProcess(6), - 'oss-ciGroup7': kibanaPipeline.ossCiGroupProcess(7), - 'oss-ciGroup8': kibanaPipeline.ossCiGroupProcess(8), - 'oss-ciGroup9': kibanaPipeline.ossCiGroupProcess(9), - 'oss-ciGroup10': kibanaPipeline.ossCiGroupProcess(10), - 'oss-ciGroup11': kibanaPipeline.ossCiGroupProcess(11), - 'oss-ciGroup12': kibanaPipeline.ossCiGroupProcess(12), - 'oss-accessibility': kibanaPipeline.functionalTestProcess('kibana-accessibility', './test/scripts/jenkins_accessibility.sh'), - // 'oss-visualRegression': kibanaPipeline.functionalTestProcess('visualRegression', './test/scripts/jenkins_visual_regression.sh'), - ]), - 'kibana-xpack-agent': workers.functional('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [ - 'xpack-firefoxSmoke': kibanaPipeline.functionalTestProcess('xpack-firefoxSmoke', './test/scripts/jenkins_xpack_firefox_smoke.sh'), - 'xpack-ciGroup1': kibanaPipeline.xpackCiGroupProcess(1), - 'xpack-ciGroup2': kibanaPipeline.xpackCiGroupProcess(2), - 'xpack-ciGroup3': kibanaPipeline.xpackCiGroupProcess(3), - 'xpack-ciGroup4': kibanaPipeline.xpackCiGroupProcess(4), - 'xpack-ciGroup5': kibanaPipeline.xpackCiGroupProcess(5), - 'xpack-ciGroup6': kibanaPipeline.xpackCiGroupProcess(6), - 'xpack-ciGroup7': kibanaPipeline.xpackCiGroupProcess(7), - 'xpack-ciGroup8': kibanaPipeline.xpackCiGroupProcess(8), - 'xpack-ciGroup9': kibanaPipeline.xpackCiGroupProcess(9), - 'xpack-ciGroup10': kibanaPipeline.xpackCiGroupProcess(10), - 'xpack-accessibility': kibanaPipeline.functionalTestProcess('xpack-accessibility', './test/scripts/jenkins_xpack_accessibility.sh'), - 'xpack-savedObjectsFieldMetrics': kibanaPipeline.functionalTestProcess('xpack-savedObjectsFieldMetrics', './test/scripts/jenkins_xpack_saved_objects_field_metrics.sh'), - // 'xpack-pageLoadMetrics': kibanaPipeline.functionalTestProcess('xpack-pageLoadMetrics', './test/scripts/jenkins_xpack_page_load_metrics.sh'), - 'xpack-securitySolutionCypress': { processNumber -> - whenChanged(['x-pack/plugins/security_solution/', 'x-pack/test/security_solution_cypress/']) { - kibanaPipeline.functionalTestProcess('xpack-securitySolutionCypress', './test/scripts/jenkins_security_solution_cypress.sh')(processNumber) - } - }, - - // 'xpack-visualRegression': kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh'), - ]), - ]) + kibanaPipeline.allCiTasks() } } } diff --git a/src/dev/ci_setup/checkout_sibling_es.sh b/src/dev/ci_setup/checkout_sibling_es.sh index 915759d4214f9..3832ec9b4076a 100755 --- a/src/dev/ci_setup/checkout_sibling_es.sh +++ b/src/dev/ci_setup/checkout_sibling_es.sh @@ -7,10 +7,11 @@ function checkout_sibling { targetDir=$2 useExistingParamName=$3 useExisting="$(eval "echo "\$$useExistingParamName"")" + repoAddress="https://github.com/" if [ -z ${useExisting:+x} ]; then if [ -d "$targetDir" ]; then - echo "I expected a clean workspace but an '${project}' sibling directory already exists in [$PARENT_DIR]!" + echo "I expected a clean workspace but an '${project}' sibling directory already exists in [$WORKSPACE]!" echo echo "Either define '${useExistingParamName}' or remove the existing '${project}' sibling." exit 1 @@ -21,8 +22,9 @@ function checkout_sibling { cloneBranch="" function clone_target_is_valid { + echo " -> checking for '${cloneBranch}' branch at ${cloneAuthor}/${project}" - if [[ -n "$(git ls-remote --heads "git@github.com:${cloneAuthor}/${project}.git" ${cloneBranch} 2>/dev/null)" ]]; then + if [[ -n "$(git ls-remote --heads "${repoAddress}${cloneAuthor}/${project}.git" ${cloneBranch} 2>/dev/null)" ]]; then return 0 else return 1 @@ -71,7 +73,7 @@ function checkout_sibling { fi echo " -> checking out '${cloneBranch}' branch from ${cloneAuthor}/${project}..." - git clone -b "$cloneBranch" "git@github.com:${cloneAuthor}/${project}.git" "$targetDir" --depth=1 + git clone -b "$cloneBranch" "${repoAddress}${cloneAuthor}/${project}.git" "$targetDir" --depth=1 echo " -> checked out ${project} revision: $(git -C "${targetDir}" rev-parse HEAD)" echo } @@ -87,12 +89,12 @@ function checkout_sibling { fi } -checkout_sibling "elasticsearch" "${PARENT_DIR}/elasticsearch" "USE_EXISTING_ES" +checkout_sibling "elasticsearch" "${WORKSPACE}/elasticsearch" "USE_EXISTING_ES" export TEST_ES_FROM=${TEST_ES_FROM:-snapshot} # Set the JAVA_HOME based on the Java property file in the ES repo # This assumes the naming convention used on CI (ex: ~/.java/java10) -ES_DIR="$PARENT_DIR/elasticsearch" +ES_DIR="$WORKSPACE/elasticsearch" ES_JAVA_PROP_PATH=$ES_DIR/.ci/java-versions.properties diff --git a/src/dev/ci_setup/setup_env.sh b/src/dev/ci_setup/setup_env.sh index 343ff47199375..f96a2240917e2 100644 --- a/src/dev/ci_setup/setup_env.sh +++ b/src/dev/ci_setup/setup_env.sh @@ -53,6 +53,8 @@ export PARENT_DIR="$parentDir" kbnBranch="$(jq -r .branch "$KIBANA_DIR/package.json")" export KIBANA_PKG_BRANCH="$kbnBranch" +export WORKSPACE="${WORKSPACE:-$PARENT_DIR}" + ### ### download node ### @@ -161,7 +163,7 @@ export -f checks-reporter-with-killswitch source "$KIBANA_DIR/src/dev/ci_setup/load_env_keys.sh" -ES_DIR="$PARENT_DIR/elasticsearch" +ES_DIR="$WORKSPACE/elasticsearch" ES_JAVA_PROP_PATH=$ES_DIR/.ci/java-versions.properties if [[ -d "$ES_DIR" && -f "$ES_JAVA_PROP_PATH" ]]; then diff --git a/src/dev/notice/generate_notice_from_source.ts b/src/dev/notice/generate_notice_from_source.ts index fb74bed0f26f4..a2b05c6dc8a4e 100644 --- a/src/dev/notice/generate_notice_from_source.ts +++ b/src/dev/notice/generate_notice_from_source.ts @@ -49,8 +49,10 @@ export async function generateNoticeFromSource({ productName, directory, log }: ignore: [ '{node_modules,build,target,dist,data,built_assets}/**', 'packages/*/{node_modules,build,target,dist}/**', + 'src/plugins/*/{node_modules,build,target,dist}/**', 'x-pack/{node_modules,build,target,dist,data}/**', 'x-pack/packages/*/{node_modules,build,target,dist}/**', + 'x-pack/plugins/*/{node_modules,build,target,dist}/**', ], }; diff --git a/tasks/config/karma.js b/tasks/config/karma.js index fa4bdc8ed2266..7c4f75bea8801 100644 --- a/tasks/config/karma.js +++ b/tasks/config/karma.js @@ -110,7 +110,7 @@ module.exports = function (grunt) { customLaunchers: { Chrome_Headless: { base: 'Chrome', - flags: ['--headless', '--disable-gpu', '--remote-debugging-port=9222'], + flags: ['--headless', '--disable-gpu', '--remote-debugging-port=9222', '--no-sandbox'], }, }, diff --git a/tasks/test_jest.js b/tasks/test_jest.js index d8f51806e8ddc..810ed42324840 100644 --- a/tasks/test_jest.js +++ b/tasks/test_jest.js @@ -22,7 +22,7 @@ const { resolve } = require('path'); module.exports = function (grunt) { grunt.registerTask('test:jest', function () { const done = this.async(); - runJest(resolve(__dirname, '../scripts/jest.js')).then(done, done); + runJest(resolve(__dirname, '../scripts/jest.js'), ['--maxWorkers=10']).then(done, done); }); grunt.registerTask('test:jest_integration', function () { @@ -30,10 +30,10 @@ module.exports = function (grunt) { runJest(resolve(__dirname, '../scripts/jest_integration.js')).then(done, done); }); - function runJest(jestScript) { + function runJest(jestScript, args = []) { const serverCmd = { cmd: 'node', - args: [jestScript, '--ci'], + args: [jestScript, '--ci', ...args], opts: { stdio: 'inherit' }, }; diff --git a/test/scripts/checks/doc_api_changes.sh b/test/scripts/checks/doc_api_changes.sh new file mode 100755 index 0000000000000..503d12b2f6d73 --- /dev/null +++ b/test/scripts/checks/doc_api_changes.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +yarn run grunt run:checkDocApiChanges diff --git a/test/scripts/checks/file_casing.sh b/test/scripts/checks/file_casing.sh new file mode 100755 index 0000000000000..513664263791b --- /dev/null +++ b/test/scripts/checks/file_casing.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +yarn run grunt run:checkFileCasing diff --git a/test/scripts/checks/i18n.sh b/test/scripts/checks/i18n.sh new file mode 100755 index 0000000000000..7a6fd46c46c76 --- /dev/null +++ b/test/scripts/checks/i18n.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +yarn run grunt run:i18nCheck diff --git a/test/scripts/checks/licenses.sh b/test/scripts/checks/licenses.sh new file mode 100755 index 0000000000000..a08d7d07a24a1 --- /dev/null +++ b/test/scripts/checks/licenses.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +yarn run grunt run:licenses diff --git a/test/scripts/checks/lock_file_symlinks.sh b/test/scripts/checks/lock_file_symlinks.sh new file mode 100755 index 0000000000000..1d43d32c9feb8 --- /dev/null +++ b/test/scripts/checks/lock_file_symlinks.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +yarn run grunt run:checkLockfileSymlinks diff --git a/test/scripts/checks/test_hardening.sh b/test/scripts/checks/test_hardening.sh new file mode 100755 index 0000000000000..9184758577654 --- /dev/null +++ b/test/scripts/checks/test_hardening.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +yarn run grunt run:test_hardening diff --git a/test/scripts/checks/test_projects.sh b/test/scripts/checks/test_projects.sh new file mode 100755 index 0000000000000..5f9aafe80e10e --- /dev/null +++ b/test/scripts/checks/test_projects.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +yarn run grunt run:test_projects diff --git a/test/scripts/checks/ts_projects.sh b/test/scripts/checks/ts_projects.sh new file mode 100755 index 0000000000000..d667c753baec2 --- /dev/null +++ b/test/scripts/checks/ts_projects.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +yarn run grunt run:checkTsProjects diff --git a/test/scripts/checks/type_check.sh b/test/scripts/checks/type_check.sh new file mode 100755 index 0000000000000..07c49638134be --- /dev/null +++ b/test/scripts/checks/type_check.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +yarn run grunt run:typeCheck diff --git a/test/scripts/checks/verify_dependency_versions.sh b/test/scripts/checks/verify_dependency_versions.sh new file mode 100755 index 0000000000000..b73a71e7ff7fd --- /dev/null +++ b/test/scripts/checks/verify_dependency_versions.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +yarn run grunt run:verifyDependencyVersions diff --git a/test/scripts/checks/verify_notice.sh b/test/scripts/checks/verify_notice.sh new file mode 100755 index 0000000000000..9f8343e540861 --- /dev/null +++ b/test/scripts/checks/verify_notice.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +yarn run grunt run:verifyNotice diff --git a/test/scripts/jenkins_build_kbn_sample_panel_action.sh b/test/scripts/jenkins_build_kbn_sample_panel_action.sh old mode 100644 new mode 100755 diff --git a/test/scripts/jenkins_build_kibana.sh b/test/scripts/jenkins_build_kibana.sh index 3e49edc8e6ae5..f449986713f97 100755 --- a/test/scripts/jenkins_build_kibana.sh +++ b/test/scripts/jenkins_build_kibana.sh @@ -2,19 +2,9 @@ source src/dev/ci_setup/setup_env.sh -echo " -> building examples separate from test plugins" -node scripts/build_kibana_platform_plugins \ - --oss \ - --examples \ - --verbose; - -echo " -> building test plugins" -node scripts/build_kibana_platform_plugins \ - --oss \ - --no-examples \ - --scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \ - --scan-dir "$KIBANA_DIR/test/interpreter_functional/plugins" \ - --verbose; +if [[ ! "$TASK_QUEUE_PROCESS_ID" ]]; then + ./test/scripts/jenkins_build_plugins.sh +fi # doesn't persist, also set in kibanaPipeline.groovy export KBN_NP_PLUGINS_BUILT=true @@ -26,4 +16,7 @@ yarn run grunt functionalTests:ensureAllTestsInCiGroup; if [[ -z "$CODE_COVERAGE" ]] ; then echo " -> building and extracting OSS Kibana distributable for use in functional tests" node scripts/build --debug --oss + + mkdir -p "$WORKSPACE/kibana-build-oss" + cp -pR build/oss/kibana-*-SNAPSHOT-linux-x86_64/. $WORKSPACE/kibana-build-oss/ fi diff --git a/test/scripts/jenkins_build_plugins.sh b/test/scripts/jenkins_build_plugins.sh new file mode 100755 index 0000000000000..32b3942074b34 --- /dev/null +++ b/test/scripts/jenkins_build_plugins.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +echo " -> building examples separate from test plugins" +node scripts/build_kibana_platform_plugins \ + --oss \ + --examples \ + --workers 6 \ + --verbose + +echo " -> building kibana platform plugins" +node scripts/build_kibana_platform_plugins \ + --oss \ + --no-examples \ + --scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \ + --scan-dir "$KIBANA_DIR/test/interpreter_functional/plugins" \ + --workers 6 \ + --verbose diff --git a/test/scripts/jenkins_ci_group.sh b/test/scripts/jenkins_ci_group.sh index 60d7f0406f4c9..2542d7032e83b 100755 --- a/test/scripts/jenkins_ci_group.sh +++ b/test/scripts/jenkins_ci_group.sh @@ -5,7 +5,7 @@ source test/scripts/jenkins_test_setup_oss.sh if [[ -z "$CODE_COVERAGE" ]]; then checks-reporter-with-killswitch "Functional tests / Group ${CI_GROUP}" yarn run grunt "run:functionalTests_ciGroup${CI_GROUP}"; - if [ "$CI_GROUP" == "1" ]; then + if [[ ! "$TASK_QUEUE_PROCESS_ID" && "$CI_GROUP" == "1" ]]; then source test/scripts/jenkins_build_kbn_sample_panel_action.sh yarn run grunt run:pluginFunctionalTestsRelease --from=source; yarn run grunt run:exampleFunctionalTestsRelease --from=source; diff --git a/test/scripts/jenkins_plugin_functional.sh b/test/scripts/jenkins_plugin_functional.sh new file mode 100755 index 0000000000000..1d691d98982de --- /dev/null +++ b/test/scripts/jenkins_plugin_functional.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env bash + +source test/scripts/jenkins_test_setup_oss.sh + +cd test/plugin_functional/plugins/kbn_sample_panel_action; +if [[ ! -d "target" ]]; then + yarn build; +fi +cd -; + +pwd + +yarn run grunt run:pluginFunctionalTestsRelease --from=source; +yarn run grunt run:exampleFunctionalTestsRelease --from=source; +yarn run grunt run:interpreterFunctionalTestsRelease; diff --git a/test/scripts/jenkins_security_solution_cypress.sh b/test/scripts/jenkins_security_solution_cypress.sh old mode 100644 new mode 100755 index 204911a3eedaa..a5a1a2103801f --- a/test/scripts/jenkins_security_solution_cypress.sh +++ b/test/scripts/jenkins_security_solution_cypress.sh @@ -1,12 +1,6 @@ #!/usr/bin/env bash -source test/scripts/jenkins_test_setup.sh - -installDir="$PARENT_DIR/install/kibana" -destDir="${installDir}-${CI_WORKER_NUMBER}" -cp -R "$installDir" "$destDir" - -export KIBANA_INSTALL_DIR="$destDir" +source test/scripts/jenkins_test_setup_xpack.sh echo " -> Running security solution cypress tests" cd "$XPACK_DIR" diff --git a/test/scripts/jenkins_setup_parallel_workspace.sh b/test/scripts/jenkins_setup_parallel_workspace.sh new file mode 100755 index 0000000000000..5274d05572e71 --- /dev/null +++ b/test/scripts/jenkins_setup_parallel_workspace.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash +set -e + +CURRENT_DIR=$(pwd) + +# Copy everything except node_modules into the current workspace +rsync -a ${WORKSPACE}/kibana/* . --exclude node_modules +rsync -a ${WORKSPACE}/kibana/.??* . + +# Symlink all non-root, non-fixture node_modules into our new workspace +cd ${WORKSPACE}/kibana +find . -type d -name node_modules -not -path '*__fixtures__*' -not -path './node_modules*' -prune -print0 | xargs -0I % ln -s "${WORKSPACE}/kibana/%" "${CURRENT_DIR}/%" +find . -type d -wholename '*__fixtures__*node_modules' -not -path './node_modules*' -prune -print0 | xargs -0I % cp -R "${WORKSPACE}/kibana/%" "${CURRENT_DIR}/%" +cd "${CURRENT_DIR}" + +# Symlink all of the individual root-level node_modules into the node_modules/ directory +mkdir -p node_modules +ln -s ${WORKSPACE}/kibana/node_modules/* node_modules/ +ln -s ${WORKSPACE}/kibana/node_modules/.??* node_modules/ + +# Copy a few node_modules instead of symlinking them. They don't work correctly if symlinked +unlink node_modules/@kbn +unlink node_modules/css-loader +unlink node_modules/style-loader + +# packages/kbn-optimizer/src/integration_tests/basic_optimization.test.ts will fail if this is a symlink +unlink node_modules/val-loader + +cp -R ${WORKSPACE}/kibana/node_modules/@kbn node_modules/ +cp -R ${WORKSPACE}/kibana/node_modules/css-loader node_modules/ +cp -R ${WORKSPACE}/kibana/node_modules/style-loader node_modules/ +cp -R ${WORKSPACE}/kibana/node_modules/val-loader node_modules/ diff --git a/test/scripts/jenkins_test_setup.sh b/test/scripts/jenkins_test_setup.sh old mode 100644 new mode 100755 index 49ee8a6b526ca..7cced76eb650f --- a/test/scripts/jenkins_test_setup.sh +++ b/test/scripts/jenkins_test_setup.sh @@ -14,3 +14,7 @@ trap 'post_work' EXIT export TEST_BROWSER_HEADLESS=1 source src/dev/ci_setup/setup_env.sh + +if [[ ! -d .es && -d "$WORKSPACE/kibana/.es" ]]; then + cp -R $WORKSPACE/kibana/.es ./ +fi diff --git a/test/scripts/jenkins_test_setup_oss.sh b/test/scripts/jenkins_test_setup_oss.sh old mode 100644 new mode 100755 index 7bbb867526384..b7eac33f35176 --- a/test/scripts/jenkins_test_setup_oss.sh +++ b/test/scripts/jenkins_test_setup_oss.sh @@ -2,10 +2,17 @@ source test/scripts/jenkins_test_setup.sh -if [[ -z "$CODE_COVERAGE" ]] ; then - installDir="$(realpath $PARENT_DIR/kibana/build/oss/kibana-*-SNAPSHOT-linux-x86_64)" - destDir=${installDir}-${CI_PARALLEL_PROCESS_NUMBER} - cp -R "$installDir" "$destDir" +if [[ -z "$CODE_COVERAGE" ]]; then + + destDir="build/kibana-build-oss" + if [[ ! "$TASK_QUEUE_PROCESS_ID" ]]; then + destDir="${destDir}-${CI_PARALLEL_PROCESS_NUMBER}" + fi + + if [[ ! -d $destDir ]]; then + mkdir -p $destDir + cp -pR "$WORKSPACE/kibana-build-oss/." $destDir/ + fi export KIBANA_INSTALL_DIR="$destDir" fi diff --git a/test/scripts/jenkins_test_setup_xpack.sh b/test/scripts/jenkins_test_setup_xpack.sh old mode 100644 new mode 100755 index a72e9749ebbd5..74a3de77e3a76 --- a/test/scripts/jenkins_test_setup_xpack.sh +++ b/test/scripts/jenkins_test_setup_xpack.sh @@ -3,11 +3,18 @@ source test/scripts/jenkins_test_setup.sh if [[ -z "$CODE_COVERAGE" ]]; then - installDir="$PARENT_DIR/install/kibana" - destDir="${installDir}-${CI_PARALLEL_PROCESS_NUMBER}" - cp -R "$installDir" "$destDir" - export KIBANA_INSTALL_DIR="$destDir" + destDir="build/kibana-build-xpack" + if [[ ! "$TASK_QUEUE_PROCESS_ID" ]]; then + destDir="${destDir}-${CI_PARALLEL_PROCESS_NUMBER}" + fi + + if [[ ! -d $destDir ]]; then + mkdir -p $destDir + cp -pR "$WORKSPACE/kibana-build-xpack/." $destDir/ + fi + + export KIBANA_INSTALL_DIR="$(realpath $destDir)" cd "$XPACK_DIR" fi diff --git a/test/scripts/jenkins_xpack_build_kibana.sh b/test/scripts/jenkins_xpack_build_kibana.sh index 58ef6a42d3fe4..2452e2f5b8c58 100755 --- a/test/scripts/jenkins_xpack_build_kibana.sh +++ b/test/scripts/jenkins_xpack_build_kibana.sh @@ -3,21 +3,9 @@ cd "$KIBANA_DIR" source src/dev/ci_setup/setup_env.sh -echo " -> building examples separate from test plugins" -node scripts/build_kibana_platform_plugins \ - --examples \ - --verbose; - -echo " -> building test plugins" -node scripts/build_kibana_platform_plugins \ - --no-examples \ - --scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \ - --scan-dir "$XPACK_DIR/test/plugin_functional/plugins" \ - --scan-dir "$XPACK_DIR/test/functional_with_es_ssl/fixtures/plugins" \ - --scan-dir "$XPACK_DIR/test/alerting_api_integration/plugins" \ - --scan-dir "$XPACK_DIR/test/plugin_api_integration/plugins" \ - --scan-dir "$XPACK_DIR/test/plugin_api_perf/plugins" \ - --verbose; +if [[ ! "$TASK_QUEUE_PROCESS_ID" ]]; then + ./test/scripts/jenkins_xpack_build_plugins.sh +fi # doesn't persist, also set in kibanaPipeline.groovy export KBN_NP_PLUGINS_BUILT=true @@ -42,7 +30,10 @@ if [[ -z "$CODE_COVERAGE" ]] ; then cd "$KIBANA_DIR" node scripts/build --debug --no-oss linuxBuild="$(find "$KIBANA_DIR/target" -name 'kibana-*-linux-x86_64.tar.gz')" - installDir="$PARENT_DIR/install/kibana" + installDir="$KIBANA_DIR/install/kibana" mkdir -p "$installDir" tar -xzf "$linuxBuild" -C "$installDir" --strip=1 + + mkdir -p "$WORKSPACE/kibana-build-xpack" + cp -pR install/kibana/. $WORKSPACE/kibana-build-xpack/ fi diff --git a/test/scripts/jenkins_xpack_build_plugins.sh b/test/scripts/jenkins_xpack_build_plugins.sh new file mode 100755 index 0000000000000..fea30c547bd5f --- /dev/null +++ b/test/scripts/jenkins_xpack_build_plugins.sh @@ -0,0 +1,21 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +echo " -> building examples separate from test plugins" +node scripts/build_kibana_platform_plugins \ + --workers 12 \ + --examples \ + --verbose + +echo " -> building kibana platform plugins" +node scripts/build_kibana_platform_plugins \ + --no-examples \ + --scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \ + --scan-dir "$XPACK_DIR/test/plugin_functional/plugins" \ + --scan-dir "$XPACK_DIR/test/functional_with_es_ssl/fixtures/plugins" \ + --scan-dir "$XPACK_DIR/test/alerting_api_integration/plugins" \ + --scan-dir "$XPACK_DIR/test/plugin_api_integration/plugins" \ + --scan-dir "$XPACK_DIR/test/plugin_api_perf/plugins" \ + --workers 12 \ + --verbose diff --git a/test/scripts/jenkins_xpack_page_load_metrics.sh b/test/scripts/jenkins_xpack_page_load_metrics.sh old mode 100644 new mode 100755 diff --git a/test/scripts/lint/eslint.sh b/test/scripts/lint/eslint.sh new file mode 100755 index 0000000000000..c3211300b96c5 --- /dev/null +++ b/test/scripts/lint/eslint.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +yarn run grunt run:eslint diff --git a/test/scripts/lint/sasslint.sh b/test/scripts/lint/sasslint.sh new file mode 100755 index 0000000000000..b9c683bcb049e --- /dev/null +++ b/test/scripts/lint/sasslint.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +yarn run grunt run:sasslint diff --git a/test/scripts/test/api_integration.sh b/test/scripts/test/api_integration.sh new file mode 100755 index 0000000000000..152c97a3ca7df --- /dev/null +++ b/test/scripts/test/api_integration.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +yarn run grunt run:apiIntegrationTests diff --git a/test/scripts/test/jest_integration.sh b/test/scripts/test/jest_integration.sh new file mode 100755 index 0000000000000..73dbbddfb38f6 --- /dev/null +++ b/test/scripts/test/jest_integration.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +yarn run grunt run:test_jest_integration diff --git a/test/scripts/test/jest_unit.sh b/test/scripts/test/jest_unit.sh new file mode 100755 index 0000000000000..e25452698cebc --- /dev/null +++ b/test/scripts/test/jest_unit.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +yarn run grunt run:test_jest diff --git a/test/scripts/test/karma_ci.sh b/test/scripts/test/karma_ci.sh new file mode 100755 index 0000000000000..e9985300ba19d --- /dev/null +++ b/test/scripts/test/karma_ci.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +yarn run grunt run:test_karma_ci diff --git a/test/scripts/test/mocha.sh b/test/scripts/test/mocha.sh new file mode 100755 index 0000000000000..43c00f0a09dcf --- /dev/null +++ b/test/scripts/test/mocha.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +yarn run grunt run:mocha diff --git a/test/scripts/test/xpack_jest_unit.sh b/test/scripts/test/xpack_jest_unit.sh new file mode 100755 index 0000000000000..93d70ec355391 --- /dev/null +++ b/test/scripts/test/xpack_jest_unit.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +cd x-pack +checks-reporter-with-killswitch "X-Pack Jest" node --max-old-space-size=6144 scripts/jest --ci --verbose --maxWorkers=10 diff --git a/test/scripts/test/xpack_karma.sh b/test/scripts/test/xpack_karma.sh new file mode 100755 index 0000000000000..9078f01f1b870 --- /dev/null +++ b/test/scripts/test/xpack_karma.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +cd x-pack +checks-reporter-with-killswitch "X-Pack Karma Tests" yarn test:karma diff --git a/test/scripts/test/xpack_list_cyclic_dependency.sh b/test/scripts/test/xpack_list_cyclic_dependency.sh new file mode 100755 index 0000000000000..493fe9f58d322 --- /dev/null +++ b/test/scripts/test/xpack_list_cyclic_dependency.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +cd x-pack +checks-reporter-with-killswitch "X-Pack List cyclic dependency test" node plugins/lists/scripts/check_circular_deps diff --git a/test/scripts/test/xpack_siem_cyclic_dependency.sh b/test/scripts/test/xpack_siem_cyclic_dependency.sh new file mode 100755 index 0000000000000..b21301f25ad08 --- /dev/null +++ b/test/scripts/test/xpack_siem_cyclic_dependency.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +cd x-pack +checks-reporter-with-killswitch "X-Pack SIEM cyclic dependency test" node plugins/security_solution/scripts/check_circular_deps diff --git a/vars/catchErrors.groovy b/vars/catchErrors.groovy index 460a90b8ec0c0..2a1b55d832606 100644 --- a/vars/catchErrors.groovy +++ b/vars/catchErrors.groovy @@ -1,8 +1,15 @@ // Basically, this is a shortcut for catchError(catchInterruptions: false) {} // By default, catchError will swallow aborts/timeouts, which we almost never want +// Also, by wrapping it in an additional try/catch, we cut down on spam in Pipeline Steps def call(Map params = [:], Closure closure) { - params.catchInterruptions = false - return catchError(params, closure) + try { + closure() + } catch (ex) { + params.catchInterruptions = false + catchError(params) { + throw ex + } + } } return this diff --git a/vars/kibanaPipeline.groovy b/vars/kibanaPipeline.groovy index f3fc5f84583c9..0f11204311451 100644 --- a/vars/kibanaPipeline.groovy +++ b/vars/kibanaPipeline.groovy @@ -16,27 +16,34 @@ def withPostBuildReporting(Closure closure) { } } -def functionalTestProcess(String name, Closure closure) { - return { processNumber -> - def kibanaPort = "61${processNumber}1" - def esPort = "61${processNumber}2" - def esTransportPort = "61${processNumber}3" - def ingestManagementPackageRegistryPort = "61${processNumber}4" +def withFunctionalTestEnv(List additionalEnvs = [], Closure closure) { + // This can go away once everything that uses the deprecated workers.parallelProcesses() is moved to task queue + def parallelId = env.TASK_QUEUE_PROCESS_ID ?: env.CI_PARALLEL_PROCESS_NUMBER - withEnv([ - "CI_PARALLEL_PROCESS_NUMBER=${processNumber}", - "TEST_KIBANA_HOST=localhost", - "TEST_KIBANA_PORT=${kibanaPort}", - "TEST_KIBANA_URL=http://elastic:changeme@localhost:${kibanaPort}", - "TEST_ES_URL=http://elastic:changeme@localhost:${esPort}", - "TEST_ES_TRANSPORT_PORT=${esTransportPort}", - "INGEST_MANAGEMENT_PACKAGE_REGISTRY_PORT=${ingestManagementPackageRegistryPort}", - "IS_PIPELINE_JOB=1", - "JOB=${name}", - "KBN_NP_PLUGINS_BUILT=true", - ]) { - closure() - } + def kibanaPort = "61${parallelId}1" + def esPort = "61${parallelId}2" + def esTransportPort = "61${parallelId}3" + def ingestManagementPackageRegistryPort = "61${parallelId}4" + + withEnv([ + "CI_GROUP=${parallelId}", + "REMOVE_KIBANA_INSTALL_DIR=1", + "CI_PARALLEL_PROCESS_NUMBER=${parallelId}", + "TEST_KIBANA_HOST=localhost", + "TEST_KIBANA_PORT=${kibanaPort}", + "TEST_KIBANA_URL=http://elastic:changeme@localhost:${kibanaPort}", + "TEST_ES_URL=http://elastic:changeme@localhost:${esPort}", + "TEST_ES_TRANSPORT_PORT=${esTransportPort}", + "KBN_NP_PLUGINS_BUILT=true", + "INGEST_MANAGEMENT_PACKAGE_REGISTRY_PORT=${ingestManagementPackageRegistryPort}", + ] + additionalEnvs) { + closure() + } +} + +def functionalTestProcess(String name, Closure closure) { + return { + withFunctionalTestEnv(["JOB=${name}"], closure) } } @@ -100,11 +107,17 @@ def withGcsArtifactUpload(workerName, closure) { def uploadPrefix = "kibana-ci-artifacts/jobs/${env.JOB_NAME}/${BUILD_NUMBER}/${workerName}" def ARTIFACT_PATTERNS = [ 'target/kibana-*', + 'target/test-metrics/*', 'target/kibana-security-solution/**/*.png', 'target/junit/**/*', - 'test/**/screenshots/**/*.png', + 'target/test-suites-ci-plan.json', + 'test/**/screenshots/session/*.png', + 'test/**/screenshots/failure/*.png', + 'test/**/screenshots/diff/*.png', 'test/functional/failure_debug/html/*.html', - 'x-pack/test/**/screenshots/**/*.png', + 'x-pack/test/**/screenshots/session/*.png', + 'x-pack/test/**/screenshots/failure/*.png', + 'x-pack/test/**/screenshots/diff/*.png', 'x-pack/test/functional/failure_debug/html/*.html', 'x-pack/test/functional/apps/reporting/reports/session/*.pdf', ] @@ -119,6 +132,12 @@ def withGcsArtifactUpload(workerName, closure) { ARTIFACT_PATTERNS.each { pattern -> uploadGcsArtifact(uploadPrefix, pattern) } + + dir(env.WORKSPACE) { + ARTIFACT_PATTERNS.each { pattern -> + uploadGcsArtifact(uploadPrefix, "parallel/*/kibana/${pattern}") + } + } } } }) @@ -131,6 +150,11 @@ def withGcsArtifactUpload(workerName, closure) { def publishJunit() { junit(testResults: 'target/junit/**/*.xml', allowEmptyResults: true, keepLongStdio: true) + + // junit() is weird about paths for security reasons, so we need to actually change to an upper directory first + dir(env.WORKSPACE) { + junit(testResults: 'parallel/*/kibana/target/junit/**/*.xml', allowEmptyResults: true, keepLongStdio: true) + } } def sendMail() { @@ -194,12 +218,16 @@ def doSetup() { } } -def buildOss() { - runbld("./test/scripts/jenkins_build_kibana.sh", "Build OSS/Default Kibana") +def buildOss(maxWorkers = '') { + withEnv(["KBN_OPTIMIZER_MAX_WORKERS=${maxWorkers}"]) { + runbld("./test/scripts/jenkins_build_kibana.sh", "Build OSS/Default Kibana") + } } -def buildXpack() { - runbld("./test/scripts/jenkins_xpack_build_kibana.sh", "Build X-Pack Kibana") +def buildXpack(maxWorkers = '') { + withEnv(["KBN_OPTIMIZER_MAX_WORKERS=${maxWorkers}"]) { + runbld("./test/scripts/jenkins_xpack_build_kibana.sh", "Build X-Pack Kibana") + } } def runErrorReporter() { @@ -248,6 +276,100 @@ def call(Map params = [:], Closure closure) { } } +// Creates a task queue using withTaskQueue, and copies the bootstrapped kibana repo into each process's workspace +// Note that node_modules are mostly symlinked to save time/space. See test/scripts/jenkins_setup_parallel_workspace.sh +def withCiTaskQueue(Map options = [:], Closure closure) { + def setupClosure = { + // This can't use runbld, because it expects the source to be there, which isn't yet + bash("${env.WORKSPACE}/kibana/test/scripts/jenkins_setup_parallel_workspace.sh", "Set up duplicate workspace for parallel process") + } + + def config = [parallel: 24, setup: setupClosure] + options + + withTaskQueue(config) { + closure.call() + } +} + +def scriptTask(description, script) { + return { + withFunctionalTestEnv { + runbld(script, description) + } + } +} + +def scriptTaskDocker(description, script) { + return { + withDocker(scriptTask(description, script)) + } +} + +def buildDocker() { + sh( + script: """ + cp /usr/local/bin/runbld .ci/ + cp /usr/local/bin/bash_standard_lib.sh .ci/ + cd .ci + docker build -t kibana-ci -f ./Dockerfile . + """, + label: 'Build CI Docker image' + ) +} + +def withDocker(Closure closure) { + docker + .image('kibana-ci') + .inside( + "-v /etc/runbld:/etc/runbld:ro -v '${env.JENKINS_HOME}:${env.JENKINS_HOME}' -v '/dev/shm/workspace:/dev/shm/workspace' --shm-size 2GB --cpus 4", + closure + ) +} + +def buildOssPlugins() { + runbld('./test/scripts/jenkins_build_plugins.sh', 'Build OSS Plugins') +} + +def buildXpackPlugins() { + runbld('./test/scripts/jenkins_xpack_build_plugins.sh', 'Build X-Pack Plugins') +} + +def withTasks(Map params = [worker: [:]], Closure closure) { + catchErrors { + def config = [name: 'ci-worker', size: 'xxl', ramDisk: true] + (params.worker ?: [:]) + + workers.ci(config) { + withCiTaskQueue(parallel: 24) { + parallel([ + docker: { + retry(2) { + buildDocker() + } + }, + + // There are integration tests etc that require the plugins to be built first, so let's go ahead and build them before set up the parallel workspaces + ossPlugins: { buildOssPlugins() }, + xpackPlugins: { buildXpackPlugins() }, + ]) + + catchErrors { + closure() + } + } + } + } +} + +def allCiTasks() { + withTasks { + tasks.check() + tasks.lint() + tasks.test() + tasks.functionalOss() + tasks.functionalXpack() + } +} + def pipelineLibraryTests() { whenChanged(['vars/', '.ci/pipeline-library/']) { workers.base(size: 'flyweight', bootstrapped: false, ramDisk: false) { @@ -258,5 +380,4 @@ def pipelineLibraryTests() { } } - return this diff --git a/vars/task.groovy b/vars/task.groovy new file mode 100644 index 0000000000000..0c07b519b6fef --- /dev/null +++ b/vars/task.groovy @@ -0,0 +1,5 @@ +def call(Closure closure) { + withTaskQueue.addTask(closure) +} + +return this diff --git a/vars/tasks.groovy b/vars/tasks.groovy new file mode 100644 index 0000000000000..3ff9a7b4850ae --- /dev/null +++ b/vars/tasks.groovy @@ -0,0 +1,124 @@ +def call(List closures) { + withTaskQueue.addTasks(closures) +} + +def check() { + tasks([ + kibanaPipeline.scriptTask('Check TypeScript Projects', 'test/scripts/checks/ts_projects.sh'), + kibanaPipeline.scriptTask('Check Doc API Changes', 'test/scripts/checks/doc_api_changes.sh'), + kibanaPipeline.scriptTask('Check Types', 'test/scripts/checks/type_check.sh'), + kibanaPipeline.scriptTask('Check i18n', 'test/scripts/checks/i18n.sh'), + kibanaPipeline.scriptTask('Check File Casing', 'test/scripts/checks/file_casing.sh'), + kibanaPipeline.scriptTask('Check Lockfile Symlinks', 'test/scripts/checks/lock_file_symlinks.sh'), + kibanaPipeline.scriptTask('Check Licenses', 'test/scripts/checks/licenses.sh'), + kibanaPipeline.scriptTask('Verify Dependency Versions', 'test/scripts/checks/verify_dependency_versions.sh'), + kibanaPipeline.scriptTask('Verify NOTICE', 'test/scripts/checks/verify_notice.sh'), + kibanaPipeline.scriptTask('Test Projects', 'test/scripts/checks/test_projects.sh'), + kibanaPipeline.scriptTask('Test Hardening', 'test/scripts/checks/test_hardening.sh'), + ]) +} + +def lint() { + tasks([ + kibanaPipeline.scriptTask('Lint: eslint', 'test/scripts/lint/eslint.sh'), + kibanaPipeline.scriptTask('Lint: sasslint', 'test/scripts/lint/sasslint.sh'), + ]) +} + +def test() { + tasks([ + // These 4 tasks require isolation because of hard-coded, conflicting ports and such, so let's use Docker here + kibanaPipeline.scriptTaskDocker('Jest Integration Tests', 'test/scripts/test/jest_integration.sh'), + kibanaPipeline.scriptTaskDocker('Mocha Tests', 'test/scripts/test/mocha.sh'), + kibanaPipeline.scriptTaskDocker('Karma CI Tests', 'test/scripts/test/karma_ci.sh'), + kibanaPipeline.scriptTaskDocker('X-Pack Karma Tests', 'test/scripts/test/xpack_karma.sh'), + + kibanaPipeline.scriptTask('Jest Unit Tests', 'test/scripts/test/jest_unit.sh'), + kibanaPipeline.scriptTask('API Integration Tests', 'test/scripts/test/api_integration.sh'), + kibanaPipeline.scriptTask('X-Pack SIEM cyclic dependency', 'test/scripts/test/xpack_siem_cyclic_dependency.sh'), + kibanaPipeline.scriptTask('X-Pack List cyclic dependency', 'test/scripts/test/xpack_list_cyclic_dependency.sh'), + kibanaPipeline.scriptTask('X-Pack Jest Unit Tests', 'test/scripts/test/xpack_jest_unit.sh'), + ]) +} + +def functionalOss(Map params = [:]) { + def config = params ?: [ + ciGroups: true, + firefox: !githubPr.isPr(), + accessibility: true, + pluginFunctional: true, + visualRegression: false + ] + + task { + kibanaPipeline.buildOss(6) + + if (config.ciGroups) { + def ciGroups = 1..12 + tasks(ciGroups.collect { kibanaPipeline.ossCiGroupProcess(it) }) + } + + if (config.firefox) { + task(kibanaPipeline.functionalTestProcess('oss-firefox', './test/scripts/jenkins_firefox_smoke.sh')) + } + + if (config.accessibility) { + task(kibanaPipeline.functionalTestProcess('oss-accessibility', './test/scripts/jenkins_accessibility.sh')) + } + + if (config.pluginFunctional) { + task(kibanaPipeline.functionalTestProcess('oss-pluginFunctional', './test/scripts/jenkins_plugin_functional.sh')) + } + + if (config.visualRegression) { + task(kibanaPipeline.functionalTestProcess('oss-visualRegression', './test/scripts/jenkins_visual_regression.sh')) + } + } +} + +def functionalXpack(Map params = [:]) { + def config = params ?: [ + ciGroups: true, + firefox: !githubPr.isPr(), + accessibility: true, + pluginFunctional: true, + savedObjectsFieldMetrics: true, + pageLoadMetrics: false, + visualRegression: false, + ] + + task { + kibanaPipeline.buildXpack(10) + + if (config.ciGroups) { + def ciGroups = 1..10 + tasks(ciGroups.collect { kibanaPipeline.xpackCiGroupProcess(it) }) + } + + if (config.firefox) { + task(kibanaPipeline.functionalTestProcess('xpack-firefox', './test/scripts/jenkins_xpack_firefox_smoke.sh')) + } + + if (config.accessibility) { + task(kibanaPipeline.functionalTestProcess('xpack-accessibility', './test/scripts/jenkins_xpack_accessibility.sh')) + } + + if (config.visualRegression) { + task(kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh')) + } + + if (config.pageLoadMetrics) { + task(kibanaPipeline.functionalTestProcess('xpack-pageLoadMetrics', './test/scripts/jenkins_xpack_page_load_metrics.sh')) + } + + if (config.savedObjectsFieldMetrics) { + task(kibanaPipeline.functionalTestProcess('xpack-savedObjectsFieldMetrics', './test/scripts/jenkins_xpack_saved_objects_field_metrics.sh')) + } + + whenChanged(['x-pack/plugins/security_solution/', 'x-pack/test/security_solution_cypress/']) { + task(kibanaPipeline.functionalTestProcess('xpack-securitySolutionCypress', './test/scripts/jenkins_security_solution_cypress.sh')) + } + } +} + +return this diff --git a/vars/withTaskQueue.groovy b/vars/withTaskQueue.groovy new file mode 100644 index 0000000000000..8132d6264744f --- /dev/null +++ b/vars/withTaskQueue.groovy @@ -0,0 +1,154 @@ +import groovy.transform.Field + +public static @Field TASK_QUEUES = [:] +public static @Field TASK_QUEUES_COUNTER = 0 + +/** + withTaskQueue creates a queue of "tasks" (just plain closures to execute), and executes them with your desired level of concurrency. + This way, you can define, for example, 40 things that need to execute, then only allow 10 of them to execute at once. + + Each "process" will execute in a separate, unique, empty directory. + If you want each process to have a bootstrapped kibana repo, check out kibanaPipeline.withCiTaskQueue + + Using the queue currently requires an agent/worker. + + Usage: + + withTaskQueue(parallel: 10) { + task { print "This is a task" } + + // This is the same as calling task() multiple times + tasks([ { print "Another task" }, { print "And another task" } ]) + + // Tasks can queue up subsequent tasks + task { + buildThing() + task { print "I depend on buildThing()" } + } + } + + You can also define a setup task that each process should execute one time before executing tasks: + withTaskQueue(parallel: 10, setup: { sh "my-setup-scrupt.sh" }) { + ... + } + +*/ +def call(Map options = [:], Closure closure) { + def config = [ parallel: 10 ] + options + def counter = ++TASK_QUEUES_COUNTER + + // We're basically abusing withEnv() to create a "scope" for all steps inside of a withTaskQueue block + // This way, we could have multiple task queue instances in the same pipeline + withEnv(["TASK_QUEUE_ID=${counter}"]) { + withTaskQueue.TASK_QUEUES[env.TASK_QUEUE_ID] = [ + tasks: [], + tmpFile: sh(script: 'mktemp', returnStdout: true).trim() + ] + + closure.call() + + def processesExecuting = 0 + def processes = [:] + def iterationId = 0 + + for(def i = 1; i <= config.parallel; i++) { + def j = i + processes["task-queue-process-${j}"] = { + catchErrors { + withEnv([ + "TASK_QUEUE_PROCESS_ID=${j}", + "TASK_QUEUE_ITERATION_ID=${++iterationId}" + ]) { + dir("${WORKSPACE}/parallel/${j}/kibana") { + if (config.setup) { + config.setup.call(j) + } + + def isDone = false + while(!isDone) { // TODO some kind of timeout? + catchErrors { + if (!getTasks().isEmpty()) { + processesExecuting++ + catchErrors { + def task + try { + task = getTasks().pop() + } catch (java.util.NoSuchElementException ex) { + return + } + + task.call() + } + processesExecuting-- + // If a task finishes, and no new tasks were queued up, and nothing else is executing + // Then all of the processes should wake up and exit + if (processesExecuting < 1 && getTasks().isEmpty()) { + taskNotify() + } + return + } + + if (processesExecuting > 0) { + taskSleep() + return + } + + // Queue is empty, no processes are executing + isDone = true + } + } + } + } + } + } + } + parallel(processes) + } +} + +// If we sleep in a loop using Groovy code, Pipeline Steps is flooded with Sleep steps +// So, instead, we just watch a file and `touch` it whenever something happens that could modify the queue +// There's a 20 minute timeout just in case something goes wrong, +// in which case this method will get called again if the process is actually supposed to be waiting. +def taskSleep() { + sh(script: """#!/bin/bash + TIMESTAMP=\$(date '+%s' -d "0 seconds ago") + for (( i=1; i<=240; i++ )) + do + if [ "\$(stat -c %Y '${getTmpFile()}')" -ge "\$TIMESTAMP" ] + then + break + else + sleep 5 + if [[ \$i == 240 ]]; then + echo "Waited for new tasks for 20 minutes, exiting in case something went wrong" + fi + fi + done + """, label: "Waiting for new tasks...") +} + +// Used to let the task queue processes know that either a new task has been queued up, or work is complete +def taskNotify() { + sh "touch '${getTmpFile()}'" +} + +def getTasks() { + return withTaskQueue.TASK_QUEUES[env.TASK_QUEUE_ID].tasks +} + +def getTmpFile() { + return withTaskQueue.TASK_QUEUES[env.TASK_QUEUE_ID].tmpFile +} + +def addTask(Closure closure) { + getTasks() << closure + taskNotify() +} + +def addTasks(List closures) { + closures.reverse().each { + getTasks() << it + } + taskNotify() +} diff --git a/vars/workers.groovy b/vars/workers.groovy index 8b7e8525a7ce3..2e94ce12f34c0 100644 --- a/vars/workers.groovy +++ b/vars/workers.groovy @@ -13,6 +13,8 @@ def label(size) { return 'docker && tests-l' case 'xl': return 'docker && tests-xl' + case 'xl-highmem': + return 'docker && tests-xl-highmem' case 'xxl': return 'docker && tests-xxl' } @@ -55,6 +57,11 @@ def base(Map params, Closure closure) { } } + sh( + script: "mkdir -p ${env.WORKSPACE}/tmp", + label: "Create custom temp directory" + ) + def checkoutInfo = [:] if (config.scm) { @@ -89,6 +96,7 @@ def base(Map params, Closure closure) { "PR_AUTHOR=${env.ghprbPullAuthorLogin ?: ''}", "TEST_BROWSER_HEADLESS=1", "GIT_BRANCH=${checkoutInfo.branch}", + "TMPDIR=${env.WORKSPACE}/tmp", // For Chrome and anything else that respects it ]) { withCredentials([ string(credentialsId: 'vault-addr', variable: 'VAULT_ADDR'), @@ -167,7 +175,9 @@ def parallelProcesses(Map params) { sleep(delay) } - processClosure(processNumber) + withEnv(["CI_PARALLEL_PROCESS_NUMBER=${processNumber}"]) { + processClosure() + } } } diff --git a/x-pack/plugins/canvas/.storybook/storyshots.test.js b/x-pack/plugins/canvas/.storybook/storyshots.test.js index b9fe0914b3698..e3217ad4dbe58 100644 --- a/x-pack/plugins/canvas/.storybook/storyshots.test.js +++ b/x-pack/plugins/canvas/.storybook/storyshots.test.js @@ -4,6 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ +import fs from 'fs'; import path from 'path'; import moment from 'moment'; import 'moment-timezone'; @@ -76,6 +77,12 @@ import { RenderedElement } from '../shareable_runtime/components/rendered_elemen jest.mock('../shareable_runtime/components/rendered_element'); RenderedElement.mockImplementation(() => 'RenderedElement'); +// Some of the code requires that this directory exists, but the tests don't actually require any css to be present +const cssDir = path.resolve(__dirname, '../../../../built_assets/css'); +if (!fs.existsSync(cssDir)) { + fs.mkdirSync(cssDir, { recursive: true }); +} + addSerializer(styleSheetSerializer); // Initialize Storyshots and build the Jest Snapshots From 4228b50303031dd3045a8934a6786761fabde582 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Thu, 9 Jul 2020 11:07:10 -0400 Subject: [PATCH 02/22] Revert "temporarily disable firefox functional tests in PRs (#71116)" This reverts commit 54bd07f81b6fc75ede018b9632e06c5bce616c4c. --- vars/tasks.groovy | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/vars/tasks.groovy b/vars/tasks.groovy index 3ff9a7b4850ae..9de4c78322d3e 100644 --- a/vars/tasks.groovy +++ b/vars/tasks.groovy @@ -42,13 +42,7 @@ def test() { } def functionalOss(Map params = [:]) { - def config = params ?: [ - ciGroups: true, - firefox: !githubPr.isPr(), - accessibility: true, - pluginFunctional: true, - visualRegression: false - ] + def config = params ?: [ciGroups: true, firefox: true, accessibility: true, pluginFunctional: true, visualRegression: false] task { kibanaPipeline.buildOss(6) @@ -79,7 +73,7 @@ def functionalOss(Map params = [:]) { def functionalXpack(Map params = [:]) { def config = params ?: [ ciGroups: true, - firefox: !githubPr.isPr(), + firefox: true, accessibility: true, pluginFunctional: true, savedObjectsFieldMetrics: true, From 370c26b9a1d2c2c7743d8e69a551394d1ea8f806 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Thu, 9 Jul 2020 11:07:22 -0400 Subject: [PATCH 03/22] Revert "[savedObjects field count] run in baseline job (#70999)" This reverts commit 53ee7a762d4bb5790ba4c8bd9a71ceade39a9eab. --- test/scripts/jenkins_xpack_visual_regression.sh | 5 +---- vars/tasks.groovy | 2 +- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/test/scripts/jenkins_xpack_visual_regression.sh b/test/scripts/jenkins_xpack_visual_regression.sh index 930d4a74345d9..36bf3409a5421 100755 --- a/test/scripts/jenkins_xpack_visual_regression.sh +++ b/test/scripts/jenkins_xpack_visual_regression.sh @@ -5,7 +5,7 @@ source "$KIBANA_DIR/src/dev/ci_setup/setup_percy.sh" echo " -> building and extracting default Kibana distributable" cd "$KIBANA_DIR" -node scripts/build --debug +node scripts/build --debug --no-oss linuxBuild="$(find "$KIBANA_DIR/target" -name 'kibana-*-linux-x86_64.tar.gz')" installDir="$PARENT_DIR/install/kibana" mkdir -p "$installDir" @@ -21,6 +21,3 @@ yarn percy exec -t 10000 -- -- \ # cd "$KIBANA_DIR" # source "test/scripts/jenkins_xpack_page_load_metrics.sh" - -cd "$XPACK_DIR" -source "$KIBANA_DIR/test/scripts/jenkins_xpack_saved_objects_field_metrics.sh" diff --git a/vars/tasks.groovy b/vars/tasks.groovy index 9de4c78322d3e..4e8fcaad795cd 100644 --- a/vars/tasks.groovy +++ b/vars/tasks.groovy @@ -76,7 +76,7 @@ def functionalXpack(Map params = [:]) { firefox: true, accessibility: true, pluginFunctional: true, - savedObjectsFieldMetrics: true, + savedObjectsFieldMetrics:true, pageLoadMetrics: false, visualRegression: false, ] From e262c565faf2a3981c3e4034a99f7324ae676d67 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Thu, 9 Jul 2020 11:46:02 -0400 Subject: [PATCH 04/22] Ensure parallel process workspaces get junit reports processed as well --- vars/kibanaPipeline.groovy | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/vars/kibanaPipeline.groovy b/vars/kibanaPipeline.groovy index 0f11204311451..8261ee7ba2620 100644 --- a/vars/kibanaPipeline.groovy +++ b/vars/kibanaPipeline.groovy @@ -8,6 +8,13 @@ def withPostBuildReporting(Closure closure) { catchErrors { runbld.junit() + + def parallelWorkspace = "${env.WORKSPACE}/parallel" + if (fileExists(parallelWorkspace)) { + dir(parallelWorkspace) { + runbld.junit() + } + } } catchErrors { From e24d46f2819bba855f7fac054f5a83a3b7cbc8bf Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Thu, 9 Jul 2020 11:50:09 -0400 Subject: [PATCH 05/22] Temporarily disable xpack firefox --- vars/tasks.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vars/tasks.groovy b/vars/tasks.groovy index 4e8fcaad795cd..319e825b0378f 100644 --- a/vars/tasks.groovy +++ b/vars/tasks.groovy @@ -73,7 +73,7 @@ def functionalOss(Map params = [:]) { def functionalXpack(Map params = [:]) { def config = params ?: [ ciGroups: true, - firefox: true, + firefox: false, accessibility: true, pluginFunctional: true, savedObjectsFieldMetrics:true, From 28a7a0ef514dd71c5a547e3ce429702fe92fe3f8 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Mon, 27 Jul 2020 12:01:36 -0400 Subject: [PATCH 06/22] Run post-build stuff in each parallel workspace --- vars/kibanaPipeline.groovy | 49 ++++++++++++++++++++++++++++---------- 1 file changed, 36 insertions(+), 13 deletions(-) diff --git a/vars/kibanaPipeline.groovy b/vars/kibanaPipeline.groovy index 75c50b7b2e2bb..81bbf3f3857c4 100644 --- a/vars/kibanaPipeline.groovy +++ b/vars/kibanaPipeline.groovy @@ -2,24 +2,47 @@ def withPostBuildReporting(Closure closure) { try { closure() } finally { - catchErrors { - runErrorReporter() - } - - catchErrors { - runbld.junit() + postBuildReporting() + + def parallelWorkspace = "${env.WORKSPACE}/parallel" + if (fileExists(parallelWorkspace)) { + dir(parallelWorkspace) { + def workspaceTasks = [:] + + // findFiles only returns files if you use glob, so look for a file that should be in every valid workspace + findFiles(glob: '*/kibana/package.json') + .collect { + // get the paths to the kibana directories for the parallel workspaces + return it.path.tokenize('/').dropRight(1).join('/') + } + .each { workspaceDir -> + workspaceTasks[workspaceDir] = { + dir(workspaceDir) { + print "postBuildReporting: ${workspaceDir}" + postBuildReporting() + } + } + } - def parallelWorkspace = "${env.WORKSPACE}/parallel" - if (fileExists(parallelWorkspace)) { - dir(parallelWorkspace) { - runbld.junit() + if (workspaceTasks) { + parallel(workspaceTasks) } } } + } +} - catchErrors { - publishJunit() - } +def postBuildReporting() { + catchErrors { + runErrorReporter() + } + + catchErrors { + runbld.junit() + } + + catchErrors { + publishJunit() } } From c515dfa54ef2a69f5b985c36a333bc82a58f8ec7 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Mon, 27 Jul 2020 13:56:15 -0400 Subject: [PATCH 07/22] Some parallel workspace reporting fixes --- vars/kibanaPipeline.groovy | 15 +++++---------- vars/runbld.groovy | 2 +- 2 files changed, 6 insertions(+), 11 deletions(-) diff --git a/vars/kibanaPipeline.groovy b/vars/kibanaPipeline.groovy index 81bbf3f3857c4..73eae004bce6c 100644 --- a/vars/kibanaPipeline.groovy +++ b/vars/kibanaPipeline.groovy @@ -2,6 +2,10 @@ def withPostBuildReporting(Closure closure) { try { closure() } finally { + catchErrors { + runErrorReporter() + } + postBuildReporting() def parallelWorkspace = "${env.WORKSPACE}/parallel" @@ -33,10 +37,6 @@ def withPostBuildReporting(Closure closure) { } def postBuildReporting() { - catchErrors { - runErrorReporter() - } - catchErrors { runbld.junit() } @@ -182,11 +182,6 @@ def withGcsArtifactUpload(workerName, closure) { def publishJunit() { junit(testResults: 'target/junit/**/*.xml', allowEmptyResults: true, keepLongStdio: true) - - // junit() is weird about paths for security reasons, so we need to actually change to an upper directory first - dir(env.WORKSPACE) { - junit(testResults: 'parallel/*/kibana/target/junit/**/*.xml', allowEmptyResults: true, keepLongStdio: true) - } } def sendMail() { @@ -275,7 +270,7 @@ def runErrorReporter() { bash( """ source src/dev/ci_setup/setup_env.sh - node scripts/report_failed_tests ${dryRun} target/junit/**/*.xml + node scripts/report_failed_tests ${dryRun} target/junit/**/*.xml ../parallel/*/kibana/target/junit/**/*.xml """, "Report failed tests, if necessary" ) diff --git a/vars/runbld.groovy b/vars/runbld.groovy index e52bc244c65cb..37a87f6a7bbb4 100644 --- a/vars/runbld.groovy +++ b/vars/runbld.groovy @@ -9,7 +9,7 @@ def call(script, label, enableJunitProcessing = false) { def junit() { sh( - script: "/usr/local/bin/runbld -d '${pwd()}' ${env.WORKSPACE}/kibana/test/scripts/jenkins_runbld_junit.sh", + script: "/usr/local/bin/runbld --config ${env.WORKSPACE}/kibana/.ci/runbld_junit.yml -d '${pwd()}' ${env.WORKSPACE}/kibana/test/scripts/jenkins_runbld_junit.sh", label: "Process JUnit reports with runbld" ) } From db60db77278cb8b62011d9af7bafda93e3588e31 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Mon, 27 Jul 2020 14:33:29 -0400 Subject: [PATCH 08/22] Missed a file --- .ci/runbld_junit.yml | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 .ci/runbld_junit.yml diff --git a/.ci/runbld_junit.yml b/.ci/runbld_junit.yml new file mode 100644 index 0000000000000..e77cf7cc4bfe1 --- /dev/null +++ b/.ci/runbld_junit.yml @@ -0,0 +1,4 @@ +profiles: +- ".*": # Match any job + tests: + junit-filename-pattern: target/junit/**/*.xml From 4bacdc54c50f0d22f7de76926d7f5e1eb886bd35 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Mon, 27 Jul 2020 14:35:21 -0400 Subject: [PATCH 09/22] Skip failing snapshot tests for now --- packages/kbn-dev-utils/src/run/help.test.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/kbn-dev-utils/src/run/help.test.ts b/packages/kbn-dev-utils/src/run/help.test.ts index 27be7ad28b81a..300f1cba7eb7d 100644 --- a/packages/kbn-dev-utils/src/run/help.test.ts +++ b/packages/kbn-dev-utils/src/run/help.test.ts @@ -57,7 +57,7 @@ const barCommand: Command = { usage: 'bar [...names]', }; -describe('getHelp()', () => { +describe.skip('getHelp()', () => { it('returns the expected output', () => { expect( getHelp({ @@ -95,7 +95,7 @@ describe('getHelp()', () => { }); }); -describe('getCommandLevelHelp()', () => { +describe.skip('getCommandLevelHelp()', () => { it('returns the expected output', () => { expect( getCommandLevelHelp({ @@ -141,7 +141,7 @@ describe('getCommandLevelHelp()', () => { }); }); -describe('getHelpForAllCommands()', () => { +describe.skip('getHelpForAllCommands()', () => { it('returns the expected output', () => { expect( getHelpForAllCommands({ From f7e82579306dbdaad5b7ec2b420430fd5d83cb01 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Tue, 28 Jul 2020 14:17:15 -0400 Subject: [PATCH 10/22] Remove runbld junit config, not possible --- .ci/runbld_junit.yml | 4 ---- vars/runbld.groovy | 2 +- 2 files changed, 1 insertion(+), 5 deletions(-) delete mode 100644 .ci/runbld_junit.yml diff --git a/.ci/runbld_junit.yml b/.ci/runbld_junit.yml deleted file mode 100644 index e77cf7cc4bfe1..0000000000000 --- a/.ci/runbld_junit.yml +++ /dev/null @@ -1,4 +0,0 @@ -profiles: -- ".*": # Match any job - tests: - junit-filename-pattern: target/junit/**/*.xml diff --git a/vars/runbld.groovy b/vars/runbld.groovy index 37a87f6a7bbb4..e52bc244c65cb 100644 --- a/vars/runbld.groovy +++ b/vars/runbld.groovy @@ -9,7 +9,7 @@ def call(script, label, enableJunitProcessing = false) { def junit() { sh( - script: "/usr/local/bin/runbld --config ${env.WORKSPACE}/kibana/.ci/runbld_junit.yml -d '${pwd()}' ${env.WORKSPACE}/kibana/test/scripts/jenkins_runbld_junit.sh", + script: "/usr/local/bin/runbld -d '${pwd()}' ${env.WORKSPACE}/kibana/test/scripts/jenkins_runbld_junit.sh", label: "Process JUnit reports with runbld" ) } From 65abd3272153778fbce1a05a1f2689756cfc0173 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Thu, 30 Jul 2020 11:42:34 -0400 Subject: [PATCH 11/22] Try adding a stage to make test results UI better --- vars/kibanaPipeline.groovy | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/vars/kibanaPipeline.groovy b/vars/kibanaPipeline.groovy index 73eae004bce6c..b934e4443911f 100644 --- a/vars/kibanaPipeline.groovy +++ b/vars/kibanaPipeline.groovy @@ -22,8 +22,9 @@ def withPostBuildReporting(Closure closure) { .each { workspaceDir -> workspaceTasks[workspaceDir] = { dir(workspaceDir) { - print "postBuildReporting: ${workspaceDir}" - postBuildReporting() + stage('Post-Build') { // This stage is just to massage the Test Result UI in Jenkins + postBuildReporting() + } } } } From ae8410db965b0eba76f0aeb322fde19f661969bd Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Thu, 30 Jul 2020 12:04:29 -0400 Subject: [PATCH 12/22] Add missing tasks from intake --- test/scripts/checks/telemetry.sh | 5 +++++ test/scripts/test/safer_lodash_set.sh | 5 +++++ vars/tasks.groovy | 2 ++ 3 files changed, 12 insertions(+) create mode 100755 test/scripts/checks/telemetry.sh create mode 100755 test/scripts/test/safer_lodash_set.sh diff --git a/test/scripts/checks/telemetry.sh b/test/scripts/checks/telemetry.sh new file mode 100755 index 0000000000000..c74ec295b385c --- /dev/null +++ b/test/scripts/checks/telemetry.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +yarn run grunt run:telemetryCheck diff --git a/test/scripts/test/safer_lodash_set.sh b/test/scripts/test/safer_lodash_set.sh new file mode 100755 index 0000000000000..4d7f9c28210d1 --- /dev/null +++ b/test/scripts/test/safer_lodash_set.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +yarn run grunt run:test_package_safer_lodash_set diff --git a/vars/tasks.groovy b/vars/tasks.groovy index 912c94e39d98c..f73238e2a8e14 100644 --- a/vars/tasks.groovy +++ b/vars/tasks.groovy @@ -4,6 +4,7 @@ def call(List closures) { def check() { tasks([ + kibanaPipeline.scriptTask('Check Telemetry Schema', 'test/scripts/checks/telemetry.sh'), kibanaPipeline.scriptTask('Check TypeScript Projects', 'test/scripts/checks/ts_projects.sh'), kibanaPipeline.scriptTask('Check Doc API Changes', 'test/scripts/checks/doc_api_changes.sh'), kibanaPipeline.scriptTask('Check Types', 'test/scripts/checks/type_check.sh'), @@ -33,6 +34,7 @@ def test() { kibanaPipeline.scriptTask('Jest Unit Tests', 'test/scripts/test/jest_unit.sh'), kibanaPipeline.scriptTask('API Integration Tests', 'test/scripts/test/api_integration.sh'), + kibanaPipeline.scriptTask('@elastic/safer-lodash-set Tests', 'test/scripts/test/safer_lodash_set.sh'), kibanaPipeline.scriptTask('X-Pack SIEM cyclic dependency', 'test/scripts/test/xpack_siem_cyclic_dependency.sh'), kibanaPipeline.scriptTask('X-Pack List cyclic dependency', 'test/scripts/test/xpack_list_cyclic_dependency.sh'), kibanaPipeline.scriptTask('X-Pack Jest Unit Tests', 'test/scripts/test/xpack_jest_unit.sh'), From 2083badb61ed32e7ca6e74cf779a0cc1f219a1f1 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Thu, 30 Jul 2020 14:23:04 -0400 Subject: [PATCH 13/22] Only run runbld as a parallel post-build task --- vars/kibanaPipeline.groovy | 22 +++++++++------------- 1 file changed, 9 insertions(+), 13 deletions(-) diff --git a/vars/kibanaPipeline.groovy b/vars/kibanaPipeline.groovy index b934e4443911f..33a25466dd25f 100644 --- a/vars/kibanaPipeline.groovy +++ b/vars/kibanaPipeline.groovy @@ -6,7 +6,9 @@ def withPostBuildReporting(Closure closure) { runErrorReporter() } - postBuildReporting() + catchErrors { + publishJunit() + } def parallelWorkspace = "${env.WORKSPACE}/parallel" if (fileExists(parallelWorkspace)) { @@ -22,8 +24,8 @@ def withPostBuildReporting(Closure closure) { .each { workspaceDir -> workspaceTasks[workspaceDir] = { dir(workspaceDir) { - stage('Post-Build') { // This stage is just to massage the Test Result UI in Jenkins - postBuildReporting() + catchErrors { + runbld.junit() } } } @@ -37,16 +39,6 @@ def withPostBuildReporting(Closure closure) { } } -def postBuildReporting() { - catchErrors { - runbld.junit() - } - - catchErrors { - publishJunit() - } -} - def withFunctionalTestEnv(List additionalEnvs = [], Closure closure) { // This can go away once everything that uses the deprecated workers.parallelProcesses() is moved to task queue def parallelId = env.TASK_QUEUE_PROCESS_ID ?: env.CI_PARALLEL_PROCESS_NUMBER @@ -183,6 +175,10 @@ def withGcsArtifactUpload(workerName, closure) { def publishJunit() { junit(testResults: 'target/junit/**/*.xml', allowEmptyResults: true, keepLongStdio: true) + + dir(env.WORKSPACE) { + junit(testResults: 'parallel/*/kibana/target/junit/**/*.xml', allowEmptyResults: true, keepLongStdio: true) + } } def sendMail() { From 589170b8a25323448006af54fd554e3f8eb112bb Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Thu, 30 Jul 2020 14:23:41 -0400 Subject: [PATCH 14/22] Add catchErrors block --- vars/kibanaPipeline.groovy | 40 ++++++++++++++++++++------------------ 1 file changed, 21 insertions(+), 19 deletions(-) diff --git a/vars/kibanaPipeline.groovy b/vars/kibanaPipeline.groovy index 33a25466dd25f..64ea136b7b53c 100644 --- a/vars/kibanaPipeline.groovy +++ b/vars/kibanaPipeline.groovy @@ -10,29 +10,31 @@ def withPostBuildReporting(Closure closure) { publishJunit() } - def parallelWorkspace = "${env.WORKSPACE}/parallel" - if (fileExists(parallelWorkspace)) { - dir(parallelWorkspace) { - def workspaceTasks = [:] - - // findFiles only returns files if you use glob, so look for a file that should be in every valid workspace - findFiles(glob: '*/kibana/package.json') - .collect { - // get the paths to the kibana directories for the parallel workspaces - return it.path.tokenize('/').dropRight(1).join('/') - } - .each { workspaceDir -> - workspaceTasks[workspaceDir] = { - dir(workspaceDir) { - catchErrors { - runbld.junit() + catchErrors { + def parallelWorkspace = "${env.WORKSPACE}/parallel" + if (fileExists(parallelWorkspace)) { + dir(parallelWorkspace) { + def workspaceTasks = [:] + + // findFiles only returns files if you use glob, so look for a file that should be in every valid workspace + findFiles(glob: '*/kibana/package.json') + .collect { + // get the paths to the kibana directories for the parallel workspaces + return it.path.tokenize('/').dropRight(1).join('/') + } + .each { workspaceDir -> + workspaceTasks[workspaceDir] = { + dir(workspaceDir) { + catchErrors { + runbld.junit() + } } } } - } - if (workspaceTasks) { - parallel(workspaceTasks) + if (workspaceTasks) { + parallel(workspaceTasks) + } } } } From 04163a51d1554bae83a43f307b96aeab6e9d3f3b Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Thu, 30 Jul 2020 16:55:27 -0400 Subject: [PATCH 15/22] Fix installDir references so that tests don't run from source, and re-enable xpack firefox --- test/scripts/jenkins_accessibility.sh | 2 +- test/scripts/jenkins_firefox_smoke.sh | 2 +- test/scripts/jenkins_xpack_accessibility.sh | 2 +- test/scripts/jenkins_xpack_saved_objects_field_metrics.sh | 2 +- vars/tasks.groovy | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/test/scripts/jenkins_accessibility.sh b/test/scripts/jenkins_accessibility.sh index c122d71b58edb..fa7cbd41d7078 100755 --- a/test/scripts/jenkins_accessibility.sh +++ b/test/scripts/jenkins_accessibility.sh @@ -5,5 +5,5 @@ source test/scripts/jenkins_test_setup_oss.sh checks-reporter-with-killswitch "Kibana accessibility tests" \ node scripts/functional_tests \ --debug --bail \ - --kibana-install-dir "$installDir" \ + --kibana-install-dir "$KIBANA_INSTALL_DIR" \ --config test/accessibility/config.ts; diff --git a/test/scripts/jenkins_firefox_smoke.sh b/test/scripts/jenkins_firefox_smoke.sh index 2bba6e06d76d7..247ab360b7912 100755 --- a/test/scripts/jenkins_firefox_smoke.sh +++ b/test/scripts/jenkins_firefox_smoke.sh @@ -5,6 +5,6 @@ source test/scripts/jenkins_test_setup_oss.sh checks-reporter-with-killswitch "Firefox smoke test" \ node scripts/functional_tests \ --bail --debug \ - --kibana-install-dir "$installDir" \ + --kibana-install-dir "$KIBANA_INSTALL_DIR" \ --include-tag "includeFirefox" \ --config test/functional/config.firefox.js; diff --git a/test/scripts/jenkins_xpack_accessibility.sh b/test/scripts/jenkins_xpack_accessibility.sh index a3c03dd780886..3afd4bfb76396 100755 --- a/test/scripts/jenkins_xpack_accessibility.sh +++ b/test/scripts/jenkins_xpack_accessibility.sh @@ -5,5 +5,5 @@ source test/scripts/jenkins_test_setup_xpack.sh checks-reporter-with-killswitch "X-Pack accessibility tests" \ node scripts/functional_tests \ --debug --bail \ - --kibana-install-dir "$installDir" \ + --kibana-install-dir "$KIBANA_INSTALL_DIR" \ --config test/accessibility/config.ts; diff --git a/test/scripts/jenkins_xpack_saved_objects_field_metrics.sh b/test/scripts/jenkins_xpack_saved_objects_field_metrics.sh index d3ca8839a7dab..e3b0fe778bdfb 100755 --- a/test/scripts/jenkins_xpack_saved_objects_field_metrics.sh +++ b/test/scripts/jenkins_xpack_saved_objects_field_metrics.sh @@ -5,5 +5,5 @@ source test/scripts/jenkins_test_setup_xpack.sh checks-reporter-with-killswitch "Capture Kibana Saved Objects field count metrics" \ node scripts/functional_tests \ --debug --bail \ - --kibana-install-dir "$installDir" \ + --kibana-install-dir "$KIBANA_INSTALL_DIR" \ --config test/saved_objects_field_count/config.ts; diff --git a/vars/tasks.groovy b/vars/tasks.groovy index f73238e2a8e14..52641ce31f0be 100644 --- a/vars/tasks.groovy +++ b/vars/tasks.groovy @@ -73,7 +73,7 @@ def functionalOss(Map params = [:]) { def functionalXpack(Map params = [:]) { def config = params ?: [ ciGroups: true, - firefox: false, + firefox: true, accessibility: true, pluginFunctional: true, savedObjectsFieldMetrics:true, From 8e671e048493a2fc335b320298ee34f72f1ea130 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Thu, 30 Jul 2020 17:15:27 -0400 Subject: [PATCH 16/22] Don't expand globs --- vars/kibanaPipeline.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vars/kibanaPipeline.groovy b/vars/kibanaPipeline.groovy index 64ea136b7b53c..a67865aae0f64 100644 --- a/vars/kibanaPipeline.groovy +++ b/vars/kibanaPipeline.groovy @@ -269,7 +269,7 @@ def runErrorReporter() { bash( """ source src/dev/ci_setup/setup_env.sh - node scripts/report_failed_tests ${dryRun} target/junit/**/*.xml ../parallel/*/kibana/target/junit/**/*.xml + node scripts/report_failed_tests ${dryRun} 'target/junit/**/*.xml' '../parallel/*/kibana/target/junit/**/*.xml' """, "Report failed tests, if necessary" ) From 90203a55d9bd2ef37fa46b4b35e33a3151d99919 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Thu, 30 Jul 2020 17:19:10 -0400 Subject: [PATCH 17/22] Remove karma config --- tasks/config/karma.js | 207 ------------------------------------------ 1 file changed, 207 deletions(-) delete mode 100644 tasks/config/karma.js diff --git a/tasks/config/karma.js b/tasks/config/karma.js deleted file mode 100644 index 330192e0e2a26..0000000000000 --- a/tasks/config/karma.js +++ /dev/null @@ -1,207 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { dirname } from 'path'; -import { times } from 'lodash'; -import { makeJunitReportPath } from '@kbn/test'; -import * as UiSharedDeps from '@kbn/ui-shared-deps'; - -const TOTAL_CI_SHARDS = 4; -const ROOT = dirname(require.resolve('../../package.json')); -const buildHash = String(Number.MAX_SAFE_INTEGER); - -module.exports = function (grunt) { - function pickBrowser() { - if (grunt.option('browser')) { - return grunt.option('browser'); - } - if (process.env.TEST_BROWSER_HEADLESS === '1') { - return 'Chrome_Headless'; - } - return 'Chrome'; - } - - function pickReporters() { - // available reporters: https://npmjs.org/browse/keyword/karma-reporter - if (process.env.CI && process.env.DISABLE_JUNIT_REPORTER) { - return ['dots']; - } - - if (process.env.CI) { - return ['dots', 'junit']; - } - - return ['progress']; - } - - function getKarmaFiles(shardNum) { - return [ - 'http://localhost:5610/test_bundle/built_css.css', - // Sets global variables normally set by the bootstrap.js script - 'http://localhost:5610/test_bundle/karma/globals.js', - - ...UiSharedDeps.jsDepFilenames.map( - (chunkFilename) => - `http://localhost:5610/${buildHash}/bundles/kbn-ui-shared-deps/${chunkFilename}` - ), - `http://localhost:5610/${buildHash}/bundles/kbn-ui-shared-deps/${UiSharedDeps.jsFilename}`, - - shardNum === undefined - ? `http://localhost:5610/${buildHash}/bundles/tests.bundle.js` - : `http://localhost:5610/${buildHash}/bundles/tests.bundle.js?shards=${TOTAL_CI_SHARDS}&shard_num=${shardNum}`, - - `http://localhost:5610/${buildHash}/bundles/kbn-ui-shared-deps/${UiSharedDeps.baseCssDistFilename}`, - // this causes tilemap tests to fail, probably because the eui styles haven't been - // included in the karma harness a long some time, if ever - // `http://localhost:5610/bundles/kbn-ui-shared-deps/${UiSharedDeps.lightCssDistFilename}`, - `http://localhost:5610/${buildHash}/bundles/tests.style.css`, - ]; - } - - const config = { - options: { - // base path that will be used to resolve all patterns (eg. files, exclude) - basePath: '', - - captureTimeout: 30000, - browserNoActivityTimeout: 120000, - frameworks: ['mocha'], - plugins: [ - 'karma-chrome-launcher', - 'karma-coverage', - 'karma-firefox-launcher', - 'karma-ie-launcher', - 'karma-junit-reporter', - 'karma-mocha', - 'karma-safari-launcher', - ], - port: 9876, - colors: true, - logLevel: grunt.option('debug') || grunt.option('verbose') ? 'DEBUG' : 'INFO', - autoWatch: false, - browsers: [pickBrowser()], - customLaunchers: { - Chrome_Headless: { - base: 'Chrome', - flags: ['--headless', '--disable-gpu', '--remote-debugging-port=9222', '--no-sandbox'], - }, - }, - - reporters: pickReporters(), - - junitReporter: { - outputFile: makeJunitReportPath(ROOT, 'karma'), - useBrowserName: false, - nameFormatter: (_, result) => [...result.suite, result.description].join(' '), - classNameFormatter: (_, result) => { - const rootSuite = result.suite[0] || result.description; - return `Browser Unit Tests.${rootSuite.replace(/\./g, 'ยท')}`; - }, - }, - - // list of files / patterns to load in the browser - files: getKarmaFiles(), - - proxies: { - '/tests/': 'http://localhost:5610/tests/', - '/test_bundle/': 'http://localhost:5610/test_bundle/', - [`/${buildHash}/bundles/`]: `http://localhost:5610/${buildHash}/bundles/`, - }, - - client: { - mocha: { - reporter: 'html', // change Karma's debug.html to the mocha web reporter - timeout: 10000, - slow: 5000, - }, - }, - }, - - dev: { singleRun: false }, - unit: { singleRun: true }, - coverage: { - singleRun: true, - reporters: ['coverage'], - coverageReporter: { - reporters: [{ type: 'html', dir: 'coverage' }, { type: 'text-summary' }], - }, - }, - }; - - /** - * ------------------------------------------------------------ - * CI sharding - * ------------------------------------------------------------ - * - * Every test retains nearly all of the memory it causes to be allocated, - * which has started to kill the test browser as the size of the test suite - * increases. This is a deep-rooted problem that will take some serious - * work to fix. - * - * CI sharding is a short-term solution that splits the top-level describe - * calls into different "shards" and instructs karma to only run one shard - * at a time, reloading the browser in between each shard and forcing the - * memory from the previous shard to be released. - * - * ## how - * - * Rather than modify the bundling process to produce multiple testing - * bundles, top-level describe calls are sharded by their first argument, - * the suite name. - * - * The number of shards to create is controlled with the TOTAL_CI_SHARDS - * constant defined at the top of this file. - * - * ## controlling sharding - * - * To control sharding in a specific karma configuration, the total number - * of shards to create (?shards=X), and the current shard number - * (&shard_num=Y), are added to the testing bundle url and read by the - * test_harness/setup_test_sharding[1] module. This allows us to use a - * different number of shards in different scenarios (ie. running - * `yarn test:karma` runs the tests in a single shard, effectively - * disabling sharding) - * - * These same parameters can also be defined in the URL/query string of the - * karma debug page (started when you run `yarn test:karma:debug`). - * - * ## debugging - * - * It is *possible* that some tests will only pass if run after/with certain - * other suites. To debug this, make sure that your tests pass in isolation - * (by clicking the suite name on the karma debug page) and that it runs - * correctly in it's given shard (using the `?shards=X&shard_num=Y` query - * string params on the karma debug page). You can spot the shard number - * a test is running in by searching for the "ready to load tests for shard X" - * log message. - * - * [1]: src/legacy/ui/public/test_harness/test_sharding/setup_test_sharding.js - */ - times(TOTAL_CI_SHARDS, (i) => { - const n = i + 1; - config[`ciShard-${n}`] = { - singleRun: true, - options: { - files: getKarmaFiles(n), - }, - }; - }); - - return config; -}; From 3b4616054381487dae669395c40f065052987030 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Fri, 31 Jul 2020 12:31:59 -0400 Subject: [PATCH 18/22] Attempt to speed up report_failed_tests --- vars/kibanaPipeline.groovy | 51 +++++++++++++++++++++++++++----------- 1 file changed, 37 insertions(+), 14 deletions(-) diff --git a/vars/kibanaPipeline.groovy b/vars/kibanaPipeline.groovy index a67865aae0f64..29d62b32b27db 100644 --- a/vars/kibanaPipeline.groovy +++ b/vars/kibanaPipeline.groovy @@ -2,8 +2,15 @@ def withPostBuildReporting(Closure closure) { try { closure() } finally { + def parallelWorkspaces = [] + try { + parallelWorkspaces = getParallelWorkspaces() + } catch(ex) { + print ex + } + catchErrors { - runErrorReporter() + runErrorReporter([pwd()] + parallelWorkspaces) } catchErrors { @@ -16,21 +23,15 @@ def withPostBuildReporting(Closure closure) { dir(parallelWorkspace) { def workspaceTasks = [:] - // findFiles only returns files if you use glob, so look for a file that should be in every valid workspace - findFiles(glob: '*/kibana/package.json') - .collect { - // get the paths to the kibana directories for the parallel workspaces - return it.path.tokenize('/').dropRight(1).join('/') - } - .each { workspaceDir -> - workspaceTasks[workspaceDir] = { - dir(workspaceDir) { - catchErrors { - runbld.junit() - } + parallelWorkspaces.each { workspaceDir -> + workspaceTasks[workspaceDir] = { + dir(workspaceDir) { + catchErrors { + runbld.junit() } } } + } if (workspaceTasks) { parallel(workspaceTasks) @@ -41,6 +42,22 @@ def withPostBuildReporting(Closure closure) { } } +def getParallelWorkspaces() { + def parallelWorkspace = "${env.WORKSPACE}/parallel" + if (fileExists(parallelWorkspace)) { + dir(parallelWorkspace) { + // findFiles only returns files if you use glob, so look for a file that should be in every valid workspace + return findFiles(glob: '*/kibana/package.json') + .collect { + // get the paths to the kibana directories for the parallel workspaces + return it.path.tokenize('/').dropRight(1).join('/') + } + } + } + + return [] +} + def withFunctionalTestEnv(List additionalEnvs = [], Closure closure) { // This can go away once everything that uses the deprecated workers.parallelProcesses() is moved to task queue def parallelId = env.TASK_QUEUE_PROCESS_ID ?: env.CI_PARALLEL_PROCESS_NUMBER @@ -263,13 +280,19 @@ def buildXpack(maxWorkers = '') { } def runErrorReporter() { + return runErrorReporter([pwd()]) +} + +def runErrorReporter(workspaces) { def status = buildUtils.getBuildStatus() def dryRun = status != "ABORTED" ? "" : "--no-github-update" + def globs = workspaces.collect { "'${it}/target/junit/**/*.xml'" }.join(" ") + bash( """ source src/dev/ci_setup/setup_env.sh - node scripts/report_failed_tests ${dryRun} 'target/junit/**/*.xml' '../parallel/*/kibana/target/junit/**/*.xml' + node scripts/report_failed_tests ${dryRun} ${globs} """, "Report failed tests, if necessary" ) From 5abd3d26aac23b69c82f868d65b0d1032ab795ae Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Fri, 31 Jul 2020 14:32:18 -0400 Subject: [PATCH 19/22] Fix scope problem --- vars/kibanaPipeline.groovy | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/vars/kibanaPipeline.groovy b/vars/kibanaPipeline.groovy index 29d62b32b27db..982b3def80b88 100644 --- a/vars/kibanaPipeline.groovy +++ b/vars/kibanaPipeline.groovy @@ -43,11 +43,12 @@ def withPostBuildReporting(Closure closure) { } def getParallelWorkspaces() { + def workspaces = [] def parallelWorkspace = "${env.WORKSPACE}/parallel" if (fileExists(parallelWorkspace)) { dir(parallelWorkspace) { // findFiles only returns files if you use glob, so look for a file that should be in every valid workspace - return findFiles(glob: '*/kibana/package.json') + workspaces = findFiles(glob: '*/kibana/package.json') .collect { // get the paths to the kibana directories for the parallel workspaces return it.path.tokenize('/').dropRight(1).join('/') @@ -55,7 +56,7 @@ def getParallelWorkspaces() { } } - return [] + return workspaces } def withFunctionalTestEnv(List additionalEnvs = [], Closure closure) { From 6fe212672637e78cef100b298aca92e20887af8f Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Fri, 31 Jul 2020 16:22:51 -0400 Subject: [PATCH 20/22] Fix workspace path --- vars/kibanaPipeline.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vars/kibanaPipeline.groovy b/vars/kibanaPipeline.groovy index 982b3def80b88..b78f029afc669 100644 --- a/vars/kibanaPipeline.groovy +++ b/vars/kibanaPipeline.groovy @@ -51,7 +51,7 @@ def getParallelWorkspaces() { workspaces = findFiles(glob: '*/kibana/package.json') .collect { // get the paths to the kibana directories for the parallel workspaces - return it.path.tokenize('/').dropRight(1).join('/') + return parallelWorkspace + it.path.tokenize('/').dropRight(1).join('/') } } } From 5c43045d22b6f19e7732d2c78c9ed337f2d94e96 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Fri, 31 Jul 2020 20:55:03 -0400 Subject: [PATCH 21/22] Missed a slash --- vars/kibanaPipeline.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vars/kibanaPipeline.groovy b/vars/kibanaPipeline.groovy index b78f029afc669..45fe538077da2 100644 --- a/vars/kibanaPipeline.groovy +++ b/vars/kibanaPipeline.groovy @@ -51,7 +51,7 @@ def getParallelWorkspaces() { workspaces = findFiles(glob: '*/kibana/package.json') .collect { // get the paths to the kibana directories for the parallel workspaces - return parallelWorkspace + it.path.tokenize('/').dropRight(1).join('/') + return parallelWorkspace + '/' + it.path.tokenize('/').dropRight(1).join('/') } } } From e46af8c58a6d6e1ab96c0fcac5e6000754b97adc Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Mon, 3 Aug 2020 14:39:05 -0400 Subject: [PATCH 22/22] Address some PR feedback --- .ci/Dockerfile | 3 +++ test/scripts/jenkins_test_setup.sh | 2 ++ vars/kibanaPipeline.groovy | 2 +- 3 files changed, 6 insertions(+), 1 deletion(-) diff --git a/.ci/Dockerfile b/.ci/Dockerfile index 201e17b93c116..d90d9f4710b5b 100644 --- a/.ci/Dockerfile +++ b/.ci/Dockerfile @@ -1,3 +1,6 @@ +# NOTE: This Dockerfile is ONLY used to run certain tasks in CI. It is not used to run Kibana or as a distributable. +# If you're looking for the Kibana Docker image distributable, please see: src/dev/build/tasks/os_packages/docker_generator/templates/dockerfile.template.ts + ARG NODE_VERSION=10.21.0 FROM node:${NODE_VERSION} AS base diff --git a/test/scripts/jenkins_test_setup.sh b/test/scripts/jenkins_test_setup.sh index 7cced76eb650f..05b88aa2dd0a2 100755 --- a/test/scripts/jenkins_test_setup.sh +++ b/test/scripts/jenkins_test_setup.sh @@ -15,6 +15,8 @@ export TEST_BROWSER_HEADLESS=1 source src/dev/ci_setup/setup_env.sh +# For parallel workspaces, we should copy the .es directory from the root, because it should already have downloaded snapshots in it +# This isn't part of jenkins_setup_parallel_workspace.sh just because not all tasks require ES if [[ ! -d .es && -d "$WORKSPACE/kibana/.es" ]]; then cp -R $WORKSPACE/kibana/.es ./ fi diff --git a/vars/kibanaPipeline.groovy b/vars/kibanaPipeline.groovy index 5dd79ca600310..5f3e8d1a0660d 100644 --- a/vars/kibanaPipeline.groovy +++ b/vars/kibanaPipeline.groovy @@ -51,7 +51,7 @@ def getParallelWorkspaces() { workspaces = findFiles(glob: '*/kibana/package.json') .collect { // get the paths to the kibana directories for the parallel workspaces - return parallelWorkspace + '/' + it.path.tokenize('/').dropRight(1).join('/') + return parallelWorkspace + '/' + it.path.tokenize('/').dropRight(1).join('/') } } }