diff --git a/.ci/Dockerfile b/.ci/Dockerfile deleted file mode 100644 index 201e17b93c116b..00000000000000 --- a/.ci/Dockerfile +++ /dev/null @@ -1,35 +0,0 @@ -ARG NODE_VERSION=10.21.0 - -FROM node:${NODE_VERSION} AS base - -RUN apt-get update && \ - apt-get -y install xvfb gconf-service libasound2 libatk1.0-0 libc6 libcairo2 libcups2 \ - libdbus-1-3 libexpat1 libfontconfig1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 \ - libgtk-3-0 libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 \ - libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 \ - libxtst6 ca-certificates fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils wget openjdk-8-jre && \ - rm -rf /var/lib/apt/lists/* - -RUN curl -sSL https://dl.google.com/linux/linux_signing_key.pub | apt-key add - \ - && sh -c 'echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list' \ - && apt-get update \ - && apt-get install -y rsync jq bsdtar google-chrome-stable \ - --no-install-recommends \ - && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* - -RUN LATEST_VAULT_RELEASE=$(curl -s https://api.github.com/repos/hashicorp/vault/tags | jq --raw-output .[0].name[1:]) \ - && curl -L https://releases.hashicorp.com/vault/${LATEST_VAULT_RELEASE}/vault_${LATEST_VAULT_RELEASE}_linux_amd64.zip -o vault.zip \ - && unzip vault.zip \ - && rm vault.zip \ - && chmod +x vault \ - && mv vault /usr/local/bin/vault - -RUN groupadd -r kibana && useradd -r -g kibana kibana && mkdir /home/kibana && chown kibana:kibana /home/kibana - -COPY ./bash_standard_lib.sh /usr/local/bin/bash_standard_lib.sh -RUN chmod +x /usr/local/bin/bash_standard_lib.sh - -COPY ./runbld /usr/local/bin/runbld -RUN chmod +x /usr/local/bin/runbld - -USER kibana diff --git a/.ci/runbld_no_junit.yml b/.ci/runbld_no_junit.yml index 1bcb7e22a26480..67b5002c1c4377 100644 --- a/.ci/runbld_no_junit.yml +++ b/.ci/runbld_no_junit.yml @@ -3,4 +3,4 @@ profiles: - ".*": # Match any job tests: - junit-filename-pattern: false + junit-filename-pattern: "8d8bd494-d909-4e67-a052-7e8b5aaeb5e4" # A bogus path that should never exist diff --git a/.gitignore b/.gitignore index 25a8c369bb704d..32377ec0f1ffe8 100644 --- a/.gitignore +++ b/.gitignore @@ -47,8 +47,6 @@ npm-debug.log* .tern-project .nyc_output .ci/pipeline-library/build/ -.ci/runbld -.ci/bash_standard_lib.sh .gradle # apm plugin diff --git a/Jenkinsfile b/Jenkinsfile index 491a1e386deb18..f6f77ccae8427a 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -8,7 +8,50 @@ kibanaPipeline(timeoutMinutes: 155, checkPrChanges: true, setCommitStatus: true) ciStats.trackBuild { catchError { retryable.enable() - kibanaPipeline.allCiTasks() + parallel([ + 'kibana-intake-agent': workers.intake('kibana-intake', './test/scripts/jenkins_unit.sh'), + 'x-pack-intake-agent': workers.intake('x-pack-intake', './test/scripts/jenkins_xpack.sh'), + 'kibana-oss-agent': workers.functional('kibana-oss-tests', { kibanaPipeline.buildOss() }, [ + 'oss-firefoxSmoke': kibanaPipeline.functionalTestProcess('kibana-firefoxSmoke', './test/scripts/jenkins_firefox_smoke.sh'), + 'oss-ciGroup1': kibanaPipeline.ossCiGroupProcess(1), + 'oss-ciGroup2': kibanaPipeline.ossCiGroupProcess(2), + 'oss-ciGroup3': kibanaPipeline.ossCiGroupProcess(3), + 'oss-ciGroup4': kibanaPipeline.ossCiGroupProcess(4), + 'oss-ciGroup5': kibanaPipeline.ossCiGroupProcess(5), + 'oss-ciGroup6': kibanaPipeline.ossCiGroupProcess(6), + 'oss-ciGroup7': kibanaPipeline.ossCiGroupProcess(7), + 'oss-ciGroup8': kibanaPipeline.ossCiGroupProcess(8), + 'oss-ciGroup9': kibanaPipeline.ossCiGroupProcess(9), + 'oss-ciGroup10': kibanaPipeline.ossCiGroupProcess(10), + 'oss-ciGroup11': kibanaPipeline.ossCiGroupProcess(11), + 'oss-ciGroup12': kibanaPipeline.ossCiGroupProcess(12), + 'oss-accessibility': kibanaPipeline.functionalTestProcess('kibana-accessibility', './test/scripts/jenkins_accessibility.sh'), + // 'oss-visualRegression': kibanaPipeline.functionalTestProcess('visualRegression', './test/scripts/jenkins_visual_regression.sh'), + ]), + 'kibana-xpack-agent': workers.functional('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [ + 'xpack-firefoxSmoke': kibanaPipeline.functionalTestProcess('xpack-firefoxSmoke', './test/scripts/jenkins_xpack_firefox_smoke.sh'), + 'xpack-ciGroup1': kibanaPipeline.xpackCiGroupProcess(1), + 'xpack-ciGroup2': kibanaPipeline.xpackCiGroupProcess(2), + 'xpack-ciGroup3': kibanaPipeline.xpackCiGroupProcess(3), + 'xpack-ciGroup4': kibanaPipeline.xpackCiGroupProcess(4), + 'xpack-ciGroup5': kibanaPipeline.xpackCiGroupProcess(5), + 'xpack-ciGroup6': kibanaPipeline.xpackCiGroupProcess(6), + 'xpack-ciGroup7': kibanaPipeline.xpackCiGroupProcess(7), + 'xpack-ciGroup8': kibanaPipeline.xpackCiGroupProcess(8), + 'xpack-ciGroup9': kibanaPipeline.xpackCiGroupProcess(9), + 'xpack-ciGroup10': kibanaPipeline.xpackCiGroupProcess(10), + 'xpack-accessibility': kibanaPipeline.functionalTestProcess('xpack-accessibility', './test/scripts/jenkins_xpack_accessibility.sh'), + 'xpack-savedObjectsFieldMetrics': kibanaPipeline.functionalTestProcess('xpack-savedObjectsFieldMetrics', './test/scripts/jenkins_xpack_saved_objects_field_metrics.sh'), + // 'xpack-pageLoadMetrics': kibanaPipeline.functionalTestProcess('xpack-pageLoadMetrics', './test/scripts/jenkins_xpack_page_load_metrics.sh'), + 'xpack-securitySolutionCypress': { processNumber -> + whenChanged(['x-pack/plugins/security_solution/', 'x-pack/test/security_solution_cypress/']) { + kibanaPipeline.functionalTestProcess('xpack-securitySolutionCypress', './test/scripts/jenkins_security_solution_cypress.sh')(processNumber) + } + }, + + // 'xpack-visualRegression': kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh'), + ]), + ]) } } } diff --git a/src/dev/ci_setup/checkout_sibling_es.sh b/src/dev/ci_setup/checkout_sibling_es.sh index 3832ec9b4076a7..915759d4214f9a 100755 --- a/src/dev/ci_setup/checkout_sibling_es.sh +++ b/src/dev/ci_setup/checkout_sibling_es.sh @@ -7,11 +7,10 @@ function checkout_sibling { targetDir=$2 useExistingParamName=$3 useExisting="$(eval "echo "\$$useExistingParamName"")" - repoAddress="https://github.com/" if [ -z ${useExisting:+x} ]; then if [ -d "$targetDir" ]; then - echo "I expected a clean workspace but an '${project}' sibling directory already exists in [$WORKSPACE]!" + echo "I expected a clean workspace but an '${project}' sibling directory already exists in [$PARENT_DIR]!" echo echo "Either define '${useExistingParamName}' or remove the existing '${project}' sibling." exit 1 @@ -22,9 +21,8 @@ function checkout_sibling { cloneBranch="" function clone_target_is_valid { - echo " -> checking for '${cloneBranch}' branch at ${cloneAuthor}/${project}" - if [[ -n "$(git ls-remote --heads "${repoAddress}${cloneAuthor}/${project}.git" ${cloneBranch} 2>/dev/null)" ]]; then + if [[ -n "$(git ls-remote --heads "git@github.com:${cloneAuthor}/${project}.git" ${cloneBranch} 2>/dev/null)" ]]; then return 0 else return 1 @@ -73,7 +71,7 @@ function checkout_sibling { fi echo " -> checking out '${cloneBranch}' branch from ${cloneAuthor}/${project}..." - git clone -b "$cloneBranch" "${repoAddress}${cloneAuthor}/${project}.git" "$targetDir" --depth=1 + git clone -b "$cloneBranch" "git@github.com:${cloneAuthor}/${project}.git" "$targetDir" --depth=1 echo " -> checked out ${project} revision: $(git -C "${targetDir}" rev-parse HEAD)" echo } @@ -89,12 +87,12 @@ function checkout_sibling { fi } -checkout_sibling "elasticsearch" "${WORKSPACE}/elasticsearch" "USE_EXISTING_ES" +checkout_sibling "elasticsearch" "${PARENT_DIR}/elasticsearch" "USE_EXISTING_ES" export TEST_ES_FROM=${TEST_ES_FROM:-snapshot} # Set the JAVA_HOME based on the Java property file in the ES repo # This assumes the naming convention used on CI (ex: ~/.java/java10) -ES_DIR="$WORKSPACE/elasticsearch" +ES_DIR="$PARENT_DIR/elasticsearch" ES_JAVA_PROP_PATH=$ES_DIR/.ci/java-versions.properties diff --git a/src/dev/ci_setup/setup_env.sh b/src/dev/ci_setup/setup_env.sh index f96a2240917e25..343ff471993754 100644 --- a/src/dev/ci_setup/setup_env.sh +++ b/src/dev/ci_setup/setup_env.sh @@ -53,8 +53,6 @@ export PARENT_DIR="$parentDir" kbnBranch="$(jq -r .branch "$KIBANA_DIR/package.json")" export KIBANA_PKG_BRANCH="$kbnBranch" -export WORKSPACE="${WORKSPACE:-$PARENT_DIR}" - ### ### download node ### @@ -163,7 +161,7 @@ export -f checks-reporter-with-killswitch source "$KIBANA_DIR/src/dev/ci_setup/load_env_keys.sh" -ES_DIR="$WORKSPACE/elasticsearch" +ES_DIR="$PARENT_DIR/elasticsearch" ES_JAVA_PROP_PATH=$ES_DIR/.ci/java-versions.properties if [[ -d "$ES_DIR" && -f "$ES_JAVA_PROP_PATH" ]]; then diff --git a/src/dev/notice/generate_notice_from_source.ts b/src/dev/notice/generate_notice_from_source.ts index a2b05c6dc8a4ea..fb74bed0f26f4b 100644 --- a/src/dev/notice/generate_notice_from_source.ts +++ b/src/dev/notice/generate_notice_from_source.ts @@ -49,10 +49,8 @@ export async function generateNoticeFromSource({ productName, directory, log }: ignore: [ '{node_modules,build,target,dist,data,built_assets}/**', 'packages/*/{node_modules,build,target,dist}/**', - 'src/plugins/*/{node_modules,build,target,dist}/**', 'x-pack/{node_modules,build,target,dist,data}/**', 'x-pack/packages/*/{node_modules,build,target,dist}/**', - 'x-pack/plugins/*/{node_modules,build,target,dist}/**', ], }; diff --git a/tasks/config/karma.js b/tasks/config/karma.js index 7c4f75bea8801c..fa4bdc8ed22662 100644 --- a/tasks/config/karma.js +++ b/tasks/config/karma.js @@ -110,7 +110,7 @@ module.exports = function (grunt) { customLaunchers: { Chrome_Headless: { base: 'Chrome', - flags: ['--headless', '--disable-gpu', '--remote-debugging-port=9222', '--no-sandbox'], + flags: ['--headless', '--disable-gpu', '--remote-debugging-port=9222'], }, }, diff --git a/tasks/test_jest.js b/tasks/test_jest.js index 810ed423248400..d8f51806e8ddc8 100644 --- a/tasks/test_jest.js +++ b/tasks/test_jest.js @@ -22,7 +22,7 @@ const { resolve } = require('path'); module.exports = function (grunt) { grunt.registerTask('test:jest', function () { const done = this.async(); - runJest(resolve(__dirname, '../scripts/jest.js'), ['--maxWorkers=10']).then(done, done); + runJest(resolve(__dirname, '../scripts/jest.js')).then(done, done); }); grunt.registerTask('test:jest_integration', function () { @@ -30,10 +30,10 @@ module.exports = function (grunt) { runJest(resolve(__dirname, '../scripts/jest_integration.js')).then(done, done); }); - function runJest(jestScript, args = []) { + function runJest(jestScript) { const serverCmd = { cmd: 'node', - args: [jestScript, '--ci', ...args], + args: [jestScript, '--ci'], opts: { stdio: 'inherit' }, }; diff --git a/test/scripts/checks/doc_api_changes.sh b/test/scripts/checks/doc_api_changes.sh deleted file mode 100755 index 503d12b2f6d73d..00000000000000 --- a/test/scripts/checks/doc_api_changes.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - -source src/dev/ci_setup/setup_env.sh - -yarn run grunt run:checkDocApiChanges diff --git a/test/scripts/checks/file_casing.sh b/test/scripts/checks/file_casing.sh deleted file mode 100755 index 513664263791b9..00000000000000 --- a/test/scripts/checks/file_casing.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - -source src/dev/ci_setup/setup_env.sh - -yarn run grunt run:checkFileCasing diff --git a/test/scripts/checks/i18n.sh b/test/scripts/checks/i18n.sh deleted file mode 100755 index 7a6fd46c46c769..00000000000000 --- a/test/scripts/checks/i18n.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - -source src/dev/ci_setup/setup_env.sh - -yarn run grunt run:i18nCheck diff --git a/test/scripts/checks/licenses.sh b/test/scripts/checks/licenses.sh deleted file mode 100755 index a08d7d07a24a13..00000000000000 --- a/test/scripts/checks/licenses.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - -source src/dev/ci_setup/setup_env.sh - -yarn run grunt run:licenses diff --git a/test/scripts/checks/lock_file_symlinks.sh b/test/scripts/checks/lock_file_symlinks.sh deleted file mode 100755 index 1d43d32c9feb82..00000000000000 --- a/test/scripts/checks/lock_file_symlinks.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - -source src/dev/ci_setup/setup_env.sh - -yarn run grunt run:checkLockfileSymlinks diff --git a/test/scripts/checks/test_hardening.sh b/test/scripts/checks/test_hardening.sh deleted file mode 100755 index 9184758577654a..00000000000000 --- a/test/scripts/checks/test_hardening.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - -source src/dev/ci_setup/setup_env.sh - -yarn run grunt run:test_hardening diff --git a/test/scripts/checks/test_projects.sh b/test/scripts/checks/test_projects.sh deleted file mode 100755 index 5f9aafe80e10e6..00000000000000 --- a/test/scripts/checks/test_projects.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - -source src/dev/ci_setup/setup_env.sh - -yarn run grunt run:test_projects diff --git a/test/scripts/checks/ts_projects.sh b/test/scripts/checks/ts_projects.sh deleted file mode 100755 index d667c753baec23..00000000000000 --- a/test/scripts/checks/ts_projects.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - -source src/dev/ci_setup/setup_env.sh - -yarn run grunt run:checkTsProjects diff --git a/test/scripts/checks/type_check.sh b/test/scripts/checks/type_check.sh deleted file mode 100755 index 07c49638134be1..00000000000000 --- a/test/scripts/checks/type_check.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - -source src/dev/ci_setup/setup_env.sh - -yarn run grunt run:typeCheck diff --git a/test/scripts/checks/verify_dependency_versions.sh b/test/scripts/checks/verify_dependency_versions.sh deleted file mode 100755 index b73a71e7ff7fd5..00000000000000 --- a/test/scripts/checks/verify_dependency_versions.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - -source src/dev/ci_setup/setup_env.sh - -yarn run grunt run:verifyDependencyVersions diff --git a/test/scripts/checks/verify_notice.sh b/test/scripts/checks/verify_notice.sh deleted file mode 100755 index 9f8343e5408615..00000000000000 --- a/test/scripts/checks/verify_notice.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - -source src/dev/ci_setup/setup_env.sh - -yarn run grunt run:verifyNotice diff --git a/test/scripts/jenkins_build_kbn_sample_panel_action.sh b/test/scripts/jenkins_build_kbn_sample_panel_action.sh old mode 100755 new mode 100644 diff --git a/test/scripts/jenkins_build_kibana.sh b/test/scripts/jenkins_build_kibana.sh index f449986713f97d..3e49edc8e6ae5f 100755 --- a/test/scripts/jenkins_build_kibana.sh +++ b/test/scripts/jenkins_build_kibana.sh @@ -2,9 +2,19 @@ source src/dev/ci_setup/setup_env.sh -if [[ ! "$TASK_QUEUE_PROCESS_ID" ]]; then - ./test/scripts/jenkins_build_plugins.sh -fi +echo " -> building examples separate from test plugins" +node scripts/build_kibana_platform_plugins \ + --oss \ + --examples \ + --verbose; + +echo " -> building test plugins" +node scripts/build_kibana_platform_plugins \ + --oss \ + --no-examples \ + --scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \ + --scan-dir "$KIBANA_DIR/test/interpreter_functional/plugins" \ + --verbose; # doesn't persist, also set in kibanaPipeline.groovy export KBN_NP_PLUGINS_BUILT=true @@ -16,7 +26,4 @@ yarn run grunt functionalTests:ensureAllTestsInCiGroup; if [[ -z "$CODE_COVERAGE" ]] ; then echo " -> building and extracting OSS Kibana distributable for use in functional tests" node scripts/build --debug --oss - - mkdir -p "$WORKSPACE/kibana-build-oss" - cp -pR build/oss/kibana-*-SNAPSHOT-linux-x86_64/. $WORKSPACE/kibana-build-oss/ fi diff --git a/test/scripts/jenkins_build_plugins.sh b/test/scripts/jenkins_build_plugins.sh deleted file mode 100755 index 32b3942074b346..00000000000000 --- a/test/scripts/jenkins_build_plugins.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env bash - -source src/dev/ci_setup/setup_env.sh - -echo " -> building examples separate from test plugins" -node scripts/build_kibana_platform_plugins \ - --oss \ - --examples \ - --workers 6 \ - --verbose - -echo " -> building kibana platform plugins" -node scripts/build_kibana_platform_plugins \ - --oss \ - --no-examples \ - --scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \ - --scan-dir "$KIBANA_DIR/test/interpreter_functional/plugins" \ - --workers 6 \ - --verbose diff --git a/test/scripts/jenkins_ci_group.sh b/test/scripts/jenkins_ci_group.sh index 2542d7032e83bf..60d7f0406f4c9f 100755 --- a/test/scripts/jenkins_ci_group.sh +++ b/test/scripts/jenkins_ci_group.sh @@ -5,7 +5,7 @@ source test/scripts/jenkins_test_setup_oss.sh if [[ -z "$CODE_COVERAGE" ]]; then checks-reporter-with-killswitch "Functional tests / Group ${CI_GROUP}" yarn run grunt "run:functionalTests_ciGroup${CI_GROUP}"; - if [[ ! "$TASK_QUEUE_PROCESS_ID" && "$CI_GROUP" == "1" ]]; then + if [ "$CI_GROUP" == "1" ]; then source test/scripts/jenkins_build_kbn_sample_panel_action.sh yarn run grunt run:pluginFunctionalTestsRelease --from=source; yarn run grunt run:exampleFunctionalTestsRelease --from=source; diff --git a/test/scripts/jenkins_plugin_functional.sh b/test/scripts/jenkins_plugin_functional.sh deleted file mode 100755 index 1d691d98982dea..00000000000000 --- a/test/scripts/jenkins_plugin_functional.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env bash - -source test/scripts/jenkins_test_setup_oss.sh - -cd test/plugin_functional/plugins/kbn_sample_panel_action; -if [[ ! -d "target" ]]; then - yarn build; -fi -cd -; - -pwd - -yarn run grunt run:pluginFunctionalTestsRelease --from=source; -yarn run grunt run:exampleFunctionalTestsRelease --from=source; -yarn run grunt run:interpreterFunctionalTestsRelease; diff --git a/test/scripts/jenkins_security_solution_cypress.sh b/test/scripts/jenkins_security_solution_cypress.sh old mode 100755 new mode 100644 index a5a1a2103801fa..204911a3eedaa6 --- a/test/scripts/jenkins_security_solution_cypress.sh +++ b/test/scripts/jenkins_security_solution_cypress.sh @@ -1,6 +1,12 @@ #!/usr/bin/env bash -source test/scripts/jenkins_test_setup_xpack.sh +source test/scripts/jenkins_test_setup.sh + +installDir="$PARENT_DIR/install/kibana" +destDir="${installDir}-${CI_WORKER_NUMBER}" +cp -R "$installDir" "$destDir" + +export KIBANA_INSTALL_DIR="$destDir" echo " -> Running security solution cypress tests" cd "$XPACK_DIR" diff --git a/test/scripts/jenkins_setup_parallel_workspace.sh b/test/scripts/jenkins_setup_parallel_workspace.sh deleted file mode 100755 index 5274d05572e713..00000000000000 --- a/test/scripts/jenkins_setup_parallel_workspace.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env bash -set -e - -CURRENT_DIR=$(pwd) - -# Copy everything except node_modules into the current workspace -rsync -a ${WORKSPACE}/kibana/* . --exclude node_modules -rsync -a ${WORKSPACE}/kibana/.??* . - -# Symlink all non-root, non-fixture node_modules into our new workspace -cd ${WORKSPACE}/kibana -find . -type d -name node_modules -not -path '*__fixtures__*' -not -path './node_modules*' -prune -print0 | xargs -0I % ln -s "${WORKSPACE}/kibana/%" "${CURRENT_DIR}/%" -find . -type d -wholename '*__fixtures__*node_modules' -not -path './node_modules*' -prune -print0 | xargs -0I % cp -R "${WORKSPACE}/kibana/%" "${CURRENT_DIR}/%" -cd "${CURRENT_DIR}" - -# Symlink all of the individual root-level node_modules into the node_modules/ directory -mkdir -p node_modules -ln -s ${WORKSPACE}/kibana/node_modules/* node_modules/ -ln -s ${WORKSPACE}/kibana/node_modules/.??* node_modules/ - -# Copy a few node_modules instead of symlinking them. They don't work correctly if symlinked -unlink node_modules/@kbn -unlink node_modules/css-loader -unlink node_modules/style-loader - -# packages/kbn-optimizer/src/integration_tests/basic_optimization.test.ts will fail if this is a symlink -unlink node_modules/val-loader - -cp -R ${WORKSPACE}/kibana/node_modules/@kbn node_modules/ -cp -R ${WORKSPACE}/kibana/node_modules/css-loader node_modules/ -cp -R ${WORKSPACE}/kibana/node_modules/style-loader node_modules/ -cp -R ${WORKSPACE}/kibana/node_modules/val-loader node_modules/ diff --git a/test/scripts/jenkins_test_setup.sh b/test/scripts/jenkins_test_setup.sh old mode 100755 new mode 100644 index 7cced76eb650f5..49ee8a6b526ca5 --- a/test/scripts/jenkins_test_setup.sh +++ b/test/scripts/jenkins_test_setup.sh @@ -14,7 +14,3 @@ trap 'post_work' EXIT export TEST_BROWSER_HEADLESS=1 source src/dev/ci_setup/setup_env.sh - -if [[ ! -d .es && -d "$WORKSPACE/kibana/.es" ]]; then - cp -R $WORKSPACE/kibana/.es ./ -fi diff --git a/test/scripts/jenkins_test_setup_oss.sh b/test/scripts/jenkins_test_setup_oss.sh old mode 100755 new mode 100644 index b7eac33f351768..7bbb8675263843 --- a/test/scripts/jenkins_test_setup_oss.sh +++ b/test/scripts/jenkins_test_setup_oss.sh @@ -2,17 +2,10 @@ source test/scripts/jenkins_test_setup.sh -if [[ -z "$CODE_COVERAGE" ]]; then - - destDir="build/kibana-build-oss" - if [[ ! "$TASK_QUEUE_PROCESS_ID" ]]; then - destDir="${destDir}-${CI_PARALLEL_PROCESS_NUMBER}" - fi - - if [[ ! -d $destDir ]]; then - mkdir -p $destDir - cp -pR "$WORKSPACE/kibana-build-oss/." $destDir/ - fi +if [[ -z "$CODE_COVERAGE" ]] ; then + installDir="$(realpath $PARENT_DIR/kibana/build/oss/kibana-*-SNAPSHOT-linux-x86_64)" + destDir=${installDir}-${CI_PARALLEL_PROCESS_NUMBER} + cp -R "$installDir" "$destDir" export KIBANA_INSTALL_DIR="$destDir" fi diff --git a/test/scripts/jenkins_test_setup_xpack.sh b/test/scripts/jenkins_test_setup_xpack.sh old mode 100755 new mode 100644 index 74a3de77e3a760..a72e9749ebbd5b --- a/test/scripts/jenkins_test_setup_xpack.sh +++ b/test/scripts/jenkins_test_setup_xpack.sh @@ -3,18 +3,11 @@ source test/scripts/jenkins_test_setup.sh if [[ -z "$CODE_COVERAGE" ]]; then + installDir="$PARENT_DIR/install/kibana" + destDir="${installDir}-${CI_PARALLEL_PROCESS_NUMBER}" + cp -R "$installDir" "$destDir" - destDir="build/kibana-build-xpack" - if [[ ! "$TASK_QUEUE_PROCESS_ID" ]]; then - destDir="${destDir}-${CI_PARALLEL_PROCESS_NUMBER}" - fi - - if [[ ! -d $destDir ]]; then - mkdir -p $destDir - cp -pR "$WORKSPACE/kibana-build-xpack/." $destDir/ - fi - - export KIBANA_INSTALL_DIR="$(realpath $destDir)" + export KIBANA_INSTALL_DIR="$destDir" cd "$XPACK_DIR" fi diff --git a/test/scripts/jenkins_xpack_build_kibana.sh b/test/scripts/jenkins_xpack_build_kibana.sh index 2452e2f5b8c58c..58ef6a42d3fe4c 100755 --- a/test/scripts/jenkins_xpack_build_kibana.sh +++ b/test/scripts/jenkins_xpack_build_kibana.sh @@ -3,9 +3,21 @@ cd "$KIBANA_DIR" source src/dev/ci_setup/setup_env.sh -if [[ ! "$TASK_QUEUE_PROCESS_ID" ]]; then - ./test/scripts/jenkins_xpack_build_plugins.sh -fi +echo " -> building examples separate from test plugins" +node scripts/build_kibana_platform_plugins \ + --examples \ + --verbose; + +echo " -> building test plugins" +node scripts/build_kibana_platform_plugins \ + --no-examples \ + --scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \ + --scan-dir "$XPACK_DIR/test/plugin_functional/plugins" \ + --scan-dir "$XPACK_DIR/test/functional_with_es_ssl/fixtures/plugins" \ + --scan-dir "$XPACK_DIR/test/alerting_api_integration/plugins" \ + --scan-dir "$XPACK_DIR/test/plugin_api_integration/plugins" \ + --scan-dir "$XPACK_DIR/test/plugin_api_perf/plugins" \ + --verbose; # doesn't persist, also set in kibanaPipeline.groovy export KBN_NP_PLUGINS_BUILT=true @@ -30,10 +42,7 @@ if [[ -z "$CODE_COVERAGE" ]] ; then cd "$KIBANA_DIR" node scripts/build --debug --no-oss linuxBuild="$(find "$KIBANA_DIR/target" -name 'kibana-*-linux-x86_64.tar.gz')" - installDir="$KIBANA_DIR/install/kibana" + installDir="$PARENT_DIR/install/kibana" mkdir -p "$installDir" tar -xzf "$linuxBuild" -C "$installDir" --strip=1 - - mkdir -p "$WORKSPACE/kibana-build-xpack" - cp -pR install/kibana/. $WORKSPACE/kibana-build-xpack/ fi diff --git a/test/scripts/jenkins_xpack_build_plugins.sh b/test/scripts/jenkins_xpack_build_plugins.sh deleted file mode 100755 index fea30c547bd5fd..00000000000000 --- a/test/scripts/jenkins_xpack_build_plugins.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env bash - -source src/dev/ci_setup/setup_env.sh - -echo " -> building examples separate from test plugins" -node scripts/build_kibana_platform_plugins \ - --workers 12 \ - --examples \ - --verbose - -echo " -> building kibana platform plugins" -node scripts/build_kibana_platform_plugins \ - --no-examples \ - --scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \ - --scan-dir "$XPACK_DIR/test/plugin_functional/plugins" \ - --scan-dir "$XPACK_DIR/test/functional_with_es_ssl/fixtures/plugins" \ - --scan-dir "$XPACK_DIR/test/alerting_api_integration/plugins" \ - --scan-dir "$XPACK_DIR/test/plugin_api_integration/plugins" \ - --scan-dir "$XPACK_DIR/test/plugin_api_perf/plugins" \ - --workers 12 \ - --verbose diff --git a/test/scripts/jenkins_xpack_page_load_metrics.sh b/test/scripts/jenkins_xpack_page_load_metrics.sh old mode 100755 new mode 100644 diff --git a/test/scripts/lint/eslint.sh b/test/scripts/lint/eslint.sh deleted file mode 100755 index c3211300b96c54..00000000000000 --- a/test/scripts/lint/eslint.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - -source src/dev/ci_setup/setup_env.sh - -yarn run grunt run:eslint diff --git a/test/scripts/lint/sasslint.sh b/test/scripts/lint/sasslint.sh deleted file mode 100755 index b9c683bcb049e1..00000000000000 --- a/test/scripts/lint/sasslint.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - -source src/dev/ci_setup/setup_env.sh - -yarn run grunt run:sasslint diff --git a/test/scripts/test/api_integration.sh b/test/scripts/test/api_integration.sh deleted file mode 100755 index 152c97a3ca7df7..00000000000000 --- a/test/scripts/test/api_integration.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - -source src/dev/ci_setup/setup_env.sh - -yarn run grunt run:apiIntegrationTests diff --git a/test/scripts/test/jest_integration.sh b/test/scripts/test/jest_integration.sh deleted file mode 100755 index 73dbbddfb38f63..00000000000000 --- a/test/scripts/test/jest_integration.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - -source src/dev/ci_setup/setup_env.sh - -yarn run grunt run:test_jest_integration diff --git a/test/scripts/test/jest_unit.sh b/test/scripts/test/jest_unit.sh deleted file mode 100755 index e25452698cebc8..00000000000000 --- a/test/scripts/test/jest_unit.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - -source src/dev/ci_setup/setup_env.sh - -yarn run grunt run:test_jest diff --git a/test/scripts/test/karma_ci.sh b/test/scripts/test/karma_ci.sh deleted file mode 100755 index e9985300ba19d4..00000000000000 --- a/test/scripts/test/karma_ci.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - -source src/dev/ci_setup/setup_env.sh - -yarn run grunt run:test_karma_ci diff --git a/test/scripts/test/mocha.sh b/test/scripts/test/mocha.sh deleted file mode 100755 index 43c00f0a09dcf7..00000000000000 --- a/test/scripts/test/mocha.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - -source src/dev/ci_setup/setup_env.sh - -yarn run grunt run:mocha diff --git a/test/scripts/test/xpack_jest_unit.sh b/test/scripts/test/xpack_jest_unit.sh deleted file mode 100755 index 93d70ec3553910..00000000000000 --- a/test/scripts/test/xpack_jest_unit.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env bash - -source src/dev/ci_setup/setup_env.sh - -cd x-pack -checks-reporter-with-killswitch "X-Pack Jest" node --max-old-space-size=6144 scripts/jest --ci --verbose --maxWorkers=10 diff --git a/test/scripts/test/xpack_karma.sh b/test/scripts/test/xpack_karma.sh deleted file mode 100755 index 9078f01f1b870f..00000000000000 --- a/test/scripts/test/xpack_karma.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env bash - -source src/dev/ci_setup/setup_env.sh - -cd x-pack -checks-reporter-with-killswitch "X-Pack Karma Tests" yarn test:karma diff --git a/test/scripts/test/xpack_list_cyclic_dependency.sh b/test/scripts/test/xpack_list_cyclic_dependency.sh deleted file mode 100755 index 493fe9f58d322e..00000000000000 --- a/test/scripts/test/xpack_list_cyclic_dependency.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env bash - -source src/dev/ci_setup/setup_env.sh - -cd x-pack -checks-reporter-with-killswitch "X-Pack List cyclic dependency test" node plugins/lists/scripts/check_circular_deps diff --git a/test/scripts/test/xpack_siem_cyclic_dependency.sh b/test/scripts/test/xpack_siem_cyclic_dependency.sh deleted file mode 100755 index b21301f25ad087..00000000000000 --- a/test/scripts/test/xpack_siem_cyclic_dependency.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env bash - -source src/dev/ci_setup/setup_env.sh - -cd x-pack -checks-reporter-with-killswitch "X-Pack SIEM cyclic dependency test" node plugins/security_solution/scripts/check_circular_deps diff --git a/vars/catchErrors.groovy b/vars/catchErrors.groovy index 2a1b55d832606c..460a90b8ec0c04 100644 --- a/vars/catchErrors.groovy +++ b/vars/catchErrors.groovy @@ -1,15 +1,8 @@ // Basically, this is a shortcut for catchError(catchInterruptions: false) {} // By default, catchError will swallow aborts/timeouts, which we almost never want -// Also, by wrapping it in an additional try/catch, we cut down on spam in Pipeline Steps def call(Map params = [:], Closure closure) { - try { - closure() - } catch (ex) { - params.catchInterruptions = false - catchError(params) { - throw ex - } - } + params.catchInterruptions = false + return catchError(params, closure) } return this diff --git a/vars/kibanaPipeline.groovy b/vars/kibanaPipeline.groovy index 0f112043114511..f3fc5f84583c9c 100644 --- a/vars/kibanaPipeline.groovy +++ b/vars/kibanaPipeline.groovy @@ -16,34 +16,27 @@ def withPostBuildReporting(Closure closure) { } } -def withFunctionalTestEnv(List additionalEnvs = [], Closure closure) { - // This can go away once everything that uses the deprecated workers.parallelProcesses() is moved to task queue - def parallelId = env.TASK_QUEUE_PROCESS_ID ?: env.CI_PARALLEL_PROCESS_NUMBER - - def kibanaPort = "61${parallelId}1" - def esPort = "61${parallelId}2" - def esTransportPort = "61${parallelId}3" - def ingestManagementPackageRegistryPort = "61${parallelId}4" - - withEnv([ - "CI_GROUP=${parallelId}", - "REMOVE_KIBANA_INSTALL_DIR=1", - "CI_PARALLEL_PROCESS_NUMBER=${parallelId}", - "TEST_KIBANA_HOST=localhost", - "TEST_KIBANA_PORT=${kibanaPort}", - "TEST_KIBANA_URL=http://elastic:changeme@localhost:${kibanaPort}", - "TEST_ES_URL=http://elastic:changeme@localhost:${esPort}", - "TEST_ES_TRANSPORT_PORT=${esTransportPort}", - "KBN_NP_PLUGINS_BUILT=true", - "INGEST_MANAGEMENT_PACKAGE_REGISTRY_PORT=${ingestManagementPackageRegistryPort}", - ] + additionalEnvs) { - closure() - } -} - def functionalTestProcess(String name, Closure closure) { - return { - withFunctionalTestEnv(["JOB=${name}"], closure) + return { processNumber -> + def kibanaPort = "61${processNumber}1" + def esPort = "61${processNumber}2" + def esTransportPort = "61${processNumber}3" + def ingestManagementPackageRegistryPort = "61${processNumber}4" + + withEnv([ + "CI_PARALLEL_PROCESS_NUMBER=${processNumber}", + "TEST_KIBANA_HOST=localhost", + "TEST_KIBANA_PORT=${kibanaPort}", + "TEST_KIBANA_URL=http://elastic:changeme@localhost:${kibanaPort}", + "TEST_ES_URL=http://elastic:changeme@localhost:${esPort}", + "TEST_ES_TRANSPORT_PORT=${esTransportPort}", + "INGEST_MANAGEMENT_PACKAGE_REGISTRY_PORT=${ingestManagementPackageRegistryPort}", + "IS_PIPELINE_JOB=1", + "JOB=${name}", + "KBN_NP_PLUGINS_BUILT=true", + ]) { + closure() + } } } @@ -107,17 +100,11 @@ def withGcsArtifactUpload(workerName, closure) { def uploadPrefix = "kibana-ci-artifacts/jobs/${env.JOB_NAME}/${BUILD_NUMBER}/${workerName}" def ARTIFACT_PATTERNS = [ 'target/kibana-*', - 'target/test-metrics/*', 'target/kibana-security-solution/**/*.png', 'target/junit/**/*', - 'target/test-suites-ci-plan.json', - 'test/**/screenshots/session/*.png', - 'test/**/screenshots/failure/*.png', - 'test/**/screenshots/diff/*.png', + 'test/**/screenshots/**/*.png', 'test/functional/failure_debug/html/*.html', - 'x-pack/test/**/screenshots/session/*.png', - 'x-pack/test/**/screenshots/failure/*.png', - 'x-pack/test/**/screenshots/diff/*.png', + 'x-pack/test/**/screenshots/**/*.png', 'x-pack/test/functional/failure_debug/html/*.html', 'x-pack/test/functional/apps/reporting/reports/session/*.pdf', ] @@ -132,12 +119,6 @@ def withGcsArtifactUpload(workerName, closure) { ARTIFACT_PATTERNS.each { pattern -> uploadGcsArtifact(uploadPrefix, pattern) } - - dir(env.WORKSPACE) { - ARTIFACT_PATTERNS.each { pattern -> - uploadGcsArtifact(uploadPrefix, "parallel/*/kibana/${pattern}") - } - } } } }) @@ -150,11 +131,6 @@ def withGcsArtifactUpload(workerName, closure) { def publishJunit() { junit(testResults: 'target/junit/**/*.xml', allowEmptyResults: true, keepLongStdio: true) - - // junit() is weird about paths for security reasons, so we need to actually change to an upper directory first - dir(env.WORKSPACE) { - junit(testResults: 'parallel/*/kibana/target/junit/**/*.xml', allowEmptyResults: true, keepLongStdio: true) - } } def sendMail() { @@ -218,16 +194,12 @@ def doSetup() { } } -def buildOss(maxWorkers = '') { - withEnv(["KBN_OPTIMIZER_MAX_WORKERS=${maxWorkers}"]) { - runbld("./test/scripts/jenkins_build_kibana.sh", "Build OSS/Default Kibana") - } +def buildOss() { + runbld("./test/scripts/jenkins_build_kibana.sh", "Build OSS/Default Kibana") } -def buildXpack(maxWorkers = '') { - withEnv(["KBN_OPTIMIZER_MAX_WORKERS=${maxWorkers}"]) { - runbld("./test/scripts/jenkins_xpack_build_kibana.sh", "Build X-Pack Kibana") - } +def buildXpack() { + runbld("./test/scripts/jenkins_xpack_build_kibana.sh", "Build X-Pack Kibana") } def runErrorReporter() { @@ -276,100 +248,6 @@ def call(Map params = [:], Closure closure) { } } -// Creates a task queue using withTaskQueue, and copies the bootstrapped kibana repo into each process's workspace -// Note that node_modules are mostly symlinked to save time/space. See test/scripts/jenkins_setup_parallel_workspace.sh -def withCiTaskQueue(Map options = [:], Closure closure) { - def setupClosure = { - // This can't use runbld, because it expects the source to be there, which isn't yet - bash("${env.WORKSPACE}/kibana/test/scripts/jenkins_setup_parallel_workspace.sh", "Set up duplicate workspace for parallel process") - } - - def config = [parallel: 24, setup: setupClosure] + options - - withTaskQueue(config) { - closure.call() - } -} - -def scriptTask(description, script) { - return { - withFunctionalTestEnv { - runbld(script, description) - } - } -} - -def scriptTaskDocker(description, script) { - return { - withDocker(scriptTask(description, script)) - } -} - -def buildDocker() { - sh( - script: """ - cp /usr/local/bin/runbld .ci/ - cp /usr/local/bin/bash_standard_lib.sh .ci/ - cd .ci - docker build -t kibana-ci -f ./Dockerfile . - """, - label: 'Build CI Docker image' - ) -} - -def withDocker(Closure closure) { - docker - .image('kibana-ci') - .inside( - "-v /etc/runbld:/etc/runbld:ro -v '${env.JENKINS_HOME}:${env.JENKINS_HOME}' -v '/dev/shm/workspace:/dev/shm/workspace' --shm-size 2GB --cpus 4", - closure - ) -} - -def buildOssPlugins() { - runbld('./test/scripts/jenkins_build_plugins.sh', 'Build OSS Plugins') -} - -def buildXpackPlugins() { - runbld('./test/scripts/jenkins_xpack_build_plugins.sh', 'Build X-Pack Plugins') -} - -def withTasks(Map params = [worker: [:]], Closure closure) { - catchErrors { - def config = [name: 'ci-worker', size: 'xxl', ramDisk: true] + (params.worker ?: [:]) - - workers.ci(config) { - withCiTaskQueue(parallel: 24) { - parallel([ - docker: { - retry(2) { - buildDocker() - } - }, - - // There are integration tests etc that require the plugins to be built first, so let's go ahead and build them before set up the parallel workspaces - ossPlugins: { buildOssPlugins() }, - xpackPlugins: { buildXpackPlugins() }, - ]) - - catchErrors { - closure() - } - } - } - } -} - -def allCiTasks() { - withTasks { - tasks.check() - tasks.lint() - tasks.test() - tasks.functionalOss() - tasks.functionalXpack() - } -} - def pipelineLibraryTests() { whenChanged(['vars/', '.ci/pipeline-library/']) { workers.base(size: 'flyweight', bootstrapped: false, ramDisk: false) { @@ -380,4 +258,5 @@ def pipelineLibraryTests() { } } + return this diff --git a/vars/task.groovy b/vars/task.groovy deleted file mode 100644 index 0c07b519b6fefc..00000000000000 --- a/vars/task.groovy +++ /dev/null @@ -1,5 +0,0 @@ -def call(Closure closure) { - withTaskQueue.addTask(closure) -} - -return this diff --git a/vars/tasks.groovy b/vars/tasks.groovy deleted file mode 100644 index 3ff9a7b4850ae9..00000000000000 --- a/vars/tasks.groovy +++ /dev/null @@ -1,124 +0,0 @@ -def call(List closures) { - withTaskQueue.addTasks(closures) -} - -def check() { - tasks([ - kibanaPipeline.scriptTask('Check TypeScript Projects', 'test/scripts/checks/ts_projects.sh'), - kibanaPipeline.scriptTask('Check Doc API Changes', 'test/scripts/checks/doc_api_changes.sh'), - kibanaPipeline.scriptTask('Check Types', 'test/scripts/checks/type_check.sh'), - kibanaPipeline.scriptTask('Check i18n', 'test/scripts/checks/i18n.sh'), - kibanaPipeline.scriptTask('Check File Casing', 'test/scripts/checks/file_casing.sh'), - kibanaPipeline.scriptTask('Check Lockfile Symlinks', 'test/scripts/checks/lock_file_symlinks.sh'), - kibanaPipeline.scriptTask('Check Licenses', 'test/scripts/checks/licenses.sh'), - kibanaPipeline.scriptTask('Verify Dependency Versions', 'test/scripts/checks/verify_dependency_versions.sh'), - kibanaPipeline.scriptTask('Verify NOTICE', 'test/scripts/checks/verify_notice.sh'), - kibanaPipeline.scriptTask('Test Projects', 'test/scripts/checks/test_projects.sh'), - kibanaPipeline.scriptTask('Test Hardening', 'test/scripts/checks/test_hardening.sh'), - ]) -} - -def lint() { - tasks([ - kibanaPipeline.scriptTask('Lint: eslint', 'test/scripts/lint/eslint.sh'), - kibanaPipeline.scriptTask('Lint: sasslint', 'test/scripts/lint/sasslint.sh'), - ]) -} - -def test() { - tasks([ - // These 4 tasks require isolation because of hard-coded, conflicting ports and such, so let's use Docker here - kibanaPipeline.scriptTaskDocker('Jest Integration Tests', 'test/scripts/test/jest_integration.sh'), - kibanaPipeline.scriptTaskDocker('Mocha Tests', 'test/scripts/test/mocha.sh'), - kibanaPipeline.scriptTaskDocker('Karma CI Tests', 'test/scripts/test/karma_ci.sh'), - kibanaPipeline.scriptTaskDocker('X-Pack Karma Tests', 'test/scripts/test/xpack_karma.sh'), - - kibanaPipeline.scriptTask('Jest Unit Tests', 'test/scripts/test/jest_unit.sh'), - kibanaPipeline.scriptTask('API Integration Tests', 'test/scripts/test/api_integration.sh'), - kibanaPipeline.scriptTask('X-Pack SIEM cyclic dependency', 'test/scripts/test/xpack_siem_cyclic_dependency.sh'), - kibanaPipeline.scriptTask('X-Pack List cyclic dependency', 'test/scripts/test/xpack_list_cyclic_dependency.sh'), - kibanaPipeline.scriptTask('X-Pack Jest Unit Tests', 'test/scripts/test/xpack_jest_unit.sh'), - ]) -} - -def functionalOss(Map params = [:]) { - def config = params ?: [ - ciGroups: true, - firefox: !githubPr.isPr(), - accessibility: true, - pluginFunctional: true, - visualRegression: false - ] - - task { - kibanaPipeline.buildOss(6) - - if (config.ciGroups) { - def ciGroups = 1..12 - tasks(ciGroups.collect { kibanaPipeline.ossCiGroupProcess(it) }) - } - - if (config.firefox) { - task(kibanaPipeline.functionalTestProcess('oss-firefox', './test/scripts/jenkins_firefox_smoke.sh')) - } - - if (config.accessibility) { - task(kibanaPipeline.functionalTestProcess('oss-accessibility', './test/scripts/jenkins_accessibility.sh')) - } - - if (config.pluginFunctional) { - task(kibanaPipeline.functionalTestProcess('oss-pluginFunctional', './test/scripts/jenkins_plugin_functional.sh')) - } - - if (config.visualRegression) { - task(kibanaPipeline.functionalTestProcess('oss-visualRegression', './test/scripts/jenkins_visual_regression.sh')) - } - } -} - -def functionalXpack(Map params = [:]) { - def config = params ?: [ - ciGroups: true, - firefox: !githubPr.isPr(), - accessibility: true, - pluginFunctional: true, - savedObjectsFieldMetrics: true, - pageLoadMetrics: false, - visualRegression: false, - ] - - task { - kibanaPipeline.buildXpack(10) - - if (config.ciGroups) { - def ciGroups = 1..10 - tasks(ciGroups.collect { kibanaPipeline.xpackCiGroupProcess(it) }) - } - - if (config.firefox) { - task(kibanaPipeline.functionalTestProcess('xpack-firefox', './test/scripts/jenkins_xpack_firefox_smoke.sh')) - } - - if (config.accessibility) { - task(kibanaPipeline.functionalTestProcess('xpack-accessibility', './test/scripts/jenkins_xpack_accessibility.sh')) - } - - if (config.visualRegression) { - task(kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh')) - } - - if (config.pageLoadMetrics) { - task(kibanaPipeline.functionalTestProcess('xpack-pageLoadMetrics', './test/scripts/jenkins_xpack_page_load_metrics.sh')) - } - - if (config.savedObjectsFieldMetrics) { - task(kibanaPipeline.functionalTestProcess('xpack-savedObjectsFieldMetrics', './test/scripts/jenkins_xpack_saved_objects_field_metrics.sh')) - } - - whenChanged(['x-pack/plugins/security_solution/', 'x-pack/test/security_solution_cypress/']) { - task(kibanaPipeline.functionalTestProcess('xpack-securitySolutionCypress', './test/scripts/jenkins_security_solution_cypress.sh')) - } - } -} - -return this diff --git a/vars/withTaskQueue.groovy b/vars/withTaskQueue.groovy deleted file mode 100644 index 8132d6264744f2..00000000000000 --- a/vars/withTaskQueue.groovy +++ /dev/null @@ -1,154 +0,0 @@ -import groovy.transform.Field - -public static @Field TASK_QUEUES = [:] -public static @Field TASK_QUEUES_COUNTER = 0 - -/** - withTaskQueue creates a queue of "tasks" (just plain closures to execute), and executes them with your desired level of concurrency. - This way, you can define, for example, 40 things that need to execute, then only allow 10 of them to execute at once. - - Each "process" will execute in a separate, unique, empty directory. - If you want each process to have a bootstrapped kibana repo, check out kibanaPipeline.withCiTaskQueue - - Using the queue currently requires an agent/worker. - - Usage: - - withTaskQueue(parallel: 10) { - task { print "This is a task" } - - // This is the same as calling task() multiple times - tasks([ { print "Another task" }, { print "And another task" } ]) - - // Tasks can queue up subsequent tasks - task { - buildThing() - task { print "I depend on buildThing()" } - } - } - - You can also define a setup task that each process should execute one time before executing tasks: - withTaskQueue(parallel: 10, setup: { sh "my-setup-scrupt.sh" }) { - ... - } - -*/ -def call(Map options = [:], Closure closure) { - def config = [ parallel: 10 ] + options - def counter = ++TASK_QUEUES_COUNTER - - // We're basically abusing withEnv() to create a "scope" for all steps inside of a withTaskQueue block - // This way, we could have multiple task queue instances in the same pipeline - withEnv(["TASK_QUEUE_ID=${counter}"]) { - withTaskQueue.TASK_QUEUES[env.TASK_QUEUE_ID] = [ - tasks: [], - tmpFile: sh(script: 'mktemp', returnStdout: true).trim() - ] - - closure.call() - - def processesExecuting = 0 - def processes = [:] - def iterationId = 0 - - for(def i = 1; i <= config.parallel; i++) { - def j = i - processes["task-queue-process-${j}"] = { - catchErrors { - withEnv([ - "TASK_QUEUE_PROCESS_ID=${j}", - "TASK_QUEUE_ITERATION_ID=${++iterationId}" - ]) { - dir("${WORKSPACE}/parallel/${j}/kibana") { - if (config.setup) { - config.setup.call(j) - } - - def isDone = false - while(!isDone) { // TODO some kind of timeout? - catchErrors { - if (!getTasks().isEmpty()) { - processesExecuting++ - catchErrors { - def task - try { - task = getTasks().pop() - } catch (java.util.NoSuchElementException ex) { - return - } - - task.call() - } - processesExecuting-- - // If a task finishes, and no new tasks were queued up, and nothing else is executing - // Then all of the processes should wake up and exit - if (processesExecuting < 1 && getTasks().isEmpty()) { - taskNotify() - } - return - } - - if (processesExecuting > 0) { - taskSleep() - return - } - - // Queue is empty, no processes are executing - isDone = true - } - } - } - } - } - } - } - parallel(processes) - } -} - -// If we sleep in a loop using Groovy code, Pipeline Steps is flooded with Sleep steps -// So, instead, we just watch a file and `touch` it whenever something happens that could modify the queue -// There's a 20 minute timeout just in case something goes wrong, -// in which case this method will get called again if the process is actually supposed to be waiting. -def taskSleep() { - sh(script: """#!/bin/bash - TIMESTAMP=\$(date '+%s' -d "0 seconds ago") - for (( i=1; i<=240; i++ )) - do - if [ "\$(stat -c %Y '${getTmpFile()}')" -ge "\$TIMESTAMP" ] - then - break - else - sleep 5 - if [[ \$i == 240 ]]; then - echo "Waited for new tasks for 20 minutes, exiting in case something went wrong" - fi - fi - done - """, label: "Waiting for new tasks...") -} - -// Used to let the task queue processes know that either a new task has been queued up, or work is complete -def taskNotify() { - sh "touch '${getTmpFile()}'" -} - -def getTasks() { - return withTaskQueue.TASK_QUEUES[env.TASK_QUEUE_ID].tasks -} - -def getTmpFile() { - return withTaskQueue.TASK_QUEUES[env.TASK_QUEUE_ID].tmpFile -} - -def addTask(Closure closure) { - getTasks() << closure - taskNotify() -} - -def addTasks(List closures) { - closures.reverse().each { - getTasks() << it - } - taskNotify() -} diff --git a/vars/workers.groovy b/vars/workers.groovy index 2e94ce12f34c07..8b7e8525a7ce3b 100644 --- a/vars/workers.groovy +++ b/vars/workers.groovy @@ -13,8 +13,6 @@ def label(size) { return 'docker && tests-l' case 'xl': return 'docker && tests-xl' - case 'xl-highmem': - return 'docker && tests-xl-highmem' case 'xxl': return 'docker && tests-xxl' } @@ -57,11 +55,6 @@ def base(Map params, Closure closure) { } } - sh( - script: "mkdir -p ${env.WORKSPACE}/tmp", - label: "Create custom temp directory" - ) - def checkoutInfo = [:] if (config.scm) { @@ -96,7 +89,6 @@ def base(Map params, Closure closure) { "PR_AUTHOR=${env.ghprbPullAuthorLogin ?: ''}", "TEST_BROWSER_HEADLESS=1", "GIT_BRANCH=${checkoutInfo.branch}", - "TMPDIR=${env.WORKSPACE}/tmp", // For Chrome and anything else that respects it ]) { withCredentials([ string(credentialsId: 'vault-addr', variable: 'VAULT_ADDR'), @@ -175,9 +167,7 @@ def parallelProcesses(Map params) { sleep(delay) } - withEnv(["CI_PARALLEL_PROCESS_NUMBER=${processNumber}"]) { - processClosure() - } + processClosure(processNumber) } } diff --git a/x-pack/plugins/canvas/.storybook/storyshots.test.js b/x-pack/plugins/canvas/.storybook/storyshots.test.js index e3217ad4dbe58c..b9fe0914b36987 100644 --- a/x-pack/plugins/canvas/.storybook/storyshots.test.js +++ b/x-pack/plugins/canvas/.storybook/storyshots.test.js @@ -4,7 +4,6 @@ * you may not use this file except in compliance with the Elastic License. */ -import fs from 'fs'; import path from 'path'; import moment from 'moment'; import 'moment-timezone'; @@ -77,12 +76,6 @@ import { RenderedElement } from '../shareable_runtime/components/rendered_elemen jest.mock('../shareable_runtime/components/rendered_element'); RenderedElement.mockImplementation(() => 'RenderedElement'); -// Some of the code requires that this directory exists, but the tests don't actually require any css to be present -const cssDir = path.resolve(__dirname, '../../../../built_assets/css'); -if (!fs.existsSync(cssDir)) { - fs.mkdirSync(cssDir, { recursive: true }); -} - addSerializer(styleSheetSerializer); // Initialize Storyshots and build the Jest Snapshots