diff --git a/.ci/Jenkinsfile_coverage b/.ci/Jenkinsfile_coverage index 650ef94e1d3da..d6600256bab7b 100644 --- a/.ci/Jenkinsfile_coverage +++ b/.ci/Jenkinsfile_coverage @@ -28,7 +28,7 @@ def handleIngestion(timestamp) { kibanaCoverage.collectVcsInfo("### Collect VCS Info") kibanaCoverage.generateReports("### Merge coverage reports") kibanaCoverage.uploadCombinedReports() - kibanaCoverage.ingest(timestamp, '### Injest && Upload') + kibanaCoverage.ingest(env.JOB_NAME, BUILD_NUMBER, BUILD_URL, timestamp, '### Ingest && Upload') kibanaCoverage.uploadCoverageStaticSite(timestamp) } diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 47f9942162f75..a94180e60e05e 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -132,6 +132,7 @@ # Quality Assurance /src/dev/code_coverage @elastic/kibana-qa +/vars/*Coverage.groovy @elastic/kibana-qa /test/functional/services/common @elastic/kibana-qa /test/functional/services/lib @elastic/kibana-qa /test/functional/services/remote @elastic/kibana-qa diff --git a/src/dev/code_coverage/ingest_coverage/__tests__/ingest.test.js b/src/dev/code_coverage/ingest_coverage/__tests__/ingest.test.js deleted file mode 100644 index ad5b4da0873b9..0000000000000 --- a/src/dev/code_coverage/ingest_coverage/__tests__/ingest.test.js +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import expect from '@kbn/expect'; -import { maybeTeamAssign } from '../ingest'; -import { COVERAGE_INDEX, TOTALS_INDEX } from '../constants'; - -describe(`Ingest fns`, () => { - describe(`maybeTeamAssign fn`, () => { - describe(`against the coverage index`, () => { - it(`should have the pipeline prop`, () => { - expect(maybeTeamAssign(COVERAGE_INDEX, {})).to.have.property('pipeline'); - }); - }); - describe(`against the totals index`, () => { - it(`should not have the pipeline prop`, () => { - expect(maybeTeamAssign(TOTALS_INDEX, {})).not.to.have.property('pipeline'); - }); - }); - }); -}); diff --git a/src/dev/code_coverage/ingest_coverage/__tests__/ingest_helpers.test.js b/src/dev/code_coverage/ingest_coverage/__tests__/ingest_helpers.test.js new file mode 100644 index 0000000000000..7ca7279e0d64c --- /dev/null +++ b/src/dev/code_coverage/ingest_coverage/__tests__/ingest_helpers.test.js @@ -0,0 +1,75 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import expect from '@kbn/expect'; +import { maybeTeamAssign, whichIndex } from '../ingest_helpers'; +import { + TOTALS_INDEX, + RESEARCH_TOTALS_INDEX, + RESEARCH_COVERAGE_INDEX, + // COVERAGE_INDEX, +} from '../constants'; + +describe(`Ingest Helper fns`, () => { + describe(`whichIndex`, () => { + describe(`against the research job`, () => { + const whichIndexAgainstResearchJob = whichIndex(true); + describe(`against the totals index`, () => { + const isTotal = true; + it(`should return the Research Totals Index`, () => { + const actual = whichIndexAgainstResearchJob(isTotal); + expect(actual).to.be(RESEARCH_TOTALS_INDEX); + }); + }); + describe(`against the coverage index`, () => { + it(`should return the Research Totals Index`, () => { + const isTotal = false; + const actual = whichIndexAgainstResearchJob(isTotal); + expect(actual).to.be(RESEARCH_COVERAGE_INDEX); + }); + }); + }); + describe(`against the "prod" job`, () => { + const whichIndexAgainstProdJob = whichIndex(false); + describe(`against the totals index`, () => { + const isTotal = true; + it(`should return the "Prod" Totals Index`, () => { + const actual = whichIndexAgainstProdJob(isTotal); + expect(actual).to.be(TOTALS_INDEX); + }); + }); + }); + }); + describe(`maybeTeamAssign`, () => { + describe(`against a coverage index`, () => { + it(`should have the pipeline prop`, () => { + const actual = maybeTeamAssign(true, { a: 'blah' }); + expect(actual).to.have.property('pipeline'); + }); + }); + describe(`against a totals index`, () => { + describe(`for "prod"`, () => { + it(`should not have the pipeline prop`, () => { + const actual = maybeTeamAssign(false, { b: 'blah' }); + expect(actual).not.to.have.property('pipeline'); + }); + }); + }); + }); +}); diff --git a/src/dev/code_coverage/ingest_coverage/__tests__/transforms.test.js b/src/dev/code_coverage/ingest_coverage/__tests__/transforms.test.js index 8c982b792ed3b..2fd1d5cbe8d48 100644 --- a/src/dev/code_coverage/ingest_coverage/__tests__/transforms.test.js +++ b/src/dev/code_coverage/ingest_coverage/__tests__/transforms.test.js @@ -32,17 +32,33 @@ describe(`Transform fn`, () => { }); }); describe(`coveredFilePath`, () => { - it(`should remove the jenkins workspace path`, () => { - const obj = { - staticSiteUrl: - '/var/lib/jenkins/workspace/elastic+kibana+code-coverage/kibana/x-pack/plugins/reporting/server/browsers/extract/unzip.js', - COVERAGE_INGESTION_KIBANA_ROOT: - '/var/lib/jenkins/workspace/elastic+kibana+code-coverage/kibana', - }; - expect(coveredFilePath(obj)).to.have.property( - 'coveredFilePath', - 'x-pack/plugins/reporting/server/browsers/extract/unzip.js' - ); + describe(`in the code-coverage job`, () => { + it(`should remove the jenkins workspace path`, () => { + const obj = { + staticSiteUrl: + '/var/lib/jenkins/workspace/elastic+kibana+code-coverage/kibana/x-pack/plugins/reporting/server/browsers/extract/unzip.js', + COVERAGE_INGESTION_KIBANA_ROOT: + '/var/lib/jenkins/workspace/elastic+kibana+code-coverage/kibana', + }; + expect(coveredFilePath(obj)).to.have.property( + 'coveredFilePath', + 'x-pack/plugins/reporting/server/browsers/extract/unzip.js' + ); + }); + }); + describe(`in the qa research job`, () => { + it(`should remove the jenkins workspace path`, () => { + const obj = { + staticSiteUrl: + '/var/lib/jenkins/workspace/elastic+kibana+qa-research/kibana/x-pack/plugins/reporting/server/browsers/extract/unzip.js', + COVERAGE_INGESTION_KIBANA_ROOT: + '/var/lib/jenkins/workspace/elastic+kibana+qa-research/kibana', + }; + expect(coveredFilePath(obj)).to.have.property( + 'coveredFilePath', + 'x-pack/plugins/reporting/server/browsers/extract/unzip.js' + ); + }); }); }); describe(`itemizeVcs`, () => { diff --git a/src/dev/code_coverage/ingest_coverage/constants.js b/src/dev/code_coverage/ingest_coverage/constants.js index a7303f0778d1c..ddee7106f4490 100644 --- a/src/dev/code_coverage/ingest_coverage/constants.js +++ b/src/dev/code_coverage/ingest_coverage/constants.js @@ -18,4 +18,17 @@ */ export const COVERAGE_INDEX = process.env.COVERAGE_INDEX || 'kibana_code_coverage'; + export const TOTALS_INDEX = process.env.TOTALS_INDEX || `kibana_total_code_coverage`; + +export const RESEARCH_COVERAGE_INDEX = + process.env.RESEARCH_COVERAGE_INDEX || 'qa_research_code_coverage'; + +export const RESEARCH_TOTALS_INDEX = + process.env.RESEARCH_TOTALS_INDEX || `qa_research_total_code_coverage`; + +export const TEAM_ASSIGNMENT_PIPELINE_NAME = process.env.PIPELINE_NAME || 'team_assignment'; + +export const CODE_COVERAGE_CI_JOB_NAME = 'elastic+kibana+code-coverage'; +export const RESEARCH_CI_JOB_NAME = 'elastic+kibana+qa-research'; +export const CI_JOB_NAME = process.env.COVERAGE_JOB_NAME || RESEARCH_CI_JOB_NAME; diff --git a/src/dev/code_coverage/ingest_coverage/ingest.js b/src/dev/code_coverage/ingest_coverage/ingest.js index d6c55a9a655b8..43f0663ad0359 100644 --- a/src/dev/code_coverage/ingest_coverage/ingest.js +++ b/src/dev/code_coverage/ingest_coverage/ingest.js @@ -19,40 +19,77 @@ const { Client } = require('@elastic/elasticsearch'); import { createFailError } from '@kbn/dev-utils'; -import { COVERAGE_INDEX, TOTALS_INDEX } from './constants'; -import { errMsg, redact } from './ingest_helpers'; -import { noop } from './utils'; +import { RESEARCH_CI_JOB_NAME, TEAM_ASSIGNMENT_PIPELINE_NAME } from './constants'; +import { errMsg, redact, whichIndex } from './ingest_helpers'; +import { pretty, green } from './utils'; import { right, left } from './either'; const node = process.env.ES_HOST || 'http://localhost:9200'; + const client = new Client({ node }); -const pipeline = process.env.PIPELINE_NAME || 'team_assignment'; -const redacted = redact(node); +const redactedEsHostUrl = redact(node); +const parse = JSON.parse.bind(null); +const isResearchJob = process.env.COVERAGE_JOB_NAME === RESEARCH_CI_JOB_NAME ? true : false; export const ingest = (log) => async (body) => { - const index = body.isTotal ? TOTALS_INDEX : COVERAGE_INDEX; - const maybeWithPipeline = maybeTeamAssign(index, body); - const withIndex = { index, body: maybeWithPipeline }; - const dontSend = noop; - - log.verbose(withIndex); - - process.env.NODE_ENV === 'integration_test' - ? left(null) - : right(withIndex).fold(dontSend, async function doSend(finalPayload) { - await send(index, redacted, finalPayload); - }); + const isTotal = !!body.isTotal; + const index = whichIndex(isResearchJob)(isTotal); + const isACoverageIndex = isTotal ? false : true; + + const stringified = pretty(body); + const pipeline = TEAM_ASSIGNMENT_PIPELINE_NAME; + + const finalPayload = isACoverageIndex + ? { index, body: stringified, pipeline } + : { index, body: stringified }; + + const justLog = dontSendButLog(log); + const doSendToIndex = doSend(index); + const doSendRedacted = doSendToIndex(redactedEsHostUrl)(log)(client); + + eitherSendOrNot(finalPayload).fold(justLog, doSendRedacted); }; -async function send(idx, redacted, requestBody) { +function doSend(index) { + return (redactedEsHostUrl) => (log) => (client) => async (payload) => { + const logF = logSend(true)(redactedEsHostUrl)(log); + await send(logF, index, redactedEsHostUrl, client, payload); + }; +} + +function dontSendButLog(log) { + return (payload) => { + logSend(false)(null)(log)(payload); + }; +} + +async function send(logF, idx, redactedEsHostUrl, client, requestBody) { try { await client.index(requestBody); + logF(requestBody); } catch (e) { - throw createFailError(errMsg(idx, redacted, requestBody, e)); + const { body } = requestBody; + const parsed = parse(body); + throw createFailError(errMsg(idx, redactedEsHostUrl, parsed, e)); } } -export function maybeTeamAssign(index, body) { - const payload = index === TOTALS_INDEX ? body : { ...body, pipeline }; - return payload; +const sendMsg = (actuallySent, redactedEsHostUrl, payload) => { + const { index, body } = payload; + return `### ${actuallySent ? 'Sent' : 'Fake Sent'}: +${redactedEsHostUrl ? `\t### ES Host: ${redactedEsHostUrl}` : ''} +\t### Index: ${green(index)} +\t### payload.body: ${body} +${process.env.NODE_ENV === 'integration_test' ? `ingest-pipe=>${payload.pipeline}` : ''} +`; +}; + +function logSend(actuallySent) { + return (redactedEsHostUrl) => (log) => (payload) => { + log.verbose(sendMsg(actuallySent, redactedEsHostUrl, payload)); + }; +} + +function eitherSendOrNot(payload) { + return process.env.NODE_ENV === 'integration_test' ? left(payload) : right(payload); } diff --git a/src/dev/code_coverage/ingest_coverage/ingest_helpers.js b/src/dev/code_coverage/ingest_coverage/ingest_helpers.js index 11e5755bb0282..86bcf03977082 100644 --- a/src/dev/code_coverage/ingest_coverage/ingest_helpers.js +++ b/src/dev/code_coverage/ingest_coverage/ingest_helpers.js @@ -20,6 +20,13 @@ import { always, pretty } from './utils'; import chalk from 'chalk'; import { fromNullable } from './either'; +import { + COVERAGE_INDEX, + RESEARCH_COVERAGE_INDEX, + RESEARCH_TOTALS_INDEX, + TEAM_ASSIGNMENT_PIPELINE_NAME, + TOTALS_INDEX, +} from './constants'; export function errMsg(index, redacted, body, e) { const orig = fromNullable(e.body).fold( @@ -38,6 +45,9 @@ ${orig} ### Troubleshooting Hint: ${red('Perhaps the coverage data was not merged properly?\n')} + +### Error.meta (stringified): +${pretty(e.meta)} `; } @@ -59,3 +69,21 @@ function color(whichColor) { return chalk[whichColor].bgWhiteBright(x); }; } + +export function maybeTeamAssign(isACoverageIndex, body) { + const doAddTeam = isACoverageIndex ? true : false; + const payload = doAddTeam ? { ...body, pipeline: TEAM_ASSIGNMENT_PIPELINE_NAME } : body; + return payload; +} + +export function whichIndex(isResearchJob) { + return (isTotal) => + isTotal ? whichTotalsIndex(isResearchJob) : whichCoverageIndex(isResearchJob); +} +function whichTotalsIndex(isResearchJob) { + return isResearchJob ? RESEARCH_TOTALS_INDEX : TOTALS_INDEX; +} + +function whichCoverageIndex(isResearchJob) { + return isResearchJob ? RESEARCH_COVERAGE_INDEX : COVERAGE_INDEX; +} diff --git a/src/dev/code_coverage/ingest_coverage/integration_tests/ingest_coverage.test.js b/src/dev/code_coverage/ingest_coverage/integration_tests/ingest_coverage.test.js index 013adc8b6b0af..2a65839f85ac3 100644 --- a/src/dev/code_coverage/ingest_coverage/integration_tests/ingest_coverage.test.js +++ b/src/dev/code_coverage/ingest_coverage/integration_tests/ingest_coverage.test.js @@ -47,7 +47,7 @@ describe('Ingesting coverage', () => { describe(`staticSiteUrl`, () => { let actualUrl = ''; - const siteUrlRegex = /staticSiteUrl:\s*(.+,)/; + const siteUrlRegex = /"staticSiteUrl":\s*(.+,)/; beforeAll(async () => { const opts = [...verboseArgs, resolved]; @@ -70,8 +70,8 @@ describe('Ingesting coverage', () => { }); describe(`vcsInfo`, () => { - let vcsInfo; describe(`without a commit msg in the vcs info file`, () => { + let vcsInfo; const args = [ 'scripts/ingest_coverage.js', '--verbose', @@ -93,9 +93,6 @@ describe('Ingesting coverage', () => { }); }); describe(`team assignment`, () => { - let shouldNotHavePipelineOut = ''; - let shouldIndeedHavePipelineOut = ''; - const args = [ 'scripts/ingest_coverage.js', '--verbose', @@ -104,28 +101,26 @@ describe('Ingesting coverage', () => { '--path', ]; - const teamAssignRE = /pipeline:/; - - beforeAll(async () => { - const summaryPath = 'jest-combined/coverage-summary-just-total.json'; - const resolved = resolve(MOCKS_DIR, summaryPath); - const opts = [...args, resolved]; - const { stdout } = await execa(process.execPath, opts, { cwd: ROOT_DIR, env }); - shouldNotHavePipelineOut = stdout; - }); - beforeAll(async () => { - const summaryPath = 'jest-combined/coverage-summary-manual-mix.json'; - const resolved = resolve(MOCKS_DIR, summaryPath); - const opts = [...args, resolved]; - const { stdout } = await execa(process.execPath, opts, { cwd: ROOT_DIR, env }); - shouldIndeedHavePipelineOut = stdout; - }); - - it(`should not occur when going to the totals index`, () => { - expect(teamAssignRE.test(shouldNotHavePipelineOut)).to.not.be.ok(); + it(`should not occur when going to the totals index`, async () => { + const teamAssignRE = /"pipeline":/; + const shouldNotHavePipelineOut = await prokJustTotalOrNot(true, args); + const actual = teamAssignRE.test(shouldNotHavePipelineOut); + expect(actual).to.not.be.ok(); }); - it(`should indeed occur when going to the coverage index`, () => { - expect(teamAssignRE.test(shouldIndeedHavePipelineOut)).to.be.ok(); + it(`should indeed occur when going to the coverage index`, async () => { + const shouldIndeedHavePipelineOut = await prokJustTotalOrNot(false, args); + const onlyForTestingRe = /ingest-pipe=>team_assignment/; + const actual = onlyForTestingRe.test(shouldIndeedHavePipelineOut); + expect(actual).to.be.ok(); }); }); }); +async function prokJustTotalOrNot(isTotal, args) { + const justTotalPath = 'jest-combined/coverage-summary-just-total.json'; + const notJustTotalPath = 'jest-combined/coverage-summary-manual-mix.json'; + + const resolved = resolve(MOCKS_DIR, isTotal ? justTotalPath : notJustTotalPath); + const opts = [...args, resolved]; + const { stdout } = await execa(process.execPath, opts, { cwd: ROOT_DIR, env }); + return stdout; +} diff --git a/src/dev/code_coverage/ingest_coverage/integration_tests/mocks/jest-combined/coverage-summary-NO-total.json b/src/dev/code_coverage/ingest_coverage/integration_tests/mocks/jest-combined/coverage-summary-NO-total.json deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/src/dev/code_coverage/ingest_coverage/process.js b/src/dev/code_coverage/ingest_coverage/process.js index 6b9c8f09febfe..85a42cfffa6e2 100644 --- a/src/dev/code_coverage/ingest_coverage/process.js +++ b/src/dev/code_coverage/ingest_coverage/process.js @@ -36,13 +36,17 @@ import { import { resolve } from 'path'; import { createReadStream } from 'fs'; import readline from 'readline'; +import * as moment from 'moment'; const ROOT = '../../../..'; const COVERAGE_INGESTION_KIBANA_ROOT = process.env.COVERAGE_INGESTION_KIBANA_ROOT || resolve(__dirname, ROOT); const ms = process.env.DELAY || 0; const staticSiteUrlBase = process.env.STATIC_SITE_URL_BASE || 'https://kibana-coverage.elastic.dev'; -const addPrePopulatedTimeStamp = addTimeStamp(process.env.TIME_STAMP); +const format = 'YYYY-MM-DDTHH:mm:SS'; +// eslint-disable-next-line import/namespace +const formatted = `${moment.utc().format(format)}Z`; +const addPrePopulatedTimeStamp = addTimeStamp(process.env.TIME_STAMP || formatted); const preamble = pipe(statsAndstaticSiteUrl, rootDirAndOrigPath, buildId, addPrePopulatedTimeStamp); const addTestRunnerAndStaticSiteUrl = pipe(testRunner, staticSite(staticSiteUrlBase)); diff --git a/src/dev/code_coverage/shell_scripts/ingest_coverage.sh b/src/dev/code_coverage/shell_scripts/ingest_coverage.sh index 2dae75484d68f..d3cf31fc0f427 100644 --- a/src/dev/code_coverage/shell_scripts/ingest_coverage.sh +++ b/src/dev/code_coverage/shell_scripts/ingest_coverage.sh @@ -3,11 +3,14 @@ echo "### Ingesting Code Coverage" echo "" +COVERAGE_JOB_NAME=$1 +export COVERAGE_JOB_NAME +echo "### debug COVERAGE_JOB_NAME: ${COVERAGE_JOB_NAME}" -BUILD_ID=$1 +BUILD_ID=$2 export BUILD_ID -CI_RUN_URL=$2 +CI_RUN_URL=$3 export CI_RUN_URL echo "### debug CI_RUN_URL: ${CI_RUN_URL}" @@ -17,6 +20,9 @@ export ES_HOST STATIC_SITE_URL_BASE='https://kibana-coverage.elastic.dev' export STATIC_SITE_URL_BASE +DELAY=100 +export DELAY + for x in jest functional; do echo "### Ingesting coverage for ${x}" diff --git a/vars/kibanaCoverage.groovy b/vars/kibanaCoverage.groovy index 66b16566418b5..e511d7a8fc15e 100644 --- a/vars/kibanaCoverage.groovy +++ b/vars/kibanaCoverage.groovy @@ -125,31 +125,31 @@ def uploadCombinedReports() { ) } -def ingestData(buildNum, buildUrl, title) { +def ingestData(jobName, buildNum, buildUrl, title) { kibanaPipeline.bash(""" source src/dev/ci_setup/setup_env.sh yarn kbn bootstrap --prefer-offline # Using existing target/kibana-coverage folder - . src/dev/code_coverage/shell_scripts/ingest_coverage.sh ${buildNum} ${buildUrl} + . src/dev/code_coverage/shell_scripts/ingest_coverage.sh '${jobName}' ${buildNum} '${buildUrl}' """, title) } -def ingestWithVault(buildNum, buildUrl, title) { +def ingestWithVault(jobName, buildNum, buildUrl, title) { def vaultSecret = 'secret/kibana-issues/prod/coverage/elasticsearch' withVaultSecret(secret: vaultSecret, secret_field: 'host', variable_name: 'HOST_FROM_VAULT') { withVaultSecret(secret: vaultSecret, secret_field: 'username', variable_name: 'USER_FROM_VAULT') { withVaultSecret(secret: vaultSecret, secret_field: 'password', variable_name: 'PASS_FROM_VAULT') { - ingestData(buildNum, buildUrl, title) + ingestData(jobName, buildNum, buildUrl, title) } } } } -def ingest(timestamp, title) { +def ingest(jobName, buildNumber, buildUrl, timestamp, title) { withEnv([ "TIME_STAMP=${timestamp}", ]) { - ingestWithVault(BUILD_NUMBER, BUILD_URL, title) + ingestWithVault(jobName, buildNumber, buildUrl, title) } }