From 0f4e99949a9c5eaa639f076758e1d081f349678e Mon Sep 17 00:00:00 2001 From: Tim Sullivan Date: Thu, 24 Jun 2021 12:14:40 -0700 Subject: [PATCH] Reporting: Task Manager integration for 7.x (#101339) * Reporting: Task Manager integration for 7.x * update test assertions, use more explicit types * update comment * Update x-pack/plugins/reporting/server/lib/store/store.ts Co-authored-by: Vadim Dalecky * fix field mapping * Update x-pack/plugins/reporting/server/lib/store/store.ts Co-authored-by: Jean-Louis Leysens * Report also implements ReportDocumentHead * the actual ID of the task is prefixed with `task:` * remove pointless update to the report instance after failing * comment clarification * sync with 8.0 Co-authored-by: Vadim Dalecky Co-authored-by: Jean-Louis Leysens Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com> --- .../reporting/common/schema_utils.test.ts | 16 + .../plugins/reporting/common/schema_utils.ts | 7 + x-pack/plugins/reporting/common/types.ts | 3 +- x-pack/plugins/reporting/kibana.json | 1 + x-pack/plugins/reporting/server/core.ts | 44 +- .../server/export_types/csv/create_job.ts | 5 +- .../export_types/csv/execute_job.test.ts | 5 +- .../server/export_types/csv/execute_job.ts | 4 +- .../export_types/png/create_job/index.ts | 4 +- .../printable_pdf/create_job/index.ts | 4 +- .../reporting/server/lib/create_queue.ts | 75 -- .../server/lib/create_worker.test.ts | 107 -- .../reporting/server/lib/create_worker.ts | 77 -- .../reporting/server/lib/enqueue_job.ts | 44 +- .../server/lib/esqueue/__fixtures__/job.js | 24 - .../server/lib/esqueue/__fixtures__/queue.js | 18 - .../server/lib/esqueue/__fixtures__/worker.js | 23 - .../lib/esqueue/constants/default_settings.js | 17 - .../server/lib/esqueue/constants/events.js | 21 - .../server/lib/esqueue/constants/index.js | 16 - .../lib/esqueue/create_tagged_logger.ts | 27 - .../lib/esqueue/helpers/create_index.js | 115 -- .../server/lib/esqueue/helpers/errors.js | 27 - .../server/lib/esqueue/helpers/errors.test.js | 57 - .../reporting/server/lib/esqueue/index.js | 55 - .../server/lib/esqueue/index.test.js | 158 --- .../reporting/server/lib/esqueue/worker.js | 444 ------- .../server/lib/esqueue/worker.test.js | 1133 ----------------- x-pack/plugins/reporting/server/lib/index.ts | 2 - .../plugins/reporting/server/lib/statuses.ts | 5 +- .../reporting/server/lib/store/mapping.ts | 18 +- .../reporting/server/lib/store/report.test.ts | 72 +- .../reporting/server/lib/store/report.ts | 85 +- .../reporting/server/lib/store/store.test.ts | 210 +-- .../reporting/server/lib/store/store.ts | 271 +++- .../server/lib/tasks/error_logger.test.ts | 57 + .../server/lib/tasks/error_logger.ts | 44 + .../server/lib/tasks/execute_report.test.ts | 85 ++ .../server/lib/tasks/execute_report.ts | 431 +++++++ .../reporting/server/lib/tasks/index.ts | 25 +- .../server/lib/tasks/monitor_report.test.ts | 65 + .../server/lib/tasks/monitor_reports.ts | 165 +++ .../plugins/reporting/server/plugin.test.ts | 6 +- x-pack/plugins/reporting/server/plugin.ts | 15 +- .../reporting/server/routes/generation.ts | 4 +- .../create_mock_reportingplugin.ts | 7 +- x-pack/plugins/reporting/server/types.ts | 3 + .../reporting_without_security/job_apis.ts | 9 +- 48 files changed, 1460 insertions(+), 2650 deletions(-) create mode 100644 x-pack/plugins/reporting/common/schema_utils.test.ts delete mode 100644 x-pack/plugins/reporting/server/lib/create_queue.ts delete mode 100644 x-pack/plugins/reporting/server/lib/create_worker.test.ts delete mode 100644 x-pack/plugins/reporting/server/lib/create_worker.ts delete mode 100644 x-pack/plugins/reporting/server/lib/esqueue/__fixtures__/job.js delete mode 100644 x-pack/plugins/reporting/server/lib/esqueue/__fixtures__/queue.js delete mode 100644 x-pack/plugins/reporting/server/lib/esqueue/__fixtures__/worker.js delete mode 100644 x-pack/plugins/reporting/server/lib/esqueue/constants/default_settings.js delete mode 100644 x-pack/plugins/reporting/server/lib/esqueue/constants/events.js delete mode 100644 x-pack/plugins/reporting/server/lib/esqueue/constants/index.js delete mode 100644 x-pack/plugins/reporting/server/lib/esqueue/create_tagged_logger.ts delete mode 100644 x-pack/plugins/reporting/server/lib/esqueue/helpers/create_index.js delete mode 100644 x-pack/plugins/reporting/server/lib/esqueue/helpers/errors.js delete mode 100644 x-pack/plugins/reporting/server/lib/esqueue/helpers/errors.test.js delete mode 100644 x-pack/plugins/reporting/server/lib/esqueue/index.js delete mode 100644 x-pack/plugins/reporting/server/lib/esqueue/index.test.js delete mode 100644 x-pack/plugins/reporting/server/lib/esqueue/worker.js delete mode 100644 x-pack/plugins/reporting/server/lib/esqueue/worker.test.js create mode 100644 x-pack/plugins/reporting/server/lib/tasks/error_logger.test.ts create mode 100644 x-pack/plugins/reporting/server/lib/tasks/error_logger.ts create mode 100644 x-pack/plugins/reporting/server/lib/tasks/execute_report.test.ts create mode 100644 x-pack/plugins/reporting/server/lib/tasks/execute_report.ts create mode 100644 x-pack/plugins/reporting/server/lib/tasks/monitor_report.test.ts create mode 100644 x-pack/plugins/reporting/server/lib/tasks/monitor_reports.ts diff --git a/x-pack/plugins/reporting/common/schema_utils.test.ts b/x-pack/plugins/reporting/common/schema_utils.test.ts new file mode 100644 index 00000000000000..6e9bb2db754375 --- /dev/null +++ b/x-pack/plugins/reporting/common/schema_utils.test.ts @@ -0,0 +1,16 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import moment from 'moment'; +import { numberToDuration } from './schema_utils'; + +describe('Schema Utils', () => { + it('numberToDuration converts a number/Duration into a Duration object', () => { + expect(numberToDuration(500)).toMatchInlineSnapshot(`"PT0.5S"`); + expect(numberToDuration(moment.duration(1, 'hour'))).toMatchInlineSnapshot(`"PT1H"`); + }); +}); diff --git a/x-pack/plugins/reporting/common/schema_utils.ts b/x-pack/plugins/reporting/common/schema_utils.ts index 6119a2f8582f18..798440bfbb69c4 100644 --- a/x-pack/plugins/reporting/common/schema_utils.ts +++ b/x-pack/plugins/reporting/common/schema_utils.ts @@ -20,6 +20,13 @@ export const durationToNumber = (value: number | moment.Duration): number => { return value.asMilliseconds(); }; +export const numberToDuration = (value: number | moment.Duration): moment.Duration => { + if (typeof value === 'number') { + return moment.duration(value, 'milliseconds'); + } + return value; +}; + export const byteSizeValueToNumber = (value: number | ByteSizeValue) => { if (typeof value === 'number') { return value; diff --git a/x-pack/plugins/reporting/common/types.ts b/x-pack/plugins/reporting/common/types.ts index 2ce6f4ee63fbd4..dff513427c57a1 100644 --- a/x-pack/plugins/reporting/common/types.ts +++ b/x-pack/plugins/reporting/common/types.ts @@ -68,6 +68,7 @@ export interface ReportSource { }; meta: { objectType: string; layout?: string }; browser_type: string; + migration_version: string; max_attempts: number; timeout: number; @@ -77,7 +78,7 @@ export interface ReportSource { started_at?: string; completed_at?: string; created_at: string; - process_expiration?: string; + process_expiration?: string | null; // must be set to null to clear the expiration } /* diff --git a/x-pack/plugins/reporting/kibana.json b/x-pack/plugins/reporting/kibana.json index 3d443cc2441fe0..ddba61e9a0b8db 100644 --- a/x-pack/plugins/reporting/kibana.json +++ b/x-pack/plugins/reporting/kibana.json @@ -10,6 +10,7 @@ "management", "licensing", "uiActions", + "taskManager", "embeddable", "screenshotMode", "share", diff --git a/x-pack/plugins/reporting/server/core.ts b/x-pack/plugins/reporting/server/core.ts index f996ca2a126147..b7f3ebe9dcfa8b 100644 --- a/x-pack/plugins/reporting/server/core.ts +++ b/x-pack/plugins/reporting/server/core.ts @@ -24,13 +24,14 @@ import { LicensingPluginSetup } from '../../licensing/server'; import { SecurityPluginSetup } from '../../security/server'; import { DEFAULT_SPACE_ID } from '../../spaces/common/constants'; import { SpacesPluginSetup } from '../../spaces/server'; +import { TaskManagerSetupContract, TaskManagerStartContract } from '../../task_manager/server'; import { ReportingConfig, ReportingSetup } from './'; import { HeadlessChromiumDriverFactory } from './browsers/chromium/driver_factory'; import { ReportingConfigType } from './config'; import { checkLicense, getExportTypesRegistry, LevelLogger } from './lib'; -import { ESQueueInstance } from './lib/create_queue'; import { screenshotsObservableFactory, ScreenshotsObservableFn } from './lib/screenshots'; import { ReportingStore } from './lib/store'; +import { ExecuteReportTask, MonitorReportsTask, ReportTaskParams } from './lib/tasks'; import { ReportingPluginRouter } from './types'; export interface ReportingInternalSetup { @@ -40,6 +41,7 @@ export interface ReportingInternalSetup { licensing: LicensingPluginSetup; security?: SecurityPluginSetup; spaces?: SpacesPluginSetup; + taskManager: TaskManagerSetupContract; screenshotMode: ScreenshotModePluginSetup; logger: LevelLogger; } @@ -51,7 +53,7 @@ export interface ReportingInternalStart { uiSettings: UiSettingsServiceStart; esClient: IClusterClient; data: DataPluginStart; - esqueue: ESQueueInstance; + taskManager: TaskManagerStartContract; logger: LevelLogger; } @@ -62,17 +64,24 @@ export class ReportingCore { private readonly pluginStart$ = new Rx.ReplaySubject(); // observe async background startDeps private deprecatedAllowedRoles: string[] | false = false; // DEPRECATED. If `false`, the deprecated features have been disableed private exportTypesRegistry = getExportTypesRegistry(); - private config?: ReportingConfig; + private executeTask: ExecuteReportTask; + private monitorTask: MonitorReportsTask; + private config?: ReportingConfig; // final config, includes dynamic values based on OS type + private executing: Set; public getContract: () => ReportingSetup; constructor(private logger: LevelLogger, context: PluginInitializerContext) { const syncConfig = context.config.get(); this.deprecatedAllowedRoles = syncConfig.roles.enabled ? syncConfig.roles.allow : false; + this.executeTask = new ExecuteReportTask(this, syncConfig, this.logger); + this.monitorTask = new MonitorReportsTask(this, syncConfig, this.logger); this.getContract = () => ({ usesUiCapabilities: () => syncConfig.roles.enabled === false, }); + + this.executing = new Set(); } /* @@ -81,14 +90,25 @@ export class ReportingCore { public pluginSetup(setupDeps: ReportingInternalSetup) { this.pluginSetup$.next(true); // trigger the observer this.pluginSetupDeps = setupDeps; // cache + + const { executeTask, monitorTask } = this; + setupDeps.taskManager.registerTaskDefinitions({ + [executeTask.TYPE]: executeTask.getTaskDefinition(), + [monitorTask.TYPE]: monitorTask.getTaskDefinition(), + }); } /* * Register startDeps */ - public pluginStart(startDeps: ReportingInternalStart) { + public async pluginStart(startDeps: ReportingInternalStart) { this.pluginStart$.next(startDeps); // trigger the observer this.pluginStartDeps = startDeps; // cache + + const { taskManager } = startDeps; + const { executeTask, monitorTask } = this; + // enable this instance to generate reports and to monitor for pending reports + await Promise.all([executeTask.init(taskManager), monitorTask.init(taskManager)]); } /* @@ -193,8 +213,8 @@ export class ReportingCore { return this.exportTypesRegistry; } - public async getEsqueue() { - return (await this.getPluginStartDeps()).esqueue; + public async scheduleTask(report: ReportTaskParams) { + return await this.executeTask.scheduleTask(report); } public async getStore() { @@ -296,4 +316,16 @@ export class ReportingCore { const startDeps = await this.getPluginStartDeps(); return startDeps.esClient; } + + public trackReport(reportId: string) { + this.executing.add(reportId); + } + + public untrackReport(reportId: string) { + this.executing.delete(reportId); + } + + public countConcurrentReports(): number { + return this.executing.size; + } } diff --git a/x-pack/plugins/reporting/server/export_types/csv/create_job.ts b/x-pack/plugins/reporting/server/export_types/csv/create_job.ts index d0182d47e479de..876d190c9eee84 100644 --- a/x-pack/plugins/reporting/server/export_types/csv/create_job.ts +++ b/x-pack/plugins/reporting/server/export_types/csv/create_job.ts @@ -5,7 +5,6 @@ * 2.0. */ -import { CSV_JOB_TYPE_DEPRECATED } from '../../../common/constants'; import { cryptoFactory } from '../../lib'; import { CreateJobFn, CreateJobFnFactory } from '../../types'; import { @@ -16,9 +15,7 @@ import { export const createJobFnFactory: CreateJobFnFactory< CreateJobFn -> = function createJobFactoryFn(reporting, parentLogger) { - const logger = parentLogger.clone([CSV_JOB_TYPE_DEPRECATED, 'create-job']); - +> = function createJobFactoryFn(reporting, logger) { const config = reporting.getConfig(); const crypto = cryptoFactory(config.get('encryptionKey')); diff --git a/x-pack/plugins/reporting/server/export_types/csv/execute_job.test.ts b/x-pack/plugins/reporting/server/export_types/csv/execute_job.test.ts index 90d34acf28ea93..32b5370371cced 100644 --- a/x-pack/plugins/reporting/server/export_types/csv/execute_job.test.ts +++ b/x-pack/plugins/reporting/server/export_types/csv/execute_job.test.ts @@ -676,8 +676,7 @@ describe('CSV Execute Job', function () { }); }); - // FLAKY: https://github.com/elastic/kibana/issues/43069 - describe.skip('cancellation', function () { + describe('cancellation', function () { const scrollId = getRandomScrollId(); beforeEach(function () { @@ -709,7 +708,7 @@ describe('CSV Execute Job', function () { cancellationToken ); - await delay(100); + await delay(250); expect(mockEsClient.search).toHaveBeenCalled(); expect(mockEsClient.scroll).toHaveBeenCalled(); diff --git a/x-pack/plugins/reporting/server/export_types/csv/execute_job.ts b/x-pack/plugins/reporting/server/export_types/csv/execute_job.ts index 32007f88e80afb..57559d136ff3e5 100644 --- a/x-pack/plugins/reporting/server/export_types/csv/execute_job.ts +++ b/x-pack/plugins/reporting/server/export_types/csv/execute_job.ts @@ -5,7 +5,7 @@ * 2.0. */ -import { CONTENT_TYPE_CSV, CSV_JOB_TYPE_DEPRECATED } from '../../../common/constants'; +import { CONTENT_TYPE_CSV } from '../../../common/constants'; import { RunTaskFn, RunTaskFnFactory } from '../../types'; import { decryptJobHeaders } from '../common'; import { createGenerateCsv } from './generate_csv'; @@ -18,7 +18,7 @@ export const runTaskFnFactory: RunTaskFnFactory< return async function runTask(jobId, job, cancellationToken) { const elasticsearch = await reporting.getEsClient(); - const logger = parentLogger.clone([CSV_JOB_TYPE_DEPRECATED, 'execute-job', jobId]); + const logger = parentLogger.clone([jobId]); const generateCsv = createGenerateCsv(logger); const encryptionKey = config.get('encryptionKey'); diff --git a/x-pack/plugins/reporting/server/export_types/png/create_job/index.ts b/x-pack/plugins/reporting/server/export_types/png/create_job/index.ts index 62af9a9b80120f..488a339e3ef4ba 100644 --- a/x-pack/plugins/reporting/server/export_types/png/create_job/index.ts +++ b/x-pack/plugins/reporting/server/export_types/png/create_job/index.ts @@ -5,7 +5,6 @@ * 2.0. */ -import { PNG_JOB_TYPE } from '../../../../common/constants'; import { cryptoFactory } from '../../../lib'; import { CreateJobFn, CreateJobFnFactory } from '../../../types'; import { validateUrls } from '../../common'; @@ -13,8 +12,7 @@ import { JobParamsPNG, TaskPayloadPNG } from '../types'; export const createJobFnFactory: CreateJobFnFactory< CreateJobFn -> = function createJobFactoryFn(reporting, parentLogger) { - const logger = parentLogger.clone([PNG_JOB_TYPE, 'execute-job']); +> = function createJobFactoryFn(reporting, logger) { const config = reporting.getConfig(); const crypto = cryptoFactory(config.get('encryptionKey')); diff --git a/x-pack/plugins/reporting/server/export_types/printable_pdf/create_job/index.ts b/x-pack/plugins/reporting/server/export_types/printable_pdf/create_job/index.ts index 001818b9ee85a3..d2174569beb70c 100644 --- a/x-pack/plugins/reporting/server/export_types/printable_pdf/create_job/index.ts +++ b/x-pack/plugins/reporting/server/export_types/printable_pdf/create_job/index.ts @@ -6,7 +6,6 @@ */ import { KibanaRequest, RequestHandlerContext } from 'src/core/server'; -import { PDF_JOB_TYPE } from '../../../../common/constants'; import { cryptoFactory } from '../../../lib'; import { CreateJobFn, CreateJobFnFactory } from '../../../types'; import { validateUrls } from '../../common'; @@ -16,10 +15,9 @@ import { compatibilityShimFactory } from './compatibility_shim'; export const createJobFnFactory: CreateJobFnFactory< CreateJobFn -> = function createJobFactoryFn(reporting, parentLogger) { +> = function createJobFactoryFn(reporting, logger) { const config = reporting.getConfig(); const crypto = cryptoFactory(config.get('encryptionKey')); - const logger = parentLogger.clone([PDF_JOB_TYPE, 'create-job']); const compatibilityShim = compatibilityShimFactory(logger); // 7.x and below only diff --git a/x-pack/plugins/reporting/server/lib/create_queue.ts b/x-pack/plugins/reporting/server/lib/create_queue.ts deleted file mode 100644 index f3549836b90218..00000000000000 --- a/x-pack/plugins/reporting/server/lib/create_queue.ts +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import { ElasticsearchClient } from 'kibana/server'; -import { ReportingCore } from '../core'; -import { createWorkerFactory } from './create_worker'; -// @ts-ignore -import { Esqueue } from './esqueue'; -import { createTaggedLogger } from './esqueue/create_tagged_logger'; -import { LevelLogger } from './level_logger'; -import { ReportDocument, ReportingStore } from './store'; -import { TaskRunResult } from './tasks'; - -interface ESQueueWorker { - on: (event: string, handler: any) => void; -} - -export interface ESQueueInstance { - registerWorker: ( - pluginId: string, - workerFn: GenericWorkerFn, - workerOptions: { - kibanaName: string; - kibanaId: string; - interval: number; - intervalErrorMultiplier: number; - } - ) => ESQueueWorker; -} - -// GenericWorkerFn is a generic for ImmediateExecuteFn | ESQueueWorkerExecuteFn, -type GenericWorkerFn = ( - jobSource: ReportDocument, - ...workerRestArgs: any[] -) => void | Promise; - -export async function createQueueFactory( - reporting: ReportingCore, - store: ReportingStore, - logger: LevelLogger, - client: ElasticsearchClient -): Promise { - const config = reporting.getConfig(); - - // esqueue-related - const queueTimeout = config.get('queue', 'timeout'); - const isPollingEnabled = config.get('queue', 'pollEnabled'); - const queueOptions = { - client, - timeout: queueTimeout, - logger: createTaggedLogger(logger, ['esqueue', 'queue-worker']), - }; - - const queue: ESQueueInstance = new Esqueue(store, queueOptions); - - if (isPollingEnabled) { - // create workers to poll the index for idle jobs waiting to be claimed and executed - const createWorker = createWorkerFactory(reporting, logger); - await createWorker(queue); - } else { - logger.info( - 'xpack.reporting.queue.pollEnabled is set to false. This Kibana instance ' + - 'will not poll for idle jobs to claim and execute. Make sure another ' + - 'Kibana instance with polling enabled is running in this cluster so ' + - 'reporting jobs can complete.', - ['create_queue'] - ); - } - - return queue; -} diff --git a/x-pack/plugins/reporting/server/lib/create_worker.test.ts b/x-pack/plugins/reporting/server/lib/create_worker.test.ts deleted file mode 100644 index 448e797acd59d1..00000000000000 --- a/x-pack/plugins/reporting/server/lib/create_worker.test.ts +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import type { DeeplyMockedKeys } from '@kbn/utility-types/jest'; -import { ElasticsearchClient } from 'kibana/server'; -import * as sinon from 'sinon'; -import { elasticsearchServiceMock } from 'src/core/server/mocks'; -import { ReportingCore } from '../../server'; -import { - createMockConfigSchema, - createMockLevelLogger, - createMockReportingCore, -} from '../test_helpers'; -import { createWorkerFactory } from './create_worker'; -// @ts-ignore -import { Esqueue } from './esqueue'; -import { ExportTypesRegistry } from './export_types_registry'; - -const logger = createMockLevelLogger(); -const reportingConfig = { - queue: { pollInterval: 3300, pollIntervalErrorMultiplier: 10 }, - server: { name: 'test-server-123', uuid: 'g9ymiujthvy6v8yrh7567g6fwzgzftzfr' }, -}; - -const executeJobFactoryStub = sinon.stub(); - -const getMockExportTypesRegistry = ( - exportTypes: any[] = [{ runTaskFnFactory: executeJobFactoryStub }] -) => - ({ - getAll: () => exportTypes, - } as ExportTypesRegistry); - -describe('Create Worker', () => { - let mockReporting: ReportingCore; - let queue: Esqueue; - let client: DeeplyMockedKeys; - - beforeEach(async () => { - mockReporting = await createMockReportingCore(createMockConfigSchema(reportingConfig)); - mockReporting.getExportTypesRegistry = () => getMockExportTypesRegistry(); - - ({ asInternalUser: client } = elasticsearchServiceMock.createClusterClient()); - queue = new Esqueue('reporting-queue', { client }); - executeJobFactoryStub.reset(); - }); - - test('Creates a single Esqueue worker for Reporting', async () => { - const createWorker = createWorkerFactory(mockReporting, logger); - const registerWorkerSpy = sinon.spy(queue, 'registerWorker'); - - await createWorker(queue); - - sinon.assert.callCount(executeJobFactoryStub, 1); - sinon.assert.callCount(registerWorkerSpy, 1); - - const { firstCall } = registerWorkerSpy; - const [workerName, workerFn, workerOpts] = firstCall.args; - - expect(workerName).toBe('reporting'); - expect(workerFn).toMatchInlineSnapshot(`[Function]`); - expect(workerOpts).toMatchInlineSnapshot(` -Object { - "interval": 3300, - "intervalErrorMultiplier": 10, - "kibanaId": "g9ymiujthvy6v8yrh7567g6fwzgzftzfr", - "kibanaName": "test-server-123", -} -`); - }); - - test('Creates a single Esqueue worker for Reporting, even if there are multiple export types', async () => { - const exportTypesRegistry = getMockExportTypesRegistry([ - { runTaskFnFactory: executeJobFactoryStub }, - { runTaskFnFactory: executeJobFactoryStub }, - { runTaskFnFactory: executeJobFactoryStub }, - { runTaskFnFactory: executeJobFactoryStub }, - { runTaskFnFactory: executeJobFactoryStub }, - ]); - mockReporting.getExportTypesRegistry = () => exportTypesRegistry; - const createWorker = createWorkerFactory(mockReporting, logger); - const registerWorkerSpy = sinon.spy(queue, 'registerWorker'); - - await createWorker(queue); - - sinon.assert.callCount(executeJobFactoryStub, 5); - sinon.assert.callCount(registerWorkerSpy, 1); - - const { firstCall } = registerWorkerSpy; - const [workerName, workerFn, workerOpts] = firstCall.args; - - expect(workerName).toBe('reporting'); - expect(workerFn).toMatchInlineSnapshot(`[Function]`); - expect(workerOpts).toMatchInlineSnapshot(` -Object { - "interval": 3300, - "intervalErrorMultiplier": 10, - "kibanaId": "g9ymiujthvy6v8yrh7567g6fwzgzftzfr", - "kibanaName": "test-server-123", -} -`); - }); -}); diff --git a/x-pack/plugins/reporting/server/lib/create_worker.ts b/x-pack/plugins/reporting/server/lib/create_worker.ts deleted file mode 100644 index 6bbfd674604d90..00000000000000 --- a/x-pack/plugins/reporting/server/lib/create_worker.ts +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import { CancellationToken } from '../../common'; -import { PLUGIN_ID } from '../../common/constants'; -import { durationToNumber } from '../../common/schema_utils'; -import { ReportingCore } from '../../server'; -import { LevelLogger } from '../../server/lib'; -import { RunTaskFn } from '../../server/types'; -import { ESQueueInstance } from './create_queue'; -// @ts-ignore untyped dependency -import { events as esqueueEvents } from './esqueue'; -import { ReportDocument } from './store'; -import { ReportTaskParams } from './tasks'; - -export function createWorkerFactory(reporting: ReportingCore, logger: LevelLogger) { - const config = reporting.getConfig(); - const queueConfig = config.get('queue'); - const kibanaName = config.kbnConfig.get('server', 'name'); - const kibanaId = config.kbnConfig.get('server', 'uuid'); - - // Once more document types are added, this will need to be passed in - return async function createWorker(queue: ESQueueInstance) { - // export type / execute job map - const jobExecutors: Map = new Map(); - - for (const exportType of reporting.getExportTypesRegistry().getAll()) { - const jobExecutor = exportType.runTaskFnFactory(reporting, logger); - jobExecutors.set(exportType.jobType, jobExecutor); - } - - const workerFn = ( - jobSource: ReportDocument, - payload: ReportTaskParams['payload'], - cancellationToken: CancellationToken - ) => { - const { - _id: jobId, - _source: { jobtype: jobType }, - } = jobSource; - - if (!jobId) { - throw new Error(`Claimed job is missing an ID!: ${JSON.stringify(jobSource)}`); - } - - const jobTypeExecutor = jobExecutors.get(jobType); - if (!jobTypeExecutor) { - throw new Error(`Unable to find a job executor for the claimed job: [${jobId}]`); - } - - // pass the work to the jobExecutor - return jobTypeExecutor(jobId, payload, cancellationToken); - }; - - const workerOptions = { - kibanaName, - kibanaId, - interval: durationToNumber(queueConfig.pollInterval), - intervalErrorMultiplier: queueConfig.pollIntervalErrorMultiplier, - }; - const worker = queue.registerWorker(PLUGIN_ID, workerFn, workerOptions); - - worker.on(esqueueEvents.EVENT_WORKER_COMPLETE, (res: any) => { - logger.debug(`Worker completed: (${res.job.id})`); - }); - worker.on(esqueueEvents.EVENT_WORKER_JOB_EXECUTION_ERROR, (res: any) => { - logger.debug(`Worker error: (${res.job.id})`); - }); - worker.on(esqueueEvents.EVENT_WORKER_JOB_TIMEOUT, (res: any) => { - logger.debug(`Job timeout exceeded: (${res.job.id})`); - }); - }; -} diff --git a/x-pack/plugins/reporting/server/lib/enqueue_job.ts b/x-pack/plugins/reporting/server/lib/enqueue_job.ts index b409e5f8d9ac87..70492b415f961d 100644 --- a/x-pack/plugins/reporting/server/lib/enqueue_job.ts +++ b/x-pack/plugins/reporting/server/lib/enqueue_job.ts @@ -7,7 +7,6 @@ import { KibanaRequest } from 'src/core/server'; import { ReportingCore } from '../'; -import { durationToNumber } from '../../common/schema_utils'; import { BaseParams, ReportingUser } from '../types'; import { LevelLogger } from './'; import { Report } from './store'; @@ -25,15 +24,7 @@ export function enqueueJobFactory( reporting: ReportingCore, parentLogger: LevelLogger ): EnqueueJobFn { - const logger = parentLogger.clone(['queue-job']); - const config = reporting.getConfig(); - const jobSettings = { - timeout: durationToNumber(config.get('queue', 'timeout')), - browser_type: config.get('capture', 'browser', 'type'), - max_attempts: config.get('capture', 'maxAttempts'), - priority: 10, // unused - }; - + const logger = parentLogger.clone(['createJob']); return async function enqueueJob( exportTypeId: string, jobParams: BaseParams, @@ -56,22 +47,29 @@ export function enqueueJobFactory( reporting.getStore(), ]); + const config = reporting.getConfig(); const job = await createJob!(jobParams, context, request); - const pendingReport = new Report({ - jobtype: exportType.jobType, - created_by: user ? user.username : false, - payload: job, - meta: { - objectType: jobParams.objectType, - layout: jobParams.layout?.id, - }, - ...jobSettings, - }); - // store the pending report, puts it in the Reporting Management UI table - const report = await store.addReport(pendingReport); + // 1. Add the report to ReportingStore to show as pending + const report = await store.addReport( + new Report({ + jobtype: exportType.jobType, + created_by: user ? user.username : false, + max_attempts: config.get('capture', 'maxAttempts'), // NOTE: since max attempts is stored in the document, changing the capture.maxAttempts setting does not affect existing pending reports + payload: job, + meta: { + objectType: jobParams.objectType, + layout: jobParams.layout?.id, + }, + }) + ); + logger.debug(`Successfully stored pending job: ${report._index}/${report._id}`); - logger.info(`Queued ${exportType.name} report: ${report._id}`); + // 2. Schedule the report with Task Manager + const task = await reporting.scheduleTask(report.toReportTaskJSON()); + logger.info( + `Scheduled ${exportType.name} reporting task. Task ID: task:${task.id}. Report ID: ${report._id}` + ); return report; }; diff --git a/x-pack/plugins/reporting/server/lib/esqueue/__fixtures__/job.js b/x-pack/plugins/reporting/server/lib/esqueue/__fixtures__/job.js deleted file mode 100644 index 32f3d7dcaf7063..00000000000000 --- a/x-pack/plugins/reporting/server/lib/esqueue/__fixtures__/job.js +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import events from 'events'; - -export class JobMock extends events.EventEmitter { - constructor(queue, index, type, payload, options = {}) { - super(); - - this.queue = queue; - this.index = index; - this.jobType = type; - this.payload = payload; - this.options = options; - } - - getProp(name) { - return this[name]; - } -} diff --git a/x-pack/plugins/reporting/server/lib/esqueue/__fixtures__/queue.js b/x-pack/plugins/reporting/server/lib/esqueue/__fixtures__/queue.js deleted file mode 100644 index 765ee0f56cb8c9..00000000000000 --- a/x-pack/plugins/reporting/server/lib/esqueue/__fixtures__/queue.js +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import events from 'events'; - -export class QueueMock extends events.EventEmitter { - constructor() { - super(); - } - - setClient(client) { - this.client = client; - } -} diff --git a/x-pack/plugins/reporting/server/lib/esqueue/__fixtures__/worker.js b/x-pack/plugins/reporting/server/lib/esqueue/__fixtures__/worker.js deleted file mode 100644 index 2c9c547d607350..00000000000000 --- a/x-pack/plugins/reporting/server/lib/esqueue/__fixtures__/worker.js +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import events from 'events'; - -export class WorkerMock extends events.EventEmitter { - constructor(queue, type, workerFn, opts = {}) { - super(); - - this.queue = queue; - this.type = type; - this.workerFn = workerFn; - this.options = opts; - } - - getProp(name) { - return this[name]; - } -} diff --git a/x-pack/plugins/reporting/server/lib/esqueue/constants/default_settings.js b/x-pack/plugins/reporting/server/lib/esqueue/constants/default_settings.js deleted file mode 100644 index 64460735625536..00000000000000 --- a/x-pack/plugins/reporting/server/lib/esqueue/constants/default_settings.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -export const defaultSettings = { - DEFAULT_SETTING_TIMEOUT: 10000, - DEFAULT_SETTING_DATE_SEPARATOR: '-', - DEFAULT_SETTING_INTERVAL: 'week', - DEFAULT_SETTING_INDEX_SETTINGS: { - number_of_shards: 1, - auto_expand_replicas: '0-1', - }, - DEFAULT_WORKER_CHECK_SIZE: 1, -}; diff --git a/x-pack/plugins/reporting/server/lib/esqueue/constants/events.js b/x-pack/plugins/reporting/server/lib/esqueue/constants/events.js deleted file mode 100644 index 2c83408b0f88e2..00000000000000 --- a/x-pack/plugins/reporting/server/lib/esqueue/constants/events.js +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -export const events = { - EVENT_QUEUE_ERROR: 'queue:error', - EVENT_JOB_ERROR: 'job:error', - EVENT_JOB_CREATED: 'job:created', - EVENT_JOB_CREATE_ERROR: 'job:creation error', - EVENT_WORKER_COMPLETE: 'worker:job complete', - EVENT_WORKER_JOB_CLAIM_ERROR: 'worker:claim job error', - EVENT_WORKER_JOB_SEARCH_ERROR: 'worker:pending jobs error', - EVENT_WORKER_JOB_UPDATE_ERROR: 'worker:update job error', - EVENT_WORKER_JOB_FAIL: 'worker:job failed', - EVENT_WORKER_JOB_FAIL_ERROR: 'worker:failed job update error', - EVENT_WORKER_JOB_EXECUTION_ERROR: 'worker:job execution error', - EVENT_WORKER_JOB_TIMEOUT: 'worker:job timeout', -}; diff --git a/x-pack/plugins/reporting/server/lib/esqueue/constants/index.js b/x-pack/plugins/reporting/server/lib/esqueue/constants/index.js deleted file mode 100644 index 87ff1e354a7add..00000000000000 --- a/x-pack/plugins/reporting/server/lib/esqueue/constants/index.js +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import { statuses } from '../../statuses'; -import { defaultSettings } from './default_settings'; -import { events } from './events'; - -export const constants = { - ...events, - ...statuses, - ...defaultSettings, -}; diff --git a/x-pack/plugins/reporting/server/lib/esqueue/create_tagged_logger.ts b/x-pack/plugins/reporting/server/lib/esqueue/create_tagged_logger.ts deleted file mode 100644 index 1bb30b4bc7cf0b..00000000000000 --- a/x-pack/plugins/reporting/server/lib/esqueue/create_tagged_logger.ts +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import { LevelLogger } from '../level_logger'; - -export function createTaggedLogger(logger: LevelLogger, tags: string[]) { - return (msg: string, additionalTags = []) => { - const allTags = [...tags, ...additionalTags]; - - if (allTags.includes('info')) { - const newTags = allTags.filter((t) => t !== 'info'); // Ensure 'info' is not included twice - logger.info(msg, newTags); - } else if (allTags.includes('debug')) { - const newTags = allTags.filter((t) => t !== 'debug'); - logger.debug(msg, newTags); - } else if (allTags.includes('warn') || allTags.includes('warning')) { - const newTags = allTags.filter((t) => t !== 'warn' && t !== 'warning'); - logger.warn(msg, newTags); - } else { - logger.error(msg, allTags); - } - }; -} diff --git a/x-pack/plugins/reporting/server/lib/esqueue/helpers/create_index.js b/x-pack/plugins/reporting/server/lib/esqueue/helpers/create_index.js deleted file mode 100644 index b78551909a527d..00000000000000 --- a/x-pack/plugins/reporting/server/lib/esqueue/helpers/create_index.js +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import { constants } from '../constants'; - -const schema = { - meta: { - // We are indexing these properties with both text and keyword fields because that's what will be auto generated - // when an index already exists. This schema is only used when a reporting index doesn't exist. This way existing - // reporting indexes and new reporting indexes will look the same and the data can be queried in the same - // manner. - properties: { - /** - * Type of object that is triggering this report. Should be either search, visualization or dashboard. - * Used for phone home stats only. - */ - objectType: { - type: 'text', - fields: { - keyword: { - type: 'keyword', - ignore_above: 256, - }, - }, - }, - /** - * Can be either preserve_layout, print or none (in the case of csv export). - * Used for phone home stats only. - */ - layout: { - type: 'text', - fields: { - keyword: { - type: 'keyword', - ignore_above: 256, - }, - }, - }, - }, - }, - browser_type: { type: 'keyword' }, - jobtype: { type: 'keyword' }, - payload: { type: 'object', enabled: false }, - priority: { type: 'byte' }, - timeout: { type: 'long' }, - process_expiration: { type: 'date' }, - created_by: { type: 'keyword' }, - created_at: { type: 'date' }, - started_at: { type: 'date' }, - completed_at: { type: 'date' }, - attempts: { type: 'short' }, - max_attempts: { type: 'short' }, - kibana_name: { type: 'keyword' }, - kibana_id: { type: 'keyword' }, - status: { type: 'keyword' }, - output: { - type: 'object', - properties: { - content_type: { type: 'keyword' }, - size: { type: 'long' }, - content: { type: 'object', enabled: false }, - }, - }, -}; - -export function createIndex(client, indexName, indexSettings = {}) { - const body = { - settings: { - ...constants.DEFAULT_SETTING_INDEX_SETTINGS, - ...indexSettings, - }, - mappings: { - properties: schema, - }, - }; - - return client - .callAsInternalUser('indices.exists', { - index: indexName, - }) - .then((exists) => { - if (!exists) { - return client - .callAsInternalUser('indices.create', { - index: indexName, - body: body, - }) - .then(() => true) - .catch((err) => { - /* FIXME creating the index will fail if there were multiple jobs staged in parallel. - * Each staged job checks `client.indices.exists` and could each get `false` as a response. - * Only the first job in line can successfully create it though. - * The problem might only happen in automated tests, where the indices are deleted after each test run. - * This catch block is in place to not fail a job if the job runner hits this race condition. - * Unfortunately we don't have a logger in scope to log a warning. - */ - const isIndexExistsError = - err && - err.body && - err.body.error && - err.body.error.type === 'resource_already_exists_exception'; - if (isIndexExistsError) { - return true; - } - - throw err; - }); - } - return exists; - }); -} diff --git a/x-pack/plugins/reporting/server/lib/esqueue/helpers/errors.js b/x-pack/plugins/reporting/server/lib/esqueue/helpers/errors.js deleted file mode 100644 index ffe04839c42e5b..00000000000000 --- a/x-pack/plugins/reporting/server/lib/esqueue/helpers/errors.js +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -export function WorkerTimeoutError(message, props = {}) { - this.name = 'WorkerTimeoutError'; - this.message = message; - this.timeout = props.timeout; - this.jobId = props.jobId; - - if ('captureStackTrace' in Error) Error.captureStackTrace(this, WorkerTimeoutError); - else this.stack = new Error().stack; -} -WorkerTimeoutError.prototype = Object.create(Error.prototype); - -export function UnspecifiedWorkerError(message, props = {}) { - this.name = 'UnspecifiedWorkerError'; - this.message = message; - this.jobId = props.jobId; - - if ('captureStackTrace' in Error) Error.captureStackTrace(this, UnspecifiedWorkerError); - else this.stack = new Error().stack; -} -UnspecifiedWorkerError.prototype = Object.create(Error.prototype); diff --git a/x-pack/plugins/reporting/server/lib/esqueue/helpers/errors.test.js b/x-pack/plugins/reporting/server/lib/esqueue/helpers/errors.test.js deleted file mode 100644 index 01e6430e671a0c..00000000000000 --- a/x-pack/plugins/reporting/server/lib/esqueue/helpers/errors.test.js +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import { WorkerTimeoutError, UnspecifiedWorkerError } from './errors'; - -describe('custom errors', function () { - describe('WorkerTimeoutError', function () { - it('should be function', () => { - expect(typeof WorkerTimeoutError).toBe('function'); - }); - - it('should have a name', function () { - const err = new WorkerTimeoutError('timeout error'); - expect(err).toHaveProperty('name', 'WorkerTimeoutError'); - }); - - it('should take a jobId property', function () { - const err = new WorkerTimeoutError('timeout error', { jobId: 'il7hl34rqlo8ro' }); - expect(err).toHaveProperty('jobId', 'il7hl34rqlo8ro'); - }); - - it('should take a timeout property', function () { - const err = new WorkerTimeoutError('timeout error', { timeout: 15000 }); - expect(err).toHaveProperty('timeout', 15000); - }); - - it('should be stringifyable', function () { - const err = new WorkerTimeoutError('timeout error'); - expect(`${err}`).toEqual('WorkerTimeoutError: timeout error'); - }); - }); - - describe('UnspecifiedWorkerError', function () { - it('should be function', () => { - expect(typeof UnspecifiedWorkerError).toBe('function'); - }); - - it('should have a name', function () { - const err = new UnspecifiedWorkerError('unspecified error'); - expect(err).toHaveProperty('name', 'UnspecifiedWorkerError'); - }); - - it('should take a jobId property', function () { - const err = new UnspecifiedWorkerError('unspecified error', { jobId: 'il7hl34rqlo8ro' }); - expect(err).toHaveProperty('jobId', 'il7hl34rqlo8ro'); - }); - - it('should be stringifyable', function () { - const err = new UnspecifiedWorkerError('unspecified error'); - expect(`${err}`).toEqual('UnspecifiedWorkerError: unspecified error'); - }); - }); -}); diff --git a/x-pack/plugins/reporting/server/lib/esqueue/index.js b/x-pack/plugins/reporting/server/lib/esqueue/index.js deleted file mode 100644 index 1a8787c5990691..00000000000000 --- a/x-pack/plugins/reporting/server/lib/esqueue/index.js +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import { EventEmitter } from 'events'; -import { Worker } from './worker'; -import { constants } from './constants'; -import { omit } from 'lodash'; - -export { events } from './constants/events'; - -export class Esqueue extends EventEmitter { - constructor(store, options = {}) { - super(); - this.store = store; // for updating jobs in ES - this.index = this.store.indexPrefix; // for polling for pending jobs - this.settings = { - interval: constants.DEFAULT_SETTING_INTERVAL, - timeout: constants.DEFAULT_SETTING_TIMEOUT, - dateSeparator: constants.DEFAULT_SETTING_DATE_SEPARATOR, - ...omit(options, ['client']), - }; - this.client = options.client; - this._logger = options.logger || function () {}; - this._workers = []; - this._initTasks().catch((err) => this.emit(constants.EVENT_QUEUE_ERROR, err)); - } - - _initTasks() { - const initTasks = [this.client.ping()]; - - return Promise.all(initTasks).catch((err) => { - this._logger(['initTasks', 'error'], err); - throw err; - }); - } - - registerWorker(type, workerFn, opts) { - const worker = new Worker(this, type, workerFn, { ...opts, logger: this._logger }); - this._workers.push(worker); - return worker; - } - - getWorkers() { - return this._workers.map((fn) => fn); - } - - destroy() { - const workers = this._workers.filter((worker) => worker.destroy()); - this._workers = workers; - } -} diff --git a/x-pack/plugins/reporting/server/lib/esqueue/index.test.js b/x-pack/plugins/reporting/server/lib/esqueue/index.test.js deleted file mode 100644 index 1ed5219ffd8e4e..00000000000000 --- a/x-pack/plugins/reporting/server/lib/esqueue/index.test.js +++ /dev/null @@ -1,158 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import events from 'events'; -import expect from '@kbn/expect'; -import sinon from 'sinon'; -import proxyquire from 'proxyquire'; -import { noop, times } from 'lodash'; -import { constants } from './constants'; -import { JobMock } from './__fixtures__/job'; -import { WorkerMock } from './__fixtures__/worker'; - -const { Esqueue } = proxyquire.noPreserveCache()('./index', { - './job': { Job: JobMock }, - './worker': { Worker: WorkerMock }, -}); - -// TODO: tests were not running and are not up to date -describe.skip('Esqueue class', function () { - let client; - - beforeEach(function () { - client = { ping: () => {} }; - }); - - it('should be an event emitter', function () { - const queue = new Esqueue('esqueue', { client }); - expect(queue).to.be.an(events.EventEmitter); - }); - - describe('Option validation', function () { - it('should throw without an index', function () { - const init = () => new Esqueue(); - expect(init).to.throwException(/must.+specify.+index/i); - }); - }); - - describe('Queue construction', function () { - it('should ping the ES server', function () { - const pingSpy = sinon.spy(client, 'ping'); - sinon.assert.calledOnce(pingSpy); - }); - }); - - describe('Adding jobs', function () { - let indexName; - let jobType; - let payload; - let queue; - - beforeEach(function () { - indexName = 'esqueue-index'; - jobType = 'test-test'; - payload = { payload: true }; - queue = new Esqueue(indexName, { client }); - }); - - it('should throw with invalid dateSeparator setting', function () { - queue = new Esqueue(indexName, { client, dateSeparator: 'a' }); - const fn = () => queue.addJob(jobType, payload); - expect(fn).to.throwException(); - }); - - it('should pass queue instance, index name, type and payload', function () { - const job = queue.addJob(jobType, payload); - expect(job.getProp('queue')).to.equal(queue); - expect(job.getProp('index')).to.match(new RegExp(indexName)); - expect(job.getProp('jobType')).to.equal(jobType); - expect(job.getProp('payload')).to.equal(payload); - }); - - it('should pass default settings', function () { - const job = queue.addJob(jobType, payload); - const options = job.getProp('options'); - expect(options).to.have.property('timeout', constants.DEFAULT_SETTING_TIMEOUT); - }); - - it('should pass queue index settings', function () { - const indexSettings = { - index: { - number_of_shards: 1, - }, - }; - - queue = new Esqueue(indexName, { client, indexSettings }); - const job = queue.addJob(jobType, payload); - expect(job.getProp('options')).to.have.property('indexSettings', indexSettings); - }); - - it('should pass headers from options', function () { - const options = { - headers: { - authorization: 'Basic cXdlcnR5', - }, - }; - const job = queue.addJob(jobType, payload, options); - expect(job.getProp('options')).to.have.property('headers', options.headers); - }); - }); - - describe('Registering workers', function () { - let queue; - - beforeEach(function () { - queue = new Esqueue('esqueue', { client }); - }); - - it('should keep track of workers', function () { - expect(queue.getWorkers()).to.eql([]); - expect(queue.getWorkers()).to.have.length(0); - - queue.registerWorker('test', noop); - queue.registerWorker('test', noop); - queue.registerWorker('test2', noop); - expect(queue.getWorkers()).to.have.length(3); - }); - - it('should pass instance of queue, type, and worker function', function () { - const workerType = 'test-worker'; - const workerFn = () => true; - - const worker = queue.registerWorker(workerType, workerFn); - expect(worker.getProp('queue')).to.equal(queue); - expect(worker.getProp('type')).to.equal(workerType); - expect(worker.getProp('workerFn')).to.equal(workerFn); - }); - - it('should pass worker options', function () { - const workerOptions = { - size: 12, - }; - - queue = new Esqueue('esqueue', { client }); - const worker = queue.registerWorker('type', noop, workerOptions); - const options = worker.getProp('options'); - expect(options.size).to.equal(workerOptions.size); - }); - }); - - describe('Destroy', function () { - it('should destroy workers', function () { - const queue = new Esqueue('esqueue', { client }); - const stubs = times(3, () => { - return { destroy: sinon.stub() }; - }); - stubs.forEach((stub) => queue._workers.push(stub)); - expect(queue.getWorkers()).to.have.length(3); - - queue.destroy(); - stubs.forEach((stub) => sinon.assert.calledOnce(stub.destroy)); - expect(queue.getWorkers()).to.have.length(0); - }); - }); -}); diff --git a/x-pack/plugins/reporting/server/lib/esqueue/worker.js b/x-pack/plugins/reporting/server/lib/esqueue/worker.js deleted file mode 100644 index 0b3a4d48557d9d..00000000000000 --- a/x-pack/plugins/reporting/server/lib/esqueue/worker.js +++ /dev/null @@ -1,444 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import events from 'events'; -import moment from 'moment'; -import Puid from 'puid'; -import { CancellationToken, Poller } from '../../../common'; -import { constants } from './constants'; -import { UnspecifiedWorkerError, WorkerTimeoutError } from './helpers/errors'; - -const puid = new Puid(); - -export function formatJobObject(job) { - return { - index: job._index, - id: job._id, - }; -} - -export function getUpdatedDocPath(response) { - const { _index: ind, _type: type = '_doc', _id: id } = response; - return `/${ind}/${type}/${id}`; -} - -const MAX_PARTIAL_ERROR_LENGTH = 1000; // 1000 of beginning, 1000 of end -const ERROR_PARTIAL_SEPARATOR = '...'; -const MAX_ERROR_LENGTH = MAX_PARTIAL_ERROR_LENGTH * 2 + ERROR_PARTIAL_SEPARATOR.length; - -function getLogger(opts, id, logLevel) { - return (msg, err) => { - /* - * This does not get the logger instance from queue.registerWorker in the createWorker function. - * The logger instance in the Equeue lib comes from createTaggedLogger, so logLevel tags are passed differently - */ - const logger = opts.logger || function () {}; - const message = `${id} - ${msg}`; - const tags = [logLevel]; - - if (err) { - // The error message string could be very long if it contains the request - // body of a request that was too large for Elasticsearch. - // This takes a partial version of the error message without scanning - // every character of the string, which would block Node. - const errString = `${message}: ${err.stack ? err.stack : err}`; - const errLength = errString.length; - const subStr = String.prototype.substring.bind(errString); - if (errLength > MAX_ERROR_LENGTH) { - const partialError = - subStr(0, MAX_PARTIAL_ERROR_LENGTH) + - ERROR_PARTIAL_SEPARATOR + - subStr(errLength - MAX_PARTIAL_ERROR_LENGTH); - - logger(partialError, tags); - logger( - `A partial version of the entire error message was logged. ` + - `The entire error message length is: ${errLength} characters.`, - tags - ); - } else { - logger(errString, tags); - } - return; - } - - logger(message, tags); - }; -} - -export class Worker extends events.EventEmitter { - constructor(queue, type, workerFn, opts) { - if (typeof type !== 'string') throw new Error('type must be a string'); - if (typeof workerFn !== 'function') throw new Error('workerFn must be a function'); - if (typeof opts !== 'object') throw new Error('opts must be an object'); - if (typeof opts.interval !== 'number') throw new Error('opts.interval must be a number'); - if (typeof opts.intervalErrorMultiplier !== 'number') - throw new Error('opts.intervalErrorMultiplier must be a number'); - - super(); - - this.id = puid.generate(); - this.kibanaId = opts.kibanaId; - this.kibanaName = opts.kibanaName; - this.queue = queue; - this._client = this.queue.client; - this.jobtype = type; - this.workerFn = workerFn; - - this.debug = getLogger(opts, this.id, 'debug'); - this.warn = getLogger(opts, this.id, 'warning'); - this.error = getLogger(opts, this.id, 'error'); - this.info = getLogger(opts, this.id, 'info'); - - this._running = true; - this.debug(`Created worker for ${this.jobtype} jobs`); - - this._poller = new Poller({ - functionToPoll: () => { - return this._processPendingJobs(); - }, - pollFrequencyInMillis: opts.interval, - trailing: true, - continuePollingOnError: true, - pollFrequencyErrorMultiplier: opts.intervalErrorMultiplier, - }); - this._startJobPolling(); - } - - destroy() { - this._running = false; - this._stopJobPolling(); - } - - toJSON() { - return { - id: this.id, - index: this.queue.index, - jobType: this.jobType, - }; - } - - emit(name, ...args) { - super.emit(name, ...args); - this.queue.emit(name, ...args); - } - - _formatErrorParams(err, job) { - const response = { - error: err, - worker: this.toJSON(), - }; - - if (job) response.job = formatJobObject(job); - return response; - } - - _claimJob(job) { - const m = moment(); - const startTime = m.toISOString(); - const expirationTime = m.add(job._source.timeout).toISOString(); - const attempts = job._source.attempts + 1; - - if (attempts > job._source.max_attempts) { - const msg = !job._source.output - ? `Max attempts reached (${job._source.max_attempts})` - : false; - return this._failJob(job, msg).then(() => false); - } - - const doc = { - attempts: attempts, - started_at: startTime, - process_expiration: expirationTime, - status: constants.JOB_STATUS_PROCESSING, - kibana_id: this.kibanaId, - kibana_name: this.kibanaName, - }; - - return this.queue.store.setReportClaimed(job, doc).then((response) => { - this.info(`Job marked as claimed: ${getUpdatedDocPath(response)}`); - const updatedJob = { - ...job, - ...response, - }; - updatedJob._source = { - ...job._source, - ...doc, - }; - return updatedJob; - }); - } - - _failJob(job, output = false) { - this.warn(`Failing job ${job._id}`); - - const completedTime = moment().toISOString(); - const docOutput = this._formatOutput(output); - const doc = { - status: constants.JOB_STATUS_FAILED, - completed_at: completedTime, - output: docOutput, - }; - - this.emit(constants.EVENT_WORKER_JOB_FAIL, { - job: formatJobObject(job), - worker: this.toJSON(), - output: docOutput, - }); - - return this.queue.store - .setReportFailed(job, doc) - .then((response) => { - this.info(`Job marked as failed: ${getUpdatedDocPath(response)}`); - }) - .catch((err) => { - if (err.statusCode === 409) return true; - this.error(`_failJob failed to update job ${job._id}`, err); - this.emit(constants.EVENT_WORKER_FAIL_UPDATE_ERROR, this._formatErrorParams(err, job)); - return false; - }); - } - - _formatOutput(output) { - const unknownMime = false; - const defaultOutput = null; - const docOutput = {}; - - if (typeof output === 'object' && output.content) { - docOutput.content = output.content; - docOutput.content_type = output.content_type || unknownMime; - docOutput.max_size_reached = output.max_size_reached; - docOutput.csv_contains_formulas = output.csv_contains_formulas; - docOutput.size = output.size; - docOutput.warnings = - output.warnings && output.warnings.length > 0 ? output.warnings : undefined; - } else { - docOutput.content = output || defaultOutput; - docOutput.content_type = unknownMime; - } - - return docOutput; - } - - _performJob(job) { - this.info(`Starting job`); - - const workerOutput = new Promise((resolve, reject) => { - // run the worker's workerFn - let isResolved = false; - const cancellationToken = new CancellationToken(); - const jobSource = job._source; - - Promise.resolve(this.workerFn.call(null, job, jobSource.payload, cancellationToken)) - .then((res) => { - // job execution was successful - if (res && res.warnings && res.warnings.length > 0) { - this.warn(`Job execution completed with warnings`); - } else { - this.info(`Job execution completed successfully`); - } - - isResolved = true; - resolve(res); - }) - .catch((err) => { - isResolved = true; - reject(err); - }); - - // fail if workerFn doesn't finish before timeout - const { timeout } = jobSource; - setTimeout(() => { - if (isResolved) return; - - cancellationToken.cancel(); - this.warn(`Timeout processing job ${job._id}`); - reject( - new WorkerTimeoutError(`Worker timed out, timeout = ${timeout}`, { - jobId: job._id, - timeout, - }) - ); - }, timeout); - }); - - return workerOutput.then( - (output) => { - const completedTime = moment().toISOString(); - const docOutput = this._formatOutput(output); - - const status = - output && output.warnings && output.warnings.length > 0 - ? constants.JOB_STATUS_WARNINGS - : constants.JOB_STATUS_COMPLETED; - const doc = { - status, - completed_at: completedTime, - output: docOutput, - }; - - return this.queue.store - .setReportCompleted(job, doc) - .then((response) => { - const eventOutput = { - job: formatJobObject(job), - output: docOutput, - }; - this.emit(constants.EVENT_WORKER_COMPLETE, eventOutput); - - this.info(`Job data saved successfully: ${getUpdatedDocPath(response)}`); - }) - .catch((err) => { - if (err.statusCode === 409) return false; - this.error(`Failure saving job output ${job._id}`, err); - this.emit(constants.EVENT_WORKER_JOB_UPDATE_ERROR, this._formatErrorParams(err, job)); - return this._failJob(job, err.message ? err.message : false); - }); - }, - (jobErr) => { - if (!jobErr) { - jobErr = new UnspecifiedWorkerError('Unspecified worker error', { - jobId: job._id, - }); - } - - // job execution failed - if (jobErr.name === 'WorkerTimeoutError') { - this.warn(`Timeout on job ${job._id}`); - this.emit(constants.EVENT_WORKER_JOB_TIMEOUT, this._formatErrorParams(jobErr, job)); - return; - - // append the jobId to the error - } else { - try { - Object.assign(jobErr, { jobId: job._id }); - } catch (e) { - // do nothing if jobId can not be appended - } - } - - this.error(`Failure occurred on job ${job._id}`, jobErr); - this.emit(constants.EVENT_WORKER_JOB_EXECUTION_ERROR, this._formatErrorParams(jobErr, job)); - return this._failJob(job, jobErr.toString ? jobErr.toString() : false); - } - ); - } - - _startJobPolling() { - if (!this._running) { - return; - } - - this._poller.start(); - } - - _stopJobPolling() { - this._poller.stop(); - } - - _processPendingJobs() { - return this._getPendingJobs().then((jobs) => { - return this._claimPendingJobs(jobs); - }); - } - - _claimPendingJobs(jobs) { - if (!jobs || jobs.length === 0) return; - - let claimed = false; - - // claim a single job, stopping after first successful claim - return jobs - .reduce((chain, job) => { - return chain.then((claimedJob) => { - // short-circuit the promise chain if a job has been claimed - if (claimed) return claimedJob; - - return this._claimJob(job) - .then((claimResult) => { - claimed = true; - return claimResult; - }) - .catch((err) => { - if (err.statusCode === 409) { - this.warn( - `_claimPendingJobs encountered a version conflict on updating pending job ${job._id}`, - err - ); - return; // continue reducing and looking for a different job to claim - } - this.emit(constants.EVENT_WORKER_JOB_CLAIM_ERROR, this._formatErrorParams(err, job)); - return Promise.reject(err); - }); - }); - }, Promise.resolve()) - .then((claimedJob) => { - if (!claimedJob) { - this.debug(`Found no claimable jobs out of ${jobs.length} total`); - return; - } - return this._performJob(claimedJob); - }) - .catch((err) => { - this.error('Error claiming jobs', err); - return Promise.reject(err); - }); - } - - _getPendingJobs() { - const nowTime = moment().toISOString(); - const query = { - seq_no_primary_term: true, - _source: { - excludes: ['output.content'], - }, - query: { - bool: { - filter: { - bool: { - minimum_should_match: 1, - should: [ - { term: { status: 'pending' } }, - { - bool: { - must: [ - { term: { status: 'processing' } }, - { range: { process_expiration: { lte: nowTime } } }, - ], - }, - }, - ], - }, - }, - }, - }, - sort: [{ priority: { order: 'asc' } }, { created_at: { order: 'asc' } }], - size: constants.DEFAULT_WORKER_CHECK_SIZE, - }; - - return this._client - .search({ - index: `${this.queue.index}-*`, - body: query, - }) - .then((results) => { - const jobs = results.body.hits.hits; - if (jobs.length > 0) { - this.debug(`${jobs.length} outstanding jobs returned`); - } - return jobs; - }) - .catch((err) => { - // ignore missing indices errors - if (err && err.status === 404) return []; - - this.error('job querying failed', err); - this.emit(constants.EVENT_WORKER_JOB_SEARCH_ERROR, this._formatErrorParams(err)); - throw err; - }); - } -} diff --git a/x-pack/plugins/reporting/server/lib/esqueue/worker.test.js b/x-pack/plugins/reporting/server/lib/esqueue/worker.test.js deleted file mode 100644 index d3ebe08aed8172..00000000000000 --- a/x-pack/plugins/reporting/server/lib/esqueue/worker.test.js +++ /dev/null @@ -1,1133 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import expect from '@kbn/expect'; -import sinon from 'sinon'; -import moment from 'moment'; -import { noop, random, get, find, identity } from 'lodash'; -import { QueueMock } from './__fixtures__/queue'; -import { formatJobObject, getUpdatedDocPath, Worker } from './worker'; -import { constants } from './constants'; - -const anchor = '2016-04-02T01:02:03.456'; // saturday -const defaults = { - timeout: 10000, - size: 1, - unknownMime: false, - contentBody: null, -}; - -const defaultWorkerOptions = { - interval: 3000, - intervalErrorMultiplier: 10, -}; - -// TODO: tests were not running and are not up to date -describe.skip('Worker class', function () { - // some of these tests might be a little slow, give them a little extra time - jest.setTimeout(10000); - - let anchorMoment; - let clock; - let client; - let mockQueue; - let worker; - let worker2; - - // Allowing the Poller to poll requires intimate knowledge of the inner workings of the Poller. - // We have to ensure that the Promises internal to the `_poll` method are resolved to queue up - // the next setTimeout before incrementing the clock. - const allowPoll = async (interval) => { - clock.tick(interval); - await Promise.resolve(); - await Promise.resolve(); - }; - - beforeEach(function () { - client = { search: () => {} }; - mockQueue = new QueueMock(); - mockQueue.setClient(client); - }); - - afterEach(function () { - [worker, worker2].forEach((actualWorker) => { - if (actualWorker) { - actualWorker.destroy(); - } - }); - }); - - describe('invalid construction', function () { - it('should throw without a type', function () { - const init = () => new Worker(mockQueue); - expect(init).to.throwException(/type.+string/i); - }); - - it('should throw without an invalid type', function () { - const init = () => new Worker(mockQueue, { string: false }); - expect(init).to.throwException(/type.+string/i); - }); - - it('should throw without a workerFn', function () { - const init = () => new Worker(mockQueue, 'test'); - expect(init).to.throwException(/workerFn.+function/i); - }); - - it('should throw with an invalid workerFn', function () { - const init = () => new Worker(mockQueue, 'test', { function: false }); - expect(init).to.throwException(/workerFn.+function/i); - }); - - it('should throw without an opts', function () { - const init = () => new Worker(mockQueue, 'test', noop); - expect(init).to.throwException(/opts.+object/i); - }); - - it('should throw with an invalid opts.interval', function () { - const init = () => new Worker(mockQueue, 'test', noop, {}); - expect(init).to.throwException(/opts\.interval.+number/i); - }); - - it('should throw with an invalid opts.intervalErrorMultiplier', function () { - const init = () => new Worker(mockQueue, 'test', noop, { interval: 1 }); - expect(init).to.throwException(/opts\.intervalErrorMultiplier.+number/i); - }); - }); - - describe('construction', function () { - it('should assign internal properties', function () { - const jobtype = 'testjob'; - const workerFn = noop; - worker = new Worker(mockQueue, jobtype, workerFn, defaultWorkerOptions); - expect(worker).to.have.property('id'); - expect(worker).to.have.property('queue', mockQueue); - expect(worker).to.have.property('jobtype', jobtype); - expect(worker).to.have.property('workerFn', workerFn); - }); - - it('should have a unique ID', function () { - worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions); - expect(worker.id).to.be.a('string'); - - worker2 = new Worker(mockQueue, 'test', noop, defaultWorkerOptions); - expect(worker2.id).to.be.a('string'); - - expect(worker.id).to.not.equal(worker2.id); - }); - }); - - describe('event emitting', function () { - beforeEach(function () { - worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions); - }); - - it('should trigger events on the queue instance', function (done) { - const eventName = 'test event'; - const payload1 = { - test: true, - deep: { object: 'ok' }, - }; - const payload2 = 'two'; - const payload3 = new Error('test error'); - - mockQueue.on(eventName, (...args) => { - try { - expect(args[0]).to.equal(payload1); - expect(args[1]).to.equal(payload2); - expect(args[2]).to.equal(payload3); - done(); - } catch (e) { - done(e); - } - }); - - worker.emit(eventName, payload1, payload2, payload3); - }); - }); - - describe('output formatting', function () { - let f; - - beforeEach(function () { - worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions); - f = (output) => worker._formatOutput(output); - }); - - it('should handle primitives', function () { - const primitives = ['test', true, 1234, { one: 1 }, [5, 6, 7, 8]]; - - primitives.forEach((val) => { - expect(f(val)).to.have.property('content_type', defaults.unknownMime); - expect(f(val)).to.have.property('content', val); - }); - }); - - it('should accept content object without type', function () { - const output = { - content: 'test output', - }; - - expect(f(output)).to.have.property('content_type', defaults.unknownMime); - expect(f(output)).to.have.property('content', output.content); - }); - - it('should accept a content type', function () { - const output = { - content_type: 'test type', - content: 'test output', - }; - - expect(f(output)).to.have.property('content_type', output.content_type); - expect(f(output)).to.have.property('content', output.content); - }); - - it('should work with no input', function () { - expect(f()).to.have.property('content_type', defaults.unknownMime); - expect(f()).to.have.property('content', defaults.contentBody); - }); - }); - - describe('polling for jobs', function () { - beforeEach(() => { - anchorMoment = moment(anchor); - clock = sinon.useFakeTimers(anchorMoment.valueOf()); - }); - - afterEach(() => { - clock.restore(); - }); - - it('should start polling for jobs after interval', async function () { - worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions); - const processPendingJobsStub = sinon - .stub(worker, '_processPendingJobs') - .callsFake(() => Promise.resolve()); - sinon.assert.notCalled(processPendingJobsStub); - await allowPoll(defaultWorkerOptions.interval); - sinon.assert.calledOnce(processPendingJobsStub); - }); - - it('should use interval option to control polling', async function () { - const interval = 567; - worker = new Worker(mockQueue, 'test', noop, { ...defaultWorkerOptions, interval }); - const processPendingJobsStub = sinon - .stub(worker, '_processPendingJobs') - .callsFake(() => Promise.resolve()); - - sinon.assert.notCalled(processPendingJobsStub); - await allowPoll(interval); - sinon.assert.calledOnce(processPendingJobsStub); - }); - - it('should not poll once destroyed', async function () { - worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions); - - const processPendingJobsStub = sinon - .stub(worker, '_processPendingJobs') - .callsFake(() => Promise.resolve()); - - // move the clock a couple times, test for searches each time - sinon.assert.notCalled(processPendingJobsStub); - await allowPoll(defaultWorkerOptions.interval); - sinon.assert.calledOnce(processPendingJobsStub); - await allowPoll(defaultWorkerOptions.interval); - sinon.assert.calledTwice(processPendingJobsStub); - - // destroy the worker, move the clock, make sure another search doesn't happen - worker.destroy(); - await allowPoll(defaultWorkerOptions.interval); - sinon.assert.calledTwice(processPendingJobsStub); - - // manually call job poller, move the clock, make sure another search doesn't happen - worker._startJobPolling(); - await allowPoll(defaultWorkerOptions.interval); - sinon.assert.calledTwice(processPendingJobsStub); - }); - - it('should use error multiplier when processPendingJobs rejects the Promise', async function () { - worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions); - - const processPendingJobsStub = sinon - .stub(worker, '_processPendingJobs') - .rejects(new Error('test error')); - - await allowPoll(defaultWorkerOptions.interval); - expect(processPendingJobsStub.callCount).to.be(1); - await allowPoll(defaultWorkerOptions.interval); - expect(processPendingJobsStub.callCount).to.be(1); - await allowPoll(defaultWorkerOptions.interval * defaultWorkerOptions.intervalErrorMultiplier); - expect(processPendingJobsStub.callCount).to.be(2); - }); - - it('should not use error multiplier when processPendingJobs resolved the Promise', async function () { - worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions); - - const processPendingJobsStub = sinon - .stub(worker, '_processPendingJobs') - .callsFake(() => Promise.resolve()); - - await allowPoll(defaultWorkerOptions.interval); - expect(processPendingJobsStub.callCount).to.be(1); - await allowPoll(defaultWorkerOptions.interval); - expect(processPendingJobsStub.callCount).to.be(2); - }); - }); - - describe('query for pending jobs', function () { - let searchStub; - - function getSearchParams(jobtype = 'test', params = {}) { - worker = new Worker(mockQueue, jobtype, noop, { ...defaultWorkerOptions, ...params }); - worker._getPendingJobs(); - return searchStub.firstCall.args[1]; - } - - describe('error handling', function () { - it('should pass search errors', function (done) { - searchStub = sinon - .stub(mockQueue.client, 'callAsInternalUser') - .withArgs('search') - .callsFake(() => Promise.reject()); - worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions); - worker - ._getPendingJobs() - .then(() => done(new Error('should not resolve'))) - .catch(() => { - done(); - }); - }); - - describe('missing index', function () { - it('should swallow error', function (done) { - searchStub = sinon - .stub(mockQueue.client, 'callAsInternalUser') - .withArgs('search') - .callsFake(() => Promise.reject({ status: 404 })); - worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions); - worker - ._getPendingJobs() - .then(() => { - done(); - }) - .catch(() => done(new Error('should not reject'))); - }); - - it('should return an empty array', function (done) { - searchStub = sinon - .stub(mockQueue.client, 'search') - .callsFake(() => Promise.reject({ status: 404 })); - worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions); - worker - ._getPendingJobs() - .then((res) => { - try { - expect(res).to.be.an(Array); - expect(res).to.have.length(0); - done(); - } catch (e) { - done(e); - } - }) - .catch(() => done(new Error('should not reject'))); - }); - }); - }); - - describe('query body', function () { - const conditionPath = 'query.bool.filter.bool'; - const jobtype = 'test_jobtype'; - - beforeEach(() => { - searchStub = sinon - .stub(mockQueue.client, 'callAsInternalUser') - .withArgs('search') - .callsFake(() => Promise.resolve({ hits: { hits: [] } })); - anchorMoment = moment(anchor); - clock = sinon.useFakeTimers(anchorMoment.valueOf()); - }); - - afterEach(() => { - clock.restore(); - }); - - it('should query with seq_no_primary_term', function () { - const { body } = getSearchParams(jobtype); - expect(body).to.have.property('seq_no_primary_term', true); - }); - - it('should filter unwanted source data', function () { - const excludedFields = ['output.content']; - const { body } = getSearchParams(jobtype); - expect(body).to.have.property('_source'); - expect(body._source).to.eql({ excludes: excludedFields }); - }); - - it('should search for pending or expired jobs', function () { - const { body } = getSearchParams(jobtype); - const conditions = get(body, conditionPath); - expect(conditions).to.have.property('should'); - - // this works because we are stopping the clock, so all times match - const nowTime = moment().toISOString(); - const pending = { term: { status: 'pending' } }; - const expired = { - bool: { - must: [ - { term: { status: 'processing' } }, - { range: { process_expiration: { lte: nowTime } } }, - ], - }, - }; - - const pendingMatch = find(conditions.should, pending); - expect(pendingMatch).to.not.be(undefined); - - const expiredMatch = find(conditions.should, expired); - expect(expiredMatch).to.not.be(undefined); - }); - - it('specify that there should be at least one match', function () { - const { body } = getSearchParams(jobtype); - const conditions = get(body, conditionPath); - expect(conditions).to.have.property('minimum_should_match', 1); - }); - - it('should use default size', function () { - const { body } = getSearchParams(jobtype); - expect(body).to.have.property('size', defaults.size); - }); - }); - }); - - describe('claiming a job', function () { - let params; - let job; - let updateSpy; - - beforeEach(function () { - anchorMoment = moment(anchor); - clock = sinon.useFakeTimers(anchorMoment.valueOf()); - - params = { - index: 'myIndex', - type: 'test', - id: 12345, - }; - return mockQueue.client.callAsInternalUser('get', params).then((jobDoc) => { - job = jobDoc; - worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions); - updateSpy = sinon.spy(mockQueue.client, 'callAsInternalUser').withArgs('update'); - }); - }); - - afterEach(() => { - clock.restore(); - }); - - it('should use seqNo and primaryTerm on update', function () { - worker._claimJob(job); - const query = updateSpy.firstCall.args[1]; - expect(query).to.have.property('index', job._index); - expect(query).to.have.property('id', job._id); - expect(query).to.have.property('if_seq_no', job._seq_no); - expect(query).to.have.property('if_primary_term', job._primary_term); - }); - - it('should increment the job attempts', function () { - worker._claimJob(job); - const doc = updateSpy.firstCall.args[1].body.doc; - expect(doc).to.have.property('attempts', job._source.attempts + 1); - }); - - it('should update the job status', function () { - worker._claimJob(job); - const doc = updateSpy.firstCall.args[1].body.doc; - expect(doc).to.have.property('status', constants.JOB_STATUS_PROCESSING); - }); - - it('should set job expiration time', function () { - worker._claimJob(job); - const doc = updateSpy.firstCall.args[1].body.doc; - const expiration = anchorMoment.add(defaults.timeout).toISOString(); - expect(doc).to.have.property('process_expiration', expiration); - }); - - it('should fail job if max_attempts are hit', function () { - const failSpy = sinon.spy(worker, '_failJob'); - job._source.attempts = job._source.max_attempts; - worker._claimJob(job); - sinon.assert.calledOnce(failSpy); - }); - - it('should append error message if no existing content', function () { - const failSpy = sinon.spy(worker, '_failJob'); - job._source.attempts = job._source.max_attempts; - expect(job._source.output).to.be(undefined); - worker._claimJob(job); - const msg = failSpy.firstCall.args[1]; - expect(msg).to.contain('Max attempts reached'); - expect(msg).to.contain(job._source.max_attempts); - }); - - it('should not append message if existing output', function () { - const failSpy = sinon.spy(worker, '_failJob'); - job._source.attempts = job._source.max_attempts; - job._source.output = 'i have some output'; - worker._claimJob(job); - const msg = failSpy.firstCall.args[1]; - expect(msg).to.equal(false); - }); - - it('should reject the promise on conflict errors', function () { - mockQueue.client.callAsInternalUser.restore(); - sinon - .stub(mockQueue.client, 'callAsInternalUser') - .withArgs('update') - .returns(Promise.reject({ statusCode: 409 })); - return worker._claimJob(job).catch((err) => { - expect(err).to.eql({ statusCode: 409 }); - }); - }); - - it('should reject the promise on other errors', function () { - mockQueue.client.callAsInternalUser.restore(); - sinon - .stub(mockQueue.client, 'callAsInternalUser') - .withArgs('update') - .returns(Promise.reject({ statusCode: 401 })); - return worker._claimJob(job).catch((err) => { - expect(err).to.eql({ statusCode: 401 }); - }); - }); - }); - - describe('find a pending job to claim', function () { - const getMockJobs = (status = 'pending') => [ - { - _index: 'myIndex', - _id: 12345, - _seq_no: 3, - _primary_term: 3, - found: true, - _source: { - jobtype: 'jobtype', - created_by: false, - payload: { id: 'sample-job-1', now: 'Mon Apr 25 2016 14:13:04 GMT-0700 (MST)' }, - priority: 10, - timeout: 10000, - created_at: '2016-04-25T21:13:04.738Z', - attempts: 0, - max_attempts: 3, - status, - }, - }, - ]; - - beforeEach(function () { - worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions); - }); - - afterEach(() => { - mockQueue.client.callAsInternalUser.restore(); - }); - - it('should emit for errors from claiming job', function (done) { - sinon - .stub(mockQueue.client, 'callAsInternalUser') - .withArgs('update') - .rejects({ statusCode: 401 }); - - worker.once(constants.EVENT_WORKER_JOB_CLAIM_ERROR, function (err) { - try { - expect(err).to.have.property('error'); - expect(err).to.have.property('job'); - expect(err).to.have.property('worker'); - expect(err.error).to.have.property('statusCode', 401); - done(); - } catch (e) { - done(e); - } - }); - - worker._claimPendingJobs(getMockJobs()).catch(() => {}); - }); - - it('should reject the promise if an error claiming the job', function () { - sinon - .stub(mockQueue.client, 'callAsInternalUser') - .withArgs('update') - .rejects({ statusCode: 409 }); - return worker._claimPendingJobs(getMockJobs()).catch((err) => { - expect(err).to.eql({ statusCode: 409 }); - }); - }); - - it('should get the pending job', function () { - sinon - .stub(mockQueue.client, 'callAsInternalUser') - .withArgs('update') - .resolves({ test: 'cool' }); - sinon.stub(worker, '_performJob').callsFake(identity); - return worker._claimPendingJobs(getMockJobs()).then((claimedJob) => { - expect(claimedJob._index).to.be('myIndex'); - expect(claimedJob._source.jobtype).to.be('jobtype'); - expect(claimedJob._source.status).to.be('processing'); - expect(claimedJob.test).to.be('cool'); - worker._performJob.restore(); - }); - }); - }); - - describe('failing a job', function () { - let job; - let updateSpy; - - beforeEach(function () { - anchorMoment = moment(anchor); - clock = sinon.useFakeTimers(anchorMoment.valueOf()); - - return mockQueue.client.callAsInternalUser('get').then((jobDoc) => { - job = jobDoc; - worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions); - updateSpy = sinon.spy(mockQueue.client, 'callAsInternalUser').withArgs('update'); - }); - }); - - afterEach(() => { - clock.restore(); - }); - - it('should use _seq_no and _primary_term on update', function () { - worker._failJob(job); - const query = updateSpy.firstCall.args[1]; - expect(query).to.have.property('index', job._index); - expect(query).to.have.property('id', job._id); - expect(query).to.have.property('if_seq_no', job._seq_no); - expect(query).to.have.property('if_primary_term', job._primary_term); - }); - - it('should set status to failed', function () { - worker._failJob(job); - const doc = updateSpy.firstCall.args[1].body.doc; - expect(doc).to.have.property('status', constants.JOB_STATUS_FAILED); - }); - - it('should append error message if supplied', function () { - const msg = 'test message'; - worker._failJob(job, msg); - const doc = updateSpy.firstCall.args[1].body.doc; - expect(doc).to.have.property('output'); - expect(doc.output).to.have.property('content', msg); - }); - - it('should return true on conflict errors', function () { - mockQueue.client.callAsInternalUser.restore(); - sinon - .stub(mockQueue.client, 'callAsInternalUser') - .withArgs('update') - .rejects({ statusCode: 409 }); - return worker._failJob(job).then((res) => expect(res).to.equal(true)); - }); - - it('should return false on other document update errors', function () { - mockQueue.client.callAsInternalUser.restore(); - sinon - .stub(mockQueue.client, 'callAsInternalUser') - .withArgs('update') - .rejects({ statusCode: 401 }); - return worker._failJob(job).then((res) => expect(res).to.equal(false)); - }); - - it('should set completed time and status to failure', function () { - const startTime = moment().valueOf(); - const msg = 'test message'; - clock.tick(100); - - worker._failJob(job, msg); - const doc = updateSpy.firstCall.args[1].body.doc; - expect(doc).to.have.property('output'); - expect(doc).to.have.property('status', constants.JOB_STATUS_FAILED); - expect(doc).to.have.property('completed_at'); - const completedTimestamp = moment(doc.completed_at).valueOf(); - expect(completedTimestamp).to.be.greaterThan(startTime); - }); - - it('should emit worker failure event', function (done) { - worker.on(constants.EVENT_WORKER_JOB_FAIL, (err) => { - try { - expect(err).to.have.property('output'); - expect(err).to.have.property('job'); - expect(err).to.have.property('worker'); - done(); - } catch (e) { - done(e); - } - }); - - return worker._failJob(job); - }); - - it('should emit on other document update errors', function (done) { - mockQueue.client.callAsInternalUser.restore(); - sinon - .stub(mockQueue.client, 'callAsInternalUser') - .withArgs('update') - .rejects({ statusCode: 401 }); - - worker.on(constants.EVENT_WORKER_FAIL_UPDATE_ERROR, function (err) { - try { - expect(err).to.have.property('error'); - expect(err).to.have.property('job'); - expect(err).to.have.property('worker'); - expect(err.error).to.have.property('statusCode', 401); - done(); - } catch (e) { - done(e); - } - }); - worker._failJob(job); - }); - }); - - describe('performing a job', function () { - let job; - let payload; - let updateSpy; - - beforeEach(function () { - payload = { - value: random(0, 100, true), - }; - - return mockQueue.client.callAsInternalUser('get', {}, { payload }).then((jobDoc) => { - job = jobDoc; - updateSpy = sinon.spy(mockQueue.client, 'callAsInternalUser').withArgs('update'); - }); - }); - - describe('worker success', function () { - it('should call the workerFn with the payload', function (done) { - const workerFn = function (jobPayload) { - expect(jobPayload).to.eql(payload); - }; - worker = new Worker(mockQueue, 'test', workerFn, defaultWorkerOptions); - - worker._performJob(job).then(() => done()); - }); - - it('should update the job with the workerFn output', function () { - const workerFn = function (job, jobPayload) { - // eslint-disable-line no-unused-vars - expect(jobPayload).to.eql(payload); - return payload; - }; - worker = new Worker(mockQueue, 'test', workerFn, defaultWorkerOptions); - - return worker._performJob(job).then(() => { - sinon.assert.calledOnce(updateSpy); - const query = updateSpy.firstCall.args[1]; - - expect(query).to.have.property('index', job._index); - expect(query).to.have.property('id', job._id); - expect(query).to.have.property('if_seq_no', job._seq_no); - expect(query).to.have.property('if_primary_term', job._primary_term); - expect(query.body.doc).to.have.property('output'); - expect(query.body.doc.output).to.have.property('content_type', false); - expect(query.body.doc.output).to.have.property('content', payload); - }); - }); - - it('should update the job status and completed time', function () { - const startTime = moment().valueOf(); - const workerFn = function (job, jobPayload) { - // eslint-disable-line no-unused-vars - expect(jobPayload).to.eql(payload); - return new Promise(function (resolve) { - setTimeout(() => resolve(payload), 10); - }); - }; - worker = new Worker(mockQueue, 'test', workerFn, defaultWorkerOptions); - - return worker._performJob(job).then(() => { - sinon.assert.calledOnce(updateSpy); - const doc = updateSpy.firstCall.args[1].body.doc; - expect(doc).to.have.property('status', constants.JOB_STATUS_COMPLETED); - expect(doc).to.have.property('completed_at'); - const completedTimestamp = moment(doc.completed_at).valueOf(); - expect(completedTimestamp).to.be.greaterThan(startTime); - }); - }); - - it('handle warnings in the output by reflecting a warning status', () => { - const workerFn = () => { - return Promise.resolve({ - ...payload, - warnings: [`Don't run with scissors!`], - }); - }; - worker = new Worker(mockQueue, 'test', workerFn, defaultWorkerOptions); - - return worker - ._performJob({ - test: true, - ...job, - }) - .then(() => { - sinon.assert.calledOnce(updateSpy); - const doc = updateSpy.firstCall.args[1].body.doc; - expect(doc).to.have.property('status', constants.JOB_STATUS_WARNINGS); - }); - }); - - it('should emit completion event', function (done) { - worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions); - - worker.once(constants.EVENT_WORKER_COMPLETE, (workerJob) => { - try { - expect(workerJob).to.not.have.property('_source'); - - expect(workerJob).to.have.property('job'); - expect(workerJob.job).to.have.property('id'); - expect(workerJob.job).to.have.property('index'); - - expect(workerJob).to.have.property('output'); - expect(workerJob.output).to.have.property('content'); - expect(workerJob.output).to.have.property('content_type'); - - done(); - } catch (e) { - done(e); - } - }); - - worker._performJob(job); - }); - }); - - describe('worker failure', function () { - it('should append error output to job', function () { - const workerFn = function () { - throw new Error('test error'); - }; - worker = new Worker(mockQueue, 'test', workerFn, defaultWorkerOptions); - const failStub = sinon.stub(worker, '_failJob'); - - return worker._performJob(job).then(() => { - sinon.assert.calledOnce(failStub); - sinon.assert.calledWith(failStub, job, 'Error: test error'); - }); - }); - - it('should handle async errors', function () { - const workerFn = function () { - return new Promise((resolve, reject) => { - // eslint-disable-line no-unused-vars - reject(new Error('test error')); - }); - }; - worker = new Worker(mockQueue, 'test', workerFn, defaultWorkerOptions); - const failStub = sinon.stub(worker, '_failJob'); - - return worker._performJob(job).then(() => { - sinon.assert.calledOnce(failStub); - sinon.assert.calledWith(failStub, job, 'Error: test error'); - }); - }); - - it('should handle rejecting with strings', function () { - const errorMessage = 'this is a string error'; - const workerFn = function () { - return new Promise((resolve, reject) => { - // eslint-disable-line no-unused-vars - reject(errorMessage); - }); - }; - worker = new Worker(mockQueue, 'test', workerFn, defaultWorkerOptions); - const failStub = sinon.stub(worker, '_failJob'); - - return worker._performJob(job).then(() => { - sinon.assert.calledOnce(failStub); - sinon.assert.calledWith(failStub, job, errorMessage); - }); - }); - - it('should handle empty rejection', function (done) { - const workerFn = function () { - return new Promise((resolve, reject) => { - // eslint-disable-line no-unused-vars - reject(); - }); - }; - worker = new Worker(mockQueue, 'test', workerFn, defaultWorkerOptions); - - worker.once(constants.EVENT_WORKER_JOB_EXECUTION_ERROR, (err) => { - try { - expect(err).to.have.property('error'); - expect(err).to.have.property('job'); - expect(err).to.have.property('worker'); - expect(err.error).to.have.property('name', 'UnspecifiedWorkerError'); - done(); - } catch (e) { - done(e); - } - }); - - worker._performJob(job); - }); - }); - }); - - describe('job failures', function () { - function getFailStub(workerWithFailure) { - return sinon.stub(workerWithFailure, '_failJob').resolves(); - } - - describe('saving output failure', () => { - it('should mark the job as failed if saving to ES fails', async () => { - const job = { - _id: 'shouldSucced', - _source: { - timeout: 1000, - payload: 'test', - }, - }; - - sinon - .stub(mockQueue.client, 'callAsInternalUser') - .withArgs('update') - .rejects({ statusCode: 413 }); - - const workerFn = function (jobPayload) { - return new Promise(function (resolve) { - setTimeout(() => resolve(jobPayload), 10); - }); - }; - const worker = new Worker(mockQueue, 'test', workerFn, defaultWorkerOptions); - const failStub = getFailStub(worker); - - await worker._performJob(job); - worker.destroy(); - - sinon.assert.called(failStub); - }); - }); - - describe('search failure', function () { - it('causes _processPendingJobs to reject the Promise', function () { - sinon - .stub(mockQueue.client, 'callAsInternalUser') - .withArgs('search') - .rejects(new Error('test error')); - worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions); - return worker._processPendingJobs().then( - () => { - expect().fail('expected rejected Promise'); - }, - (err) => { - expect(err).to.be.an(Error); - } - ); - }); - }); - - describe('timeout', function () { - let failStub; - let job; - let cancellationCallback; - - beforeEach(function () { - const timeout = 20; - cancellationCallback = function () {}; - - const workerFn = function (job, payload, cancellationToken) { - // eslint-disable-line no-unused-vars - cancellationToken.on(cancellationCallback); - return new Promise(function (resolve) { - setTimeout(() => { - resolve(); - }, timeout * 2); - }); - }; - worker = new Worker(mockQueue, 'test', workerFn, defaultWorkerOptions); - failStub = getFailStub(worker); - - job = { - _id: 'testTimeoutJob', - _source: { - timeout: timeout, - payload: 'test', - }, - }; - }); - - it('should not fail job', function () { - // fire of the job worker - return worker._performJob(job).then(() => { - sinon.assert.notCalled(failStub); - }); - }); - - it('should emit timeout if not completed in time', function (done) { - worker.once(constants.EVENT_WORKER_JOB_TIMEOUT, (err) => { - try { - expect(err).to.have.property('error'); - expect(err).to.have.property('job'); - expect(err).to.have.property('worker'); - expect(err.error).to.have.property('name', 'WorkerTimeoutError'); - done(); - } catch (e) { - done(e); - } - }); - - // fire of the job worker - worker._performJob(job); - }); - - it('should call cancellation token callback if not completed in time', function (done) { - let called = false; - - cancellationCallback = () => { - called = true; - }; - - worker.once(constants.EVENT_WORKER_JOB_TIMEOUT, () => { - try { - expect(called).to.be(true); - done(); - } catch (err) { - done(err); - } - }); - - // fire of the job worker - worker._performJob(job); - }); - }); - - describe('worker failure', function () { - let failStub; - - const timeout = 20; - const job = { - _id: 'testTimeoutJob', - _source: { - timeout: timeout, - payload: 'test', - }, - }; - - beforeEach(function () { - sinon - .stub(mockQueue.client, 'callAsInternalUser') - .withArgs('search') - .callsFake(() => Promise.resolve({ hits: { hits: [] } })); - }); - - describe('workerFn rejects promise', function () { - beforeEach(function () { - const workerFn = function () { - return new Promise(function (resolve, reject) { - // eslint-disable-line no-unused-vars - setTimeout(() => { - reject(); - }, timeout / 2); - }); - }; - worker = new Worker(mockQueue, 'test', workerFn, defaultWorkerOptions); - failStub = getFailStub(worker); - }); - - it('should fail the job', function () { - return worker._performJob(job).then(() => { - sinon.assert.calledOnce(failStub); - }); - }); - - it('should emit worker execution error', function (done) { - worker.on(constants.EVENT_WORKER_JOB_EXECUTION_ERROR, (err) => { - try { - expect(err).to.have.property('error'); - expect(err).to.have.property('job'); - expect(err).to.have.property('worker'); - done(); - } catch (e) { - done(e); - } - }); - - // fire of the job worker - worker._performJob(job); - }); - }); - - describe('workerFn throws error', function () { - beforeEach(function () { - const workerFn = function () { - throw new Error('test throw'); - }; - worker = new Worker(mockQueue, 'test', workerFn, defaultWorkerOptions); - - failStub = getFailStub(worker); - }); - - it('should fail the job', function () { - return worker._performJob(job).then(() => { - sinon.assert.calledOnce(failStub); - }); - }); - - it('should emit worker execution error', function (done) { - worker.on(constants.EVENT_WORKER_JOB_EXECUTION_ERROR, (err) => { - try { - expect(err).to.have.property('error'); - expect(err).to.have.property('job'); - expect(err).to.have.property('worker'); - done(); - } catch (e) { - done(e); - } - }); - - // fire of the job worker - worker._performJob(job); - }); - }); - }); - }); -}); - -describe('Format Job Object', () => { - it('pulls index and ID', function () { - const jobMock = { - _index: 'foo', - _id: 'booId', - }; - expect(formatJobObject(jobMock)).eql({ - index: 'foo', - id: 'booId', - }); - }); -}); - -describe('Get Doc Path from ES Response', () => { - it('returns a formatted string after response of an update', function () { - const responseMock = { - _index: 'foo', - _type: '_doc', - _id: 'booId', - }; - expect(getUpdatedDocPath(responseMock)).equal('/foo/_doc/booId'); - }); - it('returns the same formatted string even if there is no _doc provided', function () { - const responseMock = { - _index: 'foo', - _id: 'booId', - }; - expect(getUpdatedDocPath(responseMock)).equal('/foo/_doc/booId'); - }); -}); diff --git a/x-pack/plugins/reporting/server/lib/index.ts b/x-pack/plugins/reporting/server/lib/index.ts index cd7cdf3a8fd0b7..e66f72f88f8ea7 100644 --- a/x-pack/plugins/reporting/server/lib/index.ts +++ b/x-pack/plugins/reporting/server/lib/index.ts @@ -6,9 +6,7 @@ */ export { checkLicense } from './check_license'; -export { createQueueFactory } from './create_queue'; export { cryptoFactory } from './crypto'; -export { enqueueJobFactory } from './enqueue_job'; export { ExportTypesRegistry, getExportTypesRegistry } from './export_types_registry'; export { LevelLogger } from './level_logger'; export { statuses } from './statuses'; diff --git a/x-pack/plugins/reporting/server/lib/statuses.ts b/x-pack/plugins/reporting/server/lib/statuses.ts index 1aa6b6d5ac8ffd..2c25708078aaff 100644 --- a/x-pack/plugins/reporting/server/lib/statuses.ts +++ b/x-pack/plugins/reporting/server/lib/statuses.ts @@ -5,11 +5,12 @@ * 2.0. */ -export const statuses = { +import { JobStatus } from '../../common/types'; + +export const statuses: Record = { JOB_STATUS_PENDING: 'pending', JOB_STATUS_PROCESSING: 'processing', JOB_STATUS_COMPLETED: 'completed', JOB_STATUS_WARNINGS: 'completed_with_warnings', JOB_STATUS_FAILED: 'failed', - JOB_STATUS_CANCELLED: 'cancelled', }; diff --git a/x-pack/plugins/reporting/server/lib/store/mapping.ts b/x-pack/plugins/reporting/server/lib/store/mapping.ts index 1a4567b4aee923..69f432562ec983 100644 --- a/x-pack/plugins/reporting/server/lib/store/mapping.ts +++ b/x-pack/plugins/reporting/server/lib/store/mapping.ts @@ -7,15 +7,10 @@ export const mapping = { meta: { - // We are indexing these properties with both text and keyword fields because that's what will be auto generated - // when an index already exists. This schema is only used when a reporting index doesn't exist. This way existing - // reporting indexes and new reporting indexes will look the same and the data can be queried in the same - // manner. + // We are indexing these properties with both text and keyword fields + // because that's what will be auto generated when an index already exists. properties: { - /** - * Type of object that is triggering this report. Should be either search, visualization or dashboard. - * Used for job listing and telemetry stats only. - */ + // ID of the app this report: search, visualization or dashboard, etc objectType: { type: 'text', fields: { @@ -25,10 +20,6 @@ export const mapping = { }, }, }, - /** - * Can be either preserve_layout, print or none (in the case of csv export). - * Used for phone home stats only. - */ layout: { type: 'text', fields: { @@ -41,9 +32,10 @@ export const mapping = { }, }, browser_type: { type: 'keyword' }, + migration_version: { type: 'keyword' }, // new field (7.14) to distinguish reports that were scheduled with Task Manager jobtype: { type: 'keyword' }, payload: { type: 'object', enabled: false }, - priority: { type: 'byte' }, + priority: { type: 'byte' }, // TODO: remove: this is unused timeout: { type: 'long' }, process_expiration: { type: 'date' }, created_by: { type: 'keyword' }, // `null` if security is disabled diff --git a/x-pack/plugins/reporting/server/lib/store/report.test.ts b/x-pack/plugins/reporting/server/lib/store/report.test.ts index 4c5cd755f71c45..a8d14e12a738be 100644 --- a/x-pack/plugins/reporting/server/lib/store/report.test.ts +++ b/x-pack/plugins/reporting/server/lib/store/report.test.ts @@ -20,21 +20,26 @@ describe('Class Report', () => { timeout: 30000, }); - expect(report.toEsDocsJSON()).toMatchObject({ - _index: '.reporting-test-index-12345', - _source: { - attempts: 0, - browser_type: 'browser_type_test_string', - completed_at: undefined, - created_by: 'created_by_test_string', - jobtype: 'test-report', - max_attempts: 50, - meta: { objectType: 'test' }, - payload: { headers: 'payload_test_field', objectType: 'testOt' }, - started_at: undefined, - status: 'pending', - timeout: 30000, - }, + expect(report.toReportSource()).toMatchObject({ + attempts: 0, + browser_type: 'browser_type_test_string', + completed_at: undefined, + created_by: 'created_by_test_string', + jobtype: 'test-report', + max_attempts: 50, + meta: { objectType: 'test' }, + payload: { headers: 'payload_test_field', objectType: 'testOt' }, + started_at: undefined, + status: 'pending', + timeout: 30000, + }); + expect(report.toReportTaskJSON()).toMatchObject({ + attempts: 0, + created_by: 'created_by_test_string', + index: '.reporting-test-index-12345', + jobtype: 'test-report', + meta: { objectType: 'test' }, + payload: { headers: 'payload_test_field', objectType: 'testOt' }, }); expect(report.toApiJSON()).toMatchObject({ attempts: 0, @@ -72,22 +77,27 @@ describe('Class Report', () => { }; report.updateWithEsDoc(metadata); - expect(report.toEsDocsJSON()).toMatchObject({ - _id: '12342p9o387549o2345', - _index: '.reporting-test-update', - _source: { - attempts: 0, - browser_type: 'browser_type_test_string', - completed_at: undefined, - created_by: 'created_by_test_string', - jobtype: 'test-report', - max_attempts: 50, - meta: { objectType: 'stange' }, - payload: { objectType: 'testOt' }, - started_at: undefined, - status: 'pending', - timeout: 30000, - }, + expect(report.toReportSource()).toMatchObject({ + attempts: 0, + browser_type: 'browser_type_test_string', + completed_at: undefined, + created_by: 'created_by_test_string', + jobtype: 'test-report', + max_attempts: 50, + meta: { objectType: 'stange' }, + payload: { objectType: 'testOt' }, + started_at: undefined, + status: 'pending', + timeout: 30000, + }); + expect(report.toReportTaskJSON()).toMatchObject({ + attempts: 0, + created_by: 'created_by_test_string', + id: '12342p9o387549o2345', + index: '.reporting-test-update', + jobtype: 'test-report', + meta: { objectType: 'stange' }, + payload: { objectType: 'testOt' }, }); expect(report.toApiJSON()).toMatchObject({ attempts: 0, diff --git a/x-pack/plugins/reporting/server/lib/store/report.ts b/x-pack/plugins/reporting/server/lib/store/report.ts index 0d49e524ad067c..fa5b91527ccc47 100644 --- a/x-pack/plugins/reporting/server/lib/store/report.ts +++ b/x-pack/plugins/reporting/server/lib/store/report.ts @@ -9,11 +9,25 @@ import moment from 'moment'; // @ts-ignore no module definition import Puid from 'puid'; import { JOB_STATUSES } from '../../../common/constants'; -import { ReportApiJSON, ReportDocumentHead, ReportSource } from '../../../common/types'; +import { + ReportApiJSON, + ReportDocument, + ReportDocumentHead, + ReportSource, +} from '../../../common/types'; +import { ReportTaskParams } from '../tasks'; + +export { ReportDocument }; +export { ReportApiJSON, ReportSource }; const puid = new Puid(); +export const MIGRATION_VERSION = '7.14.0'; -export class Report implements Partial { +/* + * The public fields are a flattened version what Elasticsearch returns when you + * `GET` a document. + */ +export class Report implements Partial { public _index?: string; public _id: string; public _primary_term?: number; // set by ES @@ -35,9 +49,11 @@ export class Report implements Partial { public readonly output?: ReportSource['output']; public readonly started_at?: ReportSource['started_at']; public readonly completed_at?: ReportSource['completed_at']; - public readonly process_expiration?: ReportSource['process_expiration']; public readonly timeout?: ReportSource['timeout']; + public process_expiration?: ReportSource['process_expiration']; + public migration_version: string; + /* * Create an unsaved report * Index string is required @@ -48,6 +64,8 @@ export class Report implements Partial { this._primary_term = opts._primary_term; this._seq_no = opts._seq_no; + this.migration_version = MIGRATION_VERSION; + this.payload = opts.payload!; this.kibana_name = opts.kibana_name!; this.kibana_id = opts.kibana_id!; @@ -70,7 +88,7 @@ export class Report implements Partial { /* * Update the report with "live" storage metadata */ - updateWithEsDoc(doc: Partial) { + updateWithEsDoc(doc: Partial): void { if (doc._index == null || doc._id == null) { throw new Error(`Report object from ES has missing fields!`); } @@ -79,29 +97,52 @@ export class Report implements Partial { this._index = doc._index; this._primary_term = doc._primary_term; this._seq_no = doc._seq_no; + this.migration_version = MIGRATION_VERSION; } /* * Data structure for writing to Elasticsearch index */ - toEsDocsJSON() { + toReportSource(): ReportSource { return { - _id: this._id, - _index: this._index, - _source: { - jobtype: this.jobtype, - created_at: this.created_at, - created_by: this.created_by, - payload: this.payload, - meta: this.meta, - timeout: this.timeout, - max_attempts: this.max_attempts, - browser_type: this.browser_type, - status: this.status, - attempts: this.attempts, - started_at: this.started_at, - completed_at: this.completed_at, - }, + migration_version: MIGRATION_VERSION, + kibana_name: this.kibana_name, + kibana_id: this.kibana_id, + jobtype: this.jobtype, + created_at: this.created_at, + created_by: this.created_by, + payload: this.payload, + meta: this.meta, + timeout: this.timeout!, + max_attempts: this.max_attempts, + browser_type: this.browser_type!, + status: this.status, + attempts: this.attempts, + started_at: this.started_at, + completed_at: this.completed_at, + process_expiration: this.process_expiration, + output: this.output || null, + }; + } + + /* + * Parameters to save in a task instance + */ + toReportTaskJSON(): ReportTaskParams { + if (!this._index) { + throw new Error(`Task is missing the _index field!`); + } + + return { + id: this._id, + index: this._index, + jobtype: this.jobtype, + created_at: this.created_at, + created_by: this.created_by, + payload: this.payload, + meta: this.meta, + attempts: this.attempts, + max_attempts: this.max_attempts, }; } @@ -129,5 +170,3 @@ export class Report implements Partial { }; } } - -export { ReportApiJSON, ReportSource }; diff --git a/x-pack/plugins/reporting/server/lib/store/store.test.ts b/x-pack/plugins/reporting/server/lib/store/store.test.ts index e7a51fe3e60ffe..8bb5c7fb8bbf91 100644 --- a/x-pack/plugins/reporting/server/lib/store/store.test.ts +++ b/x-pack/plugins/reporting/server/lib/store/store.test.ts @@ -13,7 +13,7 @@ import { createMockLevelLogger, createMockReportingCore, } from '../../test_helpers'; -import { Report } from './report'; +import { Report, ReportDocument } from './report'; import { ReportingStore } from './store'; describe('ReportingStore', () => { @@ -174,11 +174,75 @@ describe('ReportingStore', () => { }); }); + it('findReport gets a report from ES and returns a Report object', async () => { + // setup + const mockReport: ReportDocument = { + _id: '1234-foo-78', + _index: '.reporting-test-17409', + _primary_term: 1234, + _seq_no: 5678, + _source: { + kibana_name: 'test', + kibana_id: 'test123', + migration_version: 'X.0.0', + created_at: 'some time', + created_by: 'some security person', + jobtype: 'csv', + status: 'pending', + meta: { testMeta: 'meta' } as any, + payload: { testPayload: 'payload' } as any, + browser_type: 'browser type string', + attempts: 0, + max_attempts: 1, + timeout: 30000, + output: null, + }, + }; + mockEsClient.get.mockResolvedValue({ body: mockReport } as any); + const store = new ReportingStore(mockCore, mockLogger); + const report = new Report({ + ...mockReport, + ...mockReport._source, + }); + + expect(await store.findReportFromTask(report.toReportTaskJSON())).toMatchInlineSnapshot(` + Report { + "_id": "1234-foo-78", + "_index": ".reporting-test-17409", + "_primary_term": 1234, + "_seq_no": 5678, + "attempts": 0, + "browser_type": "browser type string", + "completed_at": undefined, + "created_at": "some time", + "created_by": "some security person", + "jobtype": "csv", + "kibana_id": undefined, + "kibana_name": undefined, + "max_attempts": 1, + "meta": Object { + "testMeta": "meta", + }, + "migration_version": "7.14.0", + "output": null, + "payload": Object { + "testPayload": "payload", + }, + "process_expiration": undefined, + "started_at": undefined, + "status": "pending", + "timeout": 30000, + } + `); + }); + it('setReportClaimed sets the status of a record to processing', async () => { const store = new ReportingStore(mockCore, mockLogger); const report = new Report({ _id: 'id-of-processing', _index: '.reporting-test-index-12345', + _seq_no: 42, + _primary_term: 10002, jobtype: 'test-report', created_by: 'created_by_test_string', browser_type: 'browser_type_test_string', @@ -194,23 +258,12 @@ describe('ReportingStore', () => { await store.setReportClaimed(report, { testDoc: 'test' } as any); - const [updateCall] = mockEsClient.update.mock.calls; - expect(updateCall).toMatchInlineSnapshot(` - Array [ - Object { - "body": Object { - "doc": Object { - "status": "processing", - "testDoc": "test", - }, - }, - "id": "id-of-processing", - "if_primary_term": undefined, - "if_seq_no": undefined, - "index": ".reporting-test-index-12345", - }, - ] - `); + const [[updateCall]] = mockEsClient.update.mock.calls; + const response = updateCall.body?.doc as Report; + expect(response.migration_version).toBe(`7.14.0`); + expect(response.status).toBe(`processing`); + expect(updateCall.if_seq_no).toBe(42); + expect(updateCall.if_primary_term).toBe(10002); }); it('setReportFailed sets the status of a record to failed', async () => { @@ -218,6 +271,8 @@ describe('ReportingStore', () => { const report = new Report({ _id: 'id-of-failure', _index: '.reporting-test-index-12345', + _seq_no: 43, + _primary_term: 10002, jobtype: 'test-report', created_by: 'created_by_test_string', browser_type: 'browser_type_test_string', @@ -233,23 +288,12 @@ describe('ReportingStore', () => { await store.setReportFailed(report, { errors: 'yes' } as any); - const [updateCall] = mockEsClient.update.mock.calls; - expect(updateCall).toMatchInlineSnapshot(` - Array [ - Object { - "body": Object { - "doc": Object { - "errors": "yes", - "status": "failed", - }, - }, - "id": "id-of-failure", - "if_primary_term": undefined, - "if_seq_no": undefined, - "index": ".reporting-test-index-12345", - }, - ] - `); + const [[updateCall]] = mockEsClient.update.mock.calls; + const response = updateCall.body?.doc as Report; + expect(response.migration_version).toBe(`7.14.0`); + expect(response.status).toBe(`failed`); + expect(updateCall.if_seq_no).toBe(43); + expect(updateCall.if_primary_term).toBe(10002); }); it('setReportCompleted sets the status of a record to completed', async () => { @@ -257,6 +301,8 @@ describe('ReportingStore', () => { const report = new Report({ _id: 'vastly-great-report-id', _index: '.reporting-test-index-12345', + _seq_no: 44, + _primary_term: 10002, jobtype: 'test-report', created_by: 'created_by_test_string', browser_type: 'browser_type_test_string', @@ -272,30 +318,21 @@ describe('ReportingStore', () => { await store.setReportCompleted(report, { certainly_completed: 'yes' } as any); - const [updateCall] = mockEsClient.update.mock.calls; - expect(updateCall).toMatchInlineSnapshot(` - Array [ - Object { - "body": Object { - "doc": Object { - "certainly_completed": "yes", - "status": "completed", - }, - }, - "id": "vastly-great-report-id", - "if_primary_term": undefined, - "if_seq_no": undefined, - "index": ".reporting-test-index-12345", - }, - ] - `); + const [[updateCall]] = mockEsClient.update.mock.calls; + const response = updateCall.body?.doc as Report; + expect(response.migration_version).toBe(`7.14.0`); + expect(response.status).toBe(`completed`); + expect(updateCall.if_seq_no).toBe(44); + expect(updateCall.if_primary_term).toBe(10002); }); - it('setReportCompleted sets the status of a record to completed_with_warnings', async () => { + it('sets the status of a record to completed_with_warnings', async () => { const store = new ReportingStore(mockCore, mockLogger); const report = new Report({ _id: 'vastly-great-report-id', _index: '.reporting-test-index-12345', + _seq_no: 45, + _primary_term: 10002, jobtype: 'test-report', created_by: 'created_by_test_string', browser_type: 'browser_type_test_string', @@ -316,27 +353,52 @@ describe('ReportingStore', () => { }, } as any); - const [updateCall] = mockEsClient.update.mock.calls; - expect(updateCall).toMatchInlineSnapshot(` - Array [ - Object { - "body": Object { - "doc": Object { - "certainly_completed": "pretty_much", - "output": Object { - "warnings": Array [ - "those pants don't go with that shirt", - ], - }, - "status": "completed_with_warnings", - }, - }, - "id": "vastly-great-report-id", - "if_primary_term": undefined, - "if_seq_no": undefined, - "index": ".reporting-test-index-12345", - }, - ] + const [[updateCall]] = mockEsClient.update.mock.calls; + const response = updateCall.body?.doc as Report; + + expect(response.migration_version).toBe(`7.14.0`); + expect(response.status).toBe(`completed_with_warnings`); + expect(updateCall.if_seq_no).toBe(45); + expect(updateCall.if_primary_term).toBe(10002); + expect(response.output).toMatchInlineSnapshot(` + Object { + "warnings": Array [ + "those pants don't go with that shirt", + ], + } `); }); + + it('prepareReportForRetry resets the expiration and status on the report document', async () => { + const store = new ReportingStore(mockCore, mockLogger); + const report = new Report({ + _id: 'pretty-good-report-id', + _index: '.reporting-test-index-94058763', + _seq_no: 46, + _primary_term: 10002, + jobtype: 'test-report-2', + created_by: 'created_by_test_string', + browser_type: 'browser_type_test_string', + status: 'processing', + process_expiration: '2002', + max_attempts: 3, + payload: { + title: 'test report', + headers: 'rp_test_headers', + objectType: 'testOt', + browserTimezone: 'utc', + }, + timeout: 30000, + }); + + await store.prepareReportForRetry(report); + + const [[updateCall]] = mockEsClient.update.mock.calls; + const response = updateCall.body?.doc as Report; + + expect(response.migration_version).toBe(`7.14.0`); + expect(response.status).toBe(`pending`); + expect(updateCall.if_seq_no).toBe(46); + expect(updateCall.if_primary_term).toBe(10002); + }); }); diff --git a/x-pack/plugins/reporting/server/lib/store/store.ts b/x-pack/plugins/reporting/server/lib/store/store.ts index 5ec867812ef609..8f1e6c315a2d1e 100644 --- a/x-pack/plugins/reporting/server/lib/store/store.ts +++ b/x-pack/plugins/reporting/server/lib/store/store.ts @@ -5,18 +5,80 @@ * 2.0. */ +import { IndexResponse, UpdateResponse } from '@elastic/elasticsearch/api/types'; import { ElasticsearchClient } from 'src/core/server'; import { LevelLogger, statuses } from '../'; import { ReportingCore } from '../../'; +import { JobStatus } from '../../../common/types'; +import { ReportTaskParams } from '../tasks'; import { indexTimestamp } from './index_timestamp'; import { mapping } from './mapping'; -import { Report } from './report'; +import { MIGRATION_VERSION, Report, ReportDocument, ReportSource } from './report'; + +/* + * When an instance of Kibana claims a report job, this information tells us about that instance + */ +export type ReportProcessingFields = Required<{ + kibana_id: Report['kibana_id']; + kibana_name: Report['kibana_name']; + browser_type: Report['browser_type']; + attempts: Report['attempts']; + started_at: Report['started_at']; + timeout: Report['timeout']; + process_expiration: Report['process_expiration']; +}>; + +export type ReportFailedFields = Required<{ + completed_at: Report['completed_at']; + output: Report['output']; +}>; + +export type ReportCompletedFields = Required<{ + completed_at: Report['completed_at']; + output: Report['output']; +}>; + +/* + * When searching for long-pending reports, we get a subset of fields + */ +export interface ReportRecordTimeout { + _id: string; + _index: string; + _source: { + status: JobStatus; + process_expiration?: string; + }; +} const checkReportIsEditable = (report: Report) => { - if (!report._id || !report._index) { - throw new Error(`Report object is not synced with ES!`); + const { _id, _index, _seq_no, _primary_term } = report; + if (_id == null || _index == null) { + throw new Error(`Report is not editable: Job [${_id}] is not synced with ES!`); + } + + if (_seq_no == null || _primary_term == null) { + throw new Error( + `Report is not editable: Job [${_id}] is missing _seq_no and _primary_term fields!` + ); } }; +/* + * When searching for long-pending reports, we get a subset of fields + */ +const sourceDoc = (doc: Partial): Partial => { + return { + ...doc, + migration_version: MIGRATION_VERSION, + }; +}; + +const jobDebugMessage = (report: Report) => + `${report._id} ` + + `[_index: ${report._index}] ` + + `[_seq_no: ${report._seq_no}] ` + + `[_primary_term: ${report._primary_term}]` + + `[attempts: ${report.attempts}] ` + + `[process_expiration: ${report.process_expiration}]`; /* * A class to give an interface to historical reports in the reporting.index @@ -25,8 +87,8 @@ const checkReportIsEditable = (report: Report) => { * - interface for downloading the report */ export class ReportingStore { - private readonly indexPrefix: string; - private readonly indexInterval: string; + private readonly indexPrefix: string; // config setting of index prefix in system index name + private readonly indexInterval: string; // config setting of index prefix: how often to poll for pending work private client?: ElasticsearchClient; constructor(private reportingCore: ReportingCore, private logger: LevelLogger) { @@ -34,6 +96,7 @@ export class ReportingStore { this.indexPrefix = config.get('index'); this.indexInterval = config.get('queue', 'indexInterval'); + this.logger = logger.clone(['store']); } private async getClient() { @@ -84,18 +147,20 @@ export class ReportingStore { /* * Called from addReport, which handles any errors */ - private async indexReport(report: Report) { + private async indexReport(report: Report): Promise { const doc = { index: report._index!, id: report._id, + refresh: true, body: { - ...report.toEsDocsJSON()._source, - process_expiration: new Date(0), // use epoch so the job query works - attempts: 0, - status: statuses.JOB_STATUS_PENDING, + ...report.toReportSource(), + ...sourceDoc({ + process_expiration: new Date(0).toISOString(), + attempts: 0, + status: statuses.JOB_STATUS_PENDING, + }), }, }; - const client = await this.getClient(); const { body } = await client.index(doc); @@ -121,11 +186,9 @@ export class ReportingStore { await this.createIndex(index); try { - const doc = await this.indexReport(report); - report.updateWithEsDoc(doc); + report.updateWithEsDoc(await this.indexReport(report)); await this.refreshIndex(index); - this.logger.debug(`Successfully stored pending job: ${report._index}/${report._id}`); return report; } catch (err) { @@ -135,85 +198,209 @@ export class ReportingStore { } } - public async setReportClaimed(report: Report, stats: Partial): Promise { - const doc = { - ...stats, + /* + * Search for a report from task data and return back the report + */ + public async findReportFromTask( + taskJson: Pick + ): Promise { + if (!taskJson.index) { + throw new Error('Task JSON is missing index field!'); + } + + try { + const client = await this.getClient(); + const { body: document } = await client.get({ + index: taskJson.index, + id: taskJson.id, + }); + + return new Report({ + _id: document._id, + _index: document._index, + _seq_no: document._seq_no, + _primary_term: document._primary_term, + jobtype: document._source?.jobtype, + attempts: document._source?.attempts, + browser_type: document._source?.browser_type, + created_at: document._source?.created_at, + created_by: document._source?.created_by, + max_attempts: document._source?.max_attempts, + meta: document._source?.meta, + payload: document._source?.payload, + process_expiration: document._source?.process_expiration, + status: document._source?.status, + timeout: document._source?.timeout, + }); + } catch (err) { + this.logger.error( + `Error in finding the report from the scheduled task info! ` + + `[id: ${taskJson.id}] [index: ${taskJson.index}]` + ); + this.logger.error(err); + throw err; + } + } + + public async setReportClaimed( + report: Report, + processingInfo: ReportProcessingFields + ): Promise> { + const doc = sourceDoc({ + ...processingInfo, status: statuses.JOB_STATUS_PROCESSING, - }; + }); try { checkReportIsEditable(report); const client = await this.getClient(); - const { body } = await client.update({ + const { body } = await client.update({ id: report._id, index: report._index!, if_seq_no: report._seq_no, if_primary_term: report._primary_term, + refresh: true, body: { doc }, }); - return (body as unknown) as Report; + return body; } catch (err) { - this.logger.error('Error in setting report processing status!'); + this.logger.error( + `Error in updating status to processing! Report: ` + jobDebugMessage(report) + ); this.logger.error(err); throw err; } } - public async setReportFailed(report: Report, stats: Partial): Promise { - const doc = { - ...stats, + public async setReportFailed( + report: Report, + failedInfo: ReportFailedFields + ): Promise> { + const doc = sourceDoc({ + ...failedInfo, status: statuses.JOB_STATUS_FAILED, - }; + }); try { checkReportIsEditable(report); const client = await this.getClient(); - const { body } = await client.update({ + const { body } = await client.update({ id: report._id, index: report._index!, if_seq_no: report._seq_no, if_primary_term: report._primary_term, + refresh: true, body: { doc }, }); - - return (body as unknown) as Report; + return body; } catch (err) { - this.logger.error('Error in setting report failed status!'); + this.logger.error(`Error in updating status to failed! Report: ` + jobDebugMessage(report)); this.logger.error(err); throw err; } } - public async setReportCompleted(report: Report, stats: Partial): Promise { + public async setReportCompleted( + report: Report, + completedInfo: ReportCompletedFields + ): Promise> { + const { output } = completedInfo; + const status = + output && output.warnings && output.warnings.length > 0 + ? statuses.JOB_STATUS_WARNINGS + : statuses.JOB_STATUS_COMPLETED; + const doc = sourceDoc({ + ...completedInfo, + status, + }); + try { - const { output } = stats; - const status = - output && output.warnings && output.warnings.length > 0 - ? statuses.JOB_STATUS_WARNINGS - : statuses.JOB_STATUS_COMPLETED; - const doc = { - ...stats, - status, - }; checkReportIsEditable(report); const client = await this.getClient(); - const { body } = await client.update({ + const { body } = await client.update({ id: report._id, index: report._index!, if_seq_no: report._seq_no, if_primary_term: report._primary_term, + refresh: true, body: { doc }, }); + return body; + } catch (err) { + this.logger.error(`Error in updating status to complete! Report: ` + jobDebugMessage(report)); + this.logger.error(err); + throw err; + } + } + + public async prepareReportForRetry(report: Report): Promise> { + const doc = sourceDoc({ + status: statuses.JOB_STATUS_PENDING, + process_expiration: null, + }); - return (body as unknown) as Report; + try { + checkReportIsEditable(report); + + const client = await this.getClient(); + const { body } = await client.update({ + id: report._id, + index: report._index!, + if_seq_no: report._seq_no, + if_primary_term: report._primary_term, + refresh: true, + body: { doc }, + }); + return body; } catch (err) { - this.logger.error('Error in setting report complete status!'); + this.logger.error( + `Error in clearing expiration and status for retry! Report: ` + jobDebugMessage(report) + ); this.logger.error(err); throw err; } } + + /* + * A report needs to be rescheduled when: + * 1. An older version of Kibana created jobs with ESQueue, and they have + * not yet started running. + * 2. The report process_expiration field is overdue, which happens if the + * report runs too long or Kibana restarts during execution + */ + public async findStaleReportJob(): Promise { + const client = await this.getClient(); + + const expiredFilter = { + bool: { + must: [ + { range: { process_expiration: { lt: `now` } } }, + { terms: { status: [statuses.JOB_STATUS_PROCESSING] } }, + ], + }, + }; + const oldVersionFilter = { + bool: { + must: [{ terms: { status: [statuses.JOB_STATUS_PENDING] } }], + must_not: [{ exists: { field: 'migration_version' } }], + }, + }; + + const { body } = await client.search({ + size: 1, + index: this.indexPrefix + '-*', + seq_no_primary_term: true, + _source_excludes: ['output'], + body: { + sort: { created_at: { order: 'asc' as const } }, // find the oldest first + query: { bool: { filter: { bool: { should: [expiredFilter, oldVersionFilter] } } } }, + }, + }); + + return body.hits?.hits[0] as ReportRecordTimeout; + } } diff --git a/x-pack/plugins/reporting/server/lib/tasks/error_logger.test.ts b/x-pack/plugins/reporting/server/lib/tasks/error_logger.test.ts new file mode 100644 index 00000000000000..607c9c32538be4 --- /dev/null +++ b/x-pack/plugins/reporting/server/lib/tasks/error_logger.test.ts @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { createMockLevelLogger } from '../../test_helpers'; +import { errorLogger } from './error_logger'; + +const logger = createMockLevelLogger(); + +describe('Execute Report Error Logger', () => { + const errorLogSpy = jest.spyOn(logger, 'error'); + + beforeEach(() => { + errorLogSpy.mockReset(); + }); + + it('cuts off the error message after 1000 characters, and includes the last 1000 characters', () => { + const longLogSet = new Array(2000); + for (let i = 0; i < longLogSet.length; i++) { + longLogSet[i] = `e`; // make a lot of e's + } + const longLog = longLogSet.join(''); + const longError = new Error(longLog); + + errorLogger(logger, 'Something went KABOOM!', longError); + + const { message, stack } = errorLogSpy.mock.calls[0][0] as Error; + expect(message).toMatch(/Something went KABOOM!: Error: e{969}\.\.\.e{1000}$/); + expect(stack).toEqual(longError.stack); + + const disclaimer = errorLogSpy.mock.calls[1][0] as string; + expect(disclaimer).toMatchInlineSnapshot( + `"A partial version of the entire error message was logged. The entire error message length is: 2031 characters."` + ); + }); + + it('does not cut off the error message when shorter than the max', () => { + const shortLogSet = new Array(100); + for (let i = 0; i < shortLogSet.length; i++) { + shortLogSet[i] = `e`; // make a lot of e's + } + const shortLog = shortLogSet.join(''); + const shortError = new Error(shortLog); + + errorLogger(logger, 'Something went KABOOM!', shortError); + + const { message, stack } = errorLogSpy.mock.calls[0][0] as Error; + expect(message).toMatch(/Something went KABOOM!: Error: e{100}$/); + expect(stack).toEqual(shortError.stack); + + const disclaimer = errorLogSpy.mock.calls[1]; + expect(disclaimer).toBeUndefined(); + }); +}); diff --git a/x-pack/plugins/reporting/server/lib/tasks/error_logger.ts b/x-pack/plugins/reporting/server/lib/tasks/error_logger.ts new file mode 100644 index 00000000000000..b4d4028230666a --- /dev/null +++ b/x-pack/plugins/reporting/server/lib/tasks/error_logger.ts @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { LevelLogger } from '..'; + +const MAX_PARTIAL_ERROR_LENGTH = 1000; // 1000 of beginning, 1000 of end +const ERROR_PARTIAL_SEPARATOR = '...'; +const MAX_ERROR_LENGTH = MAX_PARTIAL_ERROR_LENGTH * 2 + ERROR_PARTIAL_SEPARATOR.length; + +/* + * An error message string could be very long, as it sometimes includes huge + * amount of base64 + */ +export const errorLogger = (logger: LevelLogger, message: string, err?: Error) => { + if (err) { + const errString = `${message}: ${err}`; + const errLength = errString.length; + if (errLength > MAX_ERROR_LENGTH) { + const subStr = String.prototype.substring.bind(errString); + const partialErrString = + subStr(0, MAX_PARTIAL_ERROR_LENGTH) + + ERROR_PARTIAL_SEPARATOR + + subStr(errLength - MAX_PARTIAL_ERROR_LENGTH); + + const partialError = new Error(partialErrString); + partialError.stack = err.stack; + logger.error(partialError); + logger.error( + `A partial version of the entire error message was logged. The entire error message length is: ${errLength} characters.` + ); + } else { + const combinedError = new Error(errString); + combinedError.stack = err.stack; + logger.error(combinedError); + } + return; + } + + logger.error(message); +}; diff --git a/x-pack/plugins/reporting/server/lib/tasks/execute_report.test.ts b/x-pack/plugins/reporting/server/lib/tasks/execute_report.test.ts new file mode 100644 index 00000000000000..99045050120c14 --- /dev/null +++ b/x-pack/plugins/reporting/server/lib/tasks/execute_report.test.ts @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { ReportingCore } from '../..'; +import { RunContext } from '../../../../task_manager/server'; +import { taskManagerMock } from '../../../../task_manager/server/mocks'; +import { ReportingConfigType } from '../../config'; +import { + createMockConfigSchema, + createMockLevelLogger, + createMockReportingCore, +} from '../../test_helpers'; +import { ExecuteReportTask } from './'; + +const logger = createMockLevelLogger(); + +describe('Execute Report Task', () => { + let mockReporting: ReportingCore; + let configType: ReportingConfigType; + beforeAll(async () => { + configType = createMockConfigSchema(); + mockReporting = await createMockReportingCore(configType); + }); + + it('Instance setup', () => { + const task = new ExecuteReportTask(mockReporting, configType, logger); + expect(task.getStatus()).toBe('uninitialized'); + expect(task.getTaskDefinition()).toMatchInlineSnapshot(` + Object { + "createTaskRunner": [Function], + "maxAttempts": 1, + "maxConcurrency": 1, + "timeout": "120s", + "title": "Reporting: execute job", + "type": "report:execute", + } + `); + }); + + it('Instance start', () => { + const mockTaskManager = taskManagerMock.createStart(); + const task = new ExecuteReportTask(mockReporting, configType, logger); + expect(task.init(mockTaskManager)); + expect(task.getStatus()).toBe('initialized'); + }); + + it('create task runner', async () => { + logger.info = jest.fn(); + logger.error = jest.fn(); + + const task = new ExecuteReportTask(mockReporting, configType, logger); + const taskDef = task.getTaskDefinition(); + const taskRunner = taskDef.createTaskRunner(({ + taskInstance: { + id: 'random-task-id', + params: { index: 'cool-reporting-index', id: 'cool-reporting-id' }, + }, + } as unknown) as RunContext); + expect(taskRunner).toHaveProperty('run'); + expect(taskRunner).toHaveProperty('cancel'); + }); + + it('Max Concurrency is 0 if pollEnabled is false', () => { + const queueConfig = ({ + queue: { pollEnabled: false, timeout: 55000 }, + } as unknown) as ReportingConfigType['queue']; + + const task = new ExecuteReportTask(mockReporting, { ...configType, ...queueConfig }, logger); + expect(task.getStatus()).toBe('uninitialized'); + expect(task.getTaskDefinition()).toMatchInlineSnapshot(` + Object { + "createTaskRunner": [Function], + "maxAttempts": 1, + "maxConcurrency": 0, + "timeout": "55s", + "title": "Reporting: execute job", + "type": "report:execute", + } + `); + }); +}); diff --git a/x-pack/plugins/reporting/server/lib/tasks/execute_report.ts b/x-pack/plugins/reporting/server/lib/tasks/execute_report.ts new file mode 100644 index 00000000000000..f9e2cd82b0805c --- /dev/null +++ b/x-pack/plugins/reporting/server/lib/tasks/execute_report.ts @@ -0,0 +1,431 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { UpdateResponse } from '@elastic/elasticsearch/api/types'; +import moment from 'moment'; +import * as Rx from 'rxjs'; +import { timeout } from 'rxjs/operators'; +import { LevelLogger } from '../'; +import { ReportingCore } from '../../'; +import { + RunContext, + TaskManagerStartContract, + TaskRunCreatorFunction, +} from '../../../../task_manager/server'; +import { CancellationToken } from '../../../common'; +import { durationToNumber, numberToDuration } from '../../../common/schema_utils'; +import { ReportingConfigType } from '../../config'; +import { BasePayload, RunTaskFn } from '../../types'; +import { Report, ReportDocument, ReportingStore } from '../store'; +import { ReportFailedFields, ReportProcessingFields } from '../store/store'; +import { + ReportingTask, + ReportingTaskStatus, + REPORTING_EXECUTE_TYPE, + ReportTaskParams, + TaskRunResult, +} from './'; +import { errorLogger } from './error_logger'; + +interface ReportingExecuteTaskInstance { + state: object; + taskType: string; + params: ReportTaskParams; + runAt?: Date; +} + +function isOutput(output: TaskRunResult | Error): output is TaskRunResult { + return typeof output === 'object' && (output as TaskRunResult).content != null; +} + +function reportFromTask(task: ReportTaskParams) { + return new Report({ ...task, _id: task.id, _index: task.index }); +} + +export class ExecuteReportTask implements ReportingTask { + public TYPE = REPORTING_EXECUTE_TYPE; + + private logger: LevelLogger; + private taskManagerStart?: TaskManagerStartContract; + private taskExecutors?: Map>; + private kibanaId?: string; + private kibanaName?: string; + private store?: ReportingStore; + + constructor( + private reporting: ReportingCore, + private config: ReportingConfigType, + logger: LevelLogger + ) { + this.logger = logger.clone(['runTask']); + } + + /* + * To be called from plugin start + */ + public async init(taskManager: TaskManagerStartContract) { + this.taskManagerStart = taskManager; + + const { reporting } = this; + + const exportTypesRegistry = reporting.getExportTypesRegistry(); + const executors = new Map>(); + for (const exportType of exportTypesRegistry.getAll()) { + const exportTypeLogger = this.logger.clone([exportType.id]); + const jobExecutor = exportType.runTaskFnFactory(reporting, exportTypeLogger); + // The task will run the function with the job type as a param. + // This allows us to retrieve the specific export type runFn when called to run an export + executors.set(exportType.jobType, jobExecutor); + } + + this.taskExecutors = executors; + + const config = reporting.getConfig(); + this.kibanaId = config.kbnConfig.get('server', 'uuid'); + this.kibanaName = config.kbnConfig.get('server', 'name'); + } + + /* + * Async get the ReportingStore: it is only available after PluginStart + */ + private async getStore(): Promise { + if (this.store) { + return this.store; + } + const { store } = await this.reporting.getPluginStartDeps(); + this.store = store; + return store; + } + + private getTaskManagerStart() { + if (!this.taskManagerStart) { + throw new Error('Reporting task runner has not been initialized!'); + } + return this.taskManagerStart; + } + + public async _claimJob(task: ReportTaskParams): Promise { + if (this.kibanaId == null) { + throw new Error(`Kibana instance ID is undefined!`); + } + if (this.kibanaName == null) { + throw new Error(`Kibana instance name is undefined!`); + } + + const store = await this.getStore(); + let report: Report; + if (task.id && task.index) { + // if this is an ad-hoc report, there is a corresponding "pending" record in ReportingStore in need of updating + report = await store.findReportFromTask(task); // receives seq_no and primary_term + } else { + // if this is a scheduled report (not implemented), the report object needs to be instantiated + throw new Error('Could not find matching report document!'); + } + + // Check if this is a completed job. This may happen if the `reports:monitor` + // task detected it to be a zombie job and rescheduled it, but it + // eventually completed on its own. + if (report.status === 'completed') { + throw new Error(`Can not claim the report job: it is already completed!`); + } + + const m = moment(); + + // check if job has exceeded maxAttempts (stored in job params) and somehow hasn't been marked as failed yet + // NOTE: the max attempts value comes from the stored document, so changing the capture.maxAttempts config setting does not affect existing pending reports + const maxAttempts = task.max_attempts; + if (report.attempts >= maxAttempts) { + const err = new Error(`Max attempts reached (${maxAttempts}). Queue timeout reached.`); + await this._failJob(report, err); + throw err; + } + + const queueTimeout = durationToNumber(this.config.queue.timeout); + const startTime = m.toISOString(); + const expirationTime = m.add(queueTimeout).toISOString(); + + const doc: ReportProcessingFields = { + kibana_id: this.kibanaId, + kibana_name: this.kibanaName, + browser_type: this.config.capture.browser.type, + attempts: report.attempts + 1, + started_at: startTime, + timeout: queueTimeout, + process_expiration: expirationTime, + }; + + const claimedReport = new Report({ + ...report, + ...doc, + }); + + this.logger.debug( + `Claiming ${claimedReport.jobtype} ${report._id} ` + + `[_index: ${report._index}] ` + + `[_seq_no: ${report._seq_no}] ` + + `[_primary_term: ${report._primary_term}] ` + + `[attempts: ${report.attempts}] ` + + `[process_expiration: ${expirationTime}]` + ); + + const resp = await store.setReportClaimed(claimedReport, doc); + claimedReport._seq_no = resp._seq_no; + claimedReport._primary_term = resp._primary_term; + return claimedReport; + } + + private async _failJob(report: Report, error?: Error): Promise> { + const message = `Failing ${report.jobtype} job ${report._id}`; + + // log the error + let docOutput; + if (error) { + errorLogger(this.logger, message, error); + docOutput = this._formatOutput(error); + } else { + errorLogger(this.logger, message); + } + + // update the report in the store + const store = await this.getStore(); + const completedTime = moment().toISOString(); + const doc: ReportFailedFields = { + completed_at: completedTime, + output: docOutput, + }; + + return await store.setReportFailed(report, doc); + } + + private _formatOutput(output: TaskRunResult | Error): TaskRunResult { + const docOutput = {} as TaskRunResult; + const unknownMime = null; + + if (isOutput(output)) { + docOutput.content = output.content; + docOutput.content_type = output.content_type || unknownMime; + docOutput.max_size_reached = output.max_size_reached; + docOutput.csv_contains_formulas = output.csv_contains_formulas; + docOutput.size = output.size; + docOutput.warnings = + output.warnings && output.warnings.length > 0 ? output.warnings : undefined; + } else { + const defaultOutput = null; + docOutput.content = output.toString() || defaultOutput; + docOutput.content_type = unknownMime; + docOutput.warnings = [output.toString()]; + } + + return docOutput; + } + + public async _performJob( + task: ReportTaskParams, + cancellationToken: CancellationToken + ): Promise { + if (!this.taskExecutors) { + throw new Error(`Task run function factories have not been called yet!`); + } + + // get the run_task function + const runner = this.taskExecutors.get(task.jobtype); + if (!runner) { + throw new Error(`No defined task runner function for ${task.jobtype}!`); + } + + // run the report + // if workerFn doesn't finish before timeout, call the cancellationToken and throw an error + const queueTimeout = durationToNumber(this.config.queue.timeout); + return Rx.from(runner(task.id, task.payload, cancellationToken)) + .pipe(timeout(queueTimeout)) // throw an error if a value is not emitted before timeout + .toPromise(); + } + + public async _completeJob(report: Report, output: TaskRunResult): Promise { + let docId = `/${report._index}/_doc/${report._id}`; + + this.logger.debug(`Saving ${report.jobtype} to ${docId}.`); + + const completedTime = moment().toISOString(); + const docOutput = this._formatOutput(output); + + const store = await this.getStore(); + const doc = { + completed_at: completedTime, + output: docOutput, + }; + docId = `/${report._index}/_doc/${report._id}`; + + const resp = await store.setReportCompleted(report, doc); + this.logger.info(`Saved ${report.jobtype} job ${docId}`); + report._seq_no = resp._seq_no; + report._primary_term = resp._primary_term; + return report; + } + + /* + * Provides a TaskRunner for Task Manager + */ + private getTaskRunner(): TaskRunCreatorFunction { + // Keep a separate local stack for each task run + return (context: RunContext) => { + let jobId: string | undefined; + const cancellationToken = new CancellationToken(); + + return { + /* + * Runs a reporting job + * Claim job: Finds the report in ReportingStore, updates it to "processing" + * Perform job: Gets the export type's runner, runs it with the job params + * Complete job: Updates the report in ReportStore with the output from the runner + * If any error happens, additional retry attempts may be picked up by a separate instance + */ + run: async () => { + let report: Report | undefined; + + // find the job in the store and set status to processing + const task = context.taskInstance.params as ReportTaskParams; + jobId = task?.id; + + try { + if (!jobId) { + throw new Error('Invalid report data provided in scheduled task!'); + } + this.reporting.trackReport(jobId); + + // Update job status to claimed + report = await this._claimJob(task); + } catch (failedToClaim) { + // error claiming report - log the error + // could be version conflict, or no longer connected to ES + errorLogger(this.logger, `Error in claiming ${jobId}`, failedToClaim); + } + + if (!report) { + errorLogger(this.logger, `Job ${jobId} could not be claimed. Exiting...`); + return; + } + + const { jobtype: jobType, attempts, max_attempts: maxAttempts } = report; + this.logger.debug( + `Starting ${jobType} report ${jobId}: attempt ${attempts} of ${maxAttempts}.` + ); + this.logger.debug(`Reports running: ${this.reporting.countConcurrentReports()}.`); + + try { + const output = await this._performJob(task, cancellationToken); + if (output) { + report = await this._completeJob(report, output); + } + // untrack the report for concurrency awareness + this.logger.debug(`Stopping ${jobId}.`); + } catch (failedToExecuteErr) { + cancellationToken.cancel(); + + if (attempts < maxAttempts) { + // attempts remain, reschedule + try { + if (report == null) { + throw new Error(`Report ${jobId} is null!`); + } + // reschedule to retry + const remainingAttempts = maxAttempts - report.attempts; + errorLogger( + this.logger, + `Scheduling retry for job ${jobId}. Retries remaining: ${remainingAttempts}.`, + failedToExecuteErr + ); + + await this.rescheduleTask(reportFromTask(task).toReportTaskJSON(), this.logger); + } catch (rescheduleErr) { + // can not be rescheduled - log the error + errorLogger( + this.logger, + `Could not reschedule the errored job ${jobId}!`, + rescheduleErr + ); + } + } else { + // 0 attempts remain - fail the job + try { + const maxAttemptsMsg = `Max attempts (${attempts}) reached for job ${jobId}. Failed with: ${failedToExecuteErr}`; + if (report == null) { + throw new Error(`Report ${jobId} is null!`); + } + const resp = await this._failJob(report, new Error(maxAttemptsMsg)); + report._seq_no = resp._seq_no; + report._primary_term = resp._primary_term; + } catch (failedToFailError) { + errorLogger(this.logger, `Could not fail ${jobId}!`, failedToFailError); + } + } + } finally { + this.reporting.untrackReport(jobId); + this.logger.debug(`Reports running: ${this.reporting.countConcurrentReports()}.`); + } + }, + + /* + * Called by Task Manager to stop the report execution process in case + * of timeout or server shutdown + */ + cancel: async () => { + if (jobId) { + this.logger.warn(`Cancelling job ${jobId}...`); + } + cancellationToken.cancel(); + }, + }; + }; + } + + public getTaskDefinition() { + // round up from ms to the nearest second + const queueTimeout = Math.ceil(numberToDuration(this.config.queue.timeout).asSeconds()) + 's'; + const maxConcurrency = this.config.queue.pollEnabled ? 1 : 0; + + return { + type: REPORTING_EXECUTE_TYPE, + title: 'Reporting: execute job', + createTaskRunner: this.getTaskRunner(), + maxAttempts: 1, // NOTE: not using Task Manager retries + timeout: queueTimeout, + maxConcurrency, + }; + } + + public async scheduleTask(report: ReportTaskParams) { + const taskInstance: ReportingExecuteTaskInstance = { + taskType: REPORTING_EXECUTE_TYPE, + state: {}, + params: report, + }; + + return await this.getTaskManagerStart().schedule(taskInstance); + } + + private async rescheduleTask(task: ReportTaskParams, logger: LevelLogger) { + logger.info(`Rescheduling task:${task.id} to retry after error.`); + + const oldTaskInstance: ReportingExecuteTaskInstance = { + taskType: REPORTING_EXECUTE_TYPE, + state: {}, + params: task, + }; + const newTask = await this.getTaskManagerStart().schedule(oldTaskInstance); + logger.debug(`Rescheduled task:${task.id}. New task: task:${newTask.id}`); + return newTask; + } + + public getStatus() { + if (this.taskManagerStart) { + return ReportingTaskStatus.INITIALIZED; + } + + return ReportingTaskStatus.UNINITIALIZED; + } +} diff --git a/x-pack/plugins/reporting/server/lib/tasks/index.ts b/x-pack/plugins/reporting/server/lib/tasks/index.ts index 81d834440152a3..c02b06d97adc7d 100644 --- a/x-pack/plugins/reporting/server/lib/tasks/index.ts +++ b/x-pack/plugins/reporting/server/lib/tasks/index.ts @@ -5,9 +5,17 @@ * 2.0. */ +import { TaskRunCreatorFunction } from '../../../../task_manager/server'; import { ReportSource, TaskRunResult } from '../../../common/types'; import { BasePayload } from '../../types'; +export const REPORTING_EXECUTE_TYPE = 'report:execute'; +export const REPORTING_MONITOR_TYPE = 'reports:monitor'; + +export { ExecuteReportTask } from './execute_report'; +export { MonitorReportsTask } from './monitor_reports'; +export { TaskRunResult }; + /* * The document created by Reporting to store as task parameters for Task * Manager to reference the report in .reporting @@ -19,8 +27,23 @@ export interface ReportTaskParams { created_at: ReportSource['created_at']; created_by: ReportSource['created_by']; jobtype: ReportSource['jobtype']; + max_attempts: ReportSource['max_attempts']; attempts: ReportSource['attempts']; meta: ReportSource['meta']; } -export { TaskRunResult }; +export enum ReportingTaskStatus { + UNINITIALIZED = 'uninitialized', + INITIALIZED = 'initialized', +} + +export interface ReportingTask { + getTaskDefinition: () => { + type: string; + title: string; + createTaskRunner: TaskRunCreatorFunction; + maxAttempts: number; + timeout: string; + }; + getStatus: () => ReportingTaskStatus; +} diff --git a/x-pack/plugins/reporting/server/lib/tasks/monitor_report.test.ts b/x-pack/plugins/reporting/server/lib/tasks/monitor_report.test.ts new file mode 100644 index 00000000000000..fb9b49ab9e265c --- /dev/null +++ b/x-pack/plugins/reporting/server/lib/tasks/monitor_report.test.ts @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { ReportingCore } from '../..'; +import { RunContext } from '../../../../task_manager/server'; +import { taskManagerMock } from '../../../../task_manager/server/mocks'; +import { ReportingConfigType } from '../../config'; +import { + createMockConfigSchema, + createMockLevelLogger, + createMockReportingCore, +} from '../../test_helpers'; +import { MonitorReportsTask } from './'; + +const logger = createMockLevelLogger(); + +describe('Execute Report Task', () => { + let mockReporting: ReportingCore; + let configType: ReportingConfigType; + beforeAll(async () => { + configType = createMockConfigSchema(); + mockReporting = await createMockReportingCore(configType); + }); + + it('Instance setup', () => { + const task = new MonitorReportsTask(mockReporting, configType, logger); + expect(task.getStatus()).toBe('uninitialized'); + expect(task.getTaskDefinition()).toMatchInlineSnapshot(` + Object { + "createTaskRunner": [Function], + "maxAttempts": 1, + "timeout": "120s", + "title": "Reporting: monitor jobs", + "type": "reports:monitor", + } + `); + }); + + it('Instance start', () => { + const mockTaskManager = taskManagerMock.createStart(); + const task = new MonitorReportsTask(mockReporting, configType, logger); + expect(task.init(mockTaskManager)); + expect(task.getStatus()).toBe('initialized'); + }); + + it('create task runner', async () => { + logger.info = jest.fn(); + logger.error = jest.fn(); + + const task = new MonitorReportsTask(mockReporting, configType, logger); + const taskDef = task.getTaskDefinition(); + const taskRunner = taskDef.createTaskRunner(({ + taskInstance: { + id: 'random-task-id', + params: { index: 'cool-reporting-index', id: 'cool-reporting-id' }, + }, + } as unknown) as RunContext); + expect(taskRunner).toHaveProperty('run'); + expect(taskRunner).toHaveProperty('cancel'); + }); +}); diff --git a/x-pack/plugins/reporting/server/lib/tasks/monitor_reports.ts b/x-pack/plugins/reporting/server/lib/tasks/monitor_reports.ts new file mode 100644 index 00000000000000..9e1bc49739c93b --- /dev/null +++ b/x-pack/plugins/reporting/server/lib/tasks/monitor_reports.ts @@ -0,0 +1,165 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import moment from 'moment'; +import { LevelLogger, ReportingStore } from '../'; +import { ReportingCore } from '../../'; +import { TaskManagerStartContract, TaskRunCreatorFunction } from '../../../../task_manager/server'; +import { numberToDuration } from '../../../common/schema_utils'; +import { ReportingConfigType } from '../../config'; +import { statuses } from '../statuses'; +import { Report } from '../store'; +import { ReportingTask, ReportingTaskStatus, REPORTING_MONITOR_TYPE, ReportTaskParams } from './'; + +/* + * Task for finding the ReportingRecords left in the ReportingStore (.reporting index) and stuck in + * a pending or processing status. + * + * Stuck in pending: + * - This can happen if the report was scheduled in an earlier version of Kibana that used ESQueue. + * - Task Manager doesn't know about these types of reports because there was never a task + * scheduled for them. + * Stuck in processing: + * - This can could happen if the server crashed while a report was executing. + * - Task Manager doesn't know about these reports, because the task is completed in Task + * Manager when Reporting starts executing the report. We are not using Task Manager's retry + * mechanisms, which defer the retry for a few minutes. + * + * These events require us to reschedule the report with Task Manager, so that the jobs can be + * distributed and executed. + * + * The runner function reschedules a single report job per task run, to avoid flooding Task Manager + * in case many report jobs need to be recovered. + */ +export class MonitorReportsTask implements ReportingTask { + public TYPE = REPORTING_MONITOR_TYPE; + + private logger: LevelLogger; + private taskManagerStart?: TaskManagerStartContract; + private store?: ReportingStore; + private timeout: moment.Duration; + + constructor( + private reporting: ReportingCore, + private config: ReportingConfigType, + parentLogger: LevelLogger + ) { + this.logger = parentLogger.clone([REPORTING_MONITOR_TYPE]); + this.timeout = numberToDuration(config.queue.timeout); + } + + private async getStore(): Promise { + if (this.store) { + return this.store; + } + const { store } = await this.reporting.getPluginStartDeps(); + this.store = store; + return store; + } + + public async init(taskManager: TaskManagerStartContract) { + this.taskManagerStart = taskManager; + + // Round the interval up to the nearest second since Task Manager doesn't + // support milliseconds + const scheduleInterval = + Math.ceil(numberToDuration(this.config.queue.pollInterval).asSeconds()) + 's'; + this.logger.debug(`Task to monitor for pending reports to run every ${scheduleInterval}.`); + await taskManager.ensureScheduled({ + id: this.TYPE, + taskType: this.TYPE, + schedule: { interval: scheduleInterval }, + state: {}, + params: {}, + }); + } + + private getTaskRunner(): TaskRunCreatorFunction { + return () => { + return { + run: async () => { + const reportingStore = await this.getStore(); + + try { + const recoveredJob = await reportingStore.findStaleReportJob(); + if (!recoveredJob) { + // no reports need to be rescheduled + return; + } + + const { + _id: jobId, + _source: { process_expiration: processExpiration, status }, + } = recoveredJob; + + if (![statuses.JOB_STATUS_PENDING, statuses.JOB_STATUS_PROCESSING].includes(status)) { + throw new Error(`Invalid job status in the monitoring search result: ${status}`); // only pending or processing jobs possibility need rescheduling + } + + if (status === statuses.JOB_STATUS_PENDING) { + this.logger.info( + `${jobId} was scheduled in a previous version and left in [${status}] status. Rescheduling...` + ); + } + + if (status === statuses.JOB_STATUS_PROCESSING) { + const expirationTime = moment(processExpiration); + const overdueValue = moment().valueOf() - expirationTime.valueOf(); + this.logger.info( + `${jobId} status is [${status}] and the expiration time was [${overdueValue}ms] ago. Rescheduling...` + ); + } + + // clear process expiration and set status to pending + const report = new Report({ ...recoveredJob, ...recoveredJob._source }); + await reportingStore.prepareReportForRetry(report); // if there is a version conflict response, this just throws and logs an error + + // clear process expiration and reschedule + await this.rescheduleTask(report.toReportTaskJSON(), this.logger); // a recovered report job must be scheduled by only a sinle Kibana instance + } catch (err) { + this.logger.error(err); + } + + return; + }, + + cancel: async () => ({ state: {} }), + }; + }; + } + + public getTaskDefinition() { + return { + type: REPORTING_MONITOR_TYPE, + title: 'Reporting: monitor jobs', + createTaskRunner: this.getTaskRunner(), + maxAttempts: 1, + // round the timeout value up to the nearest second, since Task Manager + // doesn't support milliseconds or > 1s + timeout: Math.ceil(this.timeout.asSeconds()) + 's', + }; + } + + // reschedule the task with TM + private async rescheduleTask(task: ReportTaskParams, logger: LevelLogger) { + if (!this.taskManagerStart) { + throw new Error('Reporting task runner has not been initialized!'); + } + logger.info(`Rescheduling task:${task.id} to retry.`); + + const newTask = await this.reporting.scheduleTask(task); + return newTask; + } + + public getStatus() { + if (this.taskManagerStart) { + return ReportingTaskStatus.INITIALIZED; + } + + return ReportingTaskStatus.UNINITIALIZED; + } +} diff --git a/x-pack/plugins/reporting/server/plugin.test.ts b/x-pack/plugins/reporting/server/plugin.test.ts index c6868782f8cdd7..ce3b8aabcaa8da 100644 --- a/x-pack/plugins/reporting/server/plugin.test.ts +++ b/x-pack/plugins/reporting/server/plugin.test.ts @@ -16,9 +16,10 @@ jest.mock('./browsers/install', () => ({ })); import { coreMock } from 'src/core/server/mocks'; +import { featuresPluginMock } from '../../features/server/mocks'; +import { TaskManagerSetupContract } from '../../task_manager/server'; import { ReportingPlugin } from './plugin'; import { createMockConfigSchema } from './test_helpers'; -import { featuresPluginMock } from '../../features/server/mocks'; const sleep = (time: number) => new Promise((r) => setTimeout(r, time)); @@ -42,6 +43,9 @@ describe('Reporting Plugin', () => { makeUsageCollector: jest.fn(), registerCollector: jest.fn(), }, + taskManager: ({ + registerTaskDefinitions: jest.fn(), + } as unknown) as TaskManagerSetupContract, security: { authc: { getCurrentUser: () => ({ diff --git a/x-pack/plugins/reporting/server/plugin.ts b/x-pack/plugins/reporting/server/plugin.ts index 01ab654de8052b..4e7328cf180039 100644 --- a/x-pack/plugins/reporting/server/plugin.ts +++ b/x-pack/plugins/reporting/server/plugin.ts @@ -11,7 +11,7 @@ import { ReportingCore } from './'; import { initializeBrowserDriverFactory } from './browsers'; import { buildConfig, registerUiSettings, ReportingConfigType } from './config'; import { registerDeprecations } from './deprecations'; -import { createQueueFactory, LevelLogger, ReportingStore } from './lib'; +import { LevelLogger, ReportingStore } from './lib'; import { registerRoutes } from './routes'; import { setFieldFormats } from './services'; import type { @@ -47,7 +47,7 @@ export class ReportingPlugin }); const { http } = core; - const { screenshotMode, features, licensing, security, spaces } = plugins; + const { screenshotMode, features, licensing, security, spaces, taskManager } = plugins; const router = http.createRouter(); const basePath = http.basePath; @@ -60,6 +60,7 @@ export class ReportingPlugin router, security, spaces, + taskManager, logger: this.logger, }); @@ -95,21 +96,15 @@ export class ReportingPlugin const browserDriverFactory = await initializeBrowserDriverFactory(reportingCore, this.logger); const store = new ReportingStore(reportingCore, this.logger); - const esqueue = await createQueueFactory( - reportingCore, - store, - this.logger, - core.elasticsearch.client.asInternalUser - ); // starts polling for pending jobs - reportingCore.pluginStart({ + await reportingCore.pluginStart({ browserDriverFactory, savedObjects: core.savedObjects, uiSettings: core.uiSettings, store, esClient: core.elasticsearch.client, data: plugins.data, - esqueue, + taskManager: plugins.taskManager, logger: this.logger, }); diff --git a/x-pack/plugins/reporting/server/routes/generation.ts b/x-pack/plugins/reporting/server/routes/generation.ts index 949836148f9ca6..e90e059c11c67b 100644 --- a/x-pack/plugins/reporting/server/routes/generation.ts +++ b/x-pack/plugins/reporting/server/routes/generation.ts @@ -12,9 +12,9 @@ import { ReportingCore } from '../'; import { API_BASE_URL } from '../../common/constants'; import { LevelLogger as Logger } from '../lib'; import { enqueueJobFactory } from '../lib/enqueue_job'; -import { registerGenerateCsvFromSavedObjectImmediate } from './csv_searchsource_immediate'; import { registerGenerateFromJobParams } from './generate_from_jobparams'; import { registerLegacy } from './legacy'; +import { registerGenerateCsvFromSavedObjectImmediate } from './csv_searchsource_immediate'; import { HandlerFunction } from './types'; const esErrors = elasticsearchErrors as Record; @@ -49,7 +49,7 @@ export function registerJobGenerationRoutes(reporting: ReportingCore, logger: Lo const enqueueJob = enqueueJobFactory(reporting, logger); const report = await enqueueJob(exportTypeId, jobParams, user, context, req); - // return the queue's job information + // return task manager's task information and the download URL const downloadBaseUrl = getDownloadBaseUrl(reporting); return res.ok({ diff --git a/x-pack/plugins/reporting/server/test_helpers/create_mock_reportingplugin.ts b/x-pack/plugins/reporting/server/test_helpers/create_mock_reportingplugin.ts index 5caf9b798ad1e8..695f29dd8d6324 100644 --- a/x-pack/plugins/reporting/server/test_helpers/create_mock_reportingplugin.ts +++ b/x-pack/plugins/reporting/server/test_helpers/create_mock_reportingplugin.ts @@ -8,7 +8,6 @@ jest.mock('../routes'); jest.mock('../usage'); jest.mock('../browsers'); -jest.mock('../lib/create_queue'); import _ from 'lodash'; import * as Rx from 'rxjs'; @@ -42,6 +41,7 @@ export const createMockPluginSetup = (setupMock?: any): ReportingInternalSetup = router: setupMock.router, security: setupMock.security, licensing: { license$: Rx.of({ isAvailable: true, isActive: true, type: 'basic' }) } as any, + taskManager: { registerTaskDefinitions: jest.fn() } as any, logger: createMockLevelLogger(), ...setupMock, }; @@ -61,12 +61,15 @@ export const createMockPluginStart = ( return { browserDriverFactory: startMock.browserDriverFactory, - esqueue: startMock.esqueue, esClient: elasticsearchServiceMock.createClusterClient(), savedObjects: startMock.savedObjects || { getScopedClient: jest.fn() }, uiSettings: startMock.uiSettings || { asScopedToClient: () => ({ get: jest.fn() }) }, data: startMock.data || dataPluginMock.createStartContract(), store, + taskManager: { + schedule: jest.fn().mockImplementation(() => ({ id: 'taskId' })), + ensureScheduled: jest.fn(), + } as any, logger: createMockLevelLogger(), ...startMock, }; diff --git a/x-pack/plugins/reporting/server/types.ts b/x-pack/plugins/reporting/server/types.ts index 874a4bb0acf149..7df1dce597d56b 100644 --- a/x-pack/plugins/reporting/server/types.ts +++ b/x-pack/plugins/reporting/server/types.ts @@ -14,6 +14,7 @@ import { PluginSetupContract as FeaturesPluginSetup } from '../../features/serve import { LicensingPluginSetup } from '../../licensing/server'; import { AuthenticatedUser, SecurityPluginSetup } from '../../security/server'; import { SpacesPluginSetup } from '../../spaces/server'; +import { TaskManagerSetupContract, TaskManagerStartContract } from '../../task_manager/server'; import { CancellationToken } from '../common'; import { BaseParams, TaskRunResult } from '../common/types'; import { ReportingConfigType } from './config'; @@ -30,12 +31,14 @@ export interface ReportingSetupDeps { features: FeaturesPluginSetup; security?: SecurityPluginSetup; spaces?: SpacesPluginSetup; + taskManager: TaskManagerSetupContract; usageCollection?: UsageCollectionSetup; screenshotMode: ScreenshotModePluginSetup; } export interface ReportingStartDeps { data: DataPluginStart; + taskManager: TaskManagerStartContract; } export interface ReportingSetup { diff --git a/x-pack/test/reporting_api_integration/reporting_without_security/job_apis.ts b/x-pack/test/reporting_api_integration/reporting_without_security/job_apis.ts index 885615effa7795..4c64176dacc8b3 100644 --- a/x-pack/test/reporting_api_integration/reporting_without_security/job_apis.ts +++ b/x-pack/test/reporting_api_integration/reporting_without_security/job_apis.ts @@ -44,11 +44,7 @@ export default function ({ getService }: FtrProviderContext) { attempts: 0, created_by: false, jobtype: 'csv', - max_attempts: 1, status: 'pending', - timeout: 120000, - browser_type: 'chromium', // TODO: remove this field from the API response - // TODO: remove the payload field from the api respones }; forOwn(expectedResJob, (value: any, key: string) => { expect(resJob[key]).to.eql(value, key); @@ -78,12 +74,11 @@ export default function ({ getService }: FtrProviderContext) { .set('kbn-xsrf', 'xxx'); const listingJobs = JSON.parse(listText); + const expectedListJob: Record = { attempts: 0, created_by: false, jobtype: 'csv', - timeout: 120000, - browser_type: 'chromium', }; forOwn(expectedListJob, (value: any, key: string) => { expect(listingJobs[0]._source[key]).to.eql(value, key); @@ -112,8 +107,6 @@ export default function ({ getService }: FtrProviderContext) { attempts: 0, created_by: false, jobtype: 'csv', - timeout: 120000, - browser_type: 'chromium', }; forOwn(expectedListJob, (value: any, key: string) => { expect(listingJobs[0]._source[key]).to.eql(value, key);