Skip to content

Commit

Permalink
Remove layers of indirection
Browse files Browse the repository at this point in the history
  • Loading branch information
tsullivan committed May 27, 2020
1 parent b52c1a0 commit 086a42a
Show file tree
Hide file tree
Showing 32 changed files with 505 additions and 646 deletions.
162 changes: 64 additions & 98 deletions x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,64 @@
* you may not use this file except in compliance with the Elastic License.
*/

import { Crypto } from '@elastic/node-crypto';
import { i18n } from '@kbn/i18n';
import Hapi from 'hapi';
import { IUiSettingsClient, KibanaRequest } from '../../../../../../../src/core/server';
import { CSV_BOM_CHARS, CSV_JOB_TYPE } from '../../../common/constants';
import { CSV_JOB_TYPE, CONTENT_TYPE_CSV } from '../../../common/constants';
import { ReportingCore } from '../../../server';
import { cryptoFactory, LevelLogger } from '../../../server/lib';
import { getFieldFormats } from '../../../server/services';
import { ESQueueWorkerExecuteFn, ExecuteJobFactory } from '../../../server/types';
import { JobDocPayloadDiscoverCsv } from '../types';
import { fieldFormatMapFactory } from './lib/field_format_map';
import { createGenerateCsv } from './lib/generate_csv';
import { createGenerateCsv } from './generate_csv';
import { fieldFormatMapFactory } from './generate_csv/field_format_map';

const getRequest = async (
headers: string | undefined,
basePath: string,
crypto: Crypto,
logger: LevelLogger
) => {
const decryptHeaders = async () => {
try {
if (typeof headers !== 'string') {
throw new Error(
i18n.translate(
'xpack.reporting.exportTypes.csv.executeJob.missingJobHeadersErrorMessage',
{
defaultMessage: 'Job headers are missing',
}
)
);
}
return await crypto.decrypt(headers);
} catch (err) {
logger.error(err);
throw new Error(
i18n.translate(
'xpack.reporting.exportTypes.csv.executeJob.failedToDecryptReportJobDataErrorMessage',
{
defaultMessage: 'Failed to decrypt report job data. Please ensure that {encryptionKey} is set and re-generate this report. {err}',
values: { encryptionKey: 'xpack.reporting.encryptionKey', err: err.toString() },
}
)
); // prettier-ignore
}
};

return KibanaRequest.from({
headers: await decryptHeaders(),
// This is used by the spaces SavedObjectClientWrapper to determine the existing space.
// We use the basePath from the saved job, which we'll have post spaces being implemented;
// or we use the server base path, which uses the default space
getBasePath: () => basePath,
path: '/',
route: { settings: {} },
url: { href: '/' },
raw: { req: { url: '/' } },
} as Hapi.Request);
};

export const executeJobFactory: ExecuteJobFactory<ESQueueWorkerExecuteFn<
JobDocPayloadDiscoverCsv
Expand All @@ -32,54 +79,8 @@ export const executeJobFactory: ExecuteJobFactory<ESQueueWorkerExecuteFn<
const elasticsearch = await reporting.getElasticsearchService();
const jobLogger = logger.clone([jobId]);

const {
searchRequest,
fields,
indexPatternSavedObject,
metaFields,
conflictedTypesFields,
headers,
basePath,
} = job;

const decryptHeaders = async () => {
try {
if (typeof headers !== 'string') {
throw new Error(
i18n.translate(
'xpack.reporting.exportTypes.csv.executeJob.missingJobHeadersErrorMessage',
{
defaultMessage: 'Job headers are missing',
}
)
);
}
return await crypto.decrypt(headers);
} catch (err) {
logger.error(err);
throw new Error(
i18n.translate(
'xpack.reporting.exportTypes.csv.executeJob.failedToDecryptReportJobDataErrorMessage',
{
defaultMessage: 'Failed to decrypt report job data. Please ensure that {encryptionKey} is set and re-generate this report. {err}',
values: { encryptionKey: 'xpack.reporting.encryptionKey', err: err.toString() },
}
)
); // prettier-ignore
}
};

const fakeRequest = KibanaRequest.from({
headers: await decryptHeaders(),
// This is used by the spaces SavedObjectClientWrapper to determine the existing space.
// We use the basePath from the saved job, which we'll have post spaces being implemented;
// or we use the server base path, which uses the default space
getBasePath: () => basePath || serverBasePath,
path: '/',
route: { settings: {} },
url: { href: '/' },
raw: { req: { url: '/' } },
} as Hapi.Request);
const { headers, basePath } = job;
const fakeRequest = await getRequest(headers, basePath || serverBasePath, crypto, logger);

const { callAsCurrentUser } = elasticsearch.dataClient.asScoped(fakeRequest);
const callEndpoint = (endpoint: string, clientParams = {}, options = {}) =>
Expand All @@ -88,62 +89,27 @@ export const executeJobFactory: ExecuteJobFactory<ESQueueWorkerExecuteFn<
const savedObjectsClient = await reporting.getSavedObjectsClient(fakeRequest);
const uiSettingsClient = await reporting.getUiSettingsServiceFactory(savedObjectsClient);

// FIXME move to generateCsv function
const getFormatsMap = async (client: IUiSettingsClient) => {
const fieldFormats = await getFieldFormats().fieldFormatServiceFactory(client);
return fieldFormatMapFactory(indexPatternSavedObject, fieldFormats);
};
const getUiSettings = async (client: IUiSettingsClient) => {
const [separator, quoteValues, timezone] = await Promise.all([
client.get('csv:separator'),
client.get('csv:quoteValues'),
client.get('dateFormat:tz'),
]);

if (timezone === 'Browser') {
logger.warn(
i18n.translate('xpack.reporting.exportTypes.csv.executeJob.dateFormateSetting', {
defaultMessage: 'Kibana Advanced Setting "{dateFormatTimezone}" is set to "Browser". Dates will be formatted as UTC to avoid ambiguity.',
values: { dateFormatTimezone: 'dateFormat:tz' }
})
); // prettier-ignore
}

return {
separator,
quoteValues,
timezone,
};
return fieldFormatMapFactory(job.indexPatternSavedObject, fieldFormats);
};

const [formatsMap, uiSettings] = await Promise.all([
getFormatsMap(uiSettingsClient),
getUiSettings(uiSettingsClient),
]);

const formatsMap = await getFormatsMap(uiSettingsClient);
const generateCsv = createGenerateCsv(jobLogger);
const bom = config.get('csv', 'useByteOrderMarkEncoding') ? CSV_BOM_CHARS : '';

const { content, maxSizeReached, size, csvContainsFormulas, warnings } = await generateCsv({
searchRequest,
fields,
metaFields,
conflictedTypesFields,
callEndpoint,
cancellationToken,
const { content, maxSizeReached, size, csvContainsFormulas, warnings } = await generateCsv(
job,
config,
formatsMap,
settings: {
...uiSettings,
checkForFormulas: config.get('csv', 'checkForFormulas'),
maxSizeBytes: config.get('csv', 'maxSizeBytes'),
scroll: config.get('csv', 'scroll'),
escapeFormulaValues: config.get('csv', 'escapeFormulaValues'),
},
});
uiSettingsClient,
callEndpoint,
cancellationToken
);

// @TODO: Consolidate these one-off warnings into the warnings array (max-size reached and csv contains formulas)
return {
content_type: 'text/csv',
content: bom + content,
content_type: CONTENT_TYPE_CSV,
content,
max_size_reached: maxSizeReached,
size,
csv_contains_formulas: csvContainsFormulas,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,24 +5,20 @@
*/

import expect from '@kbn/expect';

import {
fieldFormats,
FieldFormatsGetConfigFn,
} from '../../../../../../../../src/plugins/data/server';
import { IndexPatternSavedObject } from '../../types';
import { fieldFormatMapFactory } from './field_format_map';

type ConfigValue = { number: { id: string; params: {} } } | string;

describe('field format map', function () {
const indexPatternSavedObject = {
id: 'logstash-*',
type: 'index-pattern',
version: 'abc',
const indexPatternSavedObject: IndexPatternSavedObject = {
timeFieldName: '@timestamp',
title: 'logstash-*',
attributes: {
title: 'logstash-*',
timeFieldName: '@timestamp',
notExpandable: true,
fields: '[{"name":"field1","type":"number"}, {"name":"field2","type":"number"}]',
fieldFormatMap: '{"field1":{"id":"bytes","params":{"pattern":"0,0.[0]b"}}}',
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,7 @@ import {
FieldFormatConfig,
IFieldFormatsRegistry,
} from '../../../../../../../../src/plugins/data/server';

interface IndexPatternSavedObject {
attributes: {
fieldFormatMap: string;
};
id: string;
type: string;
version: string;
}
import { IndexPatternSavedObject } from '../../types';

/**
* Create a map of FieldFormat instances for index pattern fields
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ export function createHitIterator(logger: LevelLogger) {
scrollSettings: ScrollConfig,
callEndpoint: Function,
searchRequest: SearchParams,
cancellationToken: CancellationToken
cancellationToken?: CancellationToken
) {
logger.debug('executing search request');
function search(index: string | boolean | string[] | undefined, body: object) {
Expand Down Expand Up @@ -75,14 +75,14 @@ export function createHitIterator(logger: LevelLogger) {
try {
let { scrollId, hits } = await search(searchRequest.index, searchRequest.body);
try {
while (hits && hits.length && !cancellationToken.isCancelled()) {
while (hits && hits.length && !(cancellationToken && cancellationToken.isCancelled())) {
for (const hit of hits) {
yield hit;
}

({ scrollId, hits } = await scroll(scrollId));

if (cancellationToken.isCancelled()) {
if (cancellationToken && cancellationToken.isCancelled()) {
logger.warning(
'Any remaining scrolling searches have been cancelled by the cancellation token.'
);
Expand Down
Loading

0 comments on commit 086a42a

Please sign in to comment.