diff --git a/.i18nrc.json b/.i18nrc.json index dc01a10b6a686..57dffa4147e52 100644 --- a/.i18nrc.json +++ b/.i18nrc.json @@ -3,6 +3,7 @@ "console": "src/plugins/console", "core": "src/core", "discover": "src/plugins/discover", + "bfetch": "src/plugins/bfetch", "dashboard": "src/plugins/dashboard", "data": "src/plugins/data", "embeddableApi": "src/plugins/embeddable", diff --git a/dev_docs/tutorials/expressions.mdx b/dev_docs/tutorials/expressions.mdx new file mode 100644 index 0000000000000..f0fc1dc595cfa --- /dev/null +++ b/dev_docs/tutorials/expressions.mdx @@ -0,0 +1,129 @@ +--- +id: kibDevTutorialExpressions +slug: /kibana-dev-docs/tutorials/expressions +title: Kibana Expressions Service +summary: Kibana Expressions Service +date: 2021-06-01 +tags: ['kibana', 'onboarding', 'dev', 'architecture'] +--- + +## Expressions service + +Expression service exposes a registry of reusable functions primary used for fetching and transposing data and a registry of renderer functions that can render data into a DOM element. +Adding functions is easy and so is reusing them. An expression is a chain of functions with provided arguments, which given a single input translates to a single output. +Each expression is representable by a human friendly string which a user can type. + +### creating expressions + +Here is a very simple expression string: + + essql 'select column1, column2 from myindex' | mapColumn name=column3 fn='{ column1 + 3 }' | table + + +It consists of 3 functions: + + - essql which runs given sql query against elasticsearch and returns the results + - `mapColumn`, which computes a new column from existing ones; + - `table`, which prepares the data for rendering in a tabular format. + +The same expression could also be constructed in the code: + +```ts +import { buildExpression, buildExpressionFunction } from 'src/plugins/expressions'; + +const expression = buildExpression([ + buildExpressionFunction('essql', [ q: 'select column1, column2 from myindex' ]), + buildExpressionFunction('mapColumn', [ name: 'column3', expression: 'column1 + 3' ]), + buildExpressionFunction('table'), +] +``` + +Note: Consumers need to be aware which plugin registers specific functions with expressions function registry and import correct type definitions from there. + + + The `expressions` service is available on both server and client, with similar APIs. + + +### Running expressions + +Expression service exposes `execute` method which allows you to execute an expression: + +```ts +const executionContract = expressions.execute(expression, input); +const result = await executionContract.getData(); +``` + + + Check the full spec of execute function [here](https://github.com/elastic/kibana/blob/master/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.execution.md) + + +In addition, on the browser side, there are two additional ways to run expressions and render the results. + +#### React expression renderer component + +This is the easiest way to get expressions rendered inside your application. + +```ts + +``` + + + Check the full spec of ReactExpressionRenderer component props [here](https://github.com/elastic/kibana/blob/master/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.reactexpressionrendererprops.md) + + +#### Expression loader + +If you are not using React, you can use the loader expression service provides to achieve the same: + +```ts +const handler = loader(domElement, expression, params); +``` + + + Check the full spec of expression loader params [here](https://github.com/elastic/kibana/blob/master/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.iexpressionloaderparams.md) + + +### Creating new expression functions + +Creating a new expression function is easy, just call `registerFunction` method on expressions service setup contract with your function definition: + +```ts +const functionDefinition = { + name: 'clog', + args: {}, + help: 'Outputs the context to the console', + fn: (input: unknown) => { + // eslint-disable-next-line no-console + console.log(input); + return input; + }, +}; + +expressions.registerFunction(functionDefinition); +``` + + + Check the full interface of ExpressionFuntionDefinition [here](https://github.com/elastic/kibana/blob/master/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.expressionfunctiondefinition.md) + + +### Creating new expression renderers + +Adding new renderers is just as easy as adding functions: + +```ts +const rendererDefinition = { + name: 'debug', + help: 'Outputs the context to the dom element', + render: (domElement, input, handlers) => { + // eslint-disable-next-line no-console + domElement.innerText = JSON.strinfigy(input); + handlers.done(); + }, +}; + +expressions.registerRenderer(rendererDefinition); +``` + + + Check the full interface of ExpressionRendererDefinition [here](https://github.com/elastic/kibana/blob/master/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.expressionrenderdefinition.md) + diff --git a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.deprecationtype.md b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.deprecationtype.md new file mode 100644 index 0000000000000..3a76bc60ee630 --- /dev/null +++ b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.deprecationtype.md @@ -0,0 +1,15 @@ + + +[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [DeprecationsDetails](./kibana-plugin-core-server.deprecationsdetails.md) > [deprecationType](./kibana-plugin-core-server.deprecationsdetails.deprecationtype.md) + +## DeprecationsDetails.deprecationType property + +(optional) Used to identify between different deprecation types. Example use case: in Upgrade Assistant, we may want to allow the user to sort by deprecation type or show each type in a separate tab. + +Feel free to add new types if necessary. Predefined types are necessary to reduce having similar definitions with different keywords across kibana deprecations. + +Signature: + +```typescript +deprecationType?: 'config' | 'feature'; +``` diff --git a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.md b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.md index bb77e4247711f..6e46ce0b8611f 100644 --- a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.md +++ b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.md @@ -15,6 +15,7 @@ export interface DeprecationsDetails | Property | Type | Description | | --- | --- | --- | | [correctiveActions](./kibana-plugin-core-server.deprecationsdetails.correctiveactions.md) | {
api?: {
path: string;
method: 'POST' | 'PUT';
body?: {
[key: string]: any;
};
};
manualSteps?: string[];
} | | +| [deprecationType](./kibana-plugin-core-server.deprecationsdetails.deprecationtype.md) | 'config' | 'feature' | (optional) Used to identify between different deprecation types. Example use case: in Upgrade Assistant, we may want to allow the user to sort by deprecation type or show each type in a separate tab.Feel free to add new types if necessary. Predefined types are necessary to reduce having similar definitions with different keywords across kibana deprecations. | | [documentationUrl](./kibana-plugin-core-server.deprecationsdetails.documentationurl.md) | string | | | [level](./kibana-plugin-core-server.deprecationsdetails.level.md) | 'warning' | 'critical' | 'fetch_error' | levels: - warning: will not break deployment upon upgrade - critical: needs to be addressed before upgrade. - fetch\_error: Deprecations service failed to grab the deprecation details for the domain. | | [message](./kibana-plugin-core-server.deprecationsdetails.message.md) | string | | diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.addscriptedfield.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.addscriptedfield.md index 99d2fc00a6b7b..812f014b15a6c 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.addscriptedfield.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.addscriptedfield.md @@ -4,6 +4,10 @@ ## IndexPattern.addScriptedField() method +> Warning: This API is now obsolete. +> +> + Add scripted field to field list Signature: diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getnonscriptedfields.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getnonscriptedfields.md index 77ce6f6f23a67..1792a979bf749 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getnonscriptedfields.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getnonscriptedfields.md @@ -4,6 +4,10 @@ ## IndexPattern.getNonScriptedFields() method +> Warning: This API is now obsolete. +> +> + Signature: ```typescript diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getscriptedfields.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getscriptedfields.md index 055f07367c96e..b6b3dcb19bac1 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getscriptedfields.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getscriptedfields.md @@ -4,6 +4,10 @@ ## IndexPattern.getScriptedFields() method +> Warning: This API is now obsolete. +> +> + Signature: ```typescript diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.removescriptedfield.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.removescriptedfield.md index aaaebdaccca5d..91f25c09ab197 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.removescriptedfield.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.removescriptedfield.md @@ -4,6 +4,10 @@ ## IndexPattern.removeScriptedField() method +> Warning: This API is now obsolete. +> +> + Remove scripted field from field list Signature: diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.addscriptedfield.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.addscriptedfield.md index a86fea3106225..981f28a51ae09 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.addscriptedfield.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.addscriptedfield.md @@ -4,6 +4,10 @@ ## IndexPattern.addScriptedField() method +> Warning: This API is now obsolete. +> +> + Add scripted field to field list Signature: diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getnonscriptedfields.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getnonscriptedfields.md index 89d79d9b750fa..cff2c5de98de6 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getnonscriptedfields.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getnonscriptedfields.md @@ -4,6 +4,10 @@ ## IndexPattern.getNonScriptedFields() method +> Warning: This API is now obsolete. +> +> + Signature: ```typescript diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getscriptedfields.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getscriptedfields.md index edfff8ec5efac..62b8f1b62ac78 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getscriptedfields.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getscriptedfields.md @@ -4,6 +4,10 @@ ## IndexPattern.getScriptedFields() method +> Warning: This API is now obsolete. +> +> + Signature: ```typescript diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.removescriptedfield.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.removescriptedfield.md index 3162a7f42dd12..f6beed7389e43 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.removescriptedfield.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.removescriptedfield.md @@ -4,6 +4,10 @@ ## IndexPattern.removeScriptedField() method +> Warning: This API is now obsolete. +> +> + Remove scripted field from field list Signature: diff --git a/docs/user/production-considerations/production.asciidoc b/docs/user/production-considerations/production.asciidoc index 726747d5d69d0..1ffca4b6ae6ab 100644 --- a/docs/user/production-considerations/production.asciidoc +++ b/docs/user/production-considerations/production.asciidoc @@ -8,7 +8,6 @@ * <> * <> * <> -* <> * <> * <> * <> @@ -22,9 +21,8 @@ Kibana instances that are all connected to the same Elasticsearch instance. While Kibana isn't terribly resource intensive, we still recommend running Kibana separate from your Elasticsearch data or master nodes. To distribute Kibana -traffic across the nodes in your Elasticsearch cluster, you can run Kibana -and an Elasticsearch client node on the same machine. For more information, see -<>. +traffic across the nodes in your Elasticsearch cluster, +you can configure Kibana to use a list of Elasticsearch hosts. [float] [[configuring-kibana-shield]] @@ -69,58 +67,6 @@ csp.strict: true See <>. -[float] -[[load-balancing-es]] -=== Load Balancing across multiple {es} nodes -If you have multiple nodes in your Elasticsearch cluster, the easiest way to distribute Kibana requests -across the nodes is to run an Elasticsearch _Coordinating only_ node on the same machine as Kibana. -Elasticsearch Coordinating only nodes are essentially smart load balancers that are part of the cluster. They -process incoming HTTP requests, redirect operations to the other nodes in the cluster as needed, and -gather and return the results. For more information, see -{ref}/modules-node.html[Node] in the Elasticsearch reference. - -To use a local client node to load balance Kibana requests: - -. Install Elasticsearch on the same machine as Kibana. -. Configure the node as a Coordinating only node. In `elasticsearch.yml`, set `node.data`, `node.master` and `node.ingest` to `false`: -+ -[source,js] --------- -# 3. You want this node to be neither master nor data node nor ingest node, but -# to act as a "search load balancer" (fetching data from nodes, -# aggregating results, etc.) -# -node.master: false -node.data: false -node.ingest: false --------- -. Configure the client node to join your Elasticsearch cluster. In `elasticsearch.yml`, set the `cluster.name` to the -name of your cluster. -+ -[source,js] --------- -cluster.name: "my_cluster" --------- -. Check your transport and HTTP host configs in `elasticsearch.yml` under `network.host` and `transport.host`. The `transport.host` needs to be on the network reachable to the cluster members, the `network.host` is the network for the HTTP connection for Kibana (localhost:9200 by default). -+ -[source,js] --------- -network.host: localhost -http.port: 9200 - -# by default transport.host refers to network.host -transport.host: -transport.tcp.port: 9300 - 9400 --------- -. Make sure Kibana is configured to point to your local client node. In `kibana.yml`, the `elasticsearch.hosts` setting should be set to -`["localhost:9200"]`. -+ -[source,js] --------- -# The Elasticsearch instance to use for all your queries. -elasticsearch.hosts: ["http://localhost:9200"] --------- - [float] [[load-balancing-kibana]] === Load balancing across multiple Kibana instances diff --git a/package.json b/package.json index f41c85c4c7b80..9a076ee28bc04 100644 --- a/package.json +++ b/package.json @@ -86,6 +86,7 @@ "**/prismjs": "1.23.0", "**/react-syntax-highlighter": "^15.3.1", "**/react-syntax-highlighter/**/highlight.js": "^10.4.1", + "**/refractor": "^3.3.1", "**/request": "^2.88.2", "**/trim": "1.0.1", "**/typescript": "4.1.3", @@ -102,7 +103,7 @@ "@elastic/datemath": "link:bazel-bin/packages/elastic-datemath/npm_module", "@elastic/elasticsearch": "npm:@elastic/elasticsearch-canary@^8.0.0-canary.4", "@elastic/ems-client": "7.13.0", - "@elastic/eui": "32.1.0", + "@elastic/eui": "33.0.0", "@elastic/filesaver": "1.1.2", "@elastic/good": "^9.0.1-kibana3", "@elastic/maki": "6.3.0", @@ -229,6 +230,7 @@ "expiry-js": "0.1.7", "extract-zip": "^2.0.1", "fast-deep-equal": "^3.1.1", + "fflate": "^0.6.9", "file-saver": "^1.3.8", "file-type": "^10.9.0", "focus-trap-react": "^3.1.1", diff --git a/packages/kbn-crypto/BUILD.bazel b/packages/kbn-crypto/BUILD.bazel index b1723e4120e79..20793e27de629 100644 --- a/packages/kbn-crypto/BUILD.bazel +++ b/packages/kbn-crypto/BUILD.bazel @@ -38,6 +38,7 @@ TYPES_DEPS = [ "@npm//@types/node", "@npm//@types/node-forge", "@npm//@types/testing-library__jest-dom", + "@npm//resize-observer-polyfill" ] DEPS = SRC_DEPS + TYPES_DEPS diff --git a/packages/kbn-optimizer/limits.yml b/packages/kbn-optimizer/limits.yml index c28fd83591960..6ccf6269751b1 100644 --- a/packages/kbn-optimizer/limits.yml +++ b/packages/kbn-optimizer/limits.yml @@ -3,7 +3,7 @@ pageLoadAssetSize: alerting: 106936 apm: 64385 apmOss: 18996 - bfetch: 41874 + bfetch: 51874 canvas: 1066647 charts: 195358 cloud: 21076 diff --git a/packages/kbn-telemetry-tools/BUILD.bazel b/packages/kbn-telemetry-tools/BUILD.bazel index 9a6b4a10bd190..d394b0c93d45f 100644 --- a/packages/kbn-telemetry-tools/BUILD.bazel +++ b/packages/kbn-telemetry-tools/BUILD.bazel @@ -47,6 +47,7 @@ TYPES_DEPS = [ "@npm//@types/node", "@npm//@types/normalize-path", "@npm//@types/testing-library__jest-dom", + "@npm//resize-observer-polyfill" ] DEPS = SRC_DEPS + TYPES_DEPS diff --git a/packages/kbn-ui-shared-deps/entry.js b/packages/kbn-ui-shared-deps/entry.js index 4029ce28faf5b..d3755ed7c5f29 100644 --- a/packages/kbn-ui-shared-deps/entry.js +++ b/packages/kbn-ui-shared-deps/entry.js @@ -44,6 +44,8 @@ export const Theme = require('./theme.ts'); export const Lodash = require('lodash'); export const LodashFp = require('lodash/fp'); +export const Fflate = require('fflate/esm/browser'); + // runtime deps which don't need to be copied across all bundles export const TsLib = require('tslib'); export const KbnAnalytics = require('@kbn/analytics'); diff --git a/packages/kbn-ui-shared-deps/index.js b/packages/kbn-ui-shared-deps/index.js index 62ddb09d25add..877bf3df6c039 100644 --- a/packages/kbn-ui-shared-deps/index.js +++ b/packages/kbn-ui-shared-deps/index.js @@ -52,6 +52,7 @@ exports.externals = { '@elastic/eui/dist/eui_theme_dark.json': '__kbnSharedDeps__.Theme.euiDarkVars', lodash: '__kbnSharedDeps__.Lodash', 'lodash/fp': '__kbnSharedDeps__.LodashFp', + fflate: '__kbnSharedDeps__.Fflate', /** * runtime deps which don't need to be copied across all bundles diff --git a/src/core/public/chrome/ui/header/__snapshots__/collapsible_nav.test.tsx.snap b/src/core/public/chrome/ui/header/__snapshots__/collapsible_nav.test.tsx.snap index 575a247ffeccb..0f5efe667ec2f 100644 --- a/src/core/public/chrome/ui/header/__snapshots__/collapsible_nav.test.tsx.snap +++ b/src/core/public/chrome/ui/header/__snapshots__/collapsible_nav.test.tsx.snap @@ -587,10 +587,12 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` > @@ -1921,6 +1923,7 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` > @@ -1999,6 +2004,7 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` @@ -3084,6 +3094,7 @@ exports[`CollapsibleNav renders the default nav 3`] = ` > @@ -3162,6 +3175,7 @@ exports[`CollapsibleNav renders the default nav 3`] = ` @@ -5469,6 +5473,7 @@ exports[`Header renders 1`] = ` > @@ -5547,6 +5554,7 @@ exports[`Header renders 1`] = `
Flyout content
"`; +exports[`FlyoutService openFlyout() renders a flyout to the DOM 2`] = `"
Flyout content
"`; exports[`FlyoutService openFlyout() with a currently active flyout replaces the current flyout with a new one 1`] = ` Array [ @@ -59,4 +59,4 @@ Array [ ] `; -exports[`FlyoutService openFlyout() with a currently active flyout replaces the current flyout with a new one 2`] = `"
Flyout content 2
"`; +exports[`FlyoutService openFlyout() with a currently active flyout replaces the current flyout with a new one 2`] = `"
Flyout content 2
"`; diff --git a/src/core/public/overlays/modal/__snapshots__/modal_service.test.tsx.snap b/src/core/public/overlays/modal/__snapshots__/modal_service.test.tsx.snap index 19ebb5a9113c3..9c39776fcea5c 100644 --- a/src/core/public/overlays/modal/__snapshots__/modal_service.test.tsx.snap +++ b/src/core/public/overlays/modal/__snapshots__/modal_service.test.tsx.snap @@ -29,7 +29,7 @@ Array [ ] `; -exports[`ModalService openConfirm() renders a mountpoint confirm message 2`] = `"
Modal content
"`; +exports[`ModalService openConfirm() renders a mountpoint confirm message 2`] = `"
Modal content
"`; exports[`ModalService openConfirm() renders a string confirm message 1`] = ` Array [ @@ -49,7 +49,7 @@ Array [ ] `; -exports[`ModalService openConfirm() renders a string confirm message 2`] = `"

Some message

"`; +exports[`ModalService openConfirm() renders a string confirm message 2`] = `"

Some message

"`; exports[`ModalService openConfirm() with a currently active confirm replaces the current confirm with the new one 1`] = ` Array [ @@ -131,7 +131,7 @@ Array [ ] `; -exports[`ModalService openModal() renders a modal to the DOM 2`] = `"
Modal content
"`; +exports[`ModalService openModal() renders a modal to the DOM 2`] = `"
Modal content
"`; exports[`ModalService openModal() with a currently active confirm replaces the current confirm with the new one 1`] = ` Array [ diff --git a/src/core/server/deprecations/deprecations_factory.mock.ts b/src/core/server/deprecations/deprecations_factory.mock.ts new file mode 100644 index 0000000000000..91ae4e6fa9af9 --- /dev/null +++ b/src/core/server/deprecations/deprecations_factory.mock.ts @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import type { PublicMethodsOf } from '@kbn/utility-types'; +import type { DeprecationsFactory } from './deprecations_factory'; +type DeprecationsFactoryContract = PublicMethodsOf; + +const createDeprecationsFactoryMock = () => { + const mocked: jest.Mocked = { + getRegistry: jest.fn(), + getDeprecations: jest.fn(), + getAllDeprecations: jest.fn(), + }; + + mocked.getDeprecations.mockResolvedValue([]); + mocked.getAllDeprecations.mockResolvedValue([]); + return mocked as jest.Mocked; +}; + +export const mockDeprecationsFactory = { + create: createDeprecationsFactoryMock, +}; diff --git a/src/core/server/deprecations/deprecations_factory.test.ts b/src/core/server/deprecations/deprecations_factory.test.ts index 469451b0020c0..187f3880f9998 100644 --- a/src/core/server/deprecations/deprecations_factory.test.ts +++ b/src/core/server/deprecations/deprecations_factory.test.ts @@ -6,7 +6,7 @@ * Side Public License, v 1. */ -import { GetDeprecationsContext } from './types'; +import type { GetDeprecationsContext } from './types'; import { DeprecationsFactory } from './deprecations_factory'; import { loggerMock } from '../logging/logger.mock'; diff --git a/src/core/server/deprecations/deprecations_registry.mock.ts b/src/core/server/deprecations/deprecations_registry.mock.ts new file mode 100644 index 0000000000000..bb178c3935cdc --- /dev/null +++ b/src/core/server/deprecations/deprecations_registry.mock.ts @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import type { PublicMethodsOf } from '@kbn/utility-types'; +import type { DeprecationsRegistry } from './deprecations_registry'; +import type { GetDeprecationsContext } from './types'; +import { elasticsearchClientMock } from '../elasticsearch/client/mocks'; +import { savedObjectsClientMock } from '../saved_objects/service/saved_objects_client.mock'; +type DeprecationsRegistryContract = PublicMethodsOf; + +const createDeprecationsRegistryMock = () => { + const mocked: jest.Mocked = { + registerDeprecations: jest.fn(), + getDeprecations: jest.fn(), + }; + + return mocked as jest.Mocked; +}; + +const createGetDeprecationsContextMock = () => { + const mocked: jest.Mocked = { + esClient: elasticsearchClientMock.createScopedClusterClient(), + savedObjectsClient: savedObjectsClientMock.create(), + }; + + return mocked; +}; + +export const mockDeprecationsRegistry = { + create: createDeprecationsRegistryMock, + createGetDeprecationsContext: createGetDeprecationsContextMock, +}; diff --git a/src/core/server/deprecations/deprecations_registry.test.ts b/src/core/server/deprecations/deprecations_registry.test.ts index 507677a531861..82b09beaa5123 100644 --- a/src/core/server/deprecations/deprecations_registry.test.ts +++ b/src/core/server/deprecations/deprecations_registry.test.ts @@ -7,7 +7,7 @@ */ /* eslint-disable dot-notation */ -import { RegisterDeprecationsConfig, GetDeprecationsContext } from './types'; +import type { RegisterDeprecationsConfig, GetDeprecationsContext } from './types'; import { DeprecationsRegistry } from './deprecations_registry'; describe('DeprecationsRegistry', () => { diff --git a/src/core/server/deprecations/deprecations_registry.ts b/src/core/server/deprecations/deprecations_registry.ts index f92d807514b82..cc05473923ac8 100644 --- a/src/core/server/deprecations/deprecations_registry.ts +++ b/src/core/server/deprecations/deprecations_registry.ts @@ -6,7 +6,11 @@ * Side Public License, v 1. */ -import { DeprecationsDetails, RegisterDeprecationsConfig, GetDeprecationsContext } from './types'; +import type { + DeprecationsDetails, + RegisterDeprecationsConfig, + GetDeprecationsContext, +} from './types'; export class DeprecationsRegistry { private readonly deprecationContexts: RegisterDeprecationsConfig[] = []; diff --git a/src/core/server/deprecations/deprecations_service.test.ts b/src/core/server/deprecations/deprecations_service.test.ts new file mode 100644 index 0000000000000..d1ed7a83402cb --- /dev/null +++ b/src/core/server/deprecations/deprecations_service.test.ts @@ -0,0 +1,99 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +/* eslint-disable dot-notation */ +import { DeprecationsService } from './deprecations_service'; +import { httpServiceMock } from '../http/http_service.mock'; +import { mockRouter } from '../http/router/router.mock'; +import { mockCoreContext } from '../core_context.mock'; +import { mockDeprecationsFactory } from './deprecations_factory.mock'; +import { mockDeprecationsRegistry } from './deprecations_registry.mock'; + +describe('DeprecationsService', () => { + const coreContext = mockCoreContext.create(); + beforeEach(() => jest.clearAllMocks()); + + describe('#setup', () => { + const http = httpServiceMock.createInternalSetupContract(); + const router = mockRouter.create(); + http.createRouter.mockReturnValue(router); + const deprecationsCoreSetupDeps = { http }; + + it('registers routes', () => { + const deprecationsService = new DeprecationsService(coreContext); + deprecationsService.setup(deprecationsCoreSetupDeps); + // Registers correct base api path + expect(http.createRouter).toBeCalledWith('/api/deprecations'); + // registers get route '/' + expect(router.get).toHaveBeenCalledTimes(1); + expect(router.get).toHaveBeenCalledWith({ path: '/', validate: false }, expect.any(Function)); + }); + + it('calls registerConfigDeprecationsInfo', () => { + const deprecationsService = new DeprecationsService(coreContext); + const mockRegisterConfigDeprecationsInfo = jest.fn(); + deprecationsService['registerConfigDeprecationsInfo'] = mockRegisterConfigDeprecationsInfo; + deprecationsService.setup(deprecationsCoreSetupDeps); + expect(mockRegisterConfigDeprecationsInfo).toBeCalledTimes(1); + }); + }); + + describe('#registerConfigDeprecationsInfo', () => { + const deprecationsFactory = mockDeprecationsFactory.create(); + const deprecationsRegistry = mockDeprecationsRegistry.create(); + const getDeprecationsContext = mockDeprecationsRegistry.createGetDeprecationsContext(); + + it('registers config deprecations', () => { + const deprecationsService = new DeprecationsService(coreContext); + coreContext.configService.getHandledDeprecatedConfigs.mockReturnValue([ + [ + 'testDomain', + [ + { + message: 'testMessage', + documentationUrl: 'testDocUrl', + correctiveActions: { + manualSteps: [ + 'Using Kibana user management, change all users using the kibana_user role to the kibana_admin role.', + 'Using Kibana role-mapping management, change all role-mappings which assing the kibana_user role to the kibana_admin role.', + ], + }, + }, + ], + ], + ]); + + deprecationsFactory.getRegistry.mockReturnValue(deprecationsRegistry); + deprecationsService['registerConfigDeprecationsInfo'](deprecationsFactory); + + expect(coreContext.configService.getHandledDeprecatedConfigs).toBeCalledTimes(1); + expect(deprecationsFactory.getRegistry).toBeCalledTimes(1); + expect(deprecationsFactory.getRegistry).toBeCalledWith('testDomain'); + expect(deprecationsRegistry.registerDeprecations).toBeCalledTimes(1); + const configDeprecations = deprecationsRegistry.registerDeprecations.mock.calls[0][0].getDeprecations( + getDeprecationsContext + ); + expect(configDeprecations).toMatchInlineSnapshot(` + Array [ + Object { + "correctiveActions": Object { + "manualSteps": Array [ + "Using Kibana user management, change all users using the kibana_user role to the kibana_admin role.", + "Using Kibana role-mapping management, change all role-mappings which assing the kibana_user role to the kibana_admin role.", + ], + }, + "deprecationType": "config", + "documentationUrl": "testDocUrl", + "level": "critical", + "message": "testMessage", + }, + ] + `); + }); + }); +}); diff --git a/src/core/server/deprecations/deprecations_service.ts b/src/core/server/deprecations/deprecations_service.ts index 8eca1ba5790c5..205dd964468c1 100644 --- a/src/core/server/deprecations/deprecations_service.ts +++ b/src/core/server/deprecations/deprecations_service.ts @@ -11,8 +11,6 @@ import { RegisterDeprecationsConfig } from './types'; import { registerRoutes } from './routes'; import { CoreContext } from '../core_context'; -import { CoreUsageDataSetup } from '../core_usage_data'; -import { InternalElasticsearchServiceSetup } from '../elasticsearch'; import { CoreService } from '../../types'; import { InternalHttpServiceSetup } from '../http'; import { Logger } from '../logging'; @@ -112,8 +110,6 @@ export interface InternalDeprecationsServiceSetup { /** @internal */ export interface DeprecationsSetupDeps { http: InternalHttpServiceSetup; - elasticsearch: InternalElasticsearchServiceSetup; - coreUsageData: CoreUsageDataSetup; } /** @internal */ @@ -156,6 +152,7 @@ export class DeprecationsService implements CoreService { return { level: 'critical', + deprecationType: 'config', message, correctiveActions: correctiveActions ?? {}, documentationUrl, diff --git a/src/core/server/deprecations/types.ts b/src/core/server/deprecations/types.ts index 31734b51b46bd..50c947591fdf4 100644 --- a/src/core/server/deprecations/types.ts +++ b/src/core/server/deprecations/types.ts @@ -25,6 +25,16 @@ export interface DeprecationsDetails { * - fetch_error: Deprecations service failed to grab the deprecation details for the domain. */ level: 'warning' | 'critical' | 'fetch_error'; + /** + * (optional) Used to identify between different deprecation types. + * Example use case: in Upgrade Assistant, we may want to allow the user to sort by + * deprecation type or show each type in a separate tab. + * + * Feel free to add new types if necessary. + * Predefined types are necessary to reduce having similar definitions with different keywords + * across kibana deprecations. + */ + deprecationType?: 'config' | 'feature'; /* (optional) link to the documentation for more details on the deprecation. */ documentationUrl?: string; /* corrective action needed to fix this deprecation. */ diff --git a/src/core/server/saved_objects/migrationsv2/actions/index.test.ts b/src/core/server/saved_objects/migrationsv2/actions/index.test.ts index df74a4e1282e4..05da335d70884 100644 --- a/src/core/server/saved_objects/migrationsv2/actions/index.test.ts +++ b/src/core/server/saved_objects/migrationsv2/actions/index.test.ts @@ -37,7 +37,7 @@ describe('actions', () => { describe('fetchIndices', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.fetchIndices(client, ['my_index']); + const task = Actions.fetchIndices({ client, indices: ['my_index'] }); try { await task(); } catch (e) { @@ -49,7 +49,7 @@ describe('actions', () => { describe('setWriteBlock', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.setWriteBlock(client, 'my_index'); + const task = Actions.setWriteBlock({ client, index: 'my_index' }); try { await task(); } catch (e) { @@ -58,7 +58,10 @@ describe('actions', () => { expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(retryableError); }); it('re-throws non retry-able errors', async () => { - const task = Actions.setWriteBlock(clientWithNonRetryableError, 'my_index'); + const task = Actions.setWriteBlock({ + client: clientWithNonRetryableError, + index: 'my_index', + }); await task(); expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(nonRetryableError); }); @@ -66,7 +69,11 @@ describe('actions', () => { describe('cloneIndex', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.cloneIndex(client, 'my_source_index', 'my_target_index'); + const task = Actions.cloneIndex({ + client, + source: 'my_source_index', + target: 'my_target_index', + }); try { await task(); } catch (e) { @@ -75,7 +82,10 @@ describe('actions', () => { expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(retryableError); }); it('re-throws non retry-able errors', async () => { - const task = Actions.setWriteBlock(clientWithNonRetryableError, 'my_index'); + const task = Actions.setWriteBlock({ + client: clientWithNonRetryableError, + index: 'my_index', + }); await task(); expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(nonRetryableError); }); @@ -95,7 +105,7 @@ describe('actions', () => { describe('openPit', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.openPit(client, 'my_index'); + const task = Actions.openPit({ client, index: 'my_index' }); try { await task(); } catch (e) { @@ -107,7 +117,12 @@ describe('actions', () => { describe('readWithPit', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.readWithPit(client, 'pitId', { match_all: {} }, 10_000); + const task = Actions.readWithPit({ + client, + pitId: 'pitId', + query: { match_all: {} }, + batchSize: 10_000, + }); try { await task(); } catch (e) { @@ -119,7 +134,7 @@ describe('actions', () => { describe('closePit', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.closePit(client, 'pitId'); + const task = Actions.closePit({ client, pitId: 'pitId' }); try { await task(); } catch (e) { @@ -131,14 +146,14 @@ describe('actions', () => { describe('reindex', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.reindex( + const task = Actions.reindex({ client, - 'my_source_index', - 'my_target_index', - Option.none, - false, - {} - ); + sourceIndex: 'my_source_index', + targetIndex: 'my_target_index', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: {}, + }); try { await task(); } catch (e) { @@ -150,7 +165,7 @@ describe('actions', () => { describe('waitForReindexTask', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.waitForReindexTask(client, 'my task id', '60s'); + const task = Actions.waitForReindexTask({ client, taskId: 'my task id', timeout: '60s' }); try { await task(); } catch (e) { @@ -160,7 +175,10 @@ describe('actions', () => { expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(retryableError); }); it('re-throws non retry-able errors', async () => { - const task = Actions.setWriteBlock(clientWithNonRetryableError, 'my_index'); + const task = Actions.setWriteBlock({ + client: clientWithNonRetryableError, + index: 'my_index', + }); await task(); expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(nonRetryableError); }); @@ -168,7 +186,11 @@ describe('actions', () => { describe('waitForPickupUpdatedMappingsTask', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.waitForPickupUpdatedMappingsTask(client, 'my task id', '60s'); + const task = Actions.waitForPickupUpdatedMappingsTask({ + client, + taskId: 'my task id', + timeout: '60s', + }); try { await task(); } catch (e) { @@ -178,7 +200,10 @@ describe('actions', () => { expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(retryableError); }); it('re-throws non retry-able errors', async () => { - const task = Actions.setWriteBlock(clientWithNonRetryableError, 'my_index'); + const task = Actions.setWriteBlock({ + client: clientWithNonRetryableError, + index: 'my_index', + }); await task(); expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(nonRetryableError); }); @@ -186,7 +211,7 @@ describe('actions', () => { describe('updateAliases', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.updateAliases(client, []); + const task = Actions.updateAliases({ client, aliasActions: [] }); try { await task(); } catch (e) { @@ -196,7 +221,10 @@ describe('actions', () => { expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(retryableError); }); it('re-throws non retry-able errors', async () => { - const task = Actions.setWriteBlock(clientWithNonRetryableError, 'my_index'); + const task = Actions.setWriteBlock({ + client: clientWithNonRetryableError, + index: 'my_index', + }); await task(); expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(nonRetryableError); }); @@ -204,7 +232,11 @@ describe('actions', () => { describe('createIndex', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.createIndex(client, 'new_index', { properties: {} }); + const task = Actions.createIndex({ + client, + indexName: 'new_index', + mappings: { properties: {} }, + }); try { await task(); } catch (e) { @@ -214,7 +246,10 @@ describe('actions', () => { expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(retryableError); }); it('re-throws non retry-able errors', async () => { - const task = Actions.setWriteBlock(clientWithNonRetryableError, 'my_index'); + const task = Actions.setWriteBlock({ + client: clientWithNonRetryableError, + index: 'my_index', + }); await task(); expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(nonRetryableError); }); @@ -222,7 +257,11 @@ describe('actions', () => { describe('updateAndPickupMappings', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.updateAndPickupMappings(client, 'new_index', { properties: {} }); + const task = Actions.updateAndPickupMappings({ + client, + index: 'new_index', + mappings: { properties: {} }, + }); try { await task(); } catch (e) { @@ -276,7 +315,12 @@ describe('actions', () => { describe('bulkOverwriteTransformedDocuments', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.bulkOverwriteTransformedDocuments(client, 'new_index', [], 'wait_for'); + const task = Actions.bulkOverwriteTransformedDocuments({ + client, + index: 'new_index', + transformedDocs: [], + refresh: 'wait_for', + }); try { await task(); } catch (e) { @@ -289,7 +333,7 @@ describe('actions', () => { describe('refreshIndex', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.refreshIndex(client, 'target_index'); + const task = Actions.refreshIndex({ client, targetIndex: 'target_index' }); try { await task(); } catch (e) { diff --git a/src/core/server/saved_objects/migrationsv2/actions/index.ts b/src/core/server/saved_objects/migrationsv2/actions/index.ts index c2e0476960c3b..905d64947298e 100644 --- a/src/core/server/saved_objects/migrationsv2/actions/index.ts +++ b/src/core/server/saved_objects/migrationsv2/actions/index.ts @@ -68,20 +68,26 @@ export type FetchIndexResponse = Record< { aliases: Record; mappings: IndexMapping; settings: unknown } >; +/** @internal */ +export interface FetchIndicesParams { + client: ElasticsearchClient; + indices: string[]; +} + /** * Fetches information about the given indices including aliases, mappings and * settings. */ -export const fetchIndices = ( - client: ElasticsearchClient, - indicesToFetch: string[] -): TaskEither.TaskEither => +export const fetchIndices = ({ + client, + indices, +}: FetchIndicesParams): TaskEither.TaskEither => // @ts-expect-error @elastic/elasticsearch IndexState.alias and IndexState.mappings should be required () => { return client.indices .get( { - index: indicesToFetch, + index: indices, ignore_unavailable: true, // Don't return an error for missing indices. Note this *will* include closed indices, the docs are misleading https://github.com/elastic/elasticsearch/issues/63607 }, { ignore: [404], maxRetries: 0 } @@ -96,6 +102,12 @@ export interface IndexNotFound { type: 'index_not_found_exception'; index: string; } + +/** @internal */ +export interface SetWriteBlockParams { + client: ElasticsearchClient; + index: string; +} /** * Sets a write block in place for the given index. If the response includes * `acknowledged: true` all in-progress writes have drained and no further @@ -105,10 +117,10 @@ export interface IndexNotFound { * include `shards_acknowledged: true` but once the block is in place, * subsequent calls return `shards_acknowledged: false` */ -export const setWriteBlock = ( - client: ElasticsearchClient, - index: string -): TaskEither.TaskEither< +export const setWriteBlock = ({ + client, + index, +}: SetWriteBlockParams): TaskEither.TaskEither< IndexNotFound | RetryableEsClientError, 'set_write_block_succeeded' > => () => { @@ -145,13 +157,21 @@ export const setWriteBlock = ( ); }; +/** @internal */ +export interface RemoveWriteBlockParams { + client: ElasticsearchClient; + index: string; +} /** * Removes a write block from an index */ -export const removeWriteBlock = ( - client: ElasticsearchClient, - index: string -): TaskEither.TaskEither => () => { +export const removeWriteBlock = ({ + client, + index, +}: RemoveWriteBlockParams): TaskEither.TaskEither< + RetryableEsClientError, + 'remove_write_block_succeeded' +> => () => { return client.indices .putSettings<{ acknowledged: boolean; @@ -182,6 +202,12 @@ export const removeWriteBlock = ( .catch(catchRetryableEsClientErrors); }; +/** @internal */ +export interface WaitForIndexStatusYellowParams { + client: ElasticsearchClient; + index: string; + timeout?: string; +} /** * A yellow index status means the index's primary shard is allocated and the * index is ready for searching/indexing documents, but ES wasn't able to @@ -193,11 +219,11 @@ export const removeWriteBlock = ( * yellow at any point in the future. So ultimately data-redundancy is up to * users to maintain. */ -export const waitForIndexStatusYellow = ( - client: ElasticsearchClient, - index: string, - timeout = DEFAULT_TIMEOUT -): TaskEither.TaskEither => () => { +export const waitForIndexStatusYellow = ({ + client, + index, + timeout = DEFAULT_TIMEOUT, +}: WaitForIndexStatusYellowParams): TaskEither.TaskEither => () => { return client.cluster .health({ index, wait_for_status: 'yellow', timeout }) .then(() => { @@ -208,6 +234,14 @@ export const waitForIndexStatusYellow = ( export type CloneIndexResponse = AcknowledgeResponse; +/** @internal */ +export interface CloneIndexParams { + client: ElasticsearchClient; + source: string; + target: string; + /** only used for testing */ + timeout?: string; +} /** * Makes a clone of the source index into the target. * @@ -218,13 +252,15 @@ export type CloneIndexResponse = AcknowledgeResponse; * - the first call will wait up to 120s for the cluster state and all shards * to be updated. */ -export const cloneIndex = ( - client: ElasticsearchClient, - source: string, - target: string, - /** only used for testing */ - timeout = DEFAULT_TIMEOUT -): TaskEither.TaskEither => { +export const cloneIndex = ({ + client, + source, + target, + timeout = DEFAULT_TIMEOUT, +}: CloneIndexParams): TaskEither.TaskEither< + RetryableEsClientError | IndexNotFound, + CloneIndexResponse +> => { const cloneTask: TaskEither.TaskEither< RetryableEsClientError | IndexNotFound, AcknowledgeResponse @@ -302,7 +338,7 @@ export const cloneIndex = ( } else { // Otherwise, wait until the target index has a 'green' status. return pipe( - waitForIndexStatusYellow(client, target, timeout), + waitForIndexStatusYellow({ client, index: target, timeout }), TaskEither.map((value) => { /** When the index status is 'green' we know that all shards were started */ return { acknowledged: true, shardsAcknowledged: true }; @@ -352,16 +388,22 @@ const catchWaitForTaskCompletionTimeout = ( } }; +/** @internal */ +export interface WaitForTaskParams { + client: ElasticsearchClient; + taskId: string; + timeout: string; +} /** * Blocks for up to 60s or until a task completes. * * TODO: delete completed tasks */ -const waitForTask = ( - client: ElasticsearchClient, - taskId: string, - timeout: string -): TaskEither.TaskEither< +const waitForTask = ({ + client, + taskId, + timeout, +}: WaitForTaskParams): TaskEither.TaskEither< RetryableEsClientError | WaitForTaskCompletionTimeout, WaitForTaskResponse > => () => { @@ -433,16 +475,21 @@ export interface OpenPitResponse { pitId: string; } +/** @internal */ +export interface OpenPitParams { + client: ElasticsearchClient; + index: string; +} // how long ES should keep PIT alive const pitKeepAlive = '10m'; /* * Creates a lightweight view of data when the request has been initiated. * See https://www.elastic.co/guide/en/elasticsearch/reference/current/point-in-time-api.html * */ -export const openPit = ( - client: ElasticsearchClient, - index: string -): TaskEither.TaskEither => () => { +export const openPit = ({ + client, + index, +}: OpenPitParams): TaskEither.TaskEither => () => { return client .openPointInTime({ index, @@ -459,17 +506,28 @@ export interface ReadWithPit { readonly totalHits: number | undefined; } +/** @internal */ + +export interface ReadWithPitParams { + client: ElasticsearchClient; + pitId: string; + query: estypes.QueryContainer; + batchSize: number; + searchAfter?: number[]; + seqNoPrimaryTerm?: boolean; +} + /* * Requests documents from the index using PIT mechanism. * */ -export const readWithPit = ( - client: ElasticsearchClient, - pitId: string, - query: estypes.QueryContainer, - batchSize: number, - searchAfter?: number[], - seqNoPrimaryTerm?: boolean -): TaskEither.TaskEither => () => { +export const readWithPit = ({ + client, + pitId, + query, + batchSize, + searchAfter, + seqNoPrimaryTerm, +}: ReadWithPitParams): TaskEither.TaskEither => () => { return client .search({ seq_no_primary_term: seqNoPrimaryTerm, @@ -516,14 +574,19 @@ export const readWithPit = ( .catch(catchRetryableEsClientErrors); }; +/** @internal */ +export interface ClosePitParams { + client: ElasticsearchClient; + pitId: string; +} /* * Closes PIT. * See https://www.elastic.co/guide/en/elasticsearch/reference/current/point-in-time-api.html * */ -export const closePit = ( - client: ElasticsearchClient, - pitId: string -): TaskEither.TaskEither => () => { +export const closePit = ({ + client, + pitId, +}: ClosePitParams): TaskEither.TaskEither => () => { return client .closePointInTime({ body: { id: pitId }, @@ -537,27 +600,42 @@ export const closePit = ( .catch(catchRetryableEsClientErrors); }; +/** @internal */ +export interface TransformDocsParams { + transformRawDocs: TransformRawDocs; + outdatedDocuments: SavedObjectsRawDoc[]; +} /* * Transform outdated docs * */ -export const transformDocs = ( - transformRawDocs: TransformRawDocs, - outdatedDocuments: SavedObjectsRawDoc[] -): TaskEither.TaskEither => - transformRawDocs(outdatedDocuments); +export const transformDocs = ({ + transformRawDocs, + outdatedDocuments, +}: TransformDocsParams): TaskEither.TaskEither< + DocumentsTransformFailed, + DocumentsTransformSuccess +> => transformRawDocs(outdatedDocuments); /** @internal */ export interface ReindexResponse { taskId: string; } +/** @internal */ +export interface RefreshIndexParams { + client: ElasticsearchClient; + targetIndex: string; +} /** * Wait for Elasticsearch to reindex all the changes. */ -export const refreshIndex = ( - client: ElasticsearchClient, - targetIndex: string -): TaskEither.TaskEither => () => { +export const refreshIndex = ({ + client, + targetIndex, +}: RefreshIndexParams): TaskEither.TaskEither< + RetryableEsClientError, + { refreshed: boolean } +> => () => { return client.indices .refresh({ index: targetIndex, @@ -567,6 +645,19 @@ export const refreshIndex = ( }) .catch(catchRetryableEsClientErrors); }; +/** @internal */ +export interface ReindexParams { + client: ElasticsearchClient; + sourceIndex: string; + targetIndex: string; + reindexScript: Option.Option; + requireAlias: boolean; + /* When reindexing we use a source query to exclude saved objects types which + * are no longer used. These saved objects will still be kept in the outdated + * index for backup purposes, but won't be available in the upgraded index. + */ + unusedTypesQuery: estypes.QueryContainer; +} /** * Reindex documents from the `sourceIndex` into the `targetIndex`. Returns a * task ID which can be tracked for progress. @@ -575,18 +666,14 @@ export const refreshIndex = ( * this in parallel. By using `op_type: 'create', conflicts: 'proceed'` there * will be only one write per reindexed document. */ -export const reindex = ( - client: ElasticsearchClient, - sourceIndex: string, - targetIndex: string, - reindexScript: Option.Option, - requireAlias: boolean, - /* When reindexing we use a source query to exclude saved objects types which - * are no longer used. These saved objects will still be kept in the outdated - * index for backup purposes, but won't be available in the upgraded index. - */ - unusedTypesQuery: estypes.QueryContainer -): TaskEither.TaskEither => () => { +export const reindex = ({ + client, + sourceIndex, + targetIndex, + reindexScript, + requireAlias, + unusedTypesQuery, +}: ReindexParams): TaskEither.TaskEither => () => { return client .reindex({ // Require targetIndex to be an alias. Prevents a new index from being @@ -688,11 +775,18 @@ export const waitForReindexTask = flow( ) ); -export const verifyReindex = ( - client: ElasticsearchClient, - sourceIndex: string, - targetIndex: string -): TaskEither.TaskEither< +/** @internal */ +export interface VerifyReindexParams { + client: ElasticsearchClient; + sourceIndex: string; + targetIndex: string; +} + +export const verifyReindex = ({ + client, + sourceIndex, + targetIndex, +}: VerifyReindexParams): TaskEither.TaskEither< RetryableEsClientError | { type: 'verify_reindex_failed' }, 'verify_reindex_succeeded' > => () => { @@ -762,13 +856,18 @@ export type AliasAction = | { remove: { index: string; alias: string; must_exist: boolean } } | { add: { index: string; alias: string } }; +/** @internal */ +export interface UpdateAliasesParams { + client: ElasticsearchClient; + aliasActions: AliasAction[]; +} /** * Calls the Update index alias API `_alias` with the provided alias actions. */ -export const updateAliases = ( - client: ElasticsearchClient, - aliasActions: AliasAction[] -): TaskEither.TaskEither< +export const updateAliases = ({ + client, + aliasActions, +}: UpdateAliasesParams): TaskEither.TaskEither< IndexNotFound | AliasNotFound | RemoveIndexNotAConcreteIndex | RetryableEsClientError, 'update_aliases_succeeded' > => () => { @@ -836,6 +935,14 @@ function aliasArrayToRecord(aliases: string[]): Record { } return result; } + +/** @internal */ +export interface CreateIndexParams { + client: ElasticsearchClient; + indexName: string; + mappings: IndexMapping; + aliases?: string[]; +} /** * Creates an index with the given mappings * @@ -846,12 +953,12 @@ function aliasArrayToRecord(aliases: string[]): Record { * - the first call will wait up to 120s for the cluster state and all shards * to be updated. */ -export const createIndex = ( - client: ElasticsearchClient, - indexName: string, - mappings: IndexMapping, - aliases: string[] = [] -): TaskEither.TaskEither => { +export const createIndex = ({ + client, + indexName, + mappings, + aliases = [], +}: CreateIndexParams): TaskEither.TaskEither => { const createIndexTask: TaskEither.TaskEither< RetryableEsClientError, AcknowledgeResponse @@ -930,7 +1037,7 @@ export const createIndex = ( } else { // Otherwise, wait until the target index has a 'yellow' status. return pipe( - waitForIndexStatusYellow(client, indexName, DEFAULT_TIMEOUT), + waitForIndexStatusYellow({ client, index: indexName, timeout: DEFAULT_TIMEOUT }), TaskEither.map(() => { /** When the index status is 'yellow' we know that all shards were started */ return 'create_index_succeeded'; @@ -946,15 +1053,24 @@ export interface UpdateAndPickupMappingsResponse { taskId: string; } +/** @internal */ +export interface UpdateAndPickupMappingsParams { + client: ElasticsearchClient; + index: string; + mappings: IndexMapping; +} /** * Updates an index's mappings and runs an pickupUpdatedMappings task so that the mapping * changes are "picked up". Returns a taskId to track progress. */ -export const updateAndPickupMappings = ( - client: ElasticsearchClient, - index: string, - mappings: IndexMapping -): TaskEither.TaskEither => { +export const updateAndPickupMappings = ({ + client, + index, + mappings, +}: UpdateAndPickupMappingsParams): TaskEither.TaskEither< + RetryableEsClientError, + UpdateAndPickupMappingsResponse +> => { const putMappingTask: TaskEither.TaskEither< RetryableEsClientError, 'update_mappings_succeeded' @@ -1053,16 +1169,26 @@ export const searchForOutdatedDocuments = ( .catch(catchRetryableEsClientErrors); }; +/** @internal */ +export interface BulkOverwriteTransformedDocumentsParams { + client: ElasticsearchClient; + index: string; + transformedDocs: SavedObjectsRawDoc[]; + refresh?: estypes.Refresh; +} /** * Write the up-to-date transformed documents to the index, overwriting any * documents that are still on their outdated version. */ -export const bulkOverwriteTransformedDocuments = ( - client: ElasticsearchClient, - index: string, - transformedDocs: SavedObjectsRawDoc[], - refresh: estypes.Refresh -): TaskEither.TaskEither => () => { +export const bulkOverwriteTransformedDocuments = ({ + client, + index, + transformedDocs, + refresh = false, +}: BulkOverwriteTransformedDocumentsParams): TaskEither.TaskEither< + RetryableEsClientError, + 'bulk_index_succeeded' +> => () => { return client .bulk({ // Because we only add aliases in the MARK_VERSION_INDEX_READY step we diff --git a/src/core/server/saved_objects/migrationsv2/integration_tests/actions.test.ts b/src/core/server/saved_objects/migrationsv2/integration_tests/actions.test.ts index d0158a4c68f24..67a2685caf3d6 100644 --- a/src/core/server/saved_objects/migrationsv2/integration_tests/actions.test.ts +++ b/src/core/server/saved_objects/migrationsv2/integration_tests/actions.test.ts @@ -67,9 +67,13 @@ describe('migration actions', () => { client = start.elasticsearch.client.asInternalUser; // Create test fixture data: - await createIndex(client, 'existing_index_with_docs', { - dynamic: true, - properties: {}, + await createIndex({ + client, + indexName: 'existing_index_with_docs', + mappings: { + dynamic: true, + properties: {}, + }, })(); const sourceDocs = ([ { _source: { title: 'doc 1' } }, @@ -78,25 +82,30 @@ describe('migration actions', () => { { _source: { title: 'saved object 4', type: 'another_unused_type' } }, { _source: { title: 'f-agent-event 5', type: 'f_agent_event' } }, ] as unknown) as SavedObjectsRawDoc[]; - await bulkOverwriteTransformedDocuments( + await bulkOverwriteTransformedDocuments({ + client, + index: 'existing_index_with_docs', + transformedDocs: sourceDocs, + refresh: 'wait_for', + })(); + + await createIndex({ client, indexName: 'existing_index_2', mappings: { properties: {} } })(); + await createIndex({ client, - 'existing_index_with_docs', - sourceDocs, - 'wait_for' - )(); - - await createIndex(client, 'existing_index_2', { properties: {} })(); - await createIndex(client, 'existing_index_with_write_block', { properties: {} })(); - await bulkOverwriteTransformedDocuments( + indexName: 'existing_index_with_write_block', + mappings: { properties: {} }, + })(); + await bulkOverwriteTransformedDocuments({ client, - 'existing_index_with_write_block', - sourceDocs, - 'wait_for' - )(); - await setWriteBlock(client, 'existing_index_with_write_block')(); - await updateAliases(client, [ - { add: { index: 'existing_index_2', alias: 'existing_index_2_alias' } }, - ])(); + index: 'existing_index_with_write_block', + transformedDocs: sourceDocs, + refresh: 'wait_for', + })(); + await setWriteBlock({ client, index: 'existing_index_with_write_block' })(); + await updateAliases({ + client, + aliasActions: [{ add: { index: 'existing_index_2', alias: 'existing_index_2_alias' } }], + })(); }); afterAll(async () => { @@ -107,7 +116,7 @@ describe('migration actions', () => { describe('fetchIndices', () => { it('resolves right empty record if no indices were found', async () => { expect.assertions(1); - const task = fetchIndices(client, ['no_such_index']); + const task = fetchIndices({ client, indices: ['no_such_index'] }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -117,10 +126,10 @@ describe('migration actions', () => { }); it('resolves right record with found indices', async () => { expect.assertions(1); - const res = (await fetchIndices(client, [ - 'no_such_index', - 'existing_index_with_docs', - ])()) as Either.Right; + const res = (await fetchIndices({ + client, + indices: ['no_such_index', 'existing_index_with_docs'], + })()) as Either.Right; expect(res.right).toEqual( expect.objectContaining({ @@ -136,11 +145,15 @@ describe('migration actions', () => { describe('setWriteBlock', () => { beforeAll(async () => { - await createIndex(client, 'new_index_without_write_block', { properties: {} })(); + await createIndex({ + client, + indexName: 'new_index_without_write_block', + mappings: { properties: {} }, + })(); }); it('resolves right when setting the write block succeeds', async () => { expect.assertions(1); - const task = setWriteBlock(client, 'new_index_without_write_block'); + const task = setWriteBlock({ client, index: 'new_index_without_write_block' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -150,7 +163,7 @@ describe('migration actions', () => { }); it('resolves right when setting a write block on an index that already has one', async () => { expect.assertions(1); - const task = setWriteBlock(client, 'existing_index_with_write_block'); + const task = setWriteBlock({ client, index: 'existing_index_with_write_block' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -160,7 +173,7 @@ describe('migration actions', () => { }); it('once resolved, prevents further writes to the index', async () => { expect.assertions(1); - const task = setWriteBlock(client, 'new_index_without_write_block'); + const task = setWriteBlock({ client, index: 'new_index_without_write_block' }); await task(); const sourceDocs = ([ { _source: { title: 'doc 1' } }, @@ -169,17 +182,17 @@ describe('migration actions', () => { { _source: { title: 'doc 4' } }, ] as unknown) as SavedObjectsRawDoc[]; await expect( - bulkOverwriteTransformedDocuments( + bulkOverwriteTransformedDocuments({ client, - 'new_index_without_write_block', - sourceDocs, - 'wait_for' - )() + index: 'new_index_without_write_block', + transformedDocs: sourceDocs, + refresh: 'wait_for', + })() ).rejects.toMatchObject(expect.anything()); }); it('resolves left index_not_found_exception when the index does not exist', async () => { expect.assertions(1); - const task = setWriteBlock(client, 'no_such_index'); + const task = setWriteBlock({ client, index: 'no_such_index' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -194,13 +207,21 @@ describe('migration actions', () => { describe('removeWriteBlock', () => { beforeAll(async () => { - await createIndex(client, 'existing_index_without_write_block_2', { properties: {} })(); - await createIndex(client, 'existing_index_with_write_block_2', { properties: {} })(); - await setWriteBlock(client, 'existing_index_with_write_block_2')(); + await createIndex({ + client, + indexName: 'existing_index_without_write_block_2', + mappings: { properties: {} }, + })(); + await createIndex({ + client, + indexName: 'existing_index_with_write_block_2', + mappings: { properties: {} }, + })(); + await setWriteBlock({ client, index: 'existing_index_with_write_block_2' })(); }); it('resolves right if successful when an index already has a write block', async () => { expect.assertions(1); - const task = removeWriteBlock(client, 'existing_index_with_write_block_2'); + const task = removeWriteBlock({ client, index: 'existing_index_with_write_block_2' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -210,7 +231,7 @@ describe('migration actions', () => { }); it('resolves right if successful when an index does not have a write block', async () => { expect.assertions(1); - const task = removeWriteBlock(client, 'existing_index_without_write_block_2'); + const task = removeWriteBlock({ client, index: 'existing_index_without_write_block_2' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -220,7 +241,7 @@ describe('migration actions', () => { }); it('rejects if there is a non-retryable error', async () => { expect.assertions(1); - const task = removeWriteBlock(client, 'no_such_index'); + const task = removeWriteBlock({ client, index: 'no_such_index' }); await expect(task()).rejects.toMatchInlineSnapshot( `[ResponseError: index_not_found_exception]` ); @@ -251,7 +272,10 @@ describe('migration actions', () => { ); // Start tracking the index status - const indexStatusPromise = waitForIndexStatusYellow(client, 'red_then_yellow_index')(); + const indexStatusPromise = waitForIndexStatusYellow({ + client, + index: 'red_then_yellow_index', + })(); const redStatusResponse = await client.cluster.health({ index: 'red_then_yellow_index' }); expect(redStatusResponse.body.status).toBe('red'); @@ -281,7 +305,11 @@ describe('migration actions', () => { } }); it('resolves right if cloning into a new target index', async () => { - const task = cloneIndex(client, 'existing_index_with_write_block', 'clone_target_1'); + const task = cloneIndex({ + client, + source: 'existing_index_with_write_block', + target: 'clone_target_1', + }); expect.assertions(1); await expect(task()).resolves.toMatchInlineSnapshot(` Object { @@ -314,11 +342,11 @@ describe('migration actions', () => { .catch((e) => {}); // Call clone even though the index already exists - const cloneIndexPromise = cloneIndex( + const cloneIndexPromise = cloneIndex({ client, - 'existing_index_with_write_block', - 'clone_red_then_yellow_index' - )(); + source: 'existing_index_with_write_block', + target: 'clone_red_then_yellow_index', + })(); let indexYellow = false; setTimeout(() => { @@ -348,7 +376,7 @@ describe('migration actions', () => { }); it('resolves left index_not_found_exception if the source index does not exist', async () => { expect.assertions(1); - const task = cloneIndex(client, 'no_such_index', 'clone_target_3'); + const task = cloneIndex({ client, source: 'no_such_index', target: 'clone_target_3' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -378,12 +406,12 @@ describe('migration actions', () => { .catch((e) => {}); // Call clone even though the index already exists - const cloneIndexPromise = cloneIndex( + const cloneIndexPromise = cloneIndex({ client, - 'existing_index_with_write_block', - 'clone_red_index', - '0s' - )(); + source: 'existing_index_with_write_block', + target: 'clone_red_index', + timeout: '0s', + })(); await cloneIndexPromise.then((res) => { expect(res).toMatchInlineSnapshot(` @@ -404,15 +432,15 @@ describe('migration actions', () => { // together with waitForReindexTask describe('reindex & waitForReindexTask', () => { it('resolves right when reindex succeeds without reindex script', async () => { - const res = (await reindex( + const res = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target', - Option.none, - false, - { match_all: {} } - )()) as Either.Right; - const task = waitForReindexTask(client, res.right.taskId, '10s'); + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; + const task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -436,21 +464,21 @@ describe('migration actions', () => { `); }); it('resolves right and excludes all documents not matching the unusedTypesQuery', async () => { - const res = (await reindex( + const res = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target_excluded_docs', - Option.none, - false, - { + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_excluded_docs', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { bool: { must_not: ['f_agent_event', 'another_unused_type'].map((type) => ({ term: { type }, })), }, - } - )()) as Either.Right; - const task = waitForReindexTask(client, res.right.taskId, '10s'); + }, + })()) as Either.Right; + const task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -473,15 +501,15 @@ describe('migration actions', () => { }); it('resolves right when reindex succeeds with reindex script', async () => { expect.assertions(2); - const res = (await reindex( + const res = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target_2', - Option.some(`ctx._source.title = ctx._source.title + '_updated'`), - false, - { match_all: {} } - )()) as Either.Right; - const task = waitForReindexTask(client, res.right.taskId, '10s'); + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_2', + reindexScript: Option.some(`ctx._source.title = ctx._source.title + '_updated'`), + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; + const task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -506,15 +534,15 @@ describe('migration actions', () => { it('resolves right, ignores version conflicts and does not update existing docs when reindex multiple times', async () => { expect.assertions(3); // Reindex with a script - let res = (await reindex( + let res = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target_3', - Option.some(`ctx._source.title = ctx._source.title + '_updated'`), - false, - { match_all: {} } - )()) as Either.Right; - let task = waitForReindexTask(client, res.right.taskId, '10s'); + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_3', + reindexScript: Option.some(`ctx._source.title = ctx._source.title + '_updated'`), + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; + let task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -523,15 +551,15 @@ describe('migration actions', () => { `); // reindex without a script - res = (await reindex( + res = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target_3', - Option.none, - false, - { match_all: {} } - )()) as Either.Right; - task = waitForReindexTask(client, res.right.taskId, '10s'); + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_3', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; + task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -559,7 +587,7 @@ describe('migration actions', () => { expect.assertions(2); // Simulate a reindex that only adds some of the documents from the // source index into the target index - await createIndex(client, 'reindex_target_4', { properties: {} })(); + await createIndex({ client, indexName: 'reindex_target_4', mappings: { properties: {} } })(); const sourceDocs = ((await searchForOutdatedDocuments(client, { batchSize: 1000, targetIndex: 'existing_index_with_docs', @@ -570,18 +598,23 @@ describe('migration actions', () => { _id, _source, })); - await bulkOverwriteTransformedDocuments(client, 'reindex_target_4', sourceDocs, 'wait_for')(); + await bulkOverwriteTransformedDocuments({ + client, + index: 'reindex_target_4', + transformedDocs: sourceDocs, + refresh: 'wait_for', + })(); // Now do a real reindex - const res = (await reindex( + const res = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target_4', - Option.some(`ctx._source.title = ctx._source.title + '_updated'`), - false, - { match_all: {} } - )()) as Either.Right; - const task = waitForReindexTask(client, res.right.taskId, '10s'); + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_4', + reindexScript: Option.some(`ctx._source.title = ctx._source.title + '_updated'`), + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; + const task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -614,24 +647,28 @@ describe('migration actions', () => { // and should ignore this error. // Create an index with incompatible mappings - await createIndex(client, 'reindex_target_5', { - dynamic: 'strict', - properties: { - /** no title field */ + await createIndex({ + client, + indexName: 'reindex_target_5', + mappings: { + dynamic: 'strict', + properties: { + /** no title field */ + }, }, })(); const { right: { taskId: reindexTaskId }, - } = (await reindex( + } = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target_5', - Option.none, - false, - { match_all: {} } - )()) as Either.Right; - const task = waitForReindexTask(client, reindexTaskId, '10s'); + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_5', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; + const task = waitForReindexTask({ client, taskId: reindexTaskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { @@ -651,22 +688,26 @@ describe('migration actions', () => { // and should ignore this error. // Create an index with incompatible mappings - await createIndex(client, 'reindex_target_6', { - dynamic: false, - properties: { title: { type: 'integer' } }, // integer is incompatible with string title + await createIndex({ + client, + indexName: 'reindex_target_6', + mappings: { + dynamic: false, + properties: { title: { type: 'integer' } }, // integer is incompatible with string title + }, })(); const { right: { taskId: reindexTaskId }, - } = (await reindex( + } = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target_6', - Option.none, - false, - { match_all: {} } - )()) as Either.Right; - const task = waitForReindexTask(client, reindexTaskId, '10s'); + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_6', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; + const task = waitForReindexTask({ client, taskId: reindexTaskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { @@ -679,10 +720,17 @@ describe('migration actions', () => { }); it('resolves left index_not_found_exception if source index does not exist', async () => { expect.assertions(1); - const res = (await reindex(client, 'no_such_index', 'reindex_target', Option.none, false, { - match_all: {}, + const res = (await reindex({ + client, + sourceIndex: 'no_such_index', + targetIndex: 'reindex_target', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { + match_all: {}, + }, })()) as Either.Right; - const task = waitForReindexTask(client, res.right.taskId, '10s'); + const task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -695,16 +743,16 @@ describe('migration actions', () => { }); it('resolves left target_index_had_write_block if all failures are due to a write block', async () => { expect.assertions(1); - const res = (await reindex( + const res = (await reindex({ client, - 'existing_index_with_docs', - 'existing_index_with_write_block', - Option.none, - false, - { match_all: {} } - )()) as Either.Right; + sourceIndex: 'existing_index_with_docs', + targetIndex: 'existing_index_with_write_block', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; - const task = waitForReindexTask(client, res.right.taskId, '10s'); + const task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { @@ -717,16 +765,16 @@ describe('migration actions', () => { }); it('resolves left if requireAlias=true and the target is not an alias', async () => { expect.assertions(1); - const res = (await reindex( + const res = (await reindex({ client, - 'existing_index_with_docs', - 'existing_index_with_write_block', - Option.none, - true, - { match_all: {} } - )()) as Either.Right; + sourceIndex: 'existing_index_with_docs', + targetIndex: 'existing_index_with_write_block', + reindexScript: Option.none, + requireAlias: true, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; - const task = waitForReindexTask(client, res.right.taskId, '10s'); + const task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { @@ -739,16 +787,16 @@ describe('migration actions', () => { `); }); it('resolves left wait_for_task_completion_timeout when the task does not finish within the timeout', async () => { - const res = (await reindex( + const res = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target', - Option.none, - false, - { match_all: {} } - )()) as Either.Right; + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; - const task = waitForReindexTask(client, res.right.taskId, '0s'); + const task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '0s' }); await expect(task()).resolves.toMatchObject({ _tag: 'Left', @@ -766,17 +814,21 @@ describe('migration actions', () => { describe('verifyReindex', () => { it('resolves right if source and target indices have the same amount of documents', async () => { expect.assertions(1); - const res = (await reindex( + const res = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target_7', - Option.none, - false, - { match_all: {} } - )()) as Either.Right; - await waitForReindexTask(client, res.right.taskId, '10s')(); - - const task = verifyReindex(client, 'existing_index_with_docs', 'reindex_target_7'); + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_7', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; + await waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' })(); + + const task = verifyReindex({ + client, + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_7', + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -786,7 +838,11 @@ describe('migration actions', () => { }); it('resolves left if source and target indices have different amount of documents', async () => { expect.assertions(1); - const task = verifyReindex(client, 'existing_index_with_docs', 'existing_index_2'); + const task = verifyReindex({ + client, + sourceIndex: 'existing_index_with_docs', + targetIndex: 'existing_index_2', + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -798,19 +854,27 @@ describe('migration actions', () => { }); it('rejects if source or target index does not exist', async () => { expect.assertions(2); - let task = verifyReindex(client, 'no_such_index', 'existing_index_2'); + let task = verifyReindex({ + client, + sourceIndex: 'no_such_index', + targetIndex: 'existing_index_2', + }); await expect(task()).rejects.toMatchInlineSnapshot( `[ResponseError: index_not_found_exception]` ); - task = verifyReindex(client, 'existing_index_2', 'no_such_index'); + task = verifyReindex({ + client, + sourceIndex: 'existing_index_2', + targetIndex: 'no_such_index', + }); await expect(task()).rejects.toThrow('index_not_found_exception'); }); }); describe('openPit', () => { it('opens PointInTime for an index', async () => { - const openPitTask = openPit(client, 'existing_index_with_docs'); + const openPitTask = openPit({ client, index: 'existing_index_with_docs' }); const pitResponse = (await openPitTask()) as Either.Right; expect(pitResponse.right.pitId).toEqual(expect.any(String)); @@ -824,52 +888,52 @@ describe('migration actions', () => { await expect(searchResponse.body.hits.hits.length).toBeGreaterThan(0); }); it('rejects if index does not exist', async () => { - const openPitTask = openPit(client, 'no_such_index'); + const openPitTask = openPit({ client, index: 'no_such_index' }); await expect(openPitTask()).rejects.toThrow('index_not_found_exception'); }); }); describe('readWithPit', () => { it('requests documents from an index using given PIT', async () => { - const openPitTask = openPit(client, 'existing_index_with_docs'); + const openPitTask = openPit({ client, index: 'existing_index_with_docs' }); const pitResponse = (await openPitTask()) as Either.Right; - const readWithPitTask = readWithPit( + const readWithPitTask = readWithPit({ client, - pitResponse.right.pitId, - { match_all: {} }, - 1000, - undefined - ); + pitId: pitResponse.right.pitId, + query: { match_all: {} }, + batchSize: 1000, + searchAfter: undefined, + }); const docsResponse = (await readWithPitTask()) as Either.Right; await expect(docsResponse.right.outdatedDocuments.length).toBe(5); }); it('requests the batchSize of documents from an index', async () => { - const openPitTask = openPit(client, 'existing_index_with_docs'); + const openPitTask = openPit({ client, index: 'existing_index_with_docs' }); const pitResponse = (await openPitTask()) as Either.Right; - const readWithPitTask = readWithPit( + const readWithPitTask = readWithPit({ client, - pitResponse.right.pitId, - { match_all: {} }, - 3, - undefined - ); + pitId: pitResponse.right.pitId, + query: { match_all: {} }, + batchSize: 3, + searchAfter: undefined, + }); const docsResponse = (await readWithPitTask()) as Either.Right; await expect(docsResponse.right.outdatedDocuments.length).toBe(3); }); it('it excludes documents not matching the provided "query"', async () => { - const openPitTask = openPit(client, 'existing_index_with_docs'); + const openPitTask = openPit({ client, index: 'existing_index_with_docs' }); const pitResponse = (await openPitTask()) as Either.Right; - const readWithPitTask = readWithPit( + const readWithPitTask = readWithPit({ client, - pitResponse.right.pitId, - { + pitId: pitResponse.right.pitId, + query: { bool: { must_not: [ { @@ -885,9 +949,9 @@ describe('migration actions', () => { ], }, }, - 1000, - undefined - ); + batchSize: 1000, + searchAfter: undefined, + }); const docsResponse = (await readWithPitTask()) as Either.Right; @@ -902,18 +966,18 @@ describe('migration actions', () => { }); it('only returns documents that match the provided "query"', async () => { - const openPitTask = openPit(client, 'existing_index_with_docs'); + const openPitTask = openPit({ client, index: 'existing_index_with_docs' }); const pitResponse = (await openPitTask()) as Either.Right; - const readWithPitTask = readWithPit( + const readWithPitTask = readWithPit({ client, - pitResponse.right.pitId, - { + pitId: pitResponse.right.pitId, + query: { match: { title: { query: 'doc' } }, }, - 1000, - undefined - ); + batchSize: 1000, + searchAfter: undefined, + }); const docsResponse = (await readWithPitTask()) as Either.Right; @@ -928,19 +992,19 @@ describe('migration actions', () => { }); it('returns docs with _seq_no and _primary_term when specified', async () => { - const openPitTask = openPit(client, 'existing_index_with_docs'); + const openPitTask = openPit({ client, index: 'existing_index_with_docs' }); const pitResponse = (await openPitTask()) as Either.Right; - const readWithPitTask = readWithPit( + const readWithPitTask = readWithPit({ client, - pitResponse.right.pitId, - { + pitId: pitResponse.right.pitId, + query: { match: { title: { query: 'doc' } }, }, - 1000, - undefined, - true - ); + batchSize: 1000, + searchAfter: undefined, + seqNoPrimaryTerm: true, + }); const docsResponse = (await readWithPitTask()) as Either.Right; @@ -955,18 +1019,18 @@ describe('migration actions', () => { }); it('does not return docs with _seq_no and _primary_term if not specified', async () => { - const openPitTask = openPit(client, 'existing_index_with_docs'); + const openPitTask = openPit({ client, index: 'existing_index_with_docs' }); const pitResponse = (await openPitTask()) as Either.Right; - const readWithPitTask = readWithPit( + const readWithPitTask = readWithPit({ client, - pitResponse.right.pitId, - { + pitId: pitResponse.right.pitId, + query: { match: { title: { query: 'doc' } }, }, - 1000, - undefined - ); + batchSize: 1000, + searchAfter: undefined, + }); const docsResponse = (await readWithPitTask()) as Either.Right; @@ -981,24 +1045,24 @@ describe('migration actions', () => { }); it('rejects if PIT does not exist', async () => { - const readWithPitTask = readWithPit( + const readWithPitTask = readWithPit({ client, - 'no_such_pit', - { match_all: {} }, - 1000, - undefined - ); + pitId: 'no_such_pit', + query: { match_all: {} }, + batchSize: 1000, + searchAfter: undefined, + }); await expect(readWithPitTask()).rejects.toThrow('illegal_argument_exception'); }); }); describe('closePit', () => { it('closes PointInTime', async () => { - const openPitTask = openPit(client, 'existing_index_with_docs'); + const openPitTask = openPit({ client, index: 'existing_index_with_docs' }); const pitResponse = (await openPitTask()) as Either.Right; const pitId = pitResponse.right.pitId; - await closePit(client, pitId)(); + await closePit({ client, pitId })(); const searchTask = client.search({ body: { @@ -1010,7 +1074,7 @@ describe('migration actions', () => { }); it('rejects if PIT does not exist', async () => { - const closePitTask = closePit(client, 'no_such_pit'); + const closePitTask = closePit({ client, pitId: 'no_such_pit' }); await expect(closePitTask()).rejects.toThrow('illegal_argument_exception'); }); }); @@ -1034,7 +1098,10 @@ describe('migration actions', () => { return Either.right({ processedDocs }); }; } - const transformTask = transformDocs(innerTransformRawDocs, originalDocs); + const transformTask = transformDocs({ + transformRawDocs: innerTransformRawDocs, + outdatedDocuments: originalDocs, + }); const resultsWithProcessDocs = ((await transformTask()) as Either.Right) .right.processedDocs; @@ -1051,7 +1118,11 @@ describe('migration actions', () => { 'existing_index_with_write_block' )()) as Either.Right; - const task = waitForPickupUpdatedMappingsTask(client, res.right.taskId, '10s'); + const task = waitForPickupUpdatedMappingsTask({ + client, + taskId: res.right.taskId, + timeout: '10s', + }); // We can't do a snapshot match because the response includes an index // id which ES assigns dynamically @@ -1065,7 +1136,11 @@ describe('migration actions', () => { 'no_such_index' )()) as Either.Right; - const task = waitForPickupUpdatedMappingsTask(client, res.right.taskId, '10s'); + const task = waitForPickupUpdatedMappingsTask({ + client, + taskId: res.right.taskId, + timeout: '10s', + }); await expect(task()).rejects.toMatchInlineSnapshot(` [Error: pickupUpdatedMappings task failed with the following error: @@ -1078,7 +1153,11 @@ describe('migration actions', () => { 'existing_index_with_docs' )()) as Either.Right; - const task = waitForPickupUpdatedMappingsTask(client, res.right.taskId, '0s'); + const task = waitForPickupUpdatedMappingsTask({ + client, + taskId: res.right.taskId, + timeout: '0s', + }); await expect(task()).resolves.toMatchObject({ _tag: 'Left', @@ -1097,7 +1176,11 @@ describe('migration actions', () => { 'existing_index_with_docs' )()) as Either.Right; - const task = waitForPickupUpdatedMappingsTask(client, res.right.taskId, '10s'); + const task = waitForPickupUpdatedMappingsTask({ + client, + taskId: res.right.taskId, + timeout: '10s', + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { @@ -1111,9 +1194,13 @@ describe('migration actions', () => { describe('updateAndPickupMappings', () => { it('resolves right when mappings were updated and picked up', async () => { // Create an index without any mappings and insert documents into it - await createIndex(client, 'existing_index_without_mappings', { - dynamic: false, - properties: {}, + await createIndex({ + client, + indexName: 'existing_index_without_mappings', + mappings: { + dynamic: false, + properties: {}, + }, })(); const sourceDocs = ([ { _source: { title: 'doc 1' } }, @@ -1121,12 +1208,12 @@ describe('migration actions', () => { { _source: { title: 'doc 3' } }, { _source: { title: 'doc 4' } }, ] as unknown) as SavedObjectsRawDoc[]; - await bulkOverwriteTransformedDocuments( + await bulkOverwriteTransformedDocuments({ client, - 'existing_index_without_mappings', - sourceDocs, - 'wait_for' - )(); + index: 'existing_index_without_mappings', + transformedDocs: sourceDocs, + refresh: 'wait_for', + })(); // Assert that we can't search over the unmapped fields of the document const originalSearchResults = ((await searchForOutdatedDocuments(client, { @@ -1139,14 +1226,18 @@ describe('migration actions', () => { expect(originalSearchResults.length).toBe(0); // Update and pickup mappings so that the title field is searchable - const res = await updateAndPickupMappings(client, 'existing_index_without_mappings', { - properties: { - title: { type: 'text' }, + const res = await updateAndPickupMappings({ + client, + index: 'existing_index_without_mappings', + mappings: { + properties: { + title: { type: 'text' }, + }, }, })(); expect(Either.isRight(res)).toBe(true); const taskId = (res as Either.Right).right.taskId; - await waitForPickupUpdatedMappingsTask(client, taskId, '60s')(); + await waitForPickupUpdatedMappingsTask({ client, taskId, timeout: '60s' })(); // Repeat the search expecting to be able to find the existing documents const pickedUpSearchResults = ((await searchForOutdatedDocuments(client, { @@ -1163,15 +1254,18 @@ describe('migration actions', () => { describe('updateAliases', () => { describe('remove', () => { it('resolves left index_not_found_exception when the index does not exist', async () => { - const task = updateAliases(client, [ - { - remove: { - alias: 'no_such_alias', - index: 'no_such_index', - must_exist: false, + const task = updateAliases({ + client, + aliasActions: [ + { + remove: { + alias: 'no_such_alias', + index: 'no_such_index', + must_exist: false, + }, }, - }, - ]); + ], + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -1184,15 +1278,18 @@ describe('migration actions', () => { }); describe('with must_exist=false', () => { it('resolves left alias_not_found_exception when alias does not exist', async () => { - const task = updateAliases(client, [ - { - remove: { - alias: 'no_such_alias', - index: 'existing_index_with_docs', - must_exist: false, + const task = updateAliases({ + client, + aliasActions: [ + { + remove: { + alias: 'no_such_alias', + index: 'existing_index_with_docs', + must_exist: false, + }, }, - }, - ]); + ], + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -1205,15 +1302,18 @@ describe('migration actions', () => { }); describe('with must_exist=true', () => { it('resolves left alias_not_found_exception when alias does not exist on specified index', async () => { - const task = updateAliases(client, [ - { - remove: { - alias: 'existing_index_2_alias', - index: 'existing_index_with_docs', - must_exist: true, + const task = updateAliases({ + client, + aliasActions: [ + { + remove: { + alias: 'existing_index_2_alias', + index: 'existing_index_with_docs', + must_exist: true, + }, }, - }, - ]); + ], + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -1224,15 +1324,18 @@ describe('migration actions', () => { `); }); it('resolves left alias_not_found_exception when alias does not exist', async () => { - const task = updateAliases(client, [ - { - remove: { - alias: 'no_such_alias', - index: 'existing_index_with_docs', - must_exist: true, + const task = updateAliases({ + client, + aliasActions: [ + { + remove: { + alias: 'no_such_alias', + index: 'existing_index_with_docs', + must_exist: true, + }, }, - }, - ]); + ], + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -1246,13 +1349,16 @@ describe('migration actions', () => { }); describe('remove_index', () => { it('left index_not_found_exception if index does not exist', async () => { - const task = updateAliases(client, [ - { - remove_index: { - index: 'no_such_index', + const task = updateAliases({ + client, + aliasActions: [ + { + remove_index: { + index: 'no_such_index', + }, }, - }, - ]); + ], + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -1264,13 +1370,16 @@ describe('migration actions', () => { `); }); it('left remove_index_not_a_concrete_index when remove_index targets an alias', async () => { - const task = updateAliases(client, [ - { - remove_index: { - index: 'existing_index_2_alias', + const task = updateAliases({ + client, + aliasActions: [ + { + remove_index: { + index: 'existing_index_2_alias', + }, }, - }, - ]); + ], + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -1312,7 +1421,11 @@ describe('migration actions', () => { }); // Call createIndex even though the index already exists - const createIndexPromise = createIndex(client, 'red_then_yellow_index', undefined as any)(); + const createIndexPromise = createIndex({ + client, + indexName: 'red_then_yellow_index', + mappings: undefined as any, + })(); let indexYellow = false; setTimeout(() => { @@ -1341,7 +1454,7 @@ describe('migration actions', () => { // Creating an index with the same name as an existing alias to induce // failure await expect( - createIndex(client, 'existing_index_2_alias', undefined as any)() + createIndex({ client, indexName: 'existing_index_2_alias', mappings: undefined as any })() ).rejects.toMatchInlineSnapshot(`[ResponseError: invalid_index_name_exception]`); }); }); @@ -1353,12 +1466,12 @@ describe('migration actions', () => { { _source: { title: 'doc 6' } }, { _source: { title: 'doc 7' } }, ] as unknown) as SavedObjectsRawDoc[]; - const task = bulkOverwriteTransformedDocuments( + const task = bulkOverwriteTransformedDocuments({ client, - 'existing_index_with_docs', - newDocs, - 'wait_for' - ); + index: 'existing_index_with_docs', + transformedDocs: newDocs, + refresh: 'wait_for', + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { @@ -1374,12 +1487,15 @@ describe('migration actions', () => { outdatedDocumentsQuery: undefined, })()) as Either.Right).right.outdatedDocuments; - const task = bulkOverwriteTransformedDocuments( + const task = bulkOverwriteTransformedDocuments({ client, - 'existing_index_with_docs', - [...existingDocs, ({ _source: { title: 'doc 8' } } as unknown) as SavedObjectsRawDoc], - 'wait_for' - ); + index: 'existing_index_with_docs', + transformedDocs: [ + ...existingDocs, + ({ _source: { title: 'doc 8' } } as unknown) as SavedObjectsRawDoc, + ], + refresh: 'wait_for', + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -1394,12 +1510,12 @@ describe('migration actions', () => { { _source: { title: 'doc 7' } }, ] as unknown) as SavedObjectsRawDoc[]; await expect( - bulkOverwriteTransformedDocuments( + bulkOverwriteTransformedDocuments({ client, - 'existing_index_with_write_block', - newDocs, - 'wait_for' - )() + index: 'existing_index_with_write_block', + transformedDocs: newDocs, + refresh: 'wait_for', + })() ).rejects.toMatchObject(expect.anything()); }); }); diff --git a/src/core/server/saved_objects/migrationsv2/migrations_state_machine_cleanup.ts b/src/core/server/saved_objects/migrationsv2/migrations_state_machine_cleanup.ts index 1881f9a712c29..e9cb33c0aa54a 100644 --- a/src/core/server/saved_objects/migrationsv2/migrations_state_machine_cleanup.ts +++ b/src/core/server/saved_objects/migrationsv2/migrations_state_machine_cleanup.ts @@ -19,7 +19,7 @@ export async function cleanup( if (!state) return; if ('sourceIndexPitId' in state) { try { - await Actions.closePit(client, state.sourceIndexPitId)(); + await Actions.closePit({ client, pitId: state.sourceIndexPitId })(); } catch (e) { executionLog.push({ type: 'cleanup', diff --git a/src/core/server/saved_objects/migrationsv2/next.ts b/src/core/server/saved_objects/migrationsv2/next.ts index 07ebf80271d48..3c3e3c46a8d68 100644 --- a/src/core/server/saved_objects/migrationsv2/next.ts +++ b/src/core/server/saved_objects/migrationsv2/next.ts @@ -58,38 +58,46 @@ export type ResponseType = UnwrapPromise< export const nextActionMap = (client: ElasticsearchClient, transformRawDocs: TransformRawDocs) => { return { INIT: (state: InitState) => - Actions.fetchIndices(client, [state.currentAlias, state.versionAlias]), + Actions.fetchIndices({ client, indices: [state.currentAlias, state.versionAlias] }), WAIT_FOR_YELLOW_SOURCE: (state: WaitForYellowSourceState) => - Actions.waitForIndexStatusYellow(client, state.sourceIndex.value), + Actions.waitForIndexStatusYellow({ client, index: state.sourceIndex.value }), SET_SOURCE_WRITE_BLOCK: (state: SetSourceWriteBlockState) => - Actions.setWriteBlock(client, state.sourceIndex.value), + Actions.setWriteBlock({ client, index: state.sourceIndex.value }), CREATE_NEW_TARGET: (state: CreateNewTargetState) => - Actions.createIndex(client, state.targetIndex, state.targetIndexMappings), + Actions.createIndex({ + client, + indexName: state.targetIndex, + mappings: state.targetIndexMappings, + }), CREATE_REINDEX_TEMP: (state: CreateReindexTempState) => - Actions.createIndex(client, state.tempIndex, state.tempIndexMappings), + Actions.createIndex({ + client, + indexName: state.tempIndex, + mappings: state.tempIndexMappings, + }), REINDEX_SOURCE_TO_TEMP_OPEN_PIT: (state: ReindexSourceToTempOpenPit) => - Actions.openPit(client, state.sourceIndex.value), + Actions.openPit({ client, index: state.sourceIndex.value }), REINDEX_SOURCE_TO_TEMP_READ: (state: ReindexSourceToTempRead) => - Actions.readWithPit( + Actions.readWithPit({ client, - state.sourceIndexPitId, + pitId: state.sourceIndexPitId, /* When reading we use a source query to exclude saved objects types which * are no longer used. These saved objects will still be kept in the outdated * index for backup purposes, but won't be available in the upgraded index. */ - state.unusedTypesQuery, - state.batchSize, - state.lastHitSortValue - ), + query: state.unusedTypesQuery, + batchSize: state.batchSize, + searchAfter: state.lastHitSortValue, + }), REINDEX_SOURCE_TO_TEMP_CLOSE_PIT: (state: ReindexSourceToTempClosePit) => - Actions.closePit(client, state.sourceIndexPitId), + Actions.closePit({ client, pitId: state.sourceIndexPitId }), REINDEX_SOURCE_TO_TEMP_INDEX: (state: ReindexSourceToTempIndex) => - Actions.transformDocs(transformRawDocs, state.outdatedDocuments), + Actions.transformDocs({ transformRawDocs, outdatedDocuments: state.outdatedDocuments }), REINDEX_SOURCE_TO_TEMP_INDEX_BULK: (state: ReindexSourceToTempIndexBulk) => - Actions.bulkOverwriteTransformedDocuments( + Actions.bulkOverwriteTransformedDocuments({ client, - state.tempIndex, - state.transformedDocs, + index: state.tempIndex, + transformedDocs: state.transformedDocs, /** * Since we don't run a search against the target index, we disable "refresh" to speed up * the migration process. @@ -97,39 +105,48 @@ export const nextActionMap = (client: ElasticsearchClient, transformRawDocs: Tra * before we reach out to the OUTDATED_DOCUMENTS_SEARCH_OPEN_PIT step. * Right now, it's performed during REFRESH_TARGET step. */ - false - ), + refresh: false, + }), SET_TEMP_WRITE_BLOCK: (state: SetTempWriteBlock) => - Actions.setWriteBlock(client, state.tempIndex), + Actions.setWriteBlock({ client, index: state.tempIndex }), CLONE_TEMP_TO_TARGET: (state: CloneTempToSource) => - Actions.cloneIndex(client, state.tempIndex, state.targetIndex), - REFRESH_TARGET: (state: RefreshTarget) => Actions.refreshIndex(client, state.targetIndex), + Actions.cloneIndex({ client, source: state.tempIndex, target: state.targetIndex }), + REFRESH_TARGET: (state: RefreshTarget) => + Actions.refreshIndex({ client, targetIndex: state.targetIndex }), UPDATE_TARGET_MAPPINGS: (state: UpdateTargetMappingsState) => - Actions.updateAndPickupMappings(client, state.targetIndex, state.targetIndexMappings), + Actions.updateAndPickupMappings({ + client, + index: state.targetIndex, + mappings: state.targetIndexMappings, + }), UPDATE_TARGET_MAPPINGS_WAIT_FOR_TASK: (state: UpdateTargetMappingsWaitForTaskState) => - Actions.waitForPickupUpdatedMappingsTask(client, state.updateTargetMappingsTaskId, '60s'), + Actions.waitForPickupUpdatedMappingsTask({ + client, + taskId: state.updateTargetMappingsTaskId, + timeout: '60s', + }), OUTDATED_DOCUMENTS_SEARCH_OPEN_PIT: (state: OutdatedDocumentsSearchOpenPit) => - Actions.openPit(client, state.targetIndex), + Actions.openPit({ client, index: state.targetIndex }), OUTDATED_DOCUMENTS_SEARCH_READ: (state: OutdatedDocumentsSearchRead) => - Actions.readWithPit( + Actions.readWithPit({ client, - state.pitId, + pitId: state.pitId, // search for outdated documents only - state.outdatedDocumentsQuery, - state.batchSize, - state.lastHitSortValue - ), + query: state.outdatedDocumentsQuery, + batchSize: state.batchSize, + searchAfter: state.lastHitSortValue, + }), OUTDATED_DOCUMENTS_SEARCH_CLOSE_PIT: (state: OutdatedDocumentsSearchClosePit) => - Actions.closePit(client, state.pitId), + Actions.closePit({ client, pitId: state.pitId }), OUTDATED_DOCUMENTS_REFRESH: (state: OutdatedDocumentsRefresh) => - Actions.refreshIndex(client, state.targetIndex), + Actions.refreshIndex({ client, targetIndex: state.targetIndex }), OUTDATED_DOCUMENTS_TRANSFORM: (state: OutdatedDocumentsTransform) => - Actions.transformDocs(transformRawDocs, state.outdatedDocuments), + Actions.transformDocs({ transformRawDocs, outdatedDocuments: state.outdatedDocuments }), TRANSFORMED_DOCUMENTS_BULK_INDEX: (state: TransformedDocumentsBulkIndex) => - Actions.bulkOverwriteTransformedDocuments( + Actions.bulkOverwriteTransformedDocuments({ client, - state.targetIndex, - state.transformedDocs, + index: state.targetIndex, + transformedDocs: state.transformedDocs, /** * Since we don't run a search against the target index, we disable "refresh" to speed up * the migration process. @@ -137,29 +154,32 @@ export const nextActionMap = (client: ElasticsearchClient, transformRawDocs: Tra * before we reach out to the MARK_VERSION_INDEX_READY step. * Right now, it's performed during OUTDATED_DOCUMENTS_REFRESH step. */ - false - ), + }), MARK_VERSION_INDEX_READY: (state: MarkVersionIndexReady) => - Actions.updateAliases(client, state.versionIndexReadyActions.value), + Actions.updateAliases({ client, aliasActions: state.versionIndexReadyActions.value }), MARK_VERSION_INDEX_READY_CONFLICT: (state: MarkVersionIndexReadyConflict) => - Actions.fetchIndices(client, [state.currentAlias, state.versionAlias]), + Actions.fetchIndices({ client, indices: [state.currentAlias, state.versionAlias] }), LEGACY_SET_WRITE_BLOCK: (state: LegacySetWriteBlockState) => - Actions.setWriteBlock(client, state.legacyIndex), + Actions.setWriteBlock({ client, index: state.legacyIndex }), LEGACY_CREATE_REINDEX_TARGET: (state: LegacyCreateReindexTargetState) => - Actions.createIndex(client, state.sourceIndex.value, state.legacyReindexTargetMappings), + Actions.createIndex({ + client, + indexName: state.sourceIndex.value, + mappings: state.legacyReindexTargetMappings, + }), LEGACY_REINDEX: (state: LegacyReindexState) => - Actions.reindex( + Actions.reindex({ client, - state.legacyIndex, - state.sourceIndex.value, - state.preMigrationScript, - false, - state.unusedTypesQuery - ), + sourceIndex: state.legacyIndex, + targetIndex: state.sourceIndex.value, + reindexScript: state.preMigrationScript, + requireAlias: false, + unusedTypesQuery: state.unusedTypesQuery, + }), LEGACY_REINDEX_WAIT_FOR_TASK: (state: LegacyReindexWaitForTaskState) => - Actions.waitForReindexTask(client, state.legacyReindexTaskId, '60s'), + Actions.waitForReindexTask({ client, taskId: state.legacyReindexTaskId, timeout: '60s' }), LEGACY_DELETE: (state: LegacyDeleteState) => - Actions.updateAliases(client, state.legacyPreMigrationDoneActions), + Actions.updateAliases({ client, aliasActions: state.legacyPreMigrationDoneActions }), }; }; diff --git a/src/core/server/server.api.md b/src/core/server/server.api.md index 7f108dbeb0086..0c35177f51f99 100644 --- a/src/core/server/server.api.md +++ b/src/core/server/server.api.md @@ -874,6 +874,7 @@ export interface DeprecationsDetails { }; manualSteps?: string[]; }; + deprecationType?: 'config' | 'feature'; // (undocumented) documentationUrl?: string; level: 'warning' | 'critical' | 'fetch_error'; diff --git a/src/core/server/server.ts b/src/core/server/server.ts index 4d99368f9bf70..a31b9a061ac5d 100644 --- a/src/core/server/server.ts +++ b/src/core/server/server.ts @@ -193,8 +193,6 @@ export class Server { const deprecationsSetup = this.deprecations.setup({ http: httpSetup, - elasticsearch: elasticsearchServiceSetup, - coreUsageData: coreUsageDataSetup, }); const coreSetup: InternalCoreSetup = { diff --git a/src/plugins/bfetch/common/batch.ts b/src/plugins/bfetch/common/batch.ts index a84d94b541ae5..59b012751c66d 100644 --- a/src/plugins/bfetch/common/batch.ts +++ b/src/plugins/bfetch/common/batch.ts @@ -19,3 +19,8 @@ export interface BatchResponseItem new Promise((resolve) => setImmediate(resolve)); const getPromiseState = (promise: Promise): Promise<'resolved' | 'rejected' | 'pending'> => Promise.race<'resolved' | 'rejected' | 'pending'>([ @@ -52,6 +54,7 @@ describe('createStreamingBatchedFunction()', () => { const fn = createStreamingBatchedFunction({ url: '/test', fetchStreaming, + compressionDisabled$: rxof(true), }); expect(typeof fn).toBe('function'); }); @@ -61,6 +64,7 @@ describe('createStreamingBatchedFunction()', () => { const fn = createStreamingBatchedFunction({ url: '/test', fetchStreaming, + compressionDisabled$: rxof(true), }); const res = fn({}); expect(typeof res.then).toBe('function'); @@ -74,6 +78,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); expect(fetchStreaming).toHaveBeenCalledTimes(0); @@ -93,6 +98,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); expect(fetchStreaming).toHaveBeenCalledTimes(0); @@ -107,6 +113,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); fn({ foo: 'bar' }); @@ -125,6 +132,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); fn({ foo: 'bar' }); @@ -146,14 +154,18 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); expect(fetchStreaming).toHaveBeenCalledTimes(0); fn({ foo: 'bar' }); + await flushPromises(); expect(fetchStreaming).toHaveBeenCalledTimes(0); fn({ baz: 'quix' }); + await flushPromises(); expect(fetchStreaming).toHaveBeenCalledTimes(0); fn({ full: 'yep' }); + await flushPromises(); expect(fetchStreaming).toHaveBeenCalledTimes(1); }); @@ -164,6 +176,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const abortController = new AbortController(); @@ -186,11 +199,13 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); fn({ a: '1' }); fn({ b: '2' }); fn({ c: '3' }); + await flushPromises(); expect(fetchStreaming.mock.calls[0][0]).toMatchObject({ url: '/test', @@ -209,13 +224,16 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); fn({ a: '1' }); fn({ b: '2' }); fn({ c: '3' }); + await flushPromises(); expect(fetchStreaming).toHaveBeenCalledTimes(1); fn({ d: '4' }); + await flushPromises(); await new Promise((r) => setTimeout(r, 6)); expect(fetchStreaming).toHaveBeenCalledTimes(2); }); @@ -229,6 +247,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise1 = fn({ a: '1' }); @@ -246,8 +265,11 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); + await flushPromises(); + const promise1 = fn({ a: '1' }); const promise2 = fn({ b: '2' }); const promise3 = fn({ c: '3' }); @@ -287,6 +309,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise1 = fn({ a: '1' }); @@ -314,6 +337,20 @@ describe('createStreamingBatchedFunction()', () => { expect(await promise3).toEqual({ foo: 'bar 2' }); }); + test('compression is false by default', async () => { + const { fetchStreaming } = setup(); + const fn = createStreamingBatchedFunction({ + url: '/test', + flushOnMaxItems: 1, + fetchStreaming, + }); + + fn({ a: '1' }); + + const dontCompress = await fetchStreaming.mock.calls[0][0].compressionDisabled$.toPromise(); + expect(dontCompress).toBe(false); + }); + test('resolves falsy results', async () => { const { fetchStreaming, stream } = setup(); const fn = createStreamingBatchedFunction({ @@ -321,6 +358,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise1 = fn({ a: '1' }); @@ -362,6 +400,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise = fn({ a: '1' }); @@ -390,6 +429,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise1 = of(fn({ a: '1' })); @@ -442,6 +482,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const abortController = new AbortController(); @@ -471,6 +512,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const abortController = new AbortController(); @@ -509,6 +551,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise1 = of(fn({ a: '1' })); @@ -539,6 +582,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise1 = of(fn({ a: '1' })); @@ -576,6 +620,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise1 = of(fn({ a: '1' })); @@ -608,6 +653,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise1 = of(fn({ a: '1' })); @@ -644,7 +690,9 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); + await flushPromises(); const promise1 = of(fn({ a: '1' })); const promise2 = of(fn({ a: '2' })); diff --git a/src/plugins/bfetch/public/batching/create_streaming_batched_function.ts b/src/plugins/bfetch/public/batching/create_streaming_batched_function.ts index 2d81331f10a88..d5f955f517d13 100644 --- a/src/plugins/bfetch/public/batching/create_streaming_batched_function.ts +++ b/src/plugins/bfetch/public/batching/create_streaming_batched_function.ts @@ -6,16 +6,16 @@ * Side Public License, v 1. */ +import { Observable, of } from 'rxjs'; import { AbortError, abortSignalToPromise, defer } from '../../../kibana_utils/public'; import { ItemBufferParams, TimedItemBufferParams, createBatchedFunction, - BatchResponseItem, ErrorLike, + normalizeError, } from '../../common'; -import { fetchStreaming, split } from '../streaming'; -import { normalizeError } from '../../common'; +import { fetchStreaming } from '../streaming'; import { BatchedFunc, BatchItem } from './types'; export interface BatchedFunctionProtocolError extends ErrorLike { @@ -47,6 +47,11 @@ export interface StreamingBatchedFunctionParams { * before sending the batch request. */ maxItemAge?: TimedItemBufferParams['maxItemAge']; + + /** + * Disabled zlib compression of response chunks. + */ + compressionDisabled$?: Observable; } /** @@ -64,6 +69,7 @@ export const createStreamingBatchedFunction = ( fetchStreaming: fetchStreamingInjected = fetchStreaming, flushOnMaxItems = 25, maxItemAge = 10, + compressionDisabled$ = of(false), } = params; const [fn] = createBatchedFunction({ onCall: (payload: Payload, signal?: AbortSignal) => { @@ -119,6 +125,7 @@ export const createStreamingBatchedFunction = ( body: JSON.stringify({ batch }), method: 'POST', signal: abortController.signal, + compressionDisabled$, }); const handleStreamError = (error: any) => { @@ -127,10 +134,10 @@ export const createStreamingBatchedFunction = ( for (const { future } of items) future.reject(normalizedError); }; - stream.pipe(split('\n')).subscribe({ + stream.subscribe({ next: (json: string) => { try { - const response = JSON.parse(json) as BatchResponseItem; + const response = JSON.parse(json); if (response.error) { items[response.id].future.reject(response.error); } else if (response.result !== undefined) { diff --git a/src/plugins/bfetch/public/batching/index.ts b/src/plugins/bfetch/public/batching/index.ts new file mode 100644 index 0000000000000..115fd84cbe979 --- /dev/null +++ b/src/plugins/bfetch/public/batching/index.ts @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +export { + createStreamingBatchedFunction, + StreamingBatchedFunctionParams, +} from './create_streaming_batched_function'; diff --git a/src/plugins/bfetch/public/plugin.ts b/src/plugins/bfetch/public/plugin.ts index ed97d468eec0b..f97a91a0e70d3 100644 --- a/src/plugins/bfetch/public/plugin.ts +++ b/src/plugins/bfetch/public/plugin.ts @@ -7,12 +7,11 @@ */ import { CoreStart, PluginInitializerContext, CoreSetup, Plugin } from 'src/core/public'; +import { from, Observable, of } from 'rxjs'; +import { switchMap } from 'rxjs/operators'; import { fetchStreaming as fetchStreamingStatic, FetchStreamingParams } from './streaming'; -import { removeLeadingSlash } from '../common'; -import { - createStreamingBatchedFunction, - StreamingBatchedFunctionParams, -} from './batching/create_streaming_batched_function'; +import { DISABLE_BFETCH_COMPRESSION, removeLeadingSlash } from '../common'; +import { createStreamingBatchedFunction, StreamingBatchedFunctionParams } from './batching'; import { BatchedFunc } from './batching/types'; // eslint-disable-next-line @@ -43,12 +42,23 @@ export class BfetchPublicPlugin constructor(private readonly initializerContext: PluginInitializerContext) {} - public setup(core: CoreSetup, plugins: BfetchPublicSetupDependencies): BfetchPublicSetup { + public setup( + core: CoreSetup, + plugins: BfetchPublicSetupDependencies + ): BfetchPublicSetup { const { version } = this.initializerContext.env.packageInfo; const basePath = core.http.basePath.get(); - const fetchStreaming = this.fetchStreaming(version, basePath); - const batchedFunction = this.batchedFunction(fetchStreaming); + const compressionDisabled$ = from(core.getStartServices()).pipe( + switchMap((deps) => { + return of(deps[0]); + }), + switchMap((coreStart) => { + return coreStart.uiSettings.get$(DISABLE_BFETCH_COMPRESSION); + }) + ); + const fetchStreaming = this.fetchStreaming(version, basePath, compressionDisabled$); + const batchedFunction = this.batchedFunction(fetchStreaming, compressionDisabled$); this.contract = { fetchStreaming, @@ -66,7 +76,8 @@ export class BfetchPublicPlugin private fetchStreaming = ( version: string, - basePath: string + basePath: string, + compressionDisabled$: Observable ): BfetchPublicSetup['fetchStreaming'] => (params) => fetchStreamingStatic({ ...params, @@ -76,13 +87,16 @@ export class BfetchPublicPlugin 'kbn-version': version, ...(params.headers || {}), }, + compressionDisabled$, }); private batchedFunction = ( - fetchStreaming: BfetchPublicContract['fetchStreaming'] + fetchStreaming: BfetchPublicContract['fetchStreaming'], + compressionDisabled$: Observable ): BfetchPublicContract['batchedFunction'] => (params) => createStreamingBatchedFunction({ ...params, + compressionDisabled$, fetchStreaming: params.fetchStreaming || fetchStreaming, }); } diff --git a/src/plugins/bfetch/public/streaming/fetch_streaming.test.ts b/src/plugins/bfetch/public/streaming/fetch_streaming.test.ts index e804b3ea94227..a5d066f6d9a24 100644 --- a/src/plugins/bfetch/public/streaming/fetch_streaming.test.ts +++ b/src/plugins/bfetch/public/streaming/fetch_streaming.test.ts @@ -8,6 +8,15 @@ import { fetchStreaming } from './fetch_streaming'; import { mockXMLHttpRequest } from '../test_helpers/xhr'; +import { of } from 'rxjs'; +import { promisify } from 'util'; +import { deflate } from 'zlib'; +const pDeflate = promisify(deflate); + +const compressResponse = async (resp: any) => { + const gzipped = await pDeflate(JSON.stringify(resp)); + return gzipped.toString('base64'); +}; const tick = () => new Promise((resolve) => setTimeout(resolve, 1)); @@ -21,6 +30,7 @@ test('returns XHR request', () => { setup(); const { xhr } = fetchStreaming({ url: 'http://example.com', + compressionDisabled$: of(true), }); expect(typeof xhr.readyState).toBe('number'); }); @@ -29,6 +39,7 @@ test('returns stream', () => { setup(); const { stream } = fetchStreaming({ url: 'http://example.com', + compressionDisabled$: of(true), }); expect(typeof stream.subscribe).toBe('function'); }); @@ -37,6 +48,7 @@ test('promise resolves when request completes', async () => { const env = setup(); const { stream } = fetchStreaming({ url: 'http://example.com', + compressionDisabled$: of(true), }); let resolved = false; @@ -65,10 +77,90 @@ test('promise resolves when request completes', async () => { expect(resolved).toBe(true); }); -test('streams incoming text as it comes through', async () => { +test('promise resolves when compressed request completes', async () => { + const env = setup(); + const { stream } = fetchStreaming({ + url: 'http://example.com', + compressionDisabled$: of(false), + }); + + let resolved = false; + let result; + stream.toPromise().then((r) => { + resolved = true; + result = r; + }); + + await tick(); + expect(resolved).toBe(false); + + const msg = { foo: 'bar' }; + + // Whole message in a response + (env.xhr as any).responseText = `${await compressResponse(msg)}\n`; + env.xhr.onprogress!({} as any); + + await tick(); + expect(resolved).toBe(false); + + (env.xhr as any).readyState = 4; + (env.xhr as any).status = 200; + env.xhr.onreadystatechange!({} as any); + + await tick(); + expect(resolved).toBe(true); + expect(result).toStrictEqual(JSON.stringify(msg)); +}); + +test('promise resolves when compressed chunked request completes', async () => { const env = setup(); const { stream } = fetchStreaming({ url: 'http://example.com', + compressionDisabled$: of(false), + }); + + let resolved = false; + let result; + stream.toPromise().then((r) => { + resolved = true; + result = r; + }); + + await tick(); + expect(resolved).toBe(false); + + const msg = { veg: 'tomato' }; + const msgToCut = await compressResponse(msg); + const part1 = msgToCut.substr(0, 3); + + // Message and a half in a response + (env.xhr as any).responseText = part1; + env.xhr.onprogress!({} as any); + + await tick(); + expect(resolved).toBe(false); + + // Half a message in a response + (env.xhr as any).responseText = `${msgToCut}\n`; + env.xhr.onprogress!({} as any); + + await tick(); + expect(resolved).toBe(false); + + (env.xhr as any).readyState = 4; + (env.xhr as any).status = 200; + env.xhr.onreadystatechange!({} as any); + + await tick(); + expect(resolved).toBe(true); + expect(result).toStrictEqual(JSON.stringify(msg)); +}); + +test('streams incoming text as it comes through, according to separators', async () => { + const env = setup(); + const { stream } = fetchStreaming({ + url: 'http://example.com', + compressionDisabled$: of(true), }); const spy = jest.fn(); @@ -80,16 +172,22 @@ test('streams incoming text as it comes through', async () => { (env.xhr as any).responseText = 'foo'; env.xhr.onprogress!({} as any); + await tick(); + expect(spy).toHaveBeenCalledTimes(0); + + (env.xhr as any).responseText = 'foo\nbar'; + env.xhr.onprogress!({} as any); + await tick(); expect(spy).toHaveBeenCalledTimes(1); expect(spy).toHaveBeenCalledWith('foo'); - (env.xhr as any).responseText = 'foo\nbar'; + (env.xhr as any).responseText = 'foo\nbar\n'; env.xhr.onprogress!({} as any); await tick(); expect(spy).toHaveBeenCalledTimes(2); - expect(spy).toHaveBeenCalledWith('\nbar'); + expect(spy).toHaveBeenCalledWith('bar'); (env.xhr as any).readyState = 4; (env.xhr as any).status = 200; @@ -103,6 +201,7 @@ test('completes stream observable when request finishes', async () => { const env = setup(); const { stream } = fetchStreaming({ url: 'http://example.com', + compressionDisabled$: of(true), }); const spy = jest.fn(); @@ -127,6 +226,7 @@ test('completes stream observable when aborted', async () => { const { stream } = fetchStreaming({ url: 'http://example.com', signal: abort.signal, + compressionDisabled$: of(true), }); const spy = jest.fn(); @@ -152,6 +252,7 @@ test('promise throws when request errors', async () => { const env = setup(); const { stream } = fetchStreaming({ url: 'http://example.com', + compressionDisabled$: of(true), }); const spy = jest.fn(); @@ -178,6 +279,7 @@ test('stream observable errors when request errors', async () => { const env = setup(); const { stream } = fetchStreaming({ url: 'http://example.com', + compressionDisabled$: of(true), }); const spy = jest.fn(); @@ -210,6 +312,7 @@ test('sets custom headers', async () => { 'Content-Type': 'text/plain', Authorization: 'Bearer 123', }, + compressionDisabled$: of(true), }); expect(env.xhr.setRequestHeader).toHaveBeenCalledWith('Content-Type', 'text/plain'); @@ -223,6 +326,7 @@ test('uses credentials', async () => { fetchStreaming({ url: 'http://example.com', + compressionDisabled$: of(true), }); expect(env.xhr.withCredentials).toBe(true); @@ -238,6 +342,7 @@ test('opens XHR request and sends specified body', async () => { url: 'http://elastic.co', method: 'GET', body: 'foobar', + compressionDisabled$: of(true), }); expect(env.xhr.open).toHaveBeenCalledTimes(1); @@ -250,6 +355,7 @@ test('uses POST request method by default', async () => { const env = setup(); fetchStreaming({ url: 'http://elastic.co', + compressionDisabled$: of(true), }); expect(env.xhr.open).toHaveBeenCalledWith('POST', 'http://elastic.co'); }); diff --git a/src/plugins/bfetch/public/streaming/fetch_streaming.ts b/src/plugins/bfetch/public/streaming/fetch_streaming.ts index d68e4d01b44f5..1af35ef68fb85 100644 --- a/src/plugins/bfetch/public/streaming/fetch_streaming.ts +++ b/src/plugins/bfetch/public/streaming/fetch_streaming.ts @@ -6,7 +6,11 @@ * Side Public License, v 1. */ +import { Observable, of } from 'rxjs'; +import { map, share, switchMap } from 'rxjs/operators'; +import { inflateResponse } from '.'; import { fromStreamingXhr } from './from_streaming_xhr'; +import { split } from './split'; export interface FetchStreamingParams { url: string; @@ -14,6 +18,7 @@ export interface FetchStreamingParams { method?: 'GET' | 'POST'; body?: string; signal?: AbortSignal; + compressionDisabled$?: Observable; } /** @@ -26,23 +31,49 @@ export function fetchStreaming({ method = 'POST', body = '', signal, + compressionDisabled$ = of(false), }: FetchStreamingParams) { const xhr = new window.XMLHttpRequest(); - // Begin the request - xhr.open(method, url); - xhr.withCredentials = true; + const msgStream = compressionDisabled$.pipe( + switchMap((compressionDisabled) => { + // Begin the request + xhr.open(method, url); + xhr.withCredentials = true; - // Set the HTTP headers - Object.entries(headers).forEach(([k, v]) => xhr.setRequestHeader(k, v)); + if (!compressionDisabled) { + headers['X-Chunk-Encoding'] = 'deflate'; + } - const stream = fromStreamingXhr(xhr, signal); + // Set the HTTP headers + Object.entries(headers).forEach(([k, v]) => xhr.setRequestHeader(k, v)); - // Send the payload to the server - xhr.send(body); + const stream = fromStreamingXhr(xhr, signal); + + // Send the payload to the server + xhr.send(body); + + // Return a stream of chunked decompressed messages + return stream.pipe( + split('\n'), + map((msg) => { + return compressionDisabled ? msg : inflateResponse(msg); + }) + ); + }), + share() + ); + + // start execution + const msgStreamSub = msgStream.subscribe({ + error: (e) => {}, + complete: () => { + msgStreamSub.unsubscribe(); + }, + }); return { xhr, - stream, + stream: msgStream, }; } diff --git a/src/plugins/bfetch/public/streaming/index.ts b/src/plugins/bfetch/public/streaming/index.ts index afb442feffb29..545cae87aa3d6 100644 --- a/src/plugins/bfetch/public/streaming/index.ts +++ b/src/plugins/bfetch/public/streaming/index.ts @@ -9,3 +9,4 @@ export * from './split'; export * from './from_streaming_xhr'; export * from './fetch_streaming'; +export { inflateResponse } from './inflate_response'; diff --git a/src/plugins/bfetch/public/streaming/inflate_response.ts b/src/plugins/bfetch/public/streaming/inflate_response.ts new file mode 100644 index 0000000000000..73cb52285987c --- /dev/null +++ b/src/plugins/bfetch/public/streaming/inflate_response.ts @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import { unzlibSync, strFromU8 } from 'fflate'; + +export function inflateResponse(response: string) { + const buff = Buffer.from(response, 'base64'); + const unzip = unzlibSync(buff); + return strFromU8(unzip); +} diff --git a/src/plugins/bfetch/server/plugin.ts b/src/plugins/bfetch/server/plugin.ts index 18f0813260f03..7fd46e2f6cc44 100644 --- a/src/plugins/bfetch/server/plugin.ts +++ b/src/plugins/bfetch/server/plugin.ts @@ -16,6 +16,7 @@ import type { RouteMethod, RequestHandler, RequestHandlerContext, + StartServicesAccessor, } from 'src/core/server'; import { schema } from '@kbn/config-schema'; import { Subject } from 'rxjs'; @@ -28,7 +29,8 @@ import { normalizeError, } from '../common'; import { StreamingRequestHandler } from './types'; -import { createNDJSONStream } from './streaming'; +import { createStream } from './streaming'; +import { getUiSettings } from './ui_settings'; // eslint-disable-next-line export interface BfetchServerSetupDependencies {} @@ -112,9 +114,19 @@ export class BfetchServerPlugin public setup(core: CoreSetup, plugins: BfetchServerSetupDependencies): BfetchServerSetup { const logger = this.initializerContext.logger.get(); const router = core.http.createRouter(); - const addStreamingResponseRoute = this.addStreamingResponseRoute({ router, logger }); + + core.uiSettings.register(getUiSettings()); + + const addStreamingResponseRoute = this.addStreamingResponseRoute({ + getStartServices: core.getStartServices, + router, + logger, + }); const addBatchProcessingRoute = this.addBatchProcessingRoute(addStreamingResponseRoute); - const createStreamingRequestHandler = this.createStreamingRequestHandler({ logger }); + const createStreamingRequestHandler = this.createStreamingRequestHandler({ + getStartServices: core.getStartServices, + logger, + }); return { addBatchProcessingRoute, @@ -129,10 +141,16 @@ export class BfetchServerPlugin public stop() {} + private getCompressionDisabled(request: KibanaRequest) { + return request.headers['x-chunk-encoding'] !== 'deflate'; + } + private addStreamingResponseRoute = ({ + getStartServices, router, logger, }: { + getStartServices: StartServicesAccessor; router: ReturnType; logger: Logger; }): BfetchServerSetup['addStreamingResponseRoute'] => (path, handler) => { @@ -146,9 +164,10 @@ export class BfetchServerPlugin async (context, request, response) => { const handlerInstance = handler(request); const data = request.body; + const compressionDisabled = this.getCompressionDisabled(request); return response.ok({ headers: streamingHeaders, - body: createNDJSONStream(handlerInstance.getResponseStream(data), logger), + body: createStream(handlerInstance.getResponseStream(data), logger, compressionDisabled), }); } ); @@ -156,17 +175,20 @@ export class BfetchServerPlugin private createStreamingRequestHandler = ({ logger, + getStartServices, }: { logger: Logger; + getStartServices: StartServicesAccessor; }): BfetchServerSetup['createStreamingRequestHandler'] => (streamHandler) => async ( context, request, response ) => { const response$ = await streamHandler(context, request); + const compressionDisabled = this.getCompressionDisabled(request); return response.ok({ headers: streamingHeaders, - body: createNDJSONStream(response$, logger), + body: createStream(response$, logger, compressionDisabled), }); }; diff --git a/src/plugins/bfetch/server/streaming/create_compressed_stream.ts b/src/plugins/bfetch/server/streaming/create_compressed_stream.ts new file mode 100644 index 0000000000000..6814ed1dd7955 --- /dev/null +++ b/src/plugins/bfetch/server/streaming/create_compressed_stream.ts @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import { promisify } from 'util'; +import { Observable } from 'rxjs'; +import { catchError, concatMap, finalize } from 'rxjs/operators'; +import { Logger } from 'src/core/server'; +import { Stream, PassThrough } from 'stream'; +import { constants, deflate } from 'zlib'; + +const delimiter = '\n'; +const pDeflate = promisify(deflate); + +async function zipMessageToStream(output: PassThrough, message: string) { + return new Promise(async (resolve, reject) => { + try { + const gzipped = await pDeflate(message, { + flush: constants.Z_SYNC_FLUSH, + }); + output.write(gzipped.toString('base64')); + output.write(delimiter); + resolve(undefined); + } catch (err) { + reject(err); + } + }); +} + +export const createCompressedStream = ( + results: Observable, + logger: Logger +): Stream => { + const output = new PassThrough(); + + const sub = results + .pipe( + concatMap((message: Response) => { + const strMessage = JSON.stringify(message); + return zipMessageToStream(output, strMessage); + }), + catchError((e) => { + logger.error('Could not serialize or stream a message.'); + logger.error(e); + throw e; + }), + finalize(() => { + output.end(); + sub.unsubscribe(); + }) + ) + .subscribe(); + + return output; +}; diff --git a/src/plugins/bfetch/server/streaming/create_stream.ts b/src/plugins/bfetch/server/streaming/create_stream.ts new file mode 100644 index 0000000000000..7d6981294341b --- /dev/null +++ b/src/plugins/bfetch/server/streaming/create_stream.ts @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import { Logger } from 'kibana/server'; +import { Stream } from 'stream'; +import { Observable } from 'rxjs'; +import { createCompressedStream } from './create_compressed_stream'; +import { createNDJSONStream } from './create_ndjson_stream'; + +export function createStream( + response$: Observable, + logger: Logger, + compressionDisabled: boolean +): Stream { + return compressionDisabled + ? createNDJSONStream(response$, logger) + : createCompressedStream(response$, logger); +} diff --git a/src/plugins/bfetch/server/streaming/index.ts b/src/plugins/bfetch/server/streaming/index.ts index 2c31cc329295d..dfd472b5034a1 100644 --- a/src/plugins/bfetch/server/streaming/index.ts +++ b/src/plugins/bfetch/server/streaming/index.ts @@ -7,3 +7,5 @@ */ export * from './create_ndjson_stream'; +export * from './create_compressed_stream'; +export * from './create_stream'; diff --git a/src/plugins/bfetch/server/ui_settings.ts b/src/plugins/bfetch/server/ui_settings.ts new file mode 100644 index 0000000000000..cf7b13a9af182 --- /dev/null +++ b/src/plugins/bfetch/server/ui_settings.ts @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import { i18n } from '@kbn/i18n'; +import { UiSettingsParams } from 'src/core/server'; +import { schema } from '@kbn/config-schema'; +import { DISABLE_BFETCH_COMPRESSION } from '../common'; + +export function getUiSettings(): Record> { + return { + [DISABLE_BFETCH_COMPRESSION]: { + name: i18n.translate('bfetch.disableBfetchCompression', { + defaultMessage: 'Disable Batch Compression', + }), + value: false, + description: i18n.translate('bfetch.disableBfetchCompressionDesc', { + defaultMessage: + 'Disable batch compression. This allows you to debug individual requests, but increases response size.', + }), + schema: schema.boolean(), + category: [], + }, + }; +} diff --git a/src/plugins/data/common/field_formats/converters/duration.test.ts b/src/plugins/data/common/field_formats/converters/duration.test.ts index fc019720425df..72551f4b7b236 100644 --- a/src/plugins/data/common/field_formats/converters/duration.test.ts +++ b/src/plugins/data/common/field_formats/converters/duration.test.ts @@ -139,17 +139,182 @@ describe('Duration Format', () => { ], }); + testCase({ + inputFormat: 'nanoseconds', + outputFormat: 'humanizePrecise', + outputPrecision: 2, + showSuffix: true, + fixtures: [ + { + input: 1988, + output: '0.00 Milliseconds', + }, + { + input: 658, + output: '0.00 Milliseconds', + }, + { + input: 3857, + output: '0.00 Milliseconds', + }, + ], + }); + + testCase({ + inputFormat: 'microseconds', + outputFormat: 'humanizePrecise', + outputPrecision: 2, + showSuffix: true, + fixtures: [ + { + input: 1988, + output: '1.99 Milliseconds', + }, + { + input: 658, + output: '0.66 Milliseconds', + }, + { + input: 3857, + output: '3.86 Milliseconds', + }, + ], + }); + + testCase({ + inputFormat: 'microseconds', + outputFormat: 'humanizePrecise', + outputPrecision: 1, + showSuffix: true, + fixtures: [ + { + input: 1988, + output: '2.0 Milliseconds', + }, + { + input: 0, + output: '0.0 Milliseconds', + }, + { + input: 658, + output: '0.7 Milliseconds', + }, + { + input: 3857, + output: '3.9 Milliseconds', + }, + ], + }); + + testCase({ + inputFormat: 'seconds', + outputFormat: 'humanizePrecise', + outputPrecision: 0, + showSuffix: true, + fixtures: [ + { + input: 600, + output: '10 Minutes', + }, + { + input: 30, + output: '30 Seconds', + }, + { + input: 3000, + output: '50 Minutes', + }, + ], + }); + + testCase({ + inputFormat: 'milliseconds', + outputFormat: 'humanizePrecise', + outputPrecision: 0, + showSuffix: true, + useShortSuffix: true, + fixtures: [ + { + input: -123, + output: '-123 ms', + }, + { + input: 1, + output: '1 ms', + }, + { + input: 600, + output: '600 ms', + }, + { + input: 30, + output: '30 ms', + }, + { + input: 3000, + output: '3 s', + }, + { + input: 300000, + output: '5 min', + }, + { + input: 30000000, + output: '8 h', + }, + { + input: 90000000, + output: '1 d', + }, + { + input: 9000000000, + output: '3 mon', + }, + { + input: 99999999999, + output: '3 y', + }, + ], + }); + + testCase({ + inputFormat: 'milliseconds', + outputFormat: 'humanizePrecise', + outputPrecision: 0, + showSuffix: true, + useShortSuffix: true, + includeSpaceWithSuffix: false, + fixtures: [ + { + input: -123, + output: '-123ms', + }, + { + input: 1, + output: '1ms', + }, + { + input: 600, + output: '600ms', + }, + ], + }); + function testCase({ inputFormat, outputFormat, outputPrecision, showSuffix, + useShortSuffix, + includeSpaceWithSuffix, fixtures, }: { inputFormat: string; outputFormat: string; outputPrecision: number | undefined; showSuffix: boolean | undefined; + useShortSuffix?: boolean; + includeSpaceWithSuffix?: boolean; fixtures: any[]; }) { fixtures.forEach((fixture: Record) => { @@ -160,7 +325,14 @@ describe('Duration Format', () => { outputPrecision ? `, ${outputPrecision} decimals` : '' }`, () => { const duration = new DurationFormat( - { inputFormat, outputFormat, outputPrecision, showSuffix }, + { + inputFormat, + outputFormat, + outputPrecision, + showSuffix, + useShortSuffix, + includeSpaceWithSuffix, + }, jest.fn() ); expect(duration.convert(input)).toBe(output); diff --git a/src/plugins/data/common/field_formats/converters/duration.ts b/src/plugins/data/common/field_formats/converters/duration.ts index ef8c1df3704a8..c9a7091db8471 100644 --- a/src/plugins/data/common/field_formats/converters/duration.ts +++ b/src/plugins/data/common/field_formats/converters/duration.ts @@ -18,6 +18,7 @@ const ratioToSeconds: Record = { microseconds: 0.000001, }; const HUMAN_FRIENDLY = 'humanize'; +const HUMAN_FRIENDLY_PRECISE = 'humanizePrecise'; const DEFAULT_OUTPUT_PRECISION = 2; const DEFAULT_INPUT_FORMAT = { text: i18n.translate('data.fieldFormats.duration.inputFormats.seconds', { @@ -89,59 +90,89 @@ const inputFormats = [ }, ]; const DEFAULT_OUTPUT_FORMAT = { - text: i18n.translate('data.fieldFormats.duration.outputFormats.humanize', { - defaultMessage: 'Human Readable', + text: i18n.translate('data.fieldFormats.duration.outputFormats.humanize.approximate', { + defaultMessage: 'Human-readable (approximate)', }), method: 'humanize', }; const outputFormats = [ { ...DEFAULT_OUTPUT_FORMAT }, + { + text: i18n.translate('data.fieldFormats.duration.outputFormats.humanize.precise', { + defaultMessage: 'Human-readable (precise)', + }), + method: 'humanizePrecise', + }, { text: i18n.translate('data.fieldFormats.duration.outputFormats.asMilliseconds', { defaultMessage: 'Milliseconds', }), + shortText: i18n.translate('data.fieldFormats.duration.outputFormats.asMilliseconds.short', { + defaultMessage: 'ms', + }), method: 'asMilliseconds', }, { text: i18n.translate('data.fieldFormats.duration.outputFormats.asSeconds', { defaultMessage: 'Seconds', }), + shortText: i18n.translate('data.fieldFormats.duration.outputFormats.asSeconds.short', { + defaultMessage: 's', + }), method: 'asSeconds', }, { text: i18n.translate('data.fieldFormats.duration.outputFormats.asMinutes', { defaultMessage: 'Minutes', }), + shortText: i18n.translate('data.fieldFormats.duration.outputFormats.asMinutes.short', { + defaultMessage: 'min', + }), method: 'asMinutes', }, { text: i18n.translate('data.fieldFormats.duration.outputFormats.asHours', { defaultMessage: 'Hours', }), + shortText: i18n.translate('data.fieldFormats.duration.outputFormats.asHours.short', { + defaultMessage: 'h', + }), method: 'asHours', }, { text: i18n.translate('data.fieldFormats.duration.outputFormats.asDays', { defaultMessage: 'Days', }), + shortText: i18n.translate('data.fieldFormats.duration.outputFormats.asDays.short', { + defaultMessage: 'd', + }), method: 'asDays', }, { text: i18n.translate('data.fieldFormats.duration.outputFormats.asWeeks', { defaultMessage: 'Weeks', }), + shortText: i18n.translate('data.fieldFormats.duration.outputFormats.asWeeks.short', { + defaultMessage: 'w', + }), method: 'asWeeks', }, { text: i18n.translate('data.fieldFormats.duration.outputFormats.asMonths', { defaultMessage: 'Months', }), + shortText: i18n.translate('data.fieldFormats.duration.outputFormats.asMonths.short', { + defaultMessage: 'mon', + }), method: 'asMonths', }, { text: i18n.translate('data.fieldFormats.duration.outputFormats.asYears', { defaultMessage: 'Years', }), + shortText: i18n.translate('data.fieldFormats.duration.outputFormats.asYears.short', { + defaultMessage: 'y', + }), method: 'asYears', }, ]; @@ -154,6 +185,29 @@ function parseInputAsDuration(val: number, inputFormat: string) { return moment.duration(val * ratio, kind); } +function formatInputHumanPrecise( + val: number, + inputFormat: string, + outputPrecision: number, + useShortSuffix: boolean, + includeSpace: string +) { + const ratio = ratioToSeconds[inputFormat] || 1; + const kind = (inputFormat in ratioToSeconds + ? 'seconds' + : inputFormat) as unitOfTime.DurationConstructor; + const valueInDuration = moment.duration(val * ratio, kind); + + return formatDuration( + val, + valueInDuration, + inputFormat, + outputPrecision, + useShortSuffix, + includeSpace + ); +} + export class DurationFormat extends FieldFormat { static id = FIELD_FORMAT_IDS.DURATION; static title = i18n.translate('data.fieldFormats.duration.title', { @@ -167,11 +221,17 @@ export class DurationFormat extends FieldFormat { isHuman() { return this.param('outputFormat') === HUMAN_FRIENDLY; } + + isHumanPrecise() { + return this.param('outputFormat') === HUMAN_FRIENDLY_PRECISE; + } + getParamDefaults() { return { inputFormat: DEFAULT_INPUT_FORMAT.kind, outputFormat: DEFAULT_OUTPUT_FORMAT.method, outputPrecision: DEFAULT_OUTPUT_PRECISION, + includeSpaceWithSuffix: true, }; } @@ -180,19 +240,84 @@ export class DurationFormat extends FieldFormat { const outputFormat = this.param('outputFormat') as keyof Duration; const outputPrecision = this.param('outputPrecision'); const showSuffix = Boolean(this.param('showSuffix')); + const useShortSuffix = Boolean(this.param('useShortSuffix')); + const includeSpaceWithSuffix = this.param('includeSpaceWithSuffix'); + + const includeSpace = includeSpaceWithSuffix ? ' ' : ''; + const human = this.isHuman(); + const humanPrecise = this.isHumanPrecise(); + const prefix = val < 0 && human ? i18n.translate('data.fieldFormats.duration.negativeLabel', { defaultMessage: 'minus', }) + ' ' : ''; + const duration = parseInputAsDuration(val, inputFormat) as Record; - const formatted = duration[outputFormat](); - const precise = human ? formatted : formatted.toFixed(outputPrecision); + const formatted = humanPrecise + ? formatInputHumanPrecise(val, inputFormat, outputPrecision, useShortSuffix, includeSpace) + : duration[outputFormat](); + + const precise = human || humanPrecise ? formatted : formatted.toFixed(outputPrecision); const type = outputFormats.find(({ method }) => method === outputFormat); - const suffix = showSuffix && type ? ` ${type.text}` : ''; - return prefix + precise + suffix; + const unitText = useShortSuffix ? type?.shortText : type?.text; + + const suffix = showSuffix && unitText && !human ? `${includeSpace}${unitText}` : ''; + + return humanPrecise ? precise : prefix + precise + suffix; }; } + +function formatDuration( + val: number, + duration: moment.Duration, + inputFormat: string, + outputPrecision: number, + useShortSuffix: boolean, + includeSpace: string +) { + // return nothing when the duration is falsy or not correctly parsed (P0D) + if (!duration || !duration.isValid()) return; + const units = [ + { unit: duration.years(), nextUnitRate: 12, method: 'asYears' }, + { unit: duration.months(), nextUnitRate: 4, method: 'asMonths' }, + { unit: duration.weeks(), nextUnitRate: 7, method: 'asWeeks' }, + { unit: duration.days(), nextUnitRate: 24, method: 'asDays' }, + { unit: duration.hours(), nextUnitRate: 60, method: 'asHours' }, + { unit: duration.minutes(), nextUnitRate: 60, method: 'asMinutes' }, + { unit: duration.seconds(), nextUnitRate: 1000, method: 'asSeconds' }, + { unit: duration.milliseconds(), nextUnitRate: 1000, method: 'asMilliseconds' }, + ]; + + const getUnitText = (method: string) => { + const type = outputFormats.find(({ method: methodT }) => method === methodT); + return useShortSuffix ? type?.shortText : type?.text; + }; + + for (let i = 0; i < units.length; i++) { + const unitValue = units[i].unit; + if (unitValue >= 1) { + const unitText = getUnitText(units[i].method); + + const value = Math.floor(unitValue); + if (units?.[i + 1]) { + const decimalPointValue = Math.floor(units[i + 1].unit); + return ( + (value + decimalPointValue / units[i].nextUnitRate).toFixed(outputPrecision) + + includeSpace + + unitText + ); + } else { + return unitValue.toFixed(outputPrecision) + includeSpace + unitText; + } + } + } + + const unitValue = units[units.length - 1].unit; + const unitText = getUnitText(units[units.length - 1].method); + + return unitValue.toFixed(outputPrecision) + includeSpace + unitText; +} diff --git a/src/plugins/data/common/index_patterns/index_patterns/index_pattern.ts b/src/plugins/data/common/index_patterns/index_patterns/index_pattern.ts index 41ce7ba4bab4a..1552bed210e8c 100644 --- a/src/plugins/data/common/index_patterns/index_patterns/index_pattern.ts +++ b/src/plugins/data/common/index_patterns/index_patterns/index_pattern.ts @@ -240,6 +240,7 @@ export class IndexPattern implements IIndexPattern { * @param script script code * @param fieldType * @param lang + * @deprecated use runtime field instead */ async addScriptedField(name: string, script: string, fieldType: string = 'string') { const scriptedFields = this.getScriptedFields(); @@ -265,6 +266,7 @@ export class IndexPattern implements IIndexPattern { /** * Remove scripted field from field list * @param fieldName + * @deprecated use runtime field instead */ removeScriptedField(fieldName: string) { @@ -274,10 +276,18 @@ export class IndexPattern implements IIndexPattern { } } + /** + * + * @deprecated use runtime field instead + */ getNonScriptedFields() { return [...this.fields.getAll().filter((field) => !field.scripted)]; } + /** + * + * @deprecated use runtime field instead + */ getScriptedFields() { return [...this.fields.getAll().filter((field) => field.scripted)]; } diff --git a/src/plugins/data/public/public.api.md b/src/plugins/data/public/public.api.md index fde7075d9e760..069b0a21c9c77 100644 --- a/src/plugins/data/public/public.api.md +++ b/src/plugins/data/public/public.api.md @@ -1307,6 +1307,7 @@ export class IndexPattern implements IIndexPattern { // Warning: (ae-forgotten-export) The symbol "IndexPatternDeps" needs to be exported by the entry point index.d.ts constructor({ spec, fieldFormats, shortDotsEnable, metaFields, }: IndexPatternDeps); addRuntimeField(name: string, runtimeField: RuntimeField): void; + // @deprecated addScriptedField(name: string, script: string, fieldType?: string): Promise; readonly allowNoIndex: boolean; // (undocumented) @@ -1366,7 +1367,7 @@ export class IndexPattern implements IIndexPattern { getFieldByName(name: string): IndexPatternField | undefined; getFormatterForField(field: IndexPatternField | IndexPatternField['spec'] | IFieldType): FieldFormat; getFormatterForFieldNoDefault(fieldname: string): FieldFormat | undefined; - // (undocumented) + // @deprecated (undocumented) getNonScriptedFields(): IndexPatternField[]; getOriginalSavedObjectBody: () => { fieldAttrs?: string | undefined; @@ -1379,7 +1380,7 @@ export class IndexPattern implements IIndexPattern { typeMeta?: string | undefined; type?: string | undefined; }; - // (undocumented) + // @deprecated (undocumented) getScriptedFields(): IndexPatternField[]; getSourceFiltering(): { excludes: any[]; @@ -1397,6 +1398,7 @@ export class IndexPattern implements IIndexPattern { // (undocumented) metaFields: string[]; removeRuntimeField(name: string): void; + // @deprecated removeScriptedField(fieldName: string): void; resetOriginalSavedObjectBody: () => void; // (undocumented) diff --git a/src/plugins/data/public/utils/table_inspector_view/components/__snapshots__/data_view.test.tsx.snap b/src/plugins/data/public/utils/table_inspector_view/components/__snapshots__/data_view.test.tsx.snap index 9896a6dbdc7b7..a0a7e54d27532 100644 --- a/src/plugins/data/public/utils/table_inspector_view/components/__snapshots__/data_view.test.tsx.snap +++ b/src/plugins/data/public/utils/table_inspector_view/components/__snapshots__/data_view.test.tsx.snap @@ -718,11 +718,13 @@ exports[`Inspector Data View component should render single table without select > @@ -996,6 +998,7 @@ exports[`Inspector Data View component should render single table without select - - - - - column1 - - - - - - - Click to sort in ascending order - + + + + column1 + + + - + @@ -1322,6 +1320,7 @@ exports[`Inspector Data View component should render single table without select -
Value -
+
@@ -8085,7 +8094,7 @@ exports[`TrustedAppsGrid renders correctly when new page and page size set (not scope="col" style="width: 30%;" > -
Field -
+
-
Operator -
+
-
Value -
+
@@ -8374,7 +8383,7 @@ exports[`TrustedAppsGrid renders correctly when new page and page size set (not scope="col" style="width: 30%;" > -
Field -
+
-
Operator -
+
-
Value -
+
@@ -8663,7 +8672,7 @@ exports[`TrustedAppsGrid renders correctly when new page and page size set (not scope="col" style="width: 30%;" > -
Field -
+
-
Operator -
+
-
Value -
+
@@ -8952,7 +8961,7 @@ exports[`TrustedAppsGrid renders correctly when new page and page size set (not scope="col" style="width: 30%;" > -
Field -
+
-
Operator -
+ -
Value -
+ @@ -9241,7 +9250,7 @@ exports[`TrustedAppsGrid renders correctly when new page and page size set (not scope="col" style="width: 30%;" > -
Field -
+ -
Operator -
+ -
Value -
+ @@ -9530,7 +9539,7 @@ exports[`TrustedAppsGrid renders correctly when new page and page size set (not scope="col" style="width: 30%;" > -
Field -
+ -
Operator -
+ -
Value -
+ @@ -9688,6 +9697,7 @@ exports[`TrustedAppsGrid renders correctly when new page and page size set (not > -
Date Created -
+
-
Created By -
+ -
Actions -
+ -
-
+ @@ -195,7 +195,7 @@ exports[`TrustedAppsList renders correctly when failed loading data for the firs role="columnheader" scope="col" > -
Name -
+ -
OS -
+ -
Date Created -
+ -
Created By -
+ -
Actions -
+ -
-
+ @@ -358,7 +358,7 @@ exports[`TrustedAppsList renders correctly when failed loading data for the seco role="columnheader" scope="col" > -
Name -
+ -
OS -
+ -
Date Created -
+ -
Created By -
+ -
Actions -
+ -
-
+ @@ -533,7 +533,7 @@ exports[`TrustedAppsList renders correctly when item details expanded 1`] = ` role="columnheader" scope="col" > -
Name -
+ -
OS -
+ -
Date Created -
+ -
Created By -
+ -
Actions -
+ -
-
+ @@ -726,6 +726,7 @@ exports[`TrustedAppsList renders correctly when item details expanded 1`] = ` -
Value -
+ @@ -1157,6 +1159,7 @@ exports[`TrustedAppsList renders correctly when item details expanded 1`] = `