Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into 82710
Browse files Browse the repository at this point in the history
  • Loading branch information
alexwizp committed Nov 17, 2020
2 parents 67988ab + 80f63f6 commit 850da04
Show file tree
Hide file tree
Showing 226 changed files with 8,064 additions and 1,912 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,10 @@ server.route({
=== Example 3: Discover

Discover takes advantage of subfeature privileges to allow fine-grained access control. In this example,
a single "Create Short URLs" subfeature privilege is defined, which allows users to grant access to this feature without having to grant the `all` privilege to Discover. In other words, you can grant `read` access to Discover, and also grant the ability to create short URLs.
two subfeature privileges are defined: "Create Short URLs", and "Generate PDF Reports". These allow users to grant access to this feature without having to grant the `all` privilege to Discover. In other words, you can grant `read` access to Discover, and also grant the ability to create short URLs or generate PDF reports.

Notice the "Generate PDF Reports" subfeature privilege has an additional `minimumPrivilege` option. Kibana will only offer this subfeature privilege if the
license requirement is satisfied.

["source","javascript"]
-----------
Expand Down Expand Up @@ -259,6 +262,28 @@ public setup(core, { features }) {
},
],
},
{
groupType: 'independent',
privileges: [
{
id: 'pdf_generate',
name: i18n.translate(
'xpack.features.ossFeatures.discoverGeneratePDFReportsPrivilegeName',
{
defaultMessage: 'Generate PDF Reports',
}
),
minimumLicense: 'platinum',
includeIn: 'all',
savedObject: {
all: [],
read: [],
},
api: ['generatePDFReports'],
ui: ['generatePDFReports'],
},
],
},
],
},
],
Expand Down
2 changes: 1 addition & 1 deletion docs/getting-started/quick-start-guide.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -140,4 +140,4 @@ For more information, refer to <<lens, *Lens*>>.

If you are you ready to add your own data, refer to <<connect-to-elasticsearch,Add data to {kib}>>.

If you want to ingest your data, refer to {ingest-guide}/ingest-management-getting-started.html[Quick start: Get logs and metrics into the Elastic Stack].
If you want to ingest your data, refer to {ingest-guide}/fleet-quick-start.html[Quick start: Get logs and metrics into the Elastic Stack].
2 changes: 1 addition & 1 deletion docs/setup/connect-to-elasticsearch.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ ship with dashboards and visualizations,
so you can quickly get insights into your data.

To get started, refer to
{ingest-guide}/ingest-management-getting-started.html[Quick start: Get logs and metrics into the Elastic Stack].
{ingest-guide}/fleet-quick-start.html[Quick start: Get logs and metrics into the Elastic Stack].

[role="screenshot"]
image::images/add-data-fleet.png[Add data using Fleet]
Expand Down
1 change: 1 addition & 0 deletions docs/user/security/authentication/index.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,7 @@ You can also configure both PKI and basic authentication for the same {kib} inst

[source,yaml]
--------------------------------------------------------------------------------
server.ssl.clientAuthentication: optional
xpack.security.authc.providers:
pki.pki1:
order: 0
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -723,7 +723,7 @@
"less": "npm:@elastic/less@2.7.3-kibana",
"license-checker": "^16.0.0",
"listr": "^0.14.1",
"lmdb-store": "^0.6.10",
"lmdb-store": "^0.8.11",
"load-grunt-config": "^3.0.1",
"loader-utils": "^1.2.3",
"log-symbols": "^2.2.0",
Expand Down
51 changes: 25 additions & 26 deletions packages/kbn-optimizer/src/node/cache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

import Path from 'path';

// @ts-expect-error no types available
import * as LmdbStore from 'lmdb-store';
import { REPO_ROOT, UPSTREAM_BRANCH } from '@kbn/dev-utils';

Expand All @@ -37,25 +36,11 @@ const MINUTE = 1000 * 60;
const HOUR = MINUTE * 60;
const DAY = HOUR * 24;

interface Lmdb<T> {
get(key: string): T | undefined;
put(key: string, value: T, version?: number, ifVersion?: number): Promise<boolean>;
remove(key: string, ifVersion?: number): Promise<boolean>;
openDB<T>(options: { name: string; encoding: 'msgpack' | 'string' | 'json' | 'binary' }): Lmdb<T>;
getRange(options?: {
start?: T;
end?: T;
reverse?: boolean;
limit?: number;
versions?: boolean;
}): Iterable<{ key: string; value: T }>;
}

export class Cache {
private readonly codes: Lmdb<string>;
private readonly atimes: Lmdb<string>;
private readonly mtimes: Lmdb<string>;
private readonly sourceMaps: Lmdb<any>;
private readonly codes: LmdbStore.RootDatabase;
private readonly atimes: LmdbStore.Database;
private readonly mtimes: LmdbStore.Database;
private readonly sourceMaps: LmdbStore.Database;
private readonly prefix: string;

constructor(config: { prefix: string }) {
Expand All @@ -64,19 +49,23 @@ export class Cache {
this.codes = LmdbStore.open({
name: 'codes',
path: CACHE_DIR,
// @ts-expect-error See https://github.com/DoctorEvidence/lmdb-store/pull/18
maxReaders: 500,
});

// @ts-expect-error See https://github.com/DoctorEvidence/lmdb-store/pull/18
this.atimes = this.codes.openDB({
name: 'atimes',
encoding: 'string',
});

// @ts-expect-error See https://github.com/DoctorEvidence/lmdb-store/pull/18
this.mtimes = this.codes.openDB({
name: 'mtimes',
encoding: 'string',
});

// @ts-expect-error See https://github.com/DoctorEvidence/lmdb-store/pull/18
this.sourceMaps = this.codes.openDB({
name: 'sourceMaps',
encoding: 'msgpack',
Expand All @@ -92,7 +81,7 @@ export class Cache {
}

getMtime(path: string) {
return this.mtimes.get(this.getKey(path));
return this.safeGet<string>(this.mtimes, this.getKey(path));
}

getCode(path: string) {
Expand All @@ -103,11 +92,11 @@ export class Cache {
// touched in a long time (currently 30 days)
this.atimes.put(key, GLOBAL_ATIME).catch(reportError);

return this.codes.get(key);
return this.safeGet<string>(this.codes, key);
}

getSourceMap(path: string) {
return this.sourceMaps.get(this.getKey(path));
return this.safeGet<any>(this.sourceMaps, this.getKey(path));
}

update(path: string, file: { mtime: string; code: string; map: any }) {
Expand All @@ -125,17 +114,27 @@ export class Cache {
return `${this.prefix}${path}`;
}

private safeGet<V>(db: LmdbStore.Database, key: string) {
try {
return db.get(key) as V | undefined;
} catch (error) {
// get errors indicate that a key value is corrupt in some way, so remove it
db.removeSync(key);
}
}

private async pruneOldKeys() {
try {
const ATIME_LIMIT = Date.now() - 30 * DAY;
const BATCH_SIZE = 1000;

const validKeys: string[] = [];
const invalidKeys: string[] = [];
const validKeys: LmdbStore.Key[] = [];
const invalidKeys: LmdbStore.Key[] = [];

// @ts-expect-error See https://github.com/DoctorEvidence/lmdb-store/pull/18
for (const { key, value } of this.atimes.getRange()) {
const atime = parseInt(value, 10);
if (atime < ATIME_LIMIT) {
const atime = parseInt(`${value}`, 10);
if (Number.isNaN(atime) || atime < ATIME_LIMIT) {
invalidKeys.push(key);
} else {
validKeys.push(key);
Expand Down
1 change: 1 addition & 0 deletions packages/kbn-plugin-helpers/src/cli.ts
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ export function runCli() {

await Tasks.initTargets(context);
await Tasks.optimize(context);
await Tasks.writePublicAssets(context);
await Tasks.writeServerFiles(context);
await Tasks.yarnInstall(context);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,8 @@ it('builds a generated plugin into a viable archive', async () => {
│ info initialized, 0 bundles cached
│ info starting worker [1 bundle]
│ succ 1 bundles compiled successfully after <time>
info copying source into the build and converting with babel
info copying assets from \`public/assets\` to build
info copying server source into the build and converting with babel
info running yarn to install dependencies
info compressing plugin into [fooTestPlugin-7.5.0.zip]"
`);
Expand Down
1 change: 1 addition & 0 deletions packages/kbn-plugin-helpers/src/tasks/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,5 +20,6 @@
export * from './clean';
export * from './create_archive';
export * from './optimize';
export * from './write_public_assets';
export * from './write_server_files';
export * from './yarn_install';
45 changes: 45 additions & 0 deletions packages/kbn-plugin-helpers/src/tasks/write_public_assets.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import { pipeline } from 'stream';
import { promisify } from 'util';

import vfs from 'vinyl-fs';

import { BuildContext } from '../build_context';

const asyncPipeline = promisify(pipeline);

export async function writePublicAssets({ log, plugin, sourceDir, buildDir }: BuildContext) {
if (!plugin.manifest.ui) {
return;
}

log.info('copying assets from `public/assets` to build');

await asyncPipeline(
vfs.src(['public/assets/**/*'], {
cwd: sourceDir,
base: sourceDir,
buffer: true,
allowEmpty: true,
}),
vfs.dest(buildDir)
);
}
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ export async function writeServerFiles({
buildDir,
kibanaVersion,
}: BuildContext) {
log.info('copying source into the build and converting with babel');
log.info('copying server source into the build and converting with babel');

// copy source files and apply some babel transformations in the process
await asyncPipeline(
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import { parsedExternallyDefinedCollector } from './parsed_externally_defined_collector';
import { parsedImportedSchemaCollector } from './parsed_imported_schema';
import { parsedImportedUsageInterface } from './parsed_imported_usage_interface';
import { parsedIndexedInterfaceWithNoMatchingSchema } from './parsed_indexed_interface_with_not_matching_schema';
import { parsedNestedCollector } from './parsed_nested_collector';
import { parsedSchemaDefinedWithSpreadsCollector } from './parsed_schema_defined_with_spreads_collector';
import { parsedWorkingCollector } from './parsed_working_collector';
import { ParsedUsageCollection } from '../ts_parser';

export const allExtractedCollectors: ParsedUsageCollection[] = [
...parsedExternallyDefinedCollector,
...parsedImportedSchemaCollector,
...parsedImportedUsageInterface,
parsedIndexedInterfaceWithNoMatchingSchema,
parsedNestedCollector,
parsedSchemaDefinedWithSpreadsCollector,
parsedWorkingCollector,
];
Original file line number Diff line number Diff line change
Expand Up @@ -36,16 +36,14 @@ export const parsedIndexedInterfaceWithNoMatchingSchema: ParsedUsageCollection =
fetch: {
typeName: 'Usage',
typeDescriptor: {
'': {
'@@INDEX@@': {
count_1: {
kind: SyntaxKind.NumberKeyword,
type: 'NumberKeyword',
},
count_2: {
kind: SyntaxKind.NumberKeyword,
type: 'NumberKeyword',
},
'@@INDEX@@': {
count_1: {
kind: SyntaxKind.NumberKeyword,
type: 'NumberKeyword',
},
count_2: {
kind: SyntaxKind.NumberKeyword,
type: 'NumberKeyword',
},
},
},
Expand Down
Loading

0 comments on commit 850da04

Please sign in to comment.