Skip to content

Commit

Permalink
Merge branch '7.x' into backport/7.x/pr-51823
Browse files Browse the repository at this point in the history
  • Loading branch information
elasticmachine authored Feb 10, 2020
2 parents 689fb07 + cb550c9 commit c345b86
Show file tree
Hide file tree
Showing 50 changed files with 2,173 additions and 904 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -108,8 +108,6 @@ describe('getSortedObjectsForExport()', () => {
"namespace": undefined,
"perPage": 500,
"search": undefined,
"sortField": "_id",
"sortOrder": "asc",
"type": Array [
"index-pattern",
"search",
Expand Down Expand Up @@ -256,8 +254,6 @@ describe('getSortedObjectsForExport()', () => {
"namespace": undefined,
"perPage": 500,
"search": "foo",
"sortField": "_id",
"sortOrder": "asc",
"type": Array [
"index-pattern",
"search",
Expand Down Expand Up @@ -345,8 +341,6 @@ describe('getSortedObjectsForExport()', () => {
"namespace": "foo",
"perPage": 500,
"search": undefined,
"sortField": "_id",
"sortOrder": "asc",
"type": Array [
"index-pattern",
"search",
Expand Down Expand Up @@ -399,6 +393,79 @@ describe('getSortedObjectsForExport()', () => {
).rejects.toThrowErrorMatchingInlineSnapshot(`"Can't export more than 1 objects"`);
});

test('sorts objects within type', async () => {
savedObjectsClient.find.mockResolvedValueOnce({
total: 3,
per_page: 10000,
page: 1,
saved_objects: [
{
id: '3',
type: 'index-pattern',
attributes: {
name: 'baz',
},
references: [],
},
{
id: '1',
type: 'index-pattern',
attributes: {
name: 'foo',
},
references: [],
},
{
id: '2',
type: 'index-pattern',
attributes: {
name: 'bar',
},
references: [],
},
],
});
const exportStream = await getSortedObjectsForExport({
exportSizeLimit: 10000,
savedObjectsClient,
types: ['index-pattern'],
});
const response = await readStreamToCompletion(exportStream);
expect(response).toMatchInlineSnapshot(`
Array [
Object {
"attributes": Object {
"name": "foo",
},
"id": "1",
"references": Array [],
"type": "index-pattern",
},
Object {
"attributes": Object {
"name": "bar",
},
"id": "2",
"references": Array [],
"type": "index-pattern",
},
Object {
"attributes": Object {
"name": "baz",
},
"id": "3",
"references": Array [],
"type": "index-pattern",
},
Object {
"exportedCount": 3,
"missingRefCount": 0,
"missingReferences": Array [],
},
]
`);
});

test('exports selected objects and sorts them', async () => {
savedObjectsClient.bulkGet.mockResolvedValueOnce({
saved_objects: [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

import Boom from 'boom';
import { createListStream } from '../../../../legacy/utils/streams';
import { SavedObjectsClientContract } from '../types';
import { SavedObjectsClientContract, SavedObject } from '../types';
import { fetchNestedDependencies } from './inject_nested_depdendencies';
import { sortObjects } from './sort_objects';

Expand Down Expand Up @@ -105,15 +105,17 @@ async function fetchObjectsToExport({
const findResponse = await savedObjectsClient.find({
type: types,
search,
sortField: '_id',
sortOrder: 'asc',
perPage: exportSizeLimit,
namespace,
});
if (findResponse.total > exportSizeLimit) {
throw Boom.badRequest(`Can't export more than ${exportSizeLimit} objects`);
}
return findResponse.saved_objects;

// sorts server-side by _id, since it's only available in fielddata
return findResponse.saved_objects.sort((a: SavedObject, b: SavedObject) =>
a.id > b.id ? 1 : -1
);
} else {
throw Boom.badRequest('Either `type` or `objects` are required.');
}
Expand All @@ -137,14 +139,17 @@ export async function getSortedObjectsForExport({
exportSizeLimit,
namespace,
});
let exportedObjects = [...rootObjects];
let exportedObjects = [];
let missingReferences: SavedObjectsExportResultDetails['missingReferences'] = [];

if (includeReferencesDeep) {
const fetchResult = await fetchNestedDependencies(rootObjects, savedObjectsClient, namespace);
exportedObjects = fetchResult.objects;
exportedObjects = sortObjects(fetchResult.objects);
missingReferences = fetchResult.missingRefs;
} else {
exportedObjects = sortObjects(rootObjects);
}
exportedObjects = sortObjects(exportedObjects);

const exportDetails: SavedObjectsExportResultDetails = {
exportedCount: exportedObjects.length,
missingRefCount: missingReferences.length,
Expand Down
8 changes: 8 additions & 0 deletions src/legacy/server/logging/log_reporter.js
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,14 @@ import LogFormatJson from './log_format_json';
import LogFormatString from './log_format_string';
import { LogInterceptor } from './log_interceptor';

// NOTE: legacy logger creates a new stream for each new access
// In https://github.com/elastic/kibana/pull/55937 we reach the max listeners
// default limit of 10 for process.stdout which starts a long warning/error
// thrown every time we start the server.
// In order to keep using the legacy logger until we remove it I'm just adding
// a new hard limit here.
process.stdout.setMaxListeners(15);

export function getLoggerStream({ events, config }) {
const squeeze = new Squeeze(events);
const format = config.json ? new LogFormatJson(config) : new LogFormatString(config);
Expand Down
2 changes: 1 addition & 1 deletion x-pack/.i18nrc.json
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
"xpack.logstash": "legacy/plugins/logstash",
"xpack.main": "legacy/plugins/xpack_main",
"xpack.monitoring": "legacy/plugins/monitoring",
"xpack.remoteClusters": "legacy/plugins/remote_clusters",
"xpack.remoteClusters": ["plugins/remote_clusters", "legacy/plugins/remote_clusters"],
"xpack.reporting": ["plugins/reporting", "legacy/plugins/reporting"],
"xpack.rollupJobs": "legacy/plugins/rollup",
"xpack.searchProfiler": "plugins/searchprofiler",
Expand Down
Loading

0 comments on commit c345b86

Please sign in to comment.