Skip to content

Commit

Permalink
Merge branch 'master' into feature/pipeline_debugger
Browse files Browse the repository at this point in the history
  • Loading branch information
elasticmachine authored Aug 5, 2020
2 parents 18e1a3d + 2280927 commit 8035de2
Show file tree
Hide file tree
Showing 1,090 changed files with 17,765 additions and 8,543 deletions.
38 changes: 38 additions & 0 deletions .ci/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
# NOTE: This Dockerfile is ONLY used to run certain tasks in CI. It is not used to run Kibana or as a distributable.
# If you're looking for the Kibana Docker image distributable, please see: src/dev/build/tasks/os_packages/docker_generator/templates/dockerfile.template.ts

ARG NODE_VERSION=10.21.0

FROM node:${NODE_VERSION} AS base

RUN apt-get update && \
apt-get -y install xvfb gconf-service libasound2 libatk1.0-0 libc6 libcairo2 libcups2 \
libdbus-1-3 libexpat1 libfontconfig1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 \
libgtk-3-0 libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 \
libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 \
libxtst6 ca-certificates fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils wget openjdk-8-jre && \
rm -rf /var/lib/apt/lists/*

RUN curl -sSL https://dl.google.com/linux/linux_signing_key.pub | apt-key add - \
&& sh -c 'echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list' \
&& apt-get update \
&& apt-get install -y rsync jq bsdtar google-chrome-stable \
--no-install-recommends \
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*

RUN LATEST_VAULT_RELEASE=$(curl -s https://api.github.com/repos/hashicorp/vault/tags | jq --raw-output .[0].name[1:]) \
&& curl -L https://releases.hashicorp.com/vault/${LATEST_VAULT_RELEASE}/vault_${LATEST_VAULT_RELEASE}_linux_amd64.zip -o vault.zip \
&& unzip vault.zip \
&& rm vault.zip \
&& chmod +x vault \
&& mv vault /usr/local/bin/vault

RUN groupadd -r kibana && useradd -r -g kibana kibana && mkdir /home/kibana && chown kibana:kibana /home/kibana

COPY ./bash_standard_lib.sh /usr/local/bin/bash_standard_lib.sh
RUN chmod +x /usr/local/bin/bash_standard_lib.sh

COPY ./runbld /usr/local/bin/runbld
RUN chmod +x /usr/local/bin/runbld

USER kibana
28 changes: 16 additions & 12 deletions .ci/Jenkinsfile_baseline_capture
Original file line number Diff line number Diff line change
Expand Up @@ -7,18 +7,22 @@ kibanaPipeline(timeoutMinutes: 120) {
githubCommitStatus.trackBuild(params.commit, 'kibana-ci-baseline') {
ciStats.trackBuild {
catchError {
parallel([
'oss-visualRegression': {
workers.ci(name: 'oss-visualRegression', size: 's-highmem', ramDisk: true) {
kibanaPipeline.functionalTestProcess('oss-visualRegression', './test/scripts/jenkins_visual_regression.sh')(1)
}
},
'xpack-visualRegression': {
workers.ci(name: 'xpack-visualRegression', size: 's-highmem', ramDisk: true) {
kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh')(1)
}
},
])
withEnv([
'CI_PARALLEL_PROCESS_NUMBER=1'
]) {
parallel([
'oss-visualRegression': {
workers.ci(name: 'oss-visualRegression', size: 's-highmem', ramDisk: true) {
kibanaPipeline.functionalTestProcess('oss-visualRegression', './test/scripts/jenkins_visual_regression.sh')()
}
},
'xpack-visualRegression': {
workers.ci(name: 'xpack-visualRegression', size: 's-highmem', ramDisk: true) {
kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh')()
}
},
])
}
}

kibanaPipeline.sendMail()
Expand Down
64 changes: 62 additions & 2 deletions .ci/pipeline-library/src/test/slackNotifications.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ class SlackNotificationsTest extends KibanaBasePipelineTest {
super.setUp()

helper.registerAllowedMethod('slackSend', [Map.class], null)
prop('buildState', loadScript("vars/buildState.groovy"))
slackNotifications = loadScript('vars/slackNotifications.groovy')
}

Expand All @@ -25,13 +26,49 @@ class SlackNotificationsTest extends KibanaBasePipelineTest {
}

@Test
void 'sendFailedBuild() should call slackSend() with message'() {
void 'sendFailedBuild() should call slackSend() with an in-progress message'() {
mockFailureBuild()

slackNotifications.sendFailedBuild()

def args = fnMock('slackSend').args[0]

def expected = [
channel: '#kibana-operations-alerts',
username: 'Kibana Operations',
iconEmoji: ':jenkins:',
color: 'danger',
message: ':hourglass_flowing_sand: elastic / kibana # master #1',
]

expected.each {
assertEquals(it.value.toString(), args[it.key].toString())
}

assertEquals(
":hourglass_flowing_sand: *<http://jenkins.localhost:8080/job/elastic+kibana+master/1/|elastic / kibana # master #1>*",
args.blocks[0].text.text.toString()
)

assertEquals(
"*Failed Steps*\n• <http://jenkins.localhost:8080|Execute test task>",
args.blocks[1].text.text.toString()
)

assertEquals(
"*Test Failures*\n• <https://localhost/|x-pack/test/functional/apps/fake/test·ts.Fake test &lt;Component&gt; should &amp; pass &amp;>",
args.blocks[2].text.text.toString()
)
}

@Test
void 'sendFailedBuild() should call slackSend() with message'() {
mockFailureBuild()

slackNotifications.sendFailedBuild(isFinal: true)

def args = fnMock('slackSend').args[0]

def expected = [
channel: '#kibana-operations-alerts',
username: 'Kibana Operations',
Expand Down Expand Up @@ -65,7 +102,7 @@ class SlackNotificationsTest extends KibanaBasePipelineTest {
mockFailureBuild()
def counter = 0
helper.registerAllowedMethod('slackSend', [Map.class], { ++counter > 1 })
slackNotifications.sendFailedBuild()
slackNotifications.sendFailedBuild(isFinal: true)

def args = fnMocks('slackSend')[1].args[0]

Expand All @@ -88,6 +125,29 @@ class SlackNotificationsTest extends KibanaBasePipelineTest {
)
}

@Test
void 'sendFailedBuild() should call slackSend() with a channel id and timestamp on second call'() {
mockFailureBuild()
helper.registerAllowedMethod('slackSend', [Map.class], { [ channelId: 'CHANNEL_ID', ts: 'TIMESTAMP' ] })
slackNotifications.sendFailedBuild(isFinal: false)
slackNotifications.sendFailedBuild(isFinal: true)

def args = fnMocks('slackSend')[1].args[0]

def expected = [
channel: 'CHANNEL_ID',
timestamp: 'TIMESTAMP',
username: 'Kibana Operations',
iconEmoji: ':jenkins:',
color: 'danger',
message: ':broken_heart: elastic / kibana # master #1',
]

expected.each {
assertEquals(it.value.toString(), args[it.key].toString())
}
}

@Test
void 'getTestFailures() should truncate list of failures to 10'() {
prop('testUtils', [
Expand Down
2 changes: 1 addition & 1 deletion .ci/runbld_no_junit.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@
profiles:
- ".*": # Match any job
tests:
junit-filename-pattern: "8d8bd494-d909-4e67-a052-7e8b5aaeb5e4" # A bogus path that should never exist
junit-filename-pattern: false
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,8 @@ npm-debug.log*
.tern-project
.nyc_output
.ci/pipeline-library/build/
.ci/runbld
.ci/bash_standard_lib.sh
.gradle

# apm plugin
Expand Down
55 changes: 7 additions & 48 deletions Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -4,59 +4,18 @@ library 'kibana-pipeline-library'
kibanaLibrary.load()

kibanaPipeline(timeoutMinutes: 155, checkPrChanges: true, setCommitStatus: true) {
githubPr.withDefaultPrComments {
ciStats.trackBuild {
catchError {
retryable.enable()
parallel([
'kibana-intake-agent': workers.intake('kibana-intake', './test/scripts/jenkins_unit.sh'),
'x-pack-intake-agent': workers.intake('x-pack-intake', './test/scripts/jenkins_xpack.sh'),
'kibana-oss-agent': workers.functional('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
'oss-firefoxSmoke': kibanaPipeline.functionalTestProcess('kibana-firefoxSmoke', './test/scripts/jenkins_firefox_smoke.sh'),
'oss-ciGroup1': kibanaPipeline.ossCiGroupProcess(1),
'oss-ciGroup2': kibanaPipeline.ossCiGroupProcess(2),
'oss-ciGroup3': kibanaPipeline.ossCiGroupProcess(3),
'oss-ciGroup4': kibanaPipeline.ossCiGroupProcess(4),
'oss-ciGroup5': kibanaPipeline.ossCiGroupProcess(5),
'oss-ciGroup6': kibanaPipeline.ossCiGroupProcess(6),
'oss-ciGroup7': kibanaPipeline.ossCiGroupProcess(7),
'oss-ciGroup8': kibanaPipeline.ossCiGroupProcess(8),
'oss-ciGroup9': kibanaPipeline.ossCiGroupProcess(9),
'oss-ciGroup10': kibanaPipeline.ossCiGroupProcess(10),
'oss-ciGroup11': kibanaPipeline.ossCiGroupProcess(11),
'oss-ciGroup12': kibanaPipeline.ossCiGroupProcess(12),
'oss-accessibility': kibanaPipeline.functionalTestProcess('kibana-accessibility', './test/scripts/jenkins_accessibility.sh'),
// 'oss-visualRegression': kibanaPipeline.functionalTestProcess('visualRegression', './test/scripts/jenkins_visual_regression.sh'),
]),
'kibana-xpack-agent': workers.functional('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [
'xpack-firefoxSmoke': kibanaPipeline.functionalTestProcess('xpack-firefoxSmoke', './test/scripts/jenkins_xpack_firefox_smoke.sh'),
'xpack-ciGroup1': kibanaPipeline.xpackCiGroupProcess(1),
'xpack-ciGroup2': kibanaPipeline.xpackCiGroupProcess(2),
'xpack-ciGroup3': kibanaPipeline.xpackCiGroupProcess(3),
'xpack-ciGroup4': kibanaPipeline.xpackCiGroupProcess(4),
'xpack-ciGroup5': kibanaPipeline.xpackCiGroupProcess(5),
'xpack-ciGroup6': kibanaPipeline.xpackCiGroupProcess(6),
'xpack-ciGroup7': kibanaPipeline.xpackCiGroupProcess(7),
'xpack-ciGroup8': kibanaPipeline.xpackCiGroupProcess(8),
'xpack-ciGroup9': kibanaPipeline.xpackCiGroupProcess(9),
'xpack-ciGroup10': kibanaPipeline.xpackCiGroupProcess(10),
'xpack-accessibility': kibanaPipeline.functionalTestProcess('xpack-accessibility', './test/scripts/jenkins_xpack_accessibility.sh'),
'xpack-savedObjectsFieldMetrics': kibanaPipeline.functionalTestProcess('xpack-savedObjectsFieldMetrics', './test/scripts/jenkins_xpack_saved_objects_field_metrics.sh'),
'xpack-securitySolutionCypress': { processNumber ->
whenChanged(['x-pack/plugins/security_solution/', 'x-pack/test/security_solution_cypress/', 'x-pack/plugins/triggers_actions_ui/public/application/sections/action_connector_form/', 'x-pack/plugins/triggers_actions_ui/public/application/context/actions_connectors_context.tsx']) {
kibanaPipeline.functionalTestProcess('xpack-securitySolutionCypress', './test/scripts/jenkins_security_solution_cypress.sh')(processNumber)
}
},

// 'xpack-visualRegression': kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh'),
]),
])
slackNotifications.onFailure(disabled: !params.NOTIFY_ON_FAILURE) {
githubPr.withDefaultPrComments {
ciStats.trackBuild {
catchError {
retryable.enable()
kibanaPipeline.allCiTasks()
}
}
}
}

if (params.NOTIFY_ON_FAILURE) {
slackNotifications.onFailure()
kibanaPipeline.sendMail()
}
}
Binary file added docs/apm/images/apm-anomaly-alert.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
49 changes: 37 additions & 12 deletions docs/apm/machine-learning.asciidoc
Original file line number Diff line number Diff line change
@@ -1,36 +1,61 @@
[role="xpack"]
[[machine-learning-integration]]
=== integration
=== Machine learning integration

++++
<titleabbrev>Integrate with machine learning</titleabbrev>
++++

The Machine Learning integration initiates a new job predefined to calculate anomaly scores on APM transaction durations.
Jobs can be created per transaction type, and are based on the service's average response time.
The Machine learning integration initiates a new job predefined to calculate anomaly scores on APM transaction durations.
With this integration, you can quickly pinpoint anomalous transactions and see the health of
any upstream and downstream services.

After a machine learning job is created, results are shown in two places:
Machine learning jobs are created per environment, and are based on a service's average response time.
Because jobs are created at the environment level,
you can add new services to your existing environments without the need for additional machine learning jobs.

The transaction duration graph will show the expected bounds and add an annotation when the anomaly score is 75 or above.
After a machine learning job is created, results are shown in two places:

* The transaction duration chart will show the expected bounds and add an annotation when the anomaly score is 75 or above.
+
[role="screenshot"]
image::apm/images/apm-ml-integration.png[Example view of anomaly scores on response times in the APM app]

Service maps will display a color-coded anomaly indicator based on the detected anomaly score.

* Service maps will display a color-coded anomaly indicator based on the detected anomaly score.
+
[role="screenshot"]
image::apm/images/apm-service-map-anomaly.png[Example view of anomaly scores on service maps in the APM app]

[float]
[[create-ml-integration]]
=== Create a new machine learning job
=== Enable anomaly detection

To enable machine learning anomaly detection:

. From the Services overview, Traces overview, or Service Map tab,
select **Anomaly detection**.

. Click **Create ML Job**.

To enable machine learning anomaly detection, first choose a service to monitor.
Then, select **Integrations** > **Enable ML anomaly detection** and click **Create job**.
. Machine learning jobs are created at the environment level.
Select all of the service environments that you want to enable anomaly detection in.
Anomalies will surface for all services and transaction types within the selected environments.

. Click **Create Jobs**.

That's it! After a few minutes, the job will begin calculating results;
it might take additional time for results to appear on your graph.
Jobs can be managed in *Machine Learning jobs management*.
it might take additional time for results to appear on your service maps.
Existing jobs can be managed in *Machine Learning jobs management*.

APM specific anomaly detection wizards are also available for certain Agents.
See the machine learning {ml-docs}/ootb-ml-jobs-apm.html[APM anomaly detection configurations] for more information.

[float]
[[warning-ml-integration]]
=== Anomaly detection warning

To make machine learning as easy as possible to set up,
the APM app will warn you when filtered to an environment without a machine learning job.

[role="screenshot"]
image::apm/images/apm-anomaly-alert.png[Example view of anomaly alert in the APM app]
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
<!-- Do not edit this file. It is automatically generated by API Documenter. -->

[Home](./index.md) &gt; [kibana-plugin-core-server](./kibana-plugin-core-server.md) &gt; [KibanaRequestEvents](./kibana-plugin-core-server.kibanarequestevents.md) &gt; [completed$](./kibana-plugin-core-server.kibanarequestevents.completed_.md)

## KibanaRequestEvents.completed$ property

Observable that emits once if and when the request has been completely handled.

<b>Signature:</b>

```typescript
completed$: Observable<void>;
```

## Remarks

The request may be considered completed if: - A response has been sent to the client; or - The request was aborted.

Original file line number Diff line number Diff line change
Expand Up @@ -17,4 +17,5 @@ export interface KibanaRequestEvents
| Property | Type | Description |
| --- | --- | --- |
| [aborted$](./kibana-plugin-core-server.kibanarequestevents.aborted_.md) | <code>Observable&lt;void&gt;</code> | Observable that emits once if and when the request has been aborted. |
| [completed$](./kibana-plugin-core-server.kibanarequestevents.completed_.md) | <code>Observable&lt;void&gt;</code> | Observable that emits once if and when the request has been completely handled. |

Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ export declare class SearchInterceptor

| Method | Modifiers | Description |
| --- | --- | --- |
| [runSearch(request, combinedSignal)](./kibana-plugin-plugins-data-public.searchinterceptor.runsearch.md) | | |
| [runSearch(request, signal)](./kibana-plugin-plugins-data-public.searchinterceptor.runsearch.md) | | |
| [search(request, options)](./kibana-plugin-plugins-data-public.searchinterceptor.search.md) | | Searches using the given <code>search</code> method. Overrides the <code>AbortSignal</code> with one that will abort either when <code>cancelPending</code> is called, when the request times out, or when the original <code>AbortSignal</code> is aborted. Updates the <code>pendingCount</code> when the request is started/finalized. |
| [setupTimers(options)](./kibana-plugin-plugins-data-public.searchinterceptor.setuptimers.md) | | |

Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,15 @@
<b>Signature:</b>

```typescript
protected runSearch(request: IEsSearchRequest, combinedSignal: AbortSignal): Observable<IEsSearchResponse>;
protected runSearch(request: IEsSearchRequest, signal: AbortSignal): Observable<IEsSearchResponse>;
```

## Parameters

| Parameter | Type | Description |
| --- | --- | --- |
| request | <code>IEsSearchRequest</code> | |
| combinedSignal | <code>AbortSignal</code> | |
| signal | <code>AbortSignal</code> | |

<b>Returns:</b>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,5 +9,5 @@ Used internally for telemetry
<b>Signature:</b>

```typescript
usage: SearchUsage;
usage?: SearchUsage;
```
Loading

0 comments on commit 8035de2

Please sign in to comment.