Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[7.15] [RAC] Replace usages of kibana.alert.status: open with active (#109033) #110384

Merged
merged 2 commits into from
Aug 27, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import {
ALERT_SEVERITY,
ALERT_START,
ALERT_STATUS,
ALERT_STATUS_ACTIVE,
ALERT_UUID,
SPACE_IDS,
ALERT_RULE_UUID,
Expand Down Expand Up @@ -43,7 +44,7 @@ const alert: Alert = {
'service.name': ['frontend-rum'],
[ALERT_RULE_NAME]: ['Latency threshold | frontend-rum'],
[ALERT_DURATION]: [62879000],
[ALERT_STATUS]: ['open'],
[ALERT_STATUS]: [ALERT_STATUS_ACTIVE],
[SPACE_IDS]: ['myfakespaceid'],
tags: ['apm', 'service.name:frontend-rum'],
'transaction.type': ['page-load'],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import {
ALERT_SEVERITY,
ALERT_START,
ALERT_STATUS,
ALERT_STATUS_ACTIVE,
ALERT_UUID,
ALERT_RULE_UUID,
ALERT_RULE_NAME,
Expand Down Expand Up @@ -133,7 +134,7 @@ Example.args = {
'service.name': ['frontend-rum'],
[ALERT_RULE_NAME]: ['Latency threshold | frontend-rum'],
[ALERT_DURATION]: [10000000000],
[ALERT_STATUS]: ['open'],
[ALERT_STATUS]: [ALERT_STATUS_ACTIVE],
tags: ['apm', 'service.name:frontend-rum'],
'transaction.type': ['page-load'],
[ALERT_RULE_PRODUCER]: ['apm'],
Expand All @@ -154,7 +155,7 @@ Example.args = {
'service.name': ['frontend-rum'],
[ALERT_RULE_NAME]: ['Latency threshold | frontend-rum'],
[ALERT_DURATION]: [10000000000],
[ALERT_STATUS]: ['open'],
[ALERT_STATUS]: [ALERT_STATUS_ACTIVE],
tags: ['apm', 'service.name:frontend-rum'],
'transaction.type': ['page-load'],
[ALERT_RULE_PRODUCER]: ['apm'],
Expand All @@ -176,7 +177,7 @@ Example.args = {
'service.name': ['frontend-rum'],
[ALERT_RULE_NAME]: ['Latency threshold | frontend-rum'],
[ALERT_DURATION]: [1000000000],
[ALERT_STATUS]: ['open'],
[ALERT_STATUS]: [ALERT_STATUS_ACTIVE],
tags: ['apm', 'service.name:frontend-rum'],
'transaction.type': ['page-load'],
[ALERT_RULE_PRODUCER]: ['apm'],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ import {
ALERT_RULE_TYPE_ID,
ALERT_START,
ALERT_STATUS,
ALERT_STATUS_ACTIVE,
ALERT_STATUS_RECOVERED,
ALERT_UUID,
ALERT_RULE_UUID,
ALERT_RULE_NAME,
Expand All @@ -26,7 +28,7 @@ export const apmAlertResponseExample = [
'service.name': ['opbeans-java'],
[ALERT_RULE_NAME]: ['Error count threshold | opbeans-java (smith test)'],
[ALERT_DURATION]: [180057000],
[ALERT_STATUS]: ['open'],
[ALERT_STATUS]: [ALERT_STATUS_ACTIVE],
[ALERT_SEVERITY]: ['warning'],
tags: ['apm', 'service.name:opbeans-java'],
[ALERT_UUID]: ['0175ec0a-a3b1-4d41-b557-e21c2d024352'],
Expand All @@ -47,7 +49,7 @@ export const apmAlertResponseExample = [
[ALERT_RULE_NAME]: ['Error count threshold | opbeans-java (smith test)'],
[ALERT_DURATION]: [2419005000],
[ALERT_END]: ['2021-04-12T13:49:49.446Z'],
[ALERT_STATUS]: ['closed'],
[ALERT_STATUS]: [ALERT_STATUS_RECOVERED],
tags: ['apm', 'service.name:opbeans-java'],
[ALERT_UUID]: ['32b940e1-3809-4c12-8eee-f027cbb385e2'],
[ALERT_RULE_UUID]: ['474920d0-93e9-11eb-ac86-0b455460de81'],
Expand Down
34 changes: 18 additions & 16 deletions x-pack/plugins/rule_registry/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -47,20 +47,23 @@ await plugins.ruleRegistry.createOrUpdateComponentTemplate({
// mappingFromFieldMap is a utility function that will generate an
// ES mapping from a field map object. You can also define a literal
// mapping.
mappings: mappingFromFieldMap({
[SERVICE_NAME]: {
type: 'keyword',
mappings: mappingFromFieldMap(
{
[SERVICE_NAME]: {
type: 'keyword',
},
[SERVICE_ENVIRONMENT]: {
type: 'keyword',
},
[TRANSACTION_TYPE]: {
type: 'keyword',
},
[PROCESSOR_EVENT]: {
type: 'keyword',
},
},
[SERVICE_ENVIRONMENT]: {
type: 'keyword',
},
[TRANSACTION_TYPE]: {
type: 'keyword',
},
[PROCESSOR_EVENT]: {
type: 'keyword',
},
}, 'strict'),
'strict'
),
},
},
});
Expand Down Expand Up @@ -129,12 +132,11 @@ The following fields are defined in the technical field component template and s
- `kibana.alert.rule.consumer`: the feature which produced the alert (inherited from the rule producer field). Usually a Kibana feature id like `apm`, `siem`...
- `kibana.alert.id`: the id of the alert, that is unique within the context of the rule execution it was created in. E.g., for a rule that monitors latency for all services in all environments, this might be `opbeans-java:production`.
- `kibana.alert.uuid`: the unique identifier for the alert during its lifespan. If an alert recovers (or closes), this identifier is re-generated when it is opened again.
- `kibana.alert.status`: the status of the alert. Can be `open` or `closed`.
- `kibana.alert.status`: the status of the alert. Can be `active` or `recovered`.
- `kibana.alert.start`: the ISO timestamp of the time at which the alert started.
- `kibana.alert.end`: the ISO timestamp of the time at which the alert recovered.
- `kibana.alert.duration.us`: the duration of the alert, in microseconds. This is always the difference between either the current time, or the time when the alert recovered.
- `kibana.alert.severity.level`: the severity of the alert, as a keyword (e.g. critical).
- `kibana.alert.severity.value`: the severity of the alert, as a numerical value, which allows sorting.
- `kibana.alert.severity`: the severity of the alert, as a keyword (e.g. critical).
- `kibana.alert.evaluation.value`: The measured (numerical value).
- `kibana.alert.threshold.value`: The threshold that was defined (or, in case of multiple thresholds, the one that was exceeded).
- `kibana.alert.ancestors`: the array of ancestors (if any) for the alert.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import {
ALERT_RULE_CONSUMER,
ALERT_STATUS,
ALERT_STATUS_ACTIVE,
SPACE_IDS,
ALERT_RULE_TYPE_ID,
} from '@kbn/rule-data-utils';
Expand Down Expand Up @@ -93,7 +94,7 @@ describe('bulkUpdate()', () => {
_source: {
[ALERT_RULE_TYPE_ID]: 'apm.error_rate',
[ALERT_RULE_CONSUMER]: 'apm',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: [DEFAULT_SPACE],
},
},
Expand Down Expand Up @@ -150,7 +151,7 @@ describe('bulkUpdate()', () => {
_source: {
[ALERT_RULE_TYPE_ID]: fakeRuleTypeId,
[ALERT_RULE_CONSUMER]: 'apm',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: [DEFAULT_SPACE],
},
},
Expand Down Expand Up @@ -196,7 +197,7 @@ describe('bulkUpdate()', () => {
_source: {
[ALERT_RULE_TYPE_ID]: 'apm.error_rate',
[ALERT_RULE_CONSUMER]: 'apm',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: [DEFAULT_SPACE],
},
},
Expand All @@ -206,7 +207,7 @@ describe('bulkUpdate()', () => {
_source: {
[ALERT_RULE_TYPE_ID]: fakeRuleTypeId,
[ALERT_RULE_CONSUMER]: 'apm',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: [DEFAULT_SPACE],
},
},
Expand Down Expand Up @@ -283,7 +284,7 @@ describe('bulkUpdate()', () => {
_source: {
[ALERT_RULE_TYPE_ID]: 'apm.error_rate',
[ALERT_RULE_CONSUMER]: 'apm',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: [DEFAULT_SPACE],
},
},
Expand All @@ -303,7 +304,7 @@ describe('bulkUpdate()', () => {

await alertsClient.bulkUpdate({
ids: undefined,
query: `${ALERT_STATUS}: open`,
query: `${ALERT_STATUS}: ${ALERT_STATUS_ACTIVE}`,
index: indexName,
status: 'closed',
});
Expand Down Expand Up @@ -343,7 +344,7 @@ describe('bulkUpdate()', () => {
_source: {
[ALERT_RULE_TYPE_ID]: fakeRuleTypeId,
[ALERT_RULE_CONSUMER]: 'apm',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: [DEFAULT_SPACE],
},
},
Expand All @@ -355,13 +356,13 @@ describe('bulkUpdate()', () => {
await expect(
alertsClient.bulkUpdate({
ids: undefined,
query: `${ALERT_STATUS}: open`,
query: `${ALERT_STATUS}: ${ALERT_STATUS_ACTIVE}`,
index: indexName,
status: 'closed',
})
).rejects.toThrowErrorMatchingInlineSnapshot(`
"queryAndAuditAllAlerts threw an error: Unable to retrieve alerts with query \\"kibana.alert.status: open\\" and operation update
Error: Unable to retrieve alert details for alert with id of \\"null\\" or with query \\"kibana.alert.status: open\\" and operation update
"queryAndAuditAllAlerts threw an error: Unable to retrieve alerts with query \\"kibana.alert.status: active\\" and operation update
Error: Unable to retrieve alert details for alert with id of \\"null\\" or with query \\"kibana.alert.status: active\\" and operation update
Error: Error: Unauthorized for fake.rule and apm"
`);

Expand Down Expand Up @@ -404,7 +405,7 @@ describe('bulkUpdate()', () => {
_source: {
[ALERT_RULE_TYPE_ID]: 'apm.error_rate',
[ALERT_RULE_CONSUMER]: 'apm',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: [DEFAULT_SPACE],
},
},
Expand All @@ -414,7 +415,7 @@ describe('bulkUpdate()', () => {
_source: {
[ALERT_RULE_TYPE_ID]: fakeRuleTypeId,
[ALERT_RULE_CONSUMER]: 'apm',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: [DEFAULT_SPACE],
},
},
Expand All @@ -426,13 +427,13 @@ describe('bulkUpdate()', () => {
await expect(
alertsClient.bulkUpdate({
ids: undefined,
query: `${ALERT_STATUS}: open`,
query: `${ALERT_STATUS}: ${ALERT_STATUS_ACTIVE}`,
index: indexName,
status: 'closed',
})
).rejects.toThrowErrorMatchingInlineSnapshot(`
"queryAndAuditAllAlerts threw an error: Unable to retrieve alerts with query \\"kibana.alert.status: open\\" and operation update
Error: Unable to retrieve alert details for alert with id of \\"null\\" or with query \\"kibana.alert.status: open\\" and operation update
"queryAndAuditAllAlerts threw an error: Unable to retrieve alerts with query \\"kibana.alert.status: active\\" and operation update
Error: Unable to retrieve alert details for alert with id of \\"null\\" or with query \\"kibana.alert.status: active\\" and operation update
Error: Error: Unauthorized for fake.rule and apm"
`);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import {
ALERT_RULE_CONSUMER,
ALERT_STATUS,
ALERT_STATUS_ACTIVE,
SPACE_IDS,
ALERT_RULE_TYPE_ID,
} from '@kbn/rule-data-utils';
Expand Down Expand Up @@ -103,7 +104,7 @@ describe('get()', () => {
[ALERT_RULE_TYPE_ID]: 'apm.error_rate',
message: 'hello world 1',
[ALERT_RULE_CONSUMER]: 'apm',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: ['test_default_space_id'],
},
},
Expand All @@ -117,7 +118,7 @@ describe('get()', () => {
Object {
"kibana.alert.rule.consumer": "apm",
"kibana.alert.rule.rule_type_id": "apm.error_rate",
"kibana.alert.status": "open",
"kibana.alert.status": "active",
"kibana.space_ids": Array [
"test_default_space_id",
],
Expand Down Expand Up @@ -212,7 +213,7 @@ describe('get()', () => {
[ALERT_RULE_TYPE_ID]: 'apm.error_rate',
message: 'hello world 1',
[ALERT_RULE_CONSUMER]: 'apm',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: ['test_default_space_id'],
},
},
Expand Down Expand Up @@ -265,7 +266,7 @@ describe('get()', () => {
_source: {
[ALERT_RULE_TYPE_ID]: fakeRuleTypeId,
[ALERT_RULE_CONSUMER]: 'apm',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: [DEFAULT_SPACE],
},
},
Expand Down Expand Up @@ -338,7 +339,7 @@ describe('get()', () => {
[ALERT_RULE_TYPE_ID]: 'apm.error_rate',
message: 'hello world 1',
[ALERT_RULE_CONSUMER]: 'apm',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: ['test_default_space_id'],
},
},
Expand All @@ -360,7 +361,7 @@ describe('get()', () => {
Object {
"kibana.alert.rule.consumer": "apm",
"kibana.alert.rule.rule_type_id": "apm.error_rate",
"kibana.alert.status": "open",
"kibana.alert.status": "active",
"kibana.space_ids": Array [
"test_default_space_id",
],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import {
ALERT_ID,
ALERT_RULE_PRODUCER,
ALERT_START,
ALERT_STATUS,
ALERT_WORKFLOW_STATUS,
ALERT_UUID,
ALERT_RULE_UUID,
ALERT_RULE_ID,
Expand Down Expand Up @@ -196,20 +196,20 @@ export const buildAlertStatusFilterRuleRegistry = (status: Status): Filter[] =>
should: [
{
term: {
[ALERT_STATUS]: status,
[ALERT_WORKFLOW_STATUS]: status,
},
},
{
term: {
[ALERT_STATUS]: 'in-progress',
[ALERT_WORKFLOW_STATUS]: 'in-progress',
},
},
],
},
}
: {
term: {
[ALERT_STATUS]: status,
[ALERT_WORKFLOW_STATUS]: status,
},
};

Expand All @@ -220,7 +220,7 @@ export const buildAlertStatusFilterRuleRegistry = (status: Status): Filter[] =>
negate: false,
disabled: false,
type: 'phrase',
key: ALERT_STATUS,
key: ALERT_WORKFLOW_STATUS,
params: {
query: status,
},
Expand All @@ -236,7 +236,7 @@ export const buildAlertStatusesFilterRuleRegistry = (statuses: Status[]): Filter
bool: {
should: statuses.map((status) => ({
term: {
[ALERT_STATUS]: status,
[ALERT_WORKFLOW_STATUS]: status,
},
})),
},
Expand Down Expand Up @@ -281,7 +281,7 @@ export const requiredFieldMappingsForActionsRuleRegistry = {
'alert.start': ALERT_START,
'alert.uuid': ALERT_UUID,
'event.action': 'event.action',
'alert.status': ALERT_STATUS,
'alert.workflow_status': ALERT_WORKFLOW_STATUS,
'alert.duration.us': ALERT_DURATION,
'rule.uuid': ALERT_RULE_UUID,
'rule.id': ALERT_RULE_ID,
Expand Down
Loading