[RAC] Replace usages of kibana.alert.status: open with active (#109033)

* Replace usages of alert.status: open with active

* Update unit tests

* Add back home.disableWelcomeScreen=true

* Only disable welcome screen within APM ftr config

* Add disableWelcomeScreen option to security solution cypress config

* Fix reference to workflow status

* oops

* Remove duplicate disableWelcomeScreen

* Update README.md

Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
Marshall Main 2021-08-26 15:58:44 -07:00 committed by GitHub
parent 8babdc2462
commit 682bc7c771
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 72 additions and 56 deletions

View file

@ -16,6 +16,7 @@ import {
ALERT_SEVERITY,
ALERT_START,
ALERT_STATUS,
ALERT_STATUS_ACTIVE,
ALERT_UUID,
SPACE_IDS,
ALERT_RULE_UUID,
@ -43,7 +44,7 @@ const alert: Alert = {
'service.name': ['frontend-rum'],
[ALERT_RULE_NAME]: ['Latency threshold | frontend-rum'],
[ALERT_DURATION]: [62879000],
[ALERT_STATUS]: ['open'],
[ALERT_STATUS]: [ALERT_STATUS_ACTIVE],
[SPACE_IDS]: ['myfakespaceid'],
tags: ['apm', 'service.name:frontend-rum'],
'transaction.type': ['page-load'],

View file

@ -14,6 +14,7 @@ import {
ALERT_SEVERITY,
ALERT_START,
ALERT_STATUS,
ALERT_STATUS_ACTIVE,
ALERT_UUID,
ALERT_RULE_UUID,
ALERT_RULE_NAME,
@ -133,7 +134,7 @@ Example.args = {
'service.name': ['frontend-rum'],
[ALERT_RULE_NAME]: ['Latency threshold | frontend-rum'],
[ALERT_DURATION]: [10000000000],
[ALERT_STATUS]: ['open'],
[ALERT_STATUS]: [ALERT_STATUS_ACTIVE],
tags: ['apm', 'service.name:frontend-rum'],
'transaction.type': ['page-load'],
[ALERT_RULE_PRODUCER]: ['apm'],
@ -154,7 +155,7 @@ Example.args = {
'service.name': ['frontend-rum'],
[ALERT_RULE_NAME]: ['Latency threshold | frontend-rum'],
[ALERT_DURATION]: [10000000000],
[ALERT_STATUS]: ['open'],
[ALERT_STATUS]: [ALERT_STATUS_ACTIVE],
tags: ['apm', 'service.name:frontend-rum'],
'transaction.type': ['page-load'],
[ALERT_RULE_PRODUCER]: ['apm'],
@ -176,7 +177,7 @@ Example.args = {
'service.name': ['frontend-rum'],
[ALERT_RULE_NAME]: ['Latency threshold | frontend-rum'],
[ALERT_DURATION]: [1000000000],
[ALERT_STATUS]: ['open'],
[ALERT_STATUS]: [ALERT_STATUS_ACTIVE],
tags: ['apm', 'service.name:frontend-rum'],
'transaction.type': ['page-load'],
[ALERT_RULE_PRODUCER]: ['apm'],

View file

@ -13,6 +13,8 @@ import {
ALERT_RULE_TYPE_ID,
ALERT_START,
ALERT_STATUS,
ALERT_STATUS_ACTIVE,
ALERT_STATUS_RECOVERED,
ALERT_UUID,
ALERT_RULE_UUID,
ALERT_RULE_NAME,
@ -26,7 +28,7 @@ export const apmAlertResponseExample = [
'service.name': ['opbeans-java'],
[ALERT_RULE_NAME]: ['Error count threshold | opbeans-java (smith test)'],
[ALERT_DURATION]: [180057000],
[ALERT_STATUS]: ['open'],
[ALERT_STATUS]: [ALERT_STATUS_ACTIVE],
[ALERT_SEVERITY]: ['warning'],
tags: ['apm', 'service.name:opbeans-java'],
[ALERT_UUID]: ['0175ec0a-a3b1-4d41-b557-e21c2d024352'],
@ -47,7 +49,7 @@ export const apmAlertResponseExample = [
[ALERT_RULE_NAME]: ['Error count threshold | opbeans-java (smith test)'],
[ALERT_DURATION]: [2419005000],
[ALERT_END]: ['2021-04-12T13:49:49.446Z'],
[ALERT_STATUS]: ['closed'],
[ALERT_STATUS]: [ALERT_STATUS_RECOVERED],
tags: ['apm', 'service.name:opbeans-java'],
[ALERT_UUID]: ['32b940e1-3809-4c12-8eee-f027cbb385e2'],
[ALERT_RULE_UUID]: ['474920d0-93e9-11eb-ac86-0b455460de81'],

View file

@ -47,20 +47,23 @@ await plugins.ruleRegistry.createOrUpdateComponentTemplate({
// mappingFromFieldMap is a utility function that will generate an
// ES mapping from a field map object. You can also define a literal
// mapping.
mappings: mappingFromFieldMap({
[SERVICE_NAME]: {
type: 'keyword',
mappings: mappingFromFieldMap(
{
[SERVICE_NAME]: {
type: 'keyword',
},
[SERVICE_ENVIRONMENT]: {
type: 'keyword',
},
[TRANSACTION_TYPE]: {
type: 'keyword',
},
[PROCESSOR_EVENT]: {
type: 'keyword',
},
},
[SERVICE_ENVIRONMENT]: {
type: 'keyword',
},
[TRANSACTION_TYPE]: {
type: 'keyword',
},
[PROCESSOR_EVENT]: {
type: 'keyword',
},
}, 'strict'),
'strict'
),
},
},
});
@ -129,12 +132,11 @@ The following fields are defined in the technical field component template and s
- `kibana.alert.rule.consumer`: the feature which produced the alert (inherited from the rule producer field). Usually a Kibana feature id like `apm`, `siem`...
- `kibana.alert.id`: the id of the alert, that is unique within the context of the rule execution it was created in. E.g., for a rule that monitors latency for all services in all environments, this might be `opbeans-java:production`.
- `kibana.alert.uuid`: the unique identifier for the alert during its lifespan. If an alert recovers (or closes), this identifier is re-generated when it is opened again.
- `kibana.alert.status`: the status of the alert. Can be `open` or `closed`.
- `kibana.alert.status`: the status of the alert. Can be `active` or `recovered`.
- `kibana.alert.start`: the ISO timestamp of the time at which the alert started.
- `kibana.alert.end`: the ISO timestamp of the time at which the alert recovered.
- `kibana.alert.duration.us`: the duration of the alert, in microseconds. This is always the difference between either the current time, or the time when the alert recovered.
- `kibana.alert.severity.level`: the severity of the alert, as a keyword (e.g. critical).
- `kibana.alert.severity.value`: the severity of the alert, as a numerical value, which allows sorting.
- `kibana.alert.severity`: the severity of the alert, as a keyword (e.g. critical).
- `kibana.alert.evaluation.value`: The measured (numerical value).
- `kibana.alert.threshold.value`: The threshold that was defined (or, in case of multiple thresholds, the one that was exceeded).
- `kibana.alert.ancestors`: the array of ancestors (if any) for the alert.

View file

@ -8,6 +8,7 @@
import {
ALERT_RULE_CONSUMER,
ALERT_STATUS,
ALERT_STATUS_ACTIVE,
SPACE_IDS,
ALERT_RULE_TYPE_ID,
} from '@kbn/rule-data-utils';
@ -93,7 +94,7 @@ describe('bulkUpdate()', () => {
_source: {
[ALERT_RULE_TYPE_ID]: 'apm.error_rate',
[ALERT_RULE_CONSUMER]: 'apm',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: [DEFAULT_SPACE],
},
},
@ -150,7 +151,7 @@ describe('bulkUpdate()', () => {
_source: {
[ALERT_RULE_TYPE_ID]: fakeRuleTypeId,
[ALERT_RULE_CONSUMER]: 'apm',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: [DEFAULT_SPACE],
},
},
@ -196,7 +197,7 @@ describe('bulkUpdate()', () => {
_source: {
[ALERT_RULE_TYPE_ID]: 'apm.error_rate',
[ALERT_RULE_CONSUMER]: 'apm',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: [DEFAULT_SPACE],
},
},
@ -206,7 +207,7 @@ describe('bulkUpdate()', () => {
_source: {
[ALERT_RULE_TYPE_ID]: fakeRuleTypeId,
[ALERT_RULE_CONSUMER]: 'apm',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: [DEFAULT_SPACE],
},
},
@ -283,7 +284,7 @@ describe('bulkUpdate()', () => {
_source: {
[ALERT_RULE_TYPE_ID]: 'apm.error_rate',
[ALERT_RULE_CONSUMER]: 'apm',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: [DEFAULT_SPACE],
},
},
@ -303,7 +304,7 @@ describe('bulkUpdate()', () => {
await alertsClient.bulkUpdate({
ids: undefined,
query: `${ALERT_STATUS}: open`,
query: `${ALERT_STATUS}: ${ALERT_STATUS_ACTIVE}`,
index: indexName,
status: 'closed',
});
@ -343,7 +344,7 @@ describe('bulkUpdate()', () => {
_source: {
[ALERT_RULE_TYPE_ID]: fakeRuleTypeId,
[ALERT_RULE_CONSUMER]: 'apm',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: [DEFAULT_SPACE],
},
},
@ -355,13 +356,13 @@ describe('bulkUpdate()', () => {
await expect(
alertsClient.bulkUpdate({
ids: undefined,
query: `${ALERT_STATUS}: open`,
query: `${ALERT_STATUS}: ${ALERT_STATUS_ACTIVE}`,
index: indexName,
status: 'closed',
})
).rejects.toThrowErrorMatchingInlineSnapshot(`
"queryAndAuditAllAlerts threw an error: Unable to retrieve alerts with query \\"kibana.alert.status: open\\" and operation update
Error: Unable to retrieve alert details for alert with id of \\"null\\" or with query \\"kibana.alert.status: open\\" and operation update
"queryAndAuditAllAlerts threw an error: Unable to retrieve alerts with query \\"kibana.alert.status: active\\" and operation update
Error: Unable to retrieve alert details for alert with id of \\"null\\" or with query \\"kibana.alert.status: active\\" and operation update
Error: Error: Unauthorized for fake.rule and apm"
`);
@ -404,7 +405,7 @@ describe('bulkUpdate()', () => {
_source: {
[ALERT_RULE_TYPE_ID]: 'apm.error_rate',
[ALERT_RULE_CONSUMER]: 'apm',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: [DEFAULT_SPACE],
},
},
@ -414,7 +415,7 @@ describe('bulkUpdate()', () => {
_source: {
[ALERT_RULE_TYPE_ID]: fakeRuleTypeId,
[ALERT_RULE_CONSUMER]: 'apm',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: [DEFAULT_SPACE],
},
},
@ -426,13 +427,13 @@ describe('bulkUpdate()', () => {
await expect(
alertsClient.bulkUpdate({
ids: undefined,
query: `${ALERT_STATUS}: open`,
query: `${ALERT_STATUS}: ${ALERT_STATUS_ACTIVE}`,
index: indexName,
status: 'closed',
})
).rejects.toThrowErrorMatchingInlineSnapshot(`
"queryAndAuditAllAlerts threw an error: Unable to retrieve alerts with query \\"kibana.alert.status: open\\" and operation update
Error: Unable to retrieve alert details for alert with id of \\"null\\" or with query \\"kibana.alert.status: open\\" and operation update
"queryAndAuditAllAlerts threw an error: Unable to retrieve alerts with query \\"kibana.alert.status: active\\" and operation update
Error: Unable to retrieve alert details for alert with id of \\"null\\" or with query \\"kibana.alert.status: active\\" and operation update
Error: Error: Unauthorized for fake.rule and apm"
`);

View file

@ -8,6 +8,7 @@
import {
ALERT_RULE_CONSUMER,
ALERT_STATUS,
ALERT_STATUS_ACTIVE,
SPACE_IDS,
ALERT_RULE_TYPE_ID,
} from '@kbn/rule-data-utils';
@ -103,7 +104,7 @@ describe('get()', () => {
[ALERT_RULE_TYPE_ID]: 'apm.error_rate',
message: 'hello world 1',
[ALERT_RULE_CONSUMER]: 'apm',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: ['test_default_space_id'],
},
},
@ -117,7 +118,7 @@ describe('get()', () => {
Object {
"kibana.alert.rule.consumer": "apm",
"kibana.alert.rule.rule_type_id": "apm.error_rate",
"kibana.alert.status": "open",
"kibana.alert.status": "active",
"kibana.space_ids": Array [
"test_default_space_id",
],
@ -212,7 +213,7 @@ describe('get()', () => {
[ALERT_RULE_TYPE_ID]: 'apm.error_rate',
message: 'hello world 1',
[ALERT_RULE_CONSUMER]: 'apm',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: ['test_default_space_id'],
},
},
@ -265,7 +266,7 @@ describe('get()', () => {
_source: {
[ALERT_RULE_TYPE_ID]: fakeRuleTypeId,
[ALERT_RULE_CONSUMER]: 'apm',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: [DEFAULT_SPACE],
},
},
@ -338,7 +339,7 @@ describe('get()', () => {
[ALERT_RULE_TYPE_ID]: 'apm.error_rate',
message: 'hello world 1',
[ALERT_RULE_CONSUMER]: 'apm',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[SPACE_IDS]: ['test_default_space_id'],
},
},
@ -360,7 +361,7 @@ describe('get()', () => {
Object {
"kibana.alert.rule.consumer": "apm",
"kibana.alert.rule.rule_type_id": "apm.error_rate",
"kibana.alert.status": "open",
"kibana.alert.status": "active",
"kibana.space_ids": Array [
"test_default_space_id",
],

View file

@ -10,7 +10,7 @@ import {
ALERT_ID,
ALERT_RULE_PRODUCER,
ALERT_START,
ALERT_STATUS,
ALERT_WORKFLOW_STATUS,
ALERT_UUID,
ALERT_RULE_UUID,
ALERT_RULE_NAME,
@ -195,12 +195,12 @@ export const buildAlertStatusFilterRuleRegistry = (status: Status): Filter[] =>
should: [
{
term: {
[ALERT_STATUS]: status,
[ALERT_WORKFLOW_STATUS]: status,
},
},
{
term: {
[ALERT_STATUS]: 'in-progress',
[ALERT_WORKFLOW_STATUS]: 'in-progress',
},
},
],
@ -208,7 +208,7 @@ export const buildAlertStatusFilterRuleRegistry = (status: Status): Filter[] =>
}
: {
term: {
[ALERT_STATUS]: status,
[ALERT_WORKFLOW_STATUS]: status,
},
};
@ -219,7 +219,7 @@ export const buildAlertStatusFilterRuleRegistry = (status: Status): Filter[] =>
negate: false,
disabled: false,
type: 'phrase',
key: ALERT_STATUS,
key: ALERT_WORKFLOW_STATUS,
params: {
query: status,
},
@ -280,7 +280,7 @@ export const requiredFieldMappingsForActionsRuleRegistry = {
'alert.start': ALERT_START,
'alert.uuid': ALERT_UUID,
'event.action': 'event.action',
'alert.status': ALERT_STATUS,
'alert.workflow_status': ALERT_WORKFLOW_STATUS,
'alert.duration.us': ALERT_DURATION,
'rule.uuid': ALERT_RULE_UUID,
'rule.name': ALERT_RULE_NAME,

View file

@ -10,6 +10,7 @@ import {
ALERT_RULE_CONSUMER,
ALERT_RULE_NAMESPACE,
ALERT_STATUS,
ALERT_STATUS_ACTIVE,
ALERT_WORKFLOW_STATUS,
SPACE_IDS,
} from '@kbn/rule-data-utils';
@ -71,7 +72,7 @@ describe('buildAlert', () => {
],
[ALERT_ORIGINAL_TIME]: '2020-04-20T21:27:45.000Z',
[ALERT_REASON]: 'alert reasonable reason',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[ALERT_WORKFLOW_STATUS]: 'open',
...flattenWithPrefix(ALERT_RULE_NAMESPACE, {
author: [],
@ -148,7 +149,7 @@ describe('buildAlert', () => {
module: 'system',
},
[ALERT_REASON]: 'alert reasonable reason',
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[ALERT_WORKFLOW_STATUS]: 'open',
...flattenWithPrefix(ALERT_RULE_NAMESPACE, {
author: [],

View file

@ -10,6 +10,7 @@ import {
ALERT_RULE_CONSUMER,
ALERT_RULE_NAMESPACE,
ALERT_STATUS,
ALERT_STATUS_ACTIVE,
ALERT_WORKFLOW_STATUS,
SPACE_IDS,
} from '@kbn/rule-data-utils';
@ -109,7 +110,7 @@ export const buildAlert = (
[ALERT_RULE_CONSUMER]: SERVER_APP_ID,
[SPACE_IDS]: spaceId != null ? [spaceId] : [],
[ALERT_ANCESTORS]: ancestors,
[ALERT_STATUS]: 'open',
[ALERT_STATUS]: ALERT_STATUS_ACTIVE,
[ALERT_WORKFLOW_STATUS]: 'open',
[ALERT_DEPTH]: depth,
[ALERT_REASON]: reason,

View file

@ -8,7 +8,7 @@
import React from 'react';
import moment from 'moment';
import { ALERT_END, ALERT_STATUS, ALERT_REASON } from '@kbn/rule-data-utils';
import { ALERT_END, ALERT_STATUS, ALERT_STATUS_ACTIVE, ALERT_REASON } from '@kbn/rule-data-utils';
import { AlertTypeInitializer } from '.';
import { getMonitorRouteFromMonitorId } from './common';
@ -39,7 +39,7 @@ export const initDurationAnomalyAlertType: AlertTypeInitializer = ({
reason: fields[ALERT_REASON] || '',
link: getMonitorRouteFromMonitorId({
monitorId: fields['monitor.id']!,
dateRangeEnd: fields[ALERT_STATUS] === 'open' ? 'now' : fields[ALERT_END]!,
dateRangeEnd: fields[ALERT_STATUS] === ALERT_STATUS_ACTIVE ? 'now' : fields[ALERT_END]!,
dateRangeStart: moment(new Date(fields['anomaly.start']!)).subtract('5', 'm').toISOString(),
}),
}),

View file

@ -8,7 +8,13 @@
import React from 'react';
import moment from 'moment';
import { ALERT_END, ALERT_START, ALERT_STATUS, ALERT_REASON } from '@kbn/rule-data-utils';
import {
ALERT_END,
ALERT_START,
ALERT_STATUS,
ALERT_STATUS_ACTIVE,
ALERT_REASON,
} from '@kbn/rule-data-utils';
import { AlertTypeInitializer } from '.';
import { getMonitorRouteFromMonitorId } from './common';
@ -53,7 +59,7 @@ export const initMonitorStatusAlertType: AlertTypeInitializer = ({
reason: fields[ALERT_REASON] || '',
link: getMonitorRouteFromMonitorId({
monitorId: fields['monitor.id']!,
dateRangeEnd: fields[ALERT_STATUS] === 'open' ? 'now' : fields[ALERT_END]!,
dateRangeEnd: fields[ALERT_STATUS] === ALERT_STATUS_ACTIVE ? 'now' : fields[ALERT_END]!,
dateRangeStart: moment(new Date(fields[ALERT_START]!)).subtract('5', 'm').toISOString(),
filters: {
'observer.geo.name': [fields['observer.geo.name'][0]],