[Metrics UI] Add integration tests for Metric Threshold Rule and refactor to fire correctly (#109971)

* [Metrics UI] Add integration tests for Metric Threshold and refactor to fire correctly

* Removing unused variables

* Fixing tests for metric_threshold_executor

* Fixing test for metric_query

* fixing test

* Changing type guard
This commit is contained in:
Chris Cowan 2021-09-02 10:38:45 -06:00 committed by GitHub
parent 3cdced3c45
commit 0452483e7d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 446 additions and 78 deletions

View file

@ -5,7 +5,7 @@
* 2.0.
*/
import { mapValues, first, last, isNaN } from 'lodash';
import { mapValues, first, last, isNaN, isNumber, isObject, has } from 'lodash';
import moment from 'moment';
import { ElasticsearchClient } from 'kibana/server';
import {
@ -23,7 +23,11 @@ import { UNGROUPED_FACTORY_KEY } from '../../common/utils';
import { MetricExpressionParams, Comparator, Aggregators } from '../types';
import { getElasticsearchMetricQuery } from './metric_query';
interface Aggregation {
interface AggregationWithoutIntervals {
aggregatedValue: { value: number; values?: Array<{ key: number; value: number }> };
}
interface AggregationWithIntervals {
aggregatedIntervals: {
buckets: Array<{
aggregatedValue: { value: number; values?: Array<{ key: number; value: number }> };
@ -35,6 +39,14 @@ interface Aggregation {
};
}
type Aggregation = AggregationWithIntervals | AggregationWithoutIntervals;
function isAggregationWithIntervals(
subject: Aggregation | undefined
): subject is AggregationWithIntervals {
return isObject(subject) && has(subject, 'aggregatedIntervals');
}
interface CompositeAggregationsResponse {
groupings: {
buckets: Aggregation[];
@ -52,7 +64,7 @@ export const evaluateAlert = <Params extends EvaluatedAlertParams = EvaluatedAle
esClient: ElasticsearchClient,
params: Params,
config: InfraSource['configuration'],
timeframe?: { start: number; end: number }
timeframe?: { start?: number; end: number }
) => {
const { criteria, groupBy, filterQuery, shouldDropPartialBuckets } = params;
return Promise.all(
@ -105,7 +117,7 @@ const getMetric: (
timefield: string,
groupBy: string | undefined | string[],
filterQuery: string | undefined,
timeframe?: { start: number; end: number },
timeframe?: { start?: number; end: number },
shouldDropPartialBuckets?: boolean
) => Promise<Record<string, number[]>> = async function (
esClient,
@ -124,10 +136,7 @@ const getMetric: (
const intervalAsSeconds = getIntervalInSeconds(interval);
const intervalAsMS = intervalAsSeconds * 1000;
const to = moment(timeframe ? timeframe.end : Date.now())
.add(1, timeUnit)
.startOf(timeUnit)
.valueOf();
const to = moment(timeframe ? timeframe.end : Date.now()).valueOf();
// Rate aggregations need 5 buckets worth of data
const minimumBuckets = aggType === Aggregators.RATE ? 5 : 1;
@ -135,7 +144,7 @@ const getMetric: (
const minimumFrom = to - intervalAsMS * minimumBuckets;
const from = roundTimestamp(
timeframe && timeframe.start <= minimumFrom ? timeframe.start : minimumFrom,
timeframe && timeframe.start && timeframe.start <= minimumFrom ? timeframe.start : minimumFrom,
timeUnit
);
@ -172,16 +181,26 @@ const getMetric: (
searchBody,
bucketSelector,
afterKeyHandler
)) as Array<Aggregation & { key: Record<string, string> }>;
return compositeBuckets.reduce(
)) as Array<Aggregation & { key: Record<string, string>; doc_count: number }>;
const groupedResults = compositeBuckets.reduce(
(result, bucket) => ({
...result,
[Object.values(bucket.key)
.map((value) => value)
.join(', ')]: getValuesFromAggregations(bucket, aggType, dropPartialBucketsOptions),
.join(', ')]: getValuesFromAggregations(
bucket,
aggType,
dropPartialBucketsOptions,
{
start: from,
end: to,
},
bucket.doc_count
),
}),
{}
);
return groupedResults;
}
const { body: result } = await esClient.search({
body: searchBody,
@ -192,7 +211,9 @@ const getMetric: (
[UNGROUPED_FACTORY_KEY]: getValuesFromAggregations(
(result.aggregations! as unknown) as Aggregation,
aggType,
dropPartialBucketsOptions
dropPartialBucketsOptions,
{ start: from, end: to },
isNumber(result.hits.total) ? result.hits.total : result.hits.total.value
),
};
} catch (e) {
@ -221,7 +242,7 @@ interface DropPartialBucketOptions {
const dropPartialBuckets = ({ from, to, bucketSizeInMillis }: DropPartialBucketOptions) => (
row: {
key: string;
value: number;
value: number | null;
} | null
) => {
if (row == null) return null;
@ -230,20 +251,45 @@ const dropPartialBuckets = ({ from, to, bucketSizeInMillis }: DropPartialBucketO
};
const getValuesFromAggregations = (
aggregations: Aggregation,
aggregations: Aggregation | undefined,
aggType: MetricExpressionParams['aggType'],
dropPartialBucketsOptions: DropPartialBucketOptions | null
dropPartialBucketsOptions: DropPartialBucketOptions | null,
timeFrame: { start: number; end: number },
docCount?: number
) => {
try {
const { buckets } = aggregations.aggregatedIntervals;
let buckets;
if (aggType === Aggregators.COUNT) {
buckets = [
{
doc_count: docCount,
to_as_string: moment(timeFrame.end).toISOString(),
from_as_string: moment(timeFrame.start).toISOString(),
key_as_string: moment(timeFrame.start).toISOString(),
},
];
} else if (isAggregationWithIntervals(aggregations)) {
buckets = aggregations.aggregatedIntervals.buckets;
} else {
buckets = [
{
...aggregations,
doc_count: docCount,
to_as_string: moment(timeFrame.end).toISOString(),
from_as_string: moment(timeFrame.start).toISOString(),
key_as_string: moment(timeFrame.start).toISOString(),
},
];
}
if (!buckets.length) return null; // No Data state
let mappedBuckets;
let mappedBuckets: Array<{ key: string; value: number | null } | null>;
if (aggType === Aggregators.COUNT) {
mappedBuckets = buckets.map((bucket) => ({
key: bucket.from_as_string,
value: bucket.doc_count,
value: bucket.doc_count || null,
}));
} else if (aggType === Aggregators.P95 || aggType === Aggregators.P99) {
mappedBuckets = buckets.map((bucket) => {

View file

@ -64,30 +64,4 @@ describe("The Metric Threshold Alert's getElasticsearchMetricQuery", () => {
);
});
});
describe('when passed a timeframe of 1 hour', () => {
const testTimeframe = {
start: moment().subtract(1, 'hour').valueOf(),
end: moment().valueOf(),
};
const searchBodyWithoutGroupBy = getElasticsearchMetricQuery(
expressionParams,
timefield,
testTimeframe
);
const searchBodyWithGroupBy = getElasticsearchMetricQuery(
expressionParams,
timefield,
testTimeframe,
groupBy
);
test("generates 1 hour's worth of buckets", () => {
// @ts-ignore
expect(searchBodyWithoutGroupBy.aggs.aggregatedIntervals.date_range.ranges.length).toBe(60);
expect(
// @ts-ignore
searchBodyWithGroupBy.aggs.groupings.aggs.aggregatedIntervals.date_range.ranges.length
).toBe(60);
});
});
});

View file

@ -7,7 +7,6 @@
import { networkTraffic } from '../../../../../common/inventory_models/shared/metrics/snapshot/network_traffic';
import { MetricExpressionParams, Aggregators } from '../types';
import { getIntervalInSeconds } from '../../../../utils/get_interval_in_seconds';
import { createPercentileAggregation } from './create_percentile_aggregation';
import { calculateDateHistogramOffset } from '../../../metrics/lib/calculate_date_histogram_offset';
@ -34,13 +33,9 @@ export const getElasticsearchMetricQuery = (
throw new Error('Can only aggregate without a metric if using the document count aggregator');
}
const interval = `${timeSize}${timeUnit}`;
const intervalAsSeconds = getIntervalInSeconds(interval);
const intervalAsMS = intervalAsSeconds * 1000;
const to = timeframe.end;
const from = timeframe.start;
const deliveryDelay = 60 * 1000; // INFO: This allows us to account for any delay ES has in indexing the most recent data.
const aggregations =
aggType === Aggregators.COUNT
? {}
@ -72,21 +67,7 @@ export const getElasticsearchMetricQuery = (
aggregations,
},
}
: {
aggregatedIntervals: {
date_range: {
field: timefield,
// Generate an array of buckets, starting at `from` and ending at `to`
// This is usually only necessary for alert previews or rate aggs. Most alert evaluations
// will generate only one bucket from this logic.
ranges: Array.from(Array(Math.floor((to - from) / intervalAsMS)), (_, i) => ({
from: from + intervalAsMS * i - deliveryDelay,
to: from + intervalAsMS * (i + 1) - deliveryDelay,
})),
},
aggregations,
},
};
: aggregations;
const aggs = groupBy
? {

View file

@ -515,7 +515,7 @@ services.scopedClusterClient.asCurrentUser.search.mockImplementation((params?: a
}
if (metric === 'test.metric.2') {
return elasticsearchClientMock.createSuccessTransportRequestPromise(
mocks.alternateMetricResponse(from)
mocks.alternateMetricResponse()
);
} else if (metric === 'test.metric.3') {
return elasticsearchClientMock.createSuccessTransportRequestPromise(
@ -524,9 +524,7 @@ services.scopedClusterClient.asCurrentUser.search.mockImplementation((params?: a
: mocks.emptyMetricResponse
);
}
return elasticsearchClientMock.createSuccessTransportRequestPromise(
mocks.basicMetricResponse(from)
);
return elasticsearchClientMock.createSuccessTransportRequestPromise(mocks.basicMetricResponse());
});
services.savedObjectsClient.get.mockImplementation(async (type: string, sourceId: string) => {
if (sourceId === 'alternate')

View file

@ -103,20 +103,26 @@ const bucketsC = (from: number) => [
},
];
export const basicMetricResponse = (from: number) => ({
aggregations: {
aggregatedIntervals: {
buckets: bucketsA(from),
export const basicMetricResponse = () => ({
hits: {
total: {
value: 1,
},
},
aggregations: {
aggregatedValue: { value: 1.0, values: [{ key: 95.0, value: 1.0 }] },
},
});
export const alternateMetricResponse = (from: number) => ({
aggregations: {
aggregatedIntervals: {
buckets: bucketsB(from),
export const alternateMetricResponse = () => ({
hits: {
total: {
value: 1,
},
},
aggregations: {
aggregatedValue: { value: 3, values: [{ key: 99.0, value: 3 }] },
},
});
export const emptyMetricResponse = {

View file

@ -28,4 +28,14 @@ export const DATES = {
max: 1564083493080,
},
},
'alert-test-data': {
gauge: {
min: 1609459200000, // '2022-01-01T00:00:00Z'
max: 1609462800000, // '2021-01-01T01:00:00Z'
},
rate: {
min: 1609545600000, // '2021-01-02T00:00:00Z'
max: 1609545900000, // '2021-01-02T00:05:00Z'
},
},
};

View file

@ -18,5 +18,6 @@ export default function ({ loadTestFile }) {
loadTestFile(require.resolve('./metrics_explorer'));
loadTestFile(require.resolve('./ip_to_hostname'));
loadTestFile(require.resolve('./http_source'));
loadTestFile(require.resolve('./metric_threshold_alert'));
});
}

View file

@ -0,0 +1,322 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import expect from '@kbn/expect';
import { InfraSource } from '../../../../plugins/infra/common/source_configuration/source_configuration';
import { FtrProviderContext } from '../../ftr_provider_context';
import {
evaluateAlert,
EvaluatedAlertParams,
} from '../../../../plugins/infra/server/lib/alerting/metric_threshold/lib/evaluate_alert';
import {
Aggregators,
CountMetricExpressionParams,
NonCountMetricExpressionParams,
} from '../../../../plugins/infra/server/lib/alerting/metric_threshold/types';
import { Comparator } from '../../../../plugins/infra/server/lib/alerting/common/types';
import { DATES } from './constants';
const { gauge, rate } = DATES['alert-test-data'];
export default function ({ getService }: FtrProviderContext) {
const esArchiver = getService('esArchiver');
const esClient = getService('es');
const baseParams: EvaluatedAlertParams = {
groupBy: void 0,
filterQuery: void 0,
criteria: [
{
timeSize: 5,
timeUnit: 'm',
threshold: [1],
comparator: Comparator.GT_OR_EQ,
aggType: Aggregators.SUM,
metric: 'value',
},
],
};
const configuration: InfraSource['configuration'] = {
name: 'Default',
description: '',
logIndices: {
type: 'index_pattern',
indexPatternId: 'some-test-id',
},
metricAlias: 'alerts-test-data',
inventoryDefaultView: 'default',
metricsExplorerDefaultView: 'default',
anomalyThreshold: 70,
fields: {
container: 'container.id',
host: 'host.name',
pod: 'kubernetes.od.uid',
tiebreaker: '_doc',
timestamp: '@timestamp',
message: ['message'],
},
logColumns: [
{
timestampColumn: {
id: '5e7f964a-be8a-40d8-88d2-fbcfbdca0e2f',
},
},
{
fieldColumn: {
id: ' eb9777a8-fcd3-420e-ba7d-172fff6da7a2',
field: 'event.dataset',
},
},
{
messageColumn: {
id: 'b645d6da-824b-4723-9a2a-e8cece1645c0',
},
},
],
};
describe('Metric Threshold Alerts Executor', () => {
before(() => esArchiver.load('x-pack/test/functional/es_archives/infra/alerts_test_data'));
after(() => esArchiver.unload('x-pack/test/functional/es_archives/infra/alerts_test_data'));
describe('with gauge data', () => {
describe('without groupBy', () => {
it('should alert on document count', async () => {
const params = {
...baseParams,
criteria: [
{
timeSize: 5,
timeUnit: 'm',
threshold: [1],
comparator: Comparator.GT_OR_EQ,
aggType: Aggregators.COUNT,
} as CountMetricExpressionParams,
],
};
const timeFrame = { end: gauge.max };
const results = await evaluateAlert(esClient, params, configuration, timeFrame);
expect(results).to.eql([
{
'*': {
timeSize: 5,
timeUnit: 'm',
threshold: [1],
comparator: '>=',
aggType: 'count',
metric: 'Document count',
currentValue: 4,
timestamp: '2021-01-01T00:55:00.000Z',
shouldFire: [true],
shouldWarn: [false],
isNoData: [false],
isError: false,
},
},
]);
});
it('should alert on the last value when the end date is the same as the last event', async () => {
const params = { ...baseParams };
const timeFrame = { end: gauge.max };
const results = await evaluateAlert(esClient, params, configuration, timeFrame);
expect(results).to.eql([
{
'*': {
timeSize: 5,
timeUnit: 'm',
threshold: [1],
comparator: '>=',
aggType: 'sum',
metric: 'value',
currentValue: 1,
timestamp: '2021-01-01T00:55:00.000Z',
shouldFire: [true],
shouldWarn: [false],
isNoData: [false],
isError: false,
},
},
]);
});
});
describe('with groupBy', () => {
it('should alert on document count', async () => {
const params = {
...baseParams,
groupBy: ['env'],
criteria: [
{
timeSize: 5,
timeUnit: 'm',
threshold: [1],
comparator: Comparator.GT_OR_EQ,
aggType: Aggregators.COUNT,
} as CountMetricExpressionParams,
],
};
const timeFrame = { end: gauge.max };
const results = await evaluateAlert(esClient, params, configuration, timeFrame);
expect(results).to.eql([
{
dev: {
timeSize: 5,
timeUnit: 'm',
threshold: [1],
comparator: '>=',
aggType: 'count',
metric: 'Document count',
currentValue: 2,
timestamp: '2021-01-01T00:55:00.000Z',
shouldFire: [true],
shouldWarn: [false],
isNoData: [false],
isError: false,
},
prod: {
timeSize: 5,
timeUnit: 'm',
threshold: [1],
comparator: '>=',
aggType: 'count',
metric: 'Document count',
currentValue: 2,
timestamp: '2021-01-01T00:55:00.000Z',
shouldFire: [true],
shouldWarn: [false],
isNoData: [false],
isError: false,
},
},
]);
});
it('should alert on the last value when the end date is the same as the last event', async () => {
const params = {
...baseParams,
groupBy: ['env'],
};
const timeFrame = { end: gauge.max };
const results = await evaluateAlert(esClient, params, configuration, timeFrame);
expect(results).to.eql([
{
dev: {
timeSize: 5,
timeUnit: 'm',
threshold: [1],
comparator: '>=',
aggType: 'sum',
metric: 'value',
currentValue: 0,
timestamp: '2021-01-01T00:55:00.000Z',
shouldFire: [false],
shouldWarn: [false],
isNoData: [false],
isError: false,
},
prod: {
timeSize: 5,
timeUnit: 'm',
threshold: [1],
comparator: '>=',
aggType: 'sum',
metric: 'value',
currentValue: 1,
timestamp: '2021-01-01T00:55:00.000Z',
shouldFire: [true],
shouldWarn: [false],
isNoData: [false],
isError: false,
},
},
]);
});
});
});
describe('with rate data', () => {
describe('without groupBy', () => {
it('should alert on rate', async () => {
const params = {
...baseParams,
criteria: [
{
timeSize: 1,
timeUnit: 'm',
threshold: [0.5],
comparator: Comparator.GT_OR_EQ,
aggType: Aggregators.RATE,
metric: 'value',
} as NonCountMetricExpressionParams,
],
};
const timeFrame = { end: rate.max };
const results = await evaluateAlert(esClient, params, configuration, timeFrame);
expect(results).to.eql([
{
'*': {
timeSize: 1,
timeUnit: 'm',
threshold: [0.5],
comparator: '>=',
aggType: 'rate',
metric: 'value',
currentValue: 0.6666666666666666,
timestamp: '2021-01-02T00:04:00.000Z',
shouldFire: [false, false, false, false, true],
shouldWarn: [false],
isNoData: [true, false, false, false, false],
isError: false,
},
},
]);
});
});
describe('with groupBy', () => {
it('should warn but not fire on rate', async () => {
const params = {
...baseParams,
groupBy: 'env',
criteria: [
{
timeSize: 1,
timeUnit: 'm',
threshold: [1],
comparator: Comparator.GT_OR_EQ,
warningThreshold: [0.5],
warningComparator: Comparator.GT_OR_EQ,
aggType: Aggregators.RATE,
metric: 'value',
} as NonCountMetricExpressionParams,
],
};
const timeFrame = { end: rate.max };
const results = await evaluateAlert(esClient, params, configuration, timeFrame);
expect(results).to.eql([
{
dev: {
timeSize: 1,
timeUnit: 'm',
threshold: [1],
comparator: '>=',
warningThreshold: [0.5],
warningComparator: '>=',
aggType: 'rate',
metric: 'value',
currentValue: 0.6666666666666666,
timestamp: '2021-01-02T00:04:00.000Z',
shouldFire: [false, false, false, false, false],
shouldWarn: [false, false, false, false, true],
isNoData: [true, false, false, false, false],
isError: false,
},
},
]);
});
});
});
});
}

View file

@ -48,7 +48,9 @@ export default function ({ getService }: FtrProviderContext) {
});
expect(result.hits).to.be.ok();
expect(result.aggregations).to.be.ok();
if (aggType !== 'count') {
expect(result.aggregations).to.be.ok();
}
});
}
it('should work with a filterQuery', async () => {

View file

@ -0,0 +1,28 @@
{
"type": "index",
"value": {
"aliases": {
},
"index": "alerts-test-data",
"mappings": {
"properties": {
"@timestamp": {
"type": "date"
},
"env": {
"ignore_above": 256,
"type": "keyword"
},
"value": {
"type": "long"
}
}
},
"settings": {
"index": {
"number_of_replicas": "1",
"number_of_shards": "1"
}
}
}
}