[Metrics UI] Remove TSVB dependency from Metrics Explorer APIs (#74804)

* [Metrics UI] Remove TSVB dependency from Metrics Explorer APIs

* Update x-pack/plugins/infra/public/pages/metrics/metrics_explorer/components/chart_title.tsx

Co-authored-by: Zacqary Adam Xeper <Zacqary@users.noreply.github.com>

* Update x-pack/plugins/infra/server/lib/metrics/lib/calculate_bucket_size/calculate_auto.ts

Co-authored-by: Zacqary Adam Xeper <Zacqary@users.noreply.github.com>

* Update x-pack/plugins/infra/server/lib/metrics/lib/calculate_bucket_size/calculate_auto.ts

Co-authored-by: Zacqary Adam Xeper <Zacqary@users.noreply.github.com>

* Update x-pack/plugins/infra/server/lib/metrics/lib/convert_histogram_buckets_to_timeseries.ts

Co-authored-by: Zacqary Adam Xeper <Zacqary@users.noreply.github.com>

* Update x-pack/plugins/infra/server/lib/metrics/lib/convert_histogram_buckets_to_timeseries.ts

Co-authored-by: Zacqary Adam Xeper <Zacqary@users.noreply.github.com>

* Update x-pack/plugins/infra/server/routes/metrics_explorer/lib/find_interval_for_metrics.ts

Co-authored-by: Zacqary Adam Xeper <Zacqary@users.noreply.github.com>

* Fixing some names, changing some units

* Reverting TSVB calculate_auto; fixing names in infra

* Fixing translation names

* Fixing typo

Co-authored-by: Zacqary Adam Xeper <Zacqary@users.noreply.github.com>

Co-authored-by: Zacqary Adam Xeper <Zacqary@users.noreply.github.com>
This commit is contained in:
Chris Cowan 2020-08-13 18:45:47 -07:00 committed by GitHub
parent 24c2e0a452
commit 1632391f35
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
80 changed files with 2190 additions and 534 deletions

View file

@ -5,3 +5,6 @@
*/
export const DEFAULT_SOURCE_ID = 'default';
export const METRICS_INDEX_PATTERN = 'metrics-*,metricbeat-*';
export const LOGS_INDEX_PATTERN = 'logs-*,filebeat-*,kibana_sample_data_logs*';
export const TIMESTAMP_FIELD = '@timestamp';

View file

@ -8,3 +8,4 @@ export * from './log_analysis';
export * from './metadata_api';
export * from './log_entries';
export * from './metrics_explorer';
export * from './metrics_api';

View file

@ -0,0 +1,97 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import * as rt from 'io-ts';
import { MetricsUIAggregationRT } from '../inventory_models/types';
import { afterKeyObjectRT } from './metrics_explorer';
export const MetricsAPITimerangeRT = rt.type({
field: rt.string,
from: rt.number,
to: rt.number,
interval: rt.string,
});
const groupByRT = rt.union([rt.string, rt.null, rt.undefined]);
export const MetricsAPIMetricRT = rt.type({
id: rt.string,
aggregations: MetricsUIAggregationRT,
});
export const MetricsAPIRequestRT = rt.intersection([
rt.type({
timerange: MetricsAPITimerangeRT,
indexPattern: rt.string,
metrics: rt.array(MetricsAPIMetricRT),
}),
rt.partial({
groupBy: rt.array(groupByRT),
afterKey: rt.union([rt.null, afterKeyObjectRT]),
limit: rt.union([rt.number, rt.null, rt.undefined]),
filters: rt.array(rt.object),
forceInterval: rt.boolean,
dropLastBucket: rt.boolean,
alignDataToEnd: rt.boolean,
}),
]);
export const MetricsAPIPageInfoRT = rt.type({
afterKey: rt.union([rt.null, afterKeyObjectRT, rt.undefined]),
interval: rt.number,
});
export const MetricsAPIColumnTypeRT = rt.keyof({
date: null,
number: null,
string: null,
});
export const MetricsAPIColumnRT = rt.type({
name: rt.string,
type: MetricsAPIColumnTypeRT,
});
export const MetricsAPIRowRT = rt.intersection([
rt.type({
timestamp: rt.number,
}),
rt.record(rt.string, rt.union([rt.string, rt.number, rt.null, rt.undefined])),
]);
export const MetricsAPISeriesRT = rt.intersection([
rt.type({
id: rt.string,
columns: rt.array(MetricsAPIColumnRT),
rows: rt.array(MetricsAPIRowRT),
}),
rt.partial({
keys: rt.array(rt.string),
}),
]);
export const MetricsAPIResponseRT = rt.type({
series: rt.array(MetricsAPISeriesRT),
info: MetricsAPIPageInfoRT,
});
export type MetricsAPITimerange = rt.TypeOf<typeof MetricsAPITimerangeRT>;
export type MetricsAPIColumnType = rt.TypeOf<typeof MetricsAPIColumnTypeRT>;
export type MetricsAPIMetric = rt.TypeOf<typeof MetricsAPIMetricRT>;
export type MetricsAPIPageInfo = rt.TypeOf<typeof MetricsAPIPageInfoRT>;
export type MetricsAPIColumn = rt.TypeOf<typeof MetricsAPIColumnRT>;
export type MetricsAPIRow = rt.TypeOf<typeof MetricsAPIRowRT>;
export type MetricsAPISeries = rt.TypeOf<typeof MetricsAPISeriesRT>;
export type MetricsAPIRequest = rt.TypeOf<typeof MetricsAPIRequestRT>;
export type MetricsAPIResponse = rt.TypeOf<typeof MetricsAPIResponseRT>;

View file

@ -108,6 +108,8 @@ export const metricsExplorerResponseRT = rt.type({
pageInfo: metricsExplorerPageInfoRT,
});
export type AfterKey = rt.TypeOf<typeof afterKeyObjectRT>;
export type MetricsExplorerAggregation = rt.TypeOf<typeof metricsExplorerAggregationRT>;
export type MetricsExplorerColumnType = rt.TypeOf<typeof metricsExplorerColumnTypeRT>;

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const cpu: SnapshotModel = {
export const cpu: MetricsUIAggregation = {
cpu_avg: {
avg: {
field: 'aws.ec2.cpu.total.pct',

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const diskIOReadBytes: SnapshotModel = {
export const diskIOReadBytes: MetricsUIAggregation = {
diskIOReadBytes: {
avg: {
field: 'aws.ec2.diskio.read.bytes_per_sec',

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const diskIOWriteBytes: SnapshotModel = {
export const diskIOWriteBytes: MetricsUIAggregation = {
diskIOWriteBytes: {
avg: {
field: 'aws.ec2.diskio.write.bytes_per_sec',

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const rx: SnapshotModel = {
export const rx: MetricsUIAggregation = {
rx: {
avg: {
field: 'aws.ec2.network.in.bytes_per_sec',

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const tx: SnapshotModel = {
export const tx: MetricsUIAggregation = {
tx: {
avg: {
field: 'aws.ec2.network.in.bytes_per_sec',

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const cpu: SnapshotModel = {
export const cpu: MetricsUIAggregation = {
cpu_avg: {
avg: {
field: 'aws.rds.cpu.total.pct',

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const rdsActiveTransactions: SnapshotModel = {
export const rdsActiveTransactions: MetricsUIAggregation = {
rdsActiveTransactions: {
avg: {
field: 'aws.rds.transactions.active',

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const rdsConnections: SnapshotModel = {
export const rdsConnections: MetricsUIAggregation = {
rdsConnections: {
avg: {
field: 'aws.rds.database_connections',

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const rdsLatency: SnapshotModel = {
export const rdsLatency: MetricsUIAggregation = {
rdsLatency: {
avg: {
field: 'aws.rds.latency.dml',

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const rdsQueriesExecuted: SnapshotModel = {
export const rdsQueriesExecuted: MetricsUIAggregation = {
rdsQueriesExecuted: {
avg: {
field: 'aws.rds.queries',

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const s3BucketSize: SnapshotModel = {
export const s3BucketSize: MetricsUIAggregation = {
s3BucketSize: {
max: {
field: 'aws.s3_daily_storage.bucket.size.bytes',

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const s3DownloadBytes: SnapshotModel = {
export const s3DownloadBytes: MetricsUIAggregation = {
s3DownloadBytes: {
max: {
field: 'aws.s3_request.downloaded.bytes',

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const s3NumberOfObjects: SnapshotModel = {
export const s3NumberOfObjects: MetricsUIAggregation = {
s3NumberOfObjects: {
max: {
field: 'aws.s3_daily_storage.number_of_objects',

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const s3TotalRequests: SnapshotModel = {
export const s3TotalRequests: MetricsUIAggregation = {
s3TotalRequests: {
max: {
field: 'aws.s3_request.requests.total',

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const s3UploadBytes: SnapshotModel = {
export const s3UploadBytes: MetricsUIAggregation = {
s3UploadBytes: {
max: {
field: 'aws.s3_request.uploaded.bytes',

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const sqsMessagesDelayed: SnapshotModel = {
export const sqsMessagesDelayed: MetricsUIAggregation = {
sqsMessagesDelayed: {
max: {
field: 'aws.sqs.messages.delayed',

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const sqsMessagesEmpty: SnapshotModel = {
export const sqsMessagesEmpty: MetricsUIAggregation = {
sqsMessagesEmpty: {
max: {
field: 'aws.sqs.messages.not_visible',

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const sqsMessagesSent: SnapshotModel = {
export const sqsMessagesSent: MetricsUIAggregation = {
sqsMessagesSent: {
max: {
field: 'aws.sqs.messages.sent',

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const sqsMessagesVisible: SnapshotModel = {
export const sqsMessagesVisible: MetricsUIAggregation = {
sqsMessagesVisible: {
avg: {
field: 'aws.sqs.messages.visible',

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const sqsOldestMessage: SnapshotModel = {
export const sqsOldestMessage: MetricsUIAggregation = {
sqsOldestMessage: {
max: {
field: 'aws.sqs.oldest_message_age.sec',

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const cpu: SnapshotModel = {
export const cpu: MetricsUIAggregation = {
cpu: {
avg: {
field: 'docker.cpu.total.pct',

View file

@ -4,6 +4,8 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const memory: SnapshotModel = { memory: { avg: { field: 'docker.memory.usage.pct' } } };
export const memory: MetricsUIAggregation = {
memory: { avg: { field: 'docker.memory.usage.pct' } },
};

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const cpu: SnapshotModel = {
export const cpu: MetricsUIAggregation = {
cpu_user: {
avg: {
field: 'system.cpu.user.pct',

View file

@ -4,6 +4,6 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const load: SnapshotModel = { load: { avg: { field: 'system.load.5' } } };
export const load: MetricsUIAggregation = { load: { avg: { field: 'system.load.5' } } };

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const logRate: SnapshotModel = {
export const logRate: MetricsUIAggregation = {
count: {
bucket_script: {
buckets_path: { count: '_count' },

View file

@ -4,8 +4,8 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const memory: SnapshotModel = {
export const memory: MetricsUIAggregation = {
memory: { avg: { field: 'system.memory.actual.used.pct' } },
};

View file

@ -51,9 +51,9 @@ const getFieldByType = (type: InventoryItemType, fields: InventoryFields) => {
}
};
export const findInventoryFields = (type: InventoryItemType, fields: InventoryFields) => {
export const findInventoryFields = (type: InventoryItemType, fields?: InventoryFields) => {
const inventoryModel = findInventoryModel(type);
if (LEGACY_TYPES.includes(type)) {
if (fields && LEGACY_TYPES.includes(type)) {
const id = getFieldByType(type, fields) || inventoryModel.fields.id;
return {
...inventoryModel.fields,

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const cpu: SnapshotModel = {
export const cpu: MetricsUIAggregation = {
cpu_with_limit: {
avg: {
field: 'kubernetes.pod.cpu.usage.limit.pct',

View file

@ -4,8 +4,8 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const memory: SnapshotModel = {
export const memory: MetricsUIAggregation = {
memory: { avg: { field: 'kubernetes.pod.memory.usage.node.pct' } },
};

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const count: SnapshotModel = {
export const count: MetricsUIAggregation = {
count: {
bucket_script: {
buckets_path: { count: '_count' },

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const networkTraffic = (id: string, field: string): SnapshotModel => {
export const networkTraffic = (id: string, field: string): MetricsUIAggregation => {
return {
[`${id}_max`]: { max: { field } },
[`${id}_deriv`]: {

View file

@ -4,13 +4,13 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const networkTrafficWithInterfaces = (
id: string,
metricField: string,
interfaceField: string
): SnapshotModel => ({
): MetricsUIAggregation => ({
[`${id}_interfaces`]: {
terms: { field: interfaceField },
aggregations: {

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SnapshotModel } from '../../../types';
import { MetricsUIAggregation } from '../../../types';
export const rate = (id: string, field: string): SnapshotModel => {
export const rate = (id: string, field: string): MetricsUIAggregation => {
return {
[`${id}_max`]: { max: { field } },
[`${id}_deriv`]: {

View file

@ -224,7 +224,7 @@ export type TSVBMetricModelCreator = (
interval: string
) => TSVBMetricModel;
export const SnapshotModelMetricAggRT = rt.record(
export const ESBasicMetricAggRT = rt.record(
rt.string,
rt.union([
rt.undefined,
@ -234,7 +234,21 @@ export const SnapshotModelMetricAggRT = rt.record(
])
);
export const SnapshotModelBucketScriptRT = rt.type({
export const ESPercentileAggRT = rt.type({
percentiles: rt.type({
field: rt.string,
percents: rt.array(rt.number),
}),
});
export const ESCaridnalityAggRT = rt.type({
cardinality: rt.partial({
field: rt.string,
script: rt.string,
}),
});
export const ESBucketScriptAggRT = rt.type({
bucket_script: rt.intersection([
rt.type({
buckets_path: rt.record(rt.string, rt.union([rt.undefined, rt.string])),
@ -247,13 +261,13 @@ export const SnapshotModelBucketScriptRT = rt.type({
]),
});
export const SnapshotModelCumulativeSumRT = rt.type({
export const ESCumulativeSumAggRT = rt.type({
cumulative_sum: rt.type({
buckets_path: rt.string,
}),
});
export const SnapshotModelDerivativeRT = rt.type({
export const ESDerivativeAggRT = rt.type({
derivative: rt.type({
buckets_path: rt.string,
gap_policy: rt.keyof({ skip: null, insert_zeros: null }),
@ -261,7 +275,7 @@ export const SnapshotModelDerivativeRT = rt.type({
}),
});
export const SnapshotModelSumBucketRT = rt.type({
export const ESSumBucketAggRT = rt.type({
sum_bucket: rt.type({
buckets_path: rt.string,
}),
@ -269,32 +283,31 @@ export const SnapshotModelSumBucketRT = rt.type({
interface SnapshotTermsWithAggregation {
terms: { field: string };
aggregations: SnapshotModel;
aggregations: MetricsUIAggregation;
}
export const SnapshotTermsWithAggregationRT: rt.Type<SnapshotTermsWithAggregation> = rt.recursion(
export const ESTermsWithAggregationRT: rt.Type<SnapshotTermsWithAggregation> = rt.recursion(
'SnapshotModelRT',
() =>
rt.type({
terms: rt.type({ field: rt.string }),
aggregations: SnapshotModelRT,
aggregations: MetricsUIAggregationRT,
})
);
export const SnapshotModelAggregationRT = rt.union([
SnapshotModelMetricAggRT,
SnapshotModelBucketScriptRT,
SnapshotModelCumulativeSumRT,
SnapshotModelDerivativeRT,
SnapshotModelSumBucketRT,
SnapshotTermsWithAggregationRT,
export const ESAggregationRT = rt.union([
ESBasicMetricAggRT,
ESPercentileAggRT,
ESBucketScriptAggRT,
ESCumulativeSumAggRT,
ESDerivativeAggRT,
ESSumBucketAggRT,
ESTermsWithAggregationRT,
ESCaridnalityAggRT,
]);
export const SnapshotModelRT = rt.record(
rt.string,
rt.union([rt.undefined, SnapshotModelAggregationRT])
);
export type SnapshotModel = rt.TypeOf<typeof SnapshotModelRT>;
export const MetricsUIAggregationRT = rt.record(rt.string, ESAggregationRT);
export type MetricsUIAggregation = rt.TypeOf<typeof MetricsUIAggregationRT>;
export const SnapshotMetricTypeRT = rt.keyof({
count: null,
@ -327,7 +340,7 @@ export type SnapshotMetricType = rt.TypeOf<typeof SnapshotMetricTypeRT>;
export interface InventoryMetrics {
tsvb: { [name: string]: TSVBMetricModelCreator };
snapshot: { [name: string]: SnapshotModel };
snapshot: { [name: string]: MetricsUIAggregation };
defaultSnapshot: SnapshotMetricType;
/** This is used by the inventory view to calculate the appropriate amount of time for the metrics detail page. Some metris like awsS3 require multiple days where others like host only need an hour.*/
defaultTimeRangeInSeconds: number;

View file

@ -16,6 +16,7 @@ import {
import { FormattedMessage } from '@kbn/i18n/react';
import React from 'react';
import { METRICS_INDEX_PATTERN } from '../../../common/constants';
import { InputFieldProps } from './input_fields';
interface IndicesConfigurationPanelProps {
@ -63,7 +64,7 @@ export const IndicesConfigurationPanel = ({
id="xpack.infra.sourceConfiguration.metricIndicesRecommendedValue"
defaultMessage="The recommended value is {defaultValue}"
values={{
defaultValue: <EuiCode>metrics-*,metricbeat-*</EuiCode>,
defaultValue: <EuiCode>{METRICS_INDEX_PATTERN}</EuiCode>,
}}
/>
}

View file

@ -6,12 +6,17 @@
import React, { Fragment } from 'react';
import { EuiText, EuiFlexGroup, EuiFlexItem } from '@elastic/eui';
import { i18n } from '@kbn/i18n';
import { MetricsExplorerSeries } from '../../../../../common/http_api';
interface Props {
series: MetricsExplorerSeries;
}
const ALL_TITLE = i18n.translate('xpack.infra.metricsExplorer.everything', {
defaultMessage: 'Everything',
});
export const ChartTitle = ({ series }: Props) => {
if (series.keys != null) {
const { keys } = series;
@ -21,7 +26,7 @@ export const ChartTitle = ({ series }: Props) => {
<Fragment key={name}>
<EuiFlexItem grow={false}>
<EuiText size="m" color={keys.length - 1 > i ? 'subdued' : 'default'}>
<strong>{name}</strong>
<strong>{name === '*' ? ALL_TITLE : name}</strong>
</EuiText>
</EuiFlexItem>
{keys.length - 1 > i && (

View file

@ -7,8 +7,5 @@
import { MetricsExplorerOptionsMetric } from '../../hooks/use_metrics_explorer_options';
export const getMetricId = (metric: MetricsExplorerOptionsMetric, index: string | number) => {
if (['p95', 'p99'].includes(metric.aggregation)) {
return `metric_${index}:percentile_0`;
}
return `metric_${index}`;
};

View file

@ -0,0 +1,16 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export const EMPTY_RESPONSE = {
series: [
{
id: '*',
keys: ['*'],
columns: [],
rows: [],
},
],
info: { total: 0, afterKey: null, interval: 0 },
};

View file

@ -0,0 +1,113 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { set } from '@elastic/safer-lodash-set';
import { ThrowReporter } from 'io-ts/lib/ThrowReporter';
import { MetricsAPIRequest, MetricsAPIResponse, afterKeyObjectRT } from '../../../common/http_api';
import {
ESSearchClient,
GroupingResponseRT,
MetricsESResponse,
HistogramResponseRT,
} from './types';
import { EMPTY_RESPONSE } from './constants';
import { createAggregations } from './lib/create_aggregations';
import { convertHistogramBucketsToTimeseries } from './lib/convert_histogram_buckets_to_timeseries';
import { calculateBucketSize } from './lib/calculate_bucket_size';
export const query = async (
search: ESSearchClient,
options: MetricsAPIRequest
): Promise<MetricsAPIResponse> => {
const hasGroupBy = Array.isArray(options.groupBy) && options.groupBy.length > 0;
const filter: Array<Record<string, any>> = [
{
range: {
[options.timerange.field]: {
gte: options.timerange.from,
lte: options.timerange.to,
format: 'epoch_millis',
},
},
},
...(options.groupBy?.map((field) => ({ exists: { field } })) ?? []),
];
const params = {
allowNoIndices: true,
ignoreUnavailable: true,
index: options.indexPattern,
body: {
size: 0,
query: { bool: { filter } },
aggs: { ...createAggregations(options) },
},
};
if (hasGroupBy) {
if (options.afterKey) {
if (afterKeyObjectRT.is(options.afterKey)) {
set(params, 'body.aggs.groupings.composite.after', options.afterKey);
} else {
set(params, 'body.aggs.groupings.composite.after', { groupBy0: options.afterKey });
}
}
}
if (options.filters) {
params.body.query.bool.filter = [...params.body.query.bool.filter, ...options.filters];
}
const response = await search<{}, MetricsESResponse>(params);
if (response.hits.total.value === 0) {
return EMPTY_RESPONSE;
}
if (!response.aggregations) {
throw new Error('Aggregations should be present.');
}
const { bucketSize } = calculateBucketSize(options.timerange);
if (hasGroupBy && GroupingResponseRT.is(response.aggregations)) {
const { groupings } = response.aggregations;
const { after_key: afterKey } = groupings;
const limit = options.limit || 9;
const returnAfterKey = afterKey && groupings.buckets.length === limit ? true : false;
return {
series: groupings.buckets.map((bucket) => {
const keys = Object.values(bucket.key);
return convertHistogramBucketsToTimeseries(keys, options, bucket.histogram.buckets);
}),
info: {
afterKey: returnAfterKey ? afterKey : null,
interval: bucketSize,
},
};
} else if (hasGroupBy) {
ThrowReporter.report(GroupingResponseRT.decode(response.aggregations));
}
if (HistogramResponseRT.is(response.aggregations)) {
return {
series: [
convertHistogramBucketsToTimeseries(
['*'],
options,
response.aggregations.histogram.buckets
),
],
info: {
afterKey: null,
interval: bucketSize,
},
};
} else {
ThrowReporter.report(HistogramResponseRT.decode(response.aggregations));
}
throw new Error('Elasticsearch responsed with an unrecoginzed format.');
};

View file

@ -0,0 +1,193 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`convertHistogramBucketsToTimeseies(keys, options, buckets) should drop the last bucket 1`] = `
Object {
"columns": Array [
Object {
"name": "timestamp",
"type": "date",
},
Object {
"name": "metric_0",
"type": "number",
},
],
"id": "example-0",
"keys": Array [
"example-0",
],
"rows": Array [
Object {
"metric_0": 1,
"timestamp": 1577836800000,
},
Object {
"metric_0": 1,
"timestamp": 1577836860000,
},
Object {
"metric_0": 1,
"timestamp": 1577836920000,
},
],
}
`;
exports[`convertHistogramBucketsToTimeseies(keys, options, buckets) should just work 1`] = `
Object {
"columns": Array [
Object {
"name": "timestamp",
"type": "date",
},
Object {
"name": "metric_0",
"type": "number",
},
],
"id": "example-0",
"keys": Array [
"example-0",
],
"rows": Array [
Object {
"metric_0": 1,
"timestamp": 1577836800000,
},
Object {
"metric_0": 1,
"timestamp": 1577836860000,
},
Object {
"metric_0": 1,
"timestamp": 1577836920000,
},
Object {
"metric_0": null,
"timestamp": 1577836920000,
},
],
}
`;
exports[`convertHistogramBucketsToTimeseies(keys, options, buckets) should return empty timeseries for empty metrics 1`] = `
Object {
"columns": Array [],
"id": "example-0",
"keys": Array [
"example-0",
],
"rows": Array [],
}
`;
exports[`convertHistogramBucketsToTimeseies(keys, options, buckets) should work with keyed percentiles 1`] = `
Object {
"columns": Array [
Object {
"name": "timestamp",
"type": "date",
},
Object {
"name": "metric_0",
"type": "number",
},
],
"id": "example-0",
"keys": Array [
"example-0",
],
"rows": Array [
Object {
"metric_0": 4,
"timestamp": 1577836800000,
},
Object {
"metric_0": 4,
"timestamp": 1577836860000,
},
Object {
"metric_0": 4,
"timestamp": 1577836920000,
},
Object {
"metric_0": 4,
"timestamp": 1577836920000,
},
],
}
`;
exports[`convertHistogramBucketsToTimeseies(keys, options, buckets) should work with normalized_values 1`] = `
Object {
"columns": Array [
Object {
"name": "timestamp",
"type": "date",
},
Object {
"name": "metric_0",
"type": "number",
},
],
"id": "example-0",
"keys": Array [
"example-0",
],
"rows": Array [
Object {
"metric_0": 2,
"timestamp": 1577836800000,
},
Object {
"metric_0": 2,
"timestamp": 1577836860000,
},
Object {
"metric_0": 2,
"timestamp": 1577836920000,
},
Object {
"metric_0": null,
"timestamp": 1577836920000,
},
],
}
`;
exports[`convertHistogramBucketsToTimeseies(keys, options, buckets) should work with percentiles 1`] = `
Object {
"columns": Array [
Object {
"name": "timestamp",
"type": "date",
},
Object {
"name": "metric_0",
"type": "number",
},
],
"id": "example-0",
"keys": Array [
"example-0",
],
"rows": Array [
Object {
"metric_0": 3,
"timestamp": 1577836800000,
},
Object {
"metric_0": 3,
"timestamp": 1577836860000,
},
Object {
"metric_0": 3,
"timestamp": 1577836920000,
},
Object {
"metric_0": 3,
"timestamp": 1577836920000,
},
],
}
`;

View file

@ -0,0 +1,87 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`createAggregations(options) should return add offset to histogram 1`] = `
Object {
"histogram": Object {
"aggregations": Object {
"metric_0": Object {
"avg": Object {
"field": "system.cpu.user.pct",
},
},
},
"date_histogram": Object {
"extended_bounds": Object {
"max": 1577838720000,
"min": 1577835120000,
},
"field": "@timestamp",
"fixed_interval": "1m",
"offset": "-60s",
},
},
}
`;
exports[`createAggregations(options) should return groupings aggregation with groupBy 1`] = `
Object {
"groupings": Object {
"aggs": Object {
"histogram": Object {
"aggregations": Object {
"metric_0": Object {
"avg": Object {
"field": "system.cpu.user.pct",
},
},
},
"date_histogram": Object {
"extended_bounds": Object {
"max": 1577840400000,
"min": 1577836800000,
},
"field": "@timestamp",
"fixed_interval": "1m",
"offset": "0s",
},
},
},
"composite": Object {
"size": 20,
"sources": Array [
Object {
"groupBy0": Object {
"terms": Object {
"field": "host.name",
"order": "asc",
},
},
},
],
},
},
}
`;
exports[`createAggregations(options) should return just histogram aggregation without groupBy 1`] = `
Object {
"histogram": Object {
"aggregations": Object {
"metric_0": Object {
"avg": Object {
"field": "system.cpu.user.pct",
},
},
},
"date_histogram": Object {
"extended_bounds": Object {
"max": 1577840400000,
"min": 1577836800000,
},
"field": "@timestamp",
"fixed_interval": "1m",
"offset": "0s",
},
},
}
`;

View file

@ -0,0 +1,23 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`createMetricsAggregations(options) should just work 1`] = `
Object {
"metric_0": Object {
"avg": Object {
"field": "system.cpu.user.pct",
},
},
"metric_1": Object {
"derivative": Object {
"buckets_path": "metric_1_max",
"gap_policy": "skip",
"unit": "1s",
},
},
"metric_1_max": Object {
"max": Object {
"field": "system.network.in.bytes",
},
},
}
`;

View file

@ -0,0 +1,28 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { calculateAuto } from './calculate_auto';
import moment, { isDuration } from 'moment';
describe('calculateAuto.near(bucket, duration)', () => {
it('should calculate the bucket size for 15 minutes', () => {
const bucketSizeDuration = calculateAuto.near(100, moment.duration(15, 'minutes'));
expect(bucketSizeDuration).not.toBeUndefined();
expect(isDuration(bucketSizeDuration)).toBeTruthy();
expect(bucketSizeDuration!.asSeconds()).toBe(10);
});
it('should calculate the bucket size for an hour', () => {
const bucketSizeDuration = calculateAuto.near(100, moment.duration(1, 'hour'));
expect(bucketSizeDuration).not.toBeUndefined();
expect(isDuration(bucketSizeDuration)).toBeTruthy();
expect(bucketSizeDuration!.asSeconds()).toBe(30);
});
it('should calculate the bucket size for a day', () => {
const bucketSizeDuration = calculateAuto.near(100, moment.duration(1, 'day'));
expect(bucketSizeDuration).not.toBeUndefined();
expect(isDuration(bucketSizeDuration)).toBeTruthy();
expect(bucketSizeDuration!.asMinutes()).toBe(10);
});
});

View file

@ -0,0 +1,88 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import moment, { isDuration, Duration } from 'moment';
import { isNumber } from 'lodash';
const d = moment.duration;
const roundingRules = [
[d(500, 'ms'), d(100, 'ms')],
[d(5, 'second'), d(1, 'second')],
[d(7.5, 'second'), d(5, 'second')],
[d(15, 'second'), d(10, 'second')],
[d(45, 'second'), d(30, 'second')],
[d(3, 'minute'), d(1, 'minute')],
[d(9, 'minute'), d(5, 'minute')],
[d(20, 'minute'), d(10, 'minute')],
[d(45, 'minute'), d(30, 'minute')],
[d(2, 'hour'), d(1, 'hour')],
[d(6, 'hour'), d(3, 'hour')],
[d(24, 'hour'), d(12, 'hour')],
[d(1, 'week'), d(1, 'd')],
[d(3, 'week'), d(1, 'week')],
[d(1, 'year'), d(1, 'month')],
[Infinity, d(1, 'year')],
];
const revRoundingRules = [...roundingRules].reverse();
type NumberOrDuration = number | Duration;
type Rule = NumberOrDuration[];
type CheckFunction = (
bound: NumberOrDuration,
interval: Duration,
target: number
) => Duration | undefined;
function findRule(rules: Rule[], check: CheckFunction, last?: boolean) {
function pickInterval(buckets: number, duration: Duration) {
const target = duration.asMilliseconds() / buckets;
let lastResult = null;
for (const rule of rules) {
const result = check(rule[0] as Duration, rule[1] as Duration, target);
if (result == null) {
if (!last) continue;
if (lastResult) return lastResult;
break;
}
if (!last) return result;
lastResult = result;
}
// fallback to just a number of milliseconds, ensure ms is >= 1
const ms = Math.max(Math.floor(target), 1);
return moment.duration(ms, 'ms');
}
return (buckets: number, duration: Duration) => {
const interval = pickInterval(buckets, duration);
if (isDuration(interval)) return interval;
};
}
export const calculateAuto = {
near: findRule(
revRoundingRules,
function near(bound, interval, target) {
if (isDuration(bound) && bound.asMilliseconds() > target) return interval;
if (isNumber(bound) && bound > target) return interval;
},
true
),
lessThan: findRule(revRoundingRules, function lessThan(_bound, interval, target) {
if (interval.asMilliseconds() < target) return interval;
}),
atLeast: findRule(revRoundingRules, function atLeast(_bound, interval, target) {
if (interval.asMilliseconds() <= target) return interval;
}),
};

View file

@ -0,0 +1,53 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { calculateBucketSize } from './';
import moment from 'moment';
const timerange = {
from: moment('2017-01-01T00:00:00.000Z').valueOf(),
to: moment('2017-01-01T01:00:00.000Z').valueOf(),
interval: '1m',
field: '@timetsamp',
};
describe('calculateBucketSize(timerange, intervalString)', () => {
test('returns auto calculated buckets', () => {
const result = calculateBucketSize({ ...timerange, interval: 'auto' });
expect(result).toHaveProperty('bucketSize', 30);
expect(result).toHaveProperty('intervalString', '30s');
});
test('returns overridden buckets (1s)', () => {
const result = calculateBucketSize({ ...timerange, interval: '1s' });
expect(result).toHaveProperty('bucketSize', 1);
expect(result).toHaveProperty('intervalString', '1s');
});
test('returns overridden buckets (10m)', () => {
const result = calculateBucketSize({ ...timerange, interval: '10m' });
expect(result).toHaveProperty('bucketSize', 600);
expect(result).toHaveProperty('intervalString', '10m');
});
test('returns overridden buckets (1d)', () => {
const result = calculateBucketSize({ ...timerange, interval: '1d' });
expect(result).toHaveProperty('bucketSize', 86400);
expect(result).toHaveProperty('intervalString', '1d');
});
test('returns overridden buckets (>=2d)', () => {
const result = calculateBucketSize({ ...timerange, interval: '>=2d' });
expect(result).toHaveProperty('bucketSize', 86400 * 2);
expect(result).toHaveProperty('intervalString', '2d');
});
test('returns overridden buckets (>=10s)', () => {
const result = calculateBucketSize({ ...timerange, interval: '>=10s' });
expect(result).toHaveProperty('bucketSize', 30);
expect(result).toHaveProperty('intervalString', '30s');
});
});

View file

@ -0,0 +1,89 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import moment from 'moment';
import { MetricsAPITimerange } from '../../../../../common/http_api';
import { calculateAuto } from './calculate_auto';
import {
getUnitValue,
parseInterval,
convertIntervalToUnit,
ASCENDING_UNIT_ORDER,
} from './unit_to_seconds';
import { INTERVAL_STRING_RE, GTE_INTERVAL_RE } from './interval_regex';
const calculateBucketData = (intervalString: string) => {
const intervalStringMatch = intervalString.match(INTERVAL_STRING_RE);
if (!intervalStringMatch) {
throw new Error('Unable to parse interval string');
}
const parsedInterval = parseInterval(intervalString);
if (!parsedInterval) {
throw new Error('Unable to parse interval string');
}
let bucketSize = Number(intervalStringMatch[1]) * getUnitValue(intervalStringMatch[2]);
// don't go too small
if (bucketSize < 1) {
bucketSize = 1;
}
// Check decimal
if (parsedInterval.value && parsedInterval.value % 1 !== 0) {
if (parsedInterval.unit && parsedInterval.unit !== 'ms') {
const { value, unit } = convertIntervalToUnit(
intervalString,
ASCENDING_UNIT_ORDER[ASCENDING_UNIT_ORDER.indexOf(parsedInterval.unit) - 1]
);
if (value && unit) {
intervalString = value + unit;
} else {
intervalString = '1ms';
}
} else {
intervalString = '1ms';
}
}
return {
bucketSize,
intervalString,
};
};
const calculateBucketSizeForAutoInterval = (timerange: MetricsAPITimerange): number | undefined => {
const duration = moment.duration(timerange.to - timerange.from, 'ms');
const bucketSizeDuration = calculateAuto.near(100, duration);
if (bucketSizeDuration) {
return bucketSizeDuration.asSeconds();
}
};
export const calculateBucketSize = (timerange: MetricsAPITimerange) => {
const bucketSize = calculateBucketSizeForAutoInterval(timerange);
let intervalString = `${bucketSize}s`;
const gteAutoMatch = timerange.interval.match(GTE_INTERVAL_RE);
if (gteAutoMatch) {
const bucketData = calculateBucketData(gteAutoMatch[1]);
if (bucketSize && bucketData.bucketSize >= bucketSize) {
return bucketData;
}
}
const matches = timerange.interval.match(INTERVAL_STRING_RE);
if (matches) {
intervalString = timerange.interval;
}
return calculateBucketData(intervalString);
};

View file

@ -0,0 +1,81 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { GTE_INTERVAL_RE, INTERVAL_STRING_RE } from './interval_regex';
describe('REGEX for Intervals', () => {
describe('GTE_INTERVAL_RE', () => {
test('returns true for">=12h"', () => {
const value = GTE_INTERVAL_RE.test('>=12h');
expect(value).toBeTruthy();
});
test('returns true for ">=1y"', () => {
const value = GTE_INTERVAL_RE.test('>=12h');
expect(value).toBeTruthy();
});
test('returns true for ">=25m"', () => {
const value = GTE_INTERVAL_RE.test('>=12h');
expect(value).toBeTruthy();
});
test('returns false "auto"', () => {
const value = GTE_INTERVAL_RE.test('auto');
expect(value).toBeFalsy();
});
test('returns false "wrongInput"', () => {
const value = GTE_INTERVAL_RE.test('wrongInput');
expect(value).toBeFalsy();
});
test('returns false "d"', () => {
const value = GTE_INTERVAL_RE.test('d');
expect(value).toBeFalsy();
});
test('returns false "y"', () => {
const value = GTE_INTERVAL_RE.test('y');
expect(value).toBeFalsy();
});
});
describe('INTERVAL_STRING_RE', () => {
test('returns true for "8d"', () => {
const value = INTERVAL_STRING_RE.test('8d');
expect(value).toBeTruthy();
});
test('returns true for "1y"', () => {
const value = INTERVAL_STRING_RE.test('1y');
expect(value).toBeTruthy();
});
test('returns true for "6M"', () => {
const value = INTERVAL_STRING_RE.test('6M');
expect(value).toBeTruthy();
});
test('returns false "auto"', () => {
const value = INTERVAL_STRING_RE.test('auto');
expect(value).toBeFalsy();
});
test('returns false "wrongInput"', () => {
const value = INTERVAL_STRING_RE.test('wrongInput');
expect(value).toBeFalsy();
});
test('returns false for">=21h"', () => {
const value = INTERVAL_STRING_RE.test('>=21h');
expect(value).toBeFalsy();
});
});
});

View file

@ -0,0 +1,10 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import dateMath from '@elastic/datemath';
export const GTE_INTERVAL_RE = new RegExp(`^>=([\\d\\.]+\\s*(${dateMath.units.join('|')}))$`);
export const INTERVAL_STRING_RE = new RegExp(`^([\\d\\.]+)\\s*(${dateMath.units.join('|')})$`);

View file

@ -0,0 +1,132 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import {
getUnitValue,
parseInterval,
convertIntervalToUnit,
getSuitableUnit,
} from './unit_to_seconds';
describe('parseInterval()', () => {
test('should parse "1m" interval (positive)', () =>
expect(parseInterval('1m')).toEqual({
value: 1,
unit: 'm',
}));
test('should parse "134d" interval (positive)', () =>
expect(parseInterval('134d')).toEqual({
value: 134,
unit: 'd',
}));
test('should parse "0.5d" interval (positive)', () =>
expect(parseInterval('0.5d')).toEqual({
value: 0.5,
unit: 'd',
}));
test('should parse "30M" interval (positive)', () =>
expect(parseInterval('30M')).toEqual({
value: 30,
unit: 'M',
}));
test('should not parse "gm" interval (negative)', () =>
expect(parseInterval('gm')).toEqual({
value: undefined,
unit: undefined,
}));
test('should not parse "-1d" interval (negative)', () =>
expect(parseInterval('-1d')).toEqual({
value: undefined,
unit: undefined,
}));
test('should not parse "M" interval (negative)', () =>
expect(parseInterval('M')).toEqual({
value: undefined,
unit: undefined,
}));
});
describe('convertIntervalToUnit()', () => {
test('should convert "30m" interval to "h" unit (positive)', () =>
expect(convertIntervalToUnit('30m', 'h')).toEqual({
value: 0.5,
unit: 'h',
}));
test('should convert "0.5h" interval to "m" unit (positive)', () =>
expect(convertIntervalToUnit('0.5h', 'm')).toEqual({
value: 30,
unit: 'm',
}));
test('should convert "1h" interval to "m" unit (positive)', () =>
expect(convertIntervalToUnit('1h', 'm')).toEqual({
value: 60,
unit: 'm',
}));
test('should convert "1h" interval to "ms" unit (positive)', () =>
expect(convertIntervalToUnit('1h', 'ms')).toEqual({
value: 3600000,
unit: 'ms',
}));
test('should not convert "30m" interval to "0" unit (positive)', () =>
expect(convertIntervalToUnit('30m', 'o')).toEqual({
value: undefined,
unit: undefined,
}));
test('should not convert "m" interval to "s" unit (positive)', () =>
expect(convertIntervalToUnit('m', 's')).toEqual({
value: undefined,
unit: undefined,
}));
});
describe('getSuitableUnit()', () => {
test('should return "d" unit for oneDayInSeconds (positive)', () => {
const oneDayInSeconds = getUnitValue('d') * 1;
expect(getSuitableUnit(oneDayInSeconds)).toBe('d');
});
test('should return "d" unit for twoDaysInSeconds (positive)', () => {
const twoDaysInSeconds = getUnitValue('d') * 2;
expect(getSuitableUnit(twoDaysInSeconds)).toBe('d');
});
test('should return "w" unit for threeWeeksInSeconds (positive)', () => {
const threeWeeksInSeconds = getUnitValue('w') * 3;
expect(getSuitableUnit(threeWeeksInSeconds)).toBe('w');
});
test('should return "y" unit for aroundOneYearInSeconds (positive)', () => {
const aroundOneYearInSeconds = getUnitValue('d') * 370;
expect(getSuitableUnit(aroundOneYearInSeconds)).toBe('y');
});
test('should return "y" unit for twoYearsInSeconds (positive)', () => {
const twoYearsInSeconds = getUnitValue('y') * 2;
expect(getSuitableUnit(twoYearsInSeconds)).toBe('y');
});
test('should return "undefined" unit for negativeNumber (negative)', () => {
const negativeNumber = -12;
expect(getSuitableUnit(negativeNumber)).toBeUndefined();
});
});

View file

@ -0,0 +1,68 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { sortBy, isNumber } from 'lodash';
import { INTERVAL_STRING_RE } from './interval_regex';
export const ASCENDING_UNIT_ORDER = ['ms', 's', 'm', 'h', 'd', 'w', 'M', 'y'];
const units: Record<string, number> = {
ms: 0.001,
s: 1,
m: 60,
h: 3600,
d: 86400,
w: 86400 * 7,
M: 86400 * 30,
y: 86400 * 365,
};
const sortedUnits = sortBy(Object.keys(units), (key) => units[key]);
export const parseInterval = (intervalString: string) => {
let value;
let unit;
if (intervalString) {
const matches = intervalString.match(INTERVAL_STRING_RE);
if (matches) {
value = Number(matches[1]);
unit = matches[2];
}
}
return { value, unit };
};
export const convertIntervalToUnit = (intervalString: string, newUnit: string) => {
const parsedInterval = parseInterval(intervalString);
let value;
let unit;
if (parsedInterval.unit && parsedInterval.value && units[newUnit]) {
value = Number(
((parsedInterval.value * units[parsedInterval.unit]) / units[newUnit]).toFixed(2)
);
unit = newUnit;
}
return { value, unit };
};
export const getSuitableUnit = (intervalInSeconds: number) =>
sortedUnits.find((key, index, array) => {
const nextUnit = array[index + 1];
const isValidInput = isNumber(intervalInSeconds) && intervalInSeconds > 0;
const isLastItem = index + 1 === array.length;
return (
isValidInput &&
((intervalInSeconds >= units[key] && intervalInSeconds < units[nextUnit]) || isLastItem)
);
});
export const getUnitValue = (unit: string) => units[unit];

View file

@ -0,0 +1,20 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { calculateDateHistogramOffset } from './calculate_date_histogram_offset';
import moment from 'moment';
describe('calculateDateHistogramOffset(timerange)', () => {
it('should just work', () => {
const timerange = {
from: moment('2020-01-01T00:03:32').valueOf(),
to: moment('2020-01-01T01:03:32').valueOf(),
interval: '1m',
field: '@timestamp',
};
const offset = calculateDateHistogramOffset(timerange);
expect(offset).toBe('-28s');
});
});

View file

@ -0,0 +1,17 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { MetricsAPITimerange } from '../../../../common/http_api';
import { calculateBucketSize } from './calculate_bucket_size';
export const calculateDateHistogramOffset = (timerange: MetricsAPITimerange): string => {
const fromInSeconds = Math.floor(timerange.from / 1000);
const { bucketSize } = calculateBucketSize(timerange);
// negative offset to align buckets with full intervals (e.g. minutes)
const offset = (fromInSeconds % bucketSize) - bucketSize;
return `${offset}s`;
};

View file

@ -0,0 +1,120 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { MetricsAPIRequest } from '../../../../common/http_api';
import moment from 'moment';
import { convertHistogramBucketsToTimeseries } from './convert_histogram_buckets_to_timeseries';
const keys = ['example-0'];
const options: MetricsAPIRequest = {
timerange: {
field: '@timestamp',
from: moment('2020-01-01T00:00:00Z').valueOf(),
to: moment('2020-01-01T01:00:00Z').valueOf(),
interval: '1m',
},
limit: 9,
indexPattern: 'metrics-*',
metrics: [
{ id: 'metric_0', aggregations: { metric_0: { avg: { field: 'system.cpu.user.pct' } } } },
],
};
const buckets = [
{
key: moment('2020-01-01T00:00:00Z').valueOf(),
key_as_string: moment('2020-01-01T00:00:00Z').toISOString(),
doc_count: 1,
metric_0: { value: 1 },
},
{
key: moment('2020-01-01T00:00:00Z').add(1, 'minute').valueOf(),
key_as_string: moment('2020-01-01T00:00:00Z').add(1, 'minute').toISOString(),
doc_count: 1,
metric_0: { value: 1 },
},
{
key: moment('2020-01-01T00:00:00Z').add(2, 'minute').valueOf(),
key_as_string: moment('2020-01-01T00:00:00Z').add(2, 'minute').toISOString(),
doc_count: 1,
metric_0: { value: 1 },
},
{
key: moment('2020-01-01T00:00:00Z').add(2, 'minute').valueOf(),
key_as_string: moment('2020-01-01T00:00:00Z').add(2, 'minute').toISOString(),
doc_count: 1,
metric_0: { value: null },
},
];
describe('convertHistogramBucketsToTimeseies(keys, options, buckets)', () => {
it('should just work', () => {
expect(convertHistogramBucketsToTimeseries(keys, options, buckets)).toMatchSnapshot();
});
it('should drop the last bucket', () => {
expect(
convertHistogramBucketsToTimeseries(keys, { ...options, dropLastBucket: true }, buckets)
).toMatchSnapshot();
});
it('should return empty timeseries for empty metrics', () => {
expect(
convertHistogramBucketsToTimeseries(keys, { ...options, metrics: [] }, buckets)
).toMatchSnapshot();
});
it('should work with normalized_values', () => {
const bucketsWithNormalizedValue = buckets.map((bucket) => {
const value = bucket.metric_0.value;
if (value) {
return { ...bucket, metric_0: { value, normalized_value: value + 1 } };
}
return bucket;
});
expect(
convertHistogramBucketsToTimeseries(keys, { ...options }, bucketsWithNormalizedValue)
).toMatchSnapshot();
});
it('should work with percentiles', () => {
const bucketsWithPercentiles = buckets.map((bucket) => {
return { ...bucket, metric_0: { values: { '95.0': 3 } } };
});
expect(
convertHistogramBucketsToTimeseries(keys, { ...options }, bucketsWithPercentiles)
).toMatchSnapshot();
});
it('should throw error with multiple percentiles', () => {
const bucketsWithMultiplePercentiles = buckets.map((bucket) => {
return { ...bucket, metric_0: { values: { '95.0': 3, '99.0': 4 } } };
});
expect(() =>
convertHistogramBucketsToTimeseries(keys, { ...options }, bucketsWithMultiplePercentiles)
).toThrow();
});
it('should work with keyed percentiles', () => {
const bucketsWithKeyedPercentiles = buckets.map((bucket) => {
return { ...bucket, metric_0: { values: [{ key: '99.0', value: 4 }] } };
});
expect(
convertHistogramBucketsToTimeseries(keys, { ...options }, bucketsWithKeyedPercentiles)
).toMatchSnapshot();
});
it('should throw error with multiple keyed percentiles', () => {
const bucketsWithMultipleKeyedPercentiles = buckets.map((bucket) => {
return {
...bucket,
metric_0: {
values: [
{ key: '95.0', value: 3 },
{ key: '99.0', value: 4 },
],
},
};
});
expect(() =>
convertHistogramBucketsToTimeseries(keys, { ...options }, bucketsWithMultipleKeyedPercentiles)
).toThrow();
});
});

View file

@ -0,0 +1,93 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { get, values, first } from 'lodash';
import {
MetricsAPIRequest,
MetricsAPISeries,
MetricsAPIColumn,
MetricsAPIRow,
} from '../../../../common/http_api/metrics_api';
import {
HistogramBucket,
MetricValueType,
BasicMetricValueRT,
NormalizedMetricValueRT,
PercentilesTypeRT,
PercentilesKeyedTypeRT,
} from '../types';
const BASE_COLUMNS = [{ name: 'timestamp', type: 'date' }] as MetricsAPIColumn[];
const getValue = (valueObject: string | number | MetricValueType) => {
if (NormalizedMetricValueRT.is(valueObject)) {
return valueObject.normalized_value || valueObject.value;
}
if (PercentilesTypeRT.is(valueObject)) {
const percentileValues = values(valueObject.values);
if (percentileValues.length > 1) {
throw new Error(
'Metrics API only supports a single percentile, multiple percentiles should be sent separately'
);
}
return first(percentileValues) || null;
}
if (PercentilesKeyedTypeRT.is(valueObject)) {
if (valueObject.values.length > 1) {
throw new Error(
'Metrics API only supports a single percentile, multiple percentiles should be sent separately'
);
}
const percentileValue = first(valueObject.values);
return (percentileValue && percentileValue.value) || null;
}
if (BasicMetricValueRT.is(valueObject)) {
return valueObject.value;
}
return null;
};
const convertBucketsToRows = (
options: MetricsAPIRequest,
buckets: HistogramBucket[]
): MetricsAPIRow[] => {
return buckets.map((bucket) => {
const ids = options.metrics.map((metric) => metric.id);
const metrics = ids.reduce((acc, id) => {
const valueObject = get(bucket, [id]);
return { ...acc, [id]: getValue(valueObject) };
}, {} as Record<string, number | null>);
return { timestamp: bucket.key as number, ...metrics };
});
};
export const convertHistogramBucketsToTimeseries = (
keys: string[],
options: MetricsAPIRequest,
buckets: HistogramBucket[]
): MetricsAPISeries => {
const id = keys.join(':');
// If there are no metrics then we just return the empty series
// but still maintain the groupings.
if (options.metrics.length === 0) {
return { id, keys, columns: [], rows: [] };
}
const columns = options.metrics.map((metric) => ({
name: metric.id,
type: 'number',
})) as MetricsAPIColumn[];
const allRows = convertBucketsToRows(options, buckets);
const rows = options.dropLastBucket ? allRows.slice(0, allRows.length - 1) : allRows;
return {
id,
keys,
rows,
columns: [...BASE_COLUMNS, ...columns],
};
};

View file

@ -0,0 +1,45 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { createAggregations } from './create_aggregations';
import moment from 'moment';
import { MetricsAPIRequest } from '../../../../common/http_api';
const options: MetricsAPIRequest = {
timerange: {
field: '@timestamp',
from: moment('2020-01-01T00:00:00Z').valueOf(),
to: moment('2020-01-01T01:00:00Z').valueOf(),
interval: '>=1m',
},
limit: 20,
indexPattern: 'metrics-*',
metrics: [
{ id: 'metric_0', aggregations: { metric_0: { avg: { field: 'system.cpu.user.pct' } } } },
],
};
describe('createAggregations(options)', () => {
it('should return groupings aggregation with groupBy', () => {
const optionsWithGroupBy = { ...options, groupBy: ['host.name'] };
expect(createAggregations(optionsWithGroupBy)).toMatchSnapshot();
});
it('should return just histogram aggregation without groupBy', () => {
expect(createAggregations(options)).toMatchSnapshot();
});
it('should return add offset to histogram', () => {
const optionsWithAlignDataToEnd = {
...options,
timerange: {
...options.timerange,
from: moment('2020-01-01T00:00:00Z').subtract(28, 'minutes').valueOf(),
to: moment('2020-01-01T01:00:00Z').subtract(28, 'minutes').valueOf(),
},
alignDataToEnd: true,
};
expect(createAggregations(optionsWithAlignDataToEnd)).toMatchSnapshot();
});
});

View file

@ -0,0 +1,45 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { MetricsAPIRequest } from '../../../../common/http_api/metrics_api';
import { calculateDateHistogramOffset } from './calculate_date_histogram_offset';
import { createMetricsAggregations } from './create_metrics_aggregations';
import { calculateBucketSize } from './calculate_bucket_size';
export const createAggregations = (options: MetricsAPIRequest) => {
const { intervalString } = calculateBucketSize(options.timerange);
const histogramAggregation = {
histogram: {
date_histogram: {
field: options.timerange.field,
fixed_interval: intervalString,
offset: options.alignDataToEnd ? calculateDateHistogramOffset(options.timerange) : '0s',
extended_bounds: {
min: options.timerange.from,
max: options.timerange.to,
},
},
aggregations: createMetricsAggregations(options),
},
};
if (Array.isArray(options.groupBy) && options.groupBy.length) {
const limit = options.limit || 9;
return {
groupings: {
composite: {
size: limit,
sources: options.groupBy.map((field, index) => ({
[`groupBy${index}`]: { terms: { field, order: 'asc' } },
})),
},
aggs: histogramAggregation,
},
};
}
return histogramAggregation;
};

View file

@ -0,0 +1,36 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { MetricsAPIRequest } from '../../../../common/http_api';
import moment from 'moment';
import { createMetricsAggregations } from './create_metrics_aggregations';
const options: MetricsAPIRequest = {
timerange: {
field: '@timestamp',
from: moment('2020-01-01T00:00:00Z').valueOf(),
to: moment('2020-01-01T01:00:00Z').valueOf(),
interval: '>=1m',
},
limit: 20,
indexPattern: 'metrics-*',
metrics: [
{ id: 'metric_0', aggregations: { metric_0: { avg: { field: 'system.cpu.user.pct' } } } },
{
id: 'metric_1',
aggregations: {
metric_1_max: { max: { field: 'system.network.in.bytes' } },
metric_1: { derivative: { buckets_path: 'metric_1_max', gap_policy: 'skip', unit: '1s' } },
},
},
],
};
describe('createMetricsAggregations(options)', () => {
it('should just work', () => {
expect(createMetricsAggregations(options)).toMatchSnapshot();
});
});

View file

@ -0,0 +1,15 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { MetricsUIAggregation } from '../../../../common/inventory_models/types';
import { MetricsAPIRequest } from '../../../../common/http_api/metrics_api';
export const createMetricsAggregations = (options: MetricsAPIRequest): MetricsUIAggregation => {
const { metrics } = options;
return metrics.reduce((aggs, metric) => {
return { ...aggs, ...metric.aggregations };
}, {});
};

View file

@ -0,0 +1,72 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import * as rt from 'io-ts';
import { InfraDatabaseSearchResponse, CallWithRequestParams } from '../adapters/framework';
export type ESSearchClient = <Hit = {}, Aggregation = undefined>(
options: CallWithRequestParams
) => Promise<InfraDatabaseSearchResponse<Hit, Aggregation>>;
const NumberOrNullRT = rt.union([rt.number, rt.null]);
export const BasicMetricValueRT = rt.type({ value: NumberOrNullRT });
export const NormalizedMetricValueRT = rt.intersection([
BasicMetricValueRT,
rt.type({ normalized_value: NumberOrNullRT }),
]);
export const PercentilesTypeRT = rt.type({ values: rt.record(rt.string, NumberOrNullRT) });
export const PercentilesKeyedTypeRT = rt.type({
values: rt.array(rt.type({ key: rt.string, value: NumberOrNullRT })),
});
export const MetricValueTypeRT = rt.union([
BasicMetricValueRT,
NormalizedMetricValueRT,
PercentilesTypeRT,
PercentilesKeyedTypeRT,
]);
export type MetricValueType = rt.TypeOf<typeof MetricValueTypeRT>;
export const HistogramBucketRT = rt.record(
rt.string,
rt.union([rt.number, rt.string, MetricValueTypeRT])
);
export const HistogramResponseRT = rt.type({
histogram: rt.type({
buckets: rt.array(HistogramBucketRT),
}),
});
const GroupingBucketRT = rt.intersection([
rt.type({
key: rt.record(rt.string, rt.string),
doc_count: rt.number,
}),
HistogramResponseRT,
]);
export const GroupingResponseRT = rt.type({
groupings: rt.intersection([
rt.type({
buckets: rt.array(GroupingBucketRT),
}),
rt.partial({
after_key: rt.record(rt.string, rt.string),
}),
]),
});
export type HistogramBucket = rt.TypeOf<typeof HistogramBucketRT>;
export type HistogramResponse = rt.TypeOf<typeof HistogramResponseRT>;
export type GroupingResponse = rt.TypeOf<typeof GroupingResponseRT>;
export type MetricsESResponse = HistogramResponse | GroupingResponse;

View file

@ -8,7 +8,7 @@ import { uniq } from 'lodash';
import { InfraSnapshotRequestOptions } from './types';
import { getMetricsAggregations } from './query_helpers';
import { calculateMetricInterval } from '../../utils/calculate_metric_interval';
import { SnapshotModel, SnapshotModelMetricAggRT } from '../../../common/inventory_models/types';
import { MetricsUIAggregation, ESBasicMetricAggRT } from '../../../common/inventory_models/types';
import { getDatasetForField } from '../../routes/metrics_explorer/lib/get_dataset_for_field';
import { InfraTimerangeInput } from '../../../common/http_api/snapshot_api';
import { ESSearchClient } from '.';
@ -59,12 +59,12 @@ export const createTimeRangeWithInterval = async (
const aggregationsToModules = async (
client: ESSearchClient,
aggregations: SnapshotModel,
aggregations: MetricsUIAggregation,
options: InfraSnapshotRequestOptions
): Promise<string[]> => {
const uniqueFields = Object.values(aggregations)
.reduce<Array<string | undefined>>((fields, agg) => {
if (SnapshotModelMetricAggRT.is(agg)) {
if (ESBasicMetricAggRT.is(agg)) {
return uniq(fields.concat(Object.values(agg).map((a) => a?.field)));
}
return fields;

View file

@ -9,8 +9,8 @@ import { findInventoryModel, findInventoryFields } from '../../../common/invento
import { InfraSnapshotRequestOptions } from './types';
import { getIntervalInSeconds } from '../../utils/get_interval_in_seconds';
import {
SnapshotModelRT,
SnapshotModel,
MetricsUIAggregation,
MetricsUIAggregationRT,
InventoryItemType,
} from '../../../common/inventory_models/types';
import {
@ -75,11 +75,13 @@ export const metricToAggregation = (
return inventoryModel.metrics.snapshot?.[metric.type];
};
export const getMetricsAggregations = (options: InfraSnapshotRequestOptions): SnapshotModel => {
export const getMetricsAggregations = (
options: InfraSnapshotRequestOptions
): MetricsUIAggregation => {
const { metrics } = options;
return metrics.reduce((aggs, metric, index) => {
const aggregation = metricToAggregation(options.nodeType, metric, index);
if (!SnapshotModelRT.is(aggregation)) {
if (!MetricsUIAggregationRT.is(aggregation)) {
throw new Error(
i18n.translate('xpack.infra.snapshot.missingSnapshotMetricError', {
defaultMessage: 'The aggregation for {metric} for {nodeType} is not available.',

View file

@ -4,20 +4,25 @@
* you may not use this file except in compliance with the Elastic License.
*/
import {
METRICS_INDEX_PATTERN,
LOGS_INDEX_PATTERN,
TIMESTAMP_FIELD,
} from '../../../common/constants';
import { InfraSourceConfiguration } from '../../../common/http_api/source_api';
export const defaultSourceConfiguration: InfraSourceConfiguration = {
name: 'Default',
description: '',
metricAlias: 'metrics-*,metricbeat-*',
logAlias: 'logs-*,filebeat-*,kibana_sample_data_logs*',
metricAlias: METRICS_INDEX_PATTERN,
logAlias: LOGS_INDEX_PATTERN,
fields: {
container: 'container.id',
host: 'host.name',
message: ['message', '@message'],
pod: 'kubernetes.pod.uid',
tiebreaker: '_doc',
timestamp: '@timestamp',
timestamp: TIMESTAMP_FIELD,
},
inventoryDefaultView: '0',
metricsExplorerDefaultView: '0',

View file

@ -10,17 +10,23 @@ import { fold } from 'fp-ts/lib/Either';
import { identity } from 'fp-ts/lib/function';
import { schema } from '@kbn/config-schema';
import { InfraBackendLibs } from '../../lib/infra_types';
import { getGroupings } from './lib/get_groupings';
import { populateSeriesWithTSVBData } from './lib/populate_series_with_tsvb_data';
import { metricsExplorerRequestBodyRT, metricsExplorerResponseRT } from '../../../common/http_api';
import {
metricsExplorerRequestBodyRT,
metricsExplorerResponseRT,
MetricsExplorerPageInfo,
} from '../../../common/http_api';
import { throwErrors } from '../../../common/runtime_types';
import { convertRequestToMetricsAPIOptions } from './lib/convert_request_to_metrics_api_options';
import { createSearchClient } from '../../lib/create_search_client';
import { findIntervalForMetrics } from './lib/find_interval_for_metrics';
import { query } from '../../lib/metrics';
import { queryTotalGroupings } from './lib/query_total_groupings';
import { transformSeries } from './lib/transform_series';
const escapeHatch = schema.object({}, { unknowns: 'allow' });
export const initMetricExplorerRoute = (libs: InfraBackendLibs) => {
const { framework } = libs;
const { callWithRequest } = framework;
framework.registerRoute(
{
method: 'post',
@ -31,26 +37,48 @@ export const initMetricExplorerRoute = (libs: InfraBackendLibs) => {
},
async (requestContext, request, response) => {
try {
const payload = pipe(
const options = pipe(
metricsExplorerRequestBodyRT.decode(request.body),
fold(throwErrors(Boom.badRequest), identity)
);
const search = <Aggregation>(searchOptions: object) =>
callWithRequest<{}, Aggregation>(requestContext, 'search', searchOptions);
const client = createSearchClient(requestContext, framework);
const interval = await findIntervalForMetrics(client, options);
// First we get the groupings from a composite aggregation
const groupings = await getGroupings(search, payload);
const optionsWithInterval = options.forceInterval
? options
: {
...options,
timerange: {
...options.timerange,
interval: interval ? `>=${interval}s` : options.timerange.interval,
},
};
const metricsApiOptions = convertRequestToMetricsAPIOptions(optionsWithInterval);
const metricsApiResponse = await query(client, metricsApiOptions);
const totalGroupings = await queryTotalGroupings(client, metricsApiOptions);
const hasGroupBy =
Array.isArray(metricsApiOptions.groupBy) && metricsApiOptions.groupBy.length > 0;
const pageInfo: MetricsExplorerPageInfo = {
total: totalGroupings,
afterKey: null,
};
if (metricsApiResponse.info.afterKey) {
pageInfo.afterKey = metricsApiResponse.info.afterKey;
}
// If we have a groupBy but there are ZERO groupings returned then we need to
// return an empty array. Otherwise we transform the series to match the current schema.
const series =
hasGroupBy && totalGroupings === 0
? []
: metricsApiResponse.series.map(transformSeries(hasGroupBy));
// Then we take the results and fill in the data from TSVB with the
// user's custom metrics
const seriesWithMetrics = await Promise.all(
groupings.series.map(
populateSeriesWithTSVBData(request, payload, framework, requestContext)
)
);
return response.ok({
body: metricsExplorerResponseRT.encode({ ...groupings, series: seriesWithMetrics }),
body: metricsExplorerResponseRT.encode({ series, pageInfo }),
});
} catch (error) {
return response.internalError({

View file

@ -0,0 +1,87 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { convertMetricToMetricsAPIMetric } from './convert_metric_to_metrics_api_metric';
import {
MetricsExplorerMetric,
MetricsAPIMetric,
MetricsExplorerAggregation,
} from '../../../../common/http_api';
describe('convertMetricToMetricsAPIMetric(metric, index)', () => {
const runTest = (metric: MetricsExplorerMetric, aggregation: MetricsAPIMetric) =>
it(`should convert ${metric.aggregation}`, () => {
expect(convertMetricToMetricsAPIMetric(metric, 1)).toEqual(aggregation);
});
const runTestForBasic = (aggregation: MetricsExplorerAggregation) =>
runTest(
{ aggregation, field: 'system.cpu.user.pct' },
{
id: 'metric_1',
aggregations: { metric_1: { [aggregation]: { field: 'system.cpu.user.pct' } } },
}
);
runTestForBasic('avg');
runTestForBasic('sum');
runTestForBasic('max');
runTestForBasic('min');
runTestForBasic('cardinality');
runTest(
{ aggregation: 'rate', field: 'system.network.in.bytes' },
{
id: 'metric_1',
aggregations: {
metric_1_max: {
max: {
field: 'system.network.in.bytes',
},
},
metric_1_deriv: {
derivative: {
buckets_path: 'metric_1_max',
gap_policy: 'skip',
unit: '1s',
},
},
metric_1: {
bucket_script: {
buckets_path: {
value: 'metric_1_deriv[normalized_value]',
},
gap_policy: 'skip',
script: {
lang: 'painless',
source: 'params.value > 0.0 ? params.value : 0.0',
},
},
},
},
}
);
runTest(
{ aggregation: 'count' },
{
id: 'metric_1',
aggregations: {
metric_1: {
bucket_script: {
buckets_path: {
count: '_count',
},
gap_policy: 'skip',
script: {
lang: 'expression',
source: 'count * 1',
},
},
},
},
}
);
});

View file

@ -0,0 +1,62 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { networkTraffic } from '../../../../common/inventory_models/shared/metrics/snapshot/network_traffic';
import { MetricsAPIMetric, MetricsExplorerMetric } from '../../../../common/http_api';
export const convertMetricToMetricsAPIMetric = (
metric: MetricsExplorerMetric,
index: number
): MetricsAPIMetric => {
const id = `metric_${index}`;
if (metric.aggregation === 'rate' && metric.field) {
return {
id,
aggregations: networkTraffic(id, metric.field),
};
}
if (['p95', 'p99'].includes(metric.aggregation) && metric.field) {
const percent = metric.aggregation === 'p95' ? 95 : 99;
return {
id,
aggregations: {
[id]: {
percentiles: {
field: metric.field,
percents: [percent],
},
},
},
};
}
if (['max', 'min', 'avg', 'cardinality', 'sum'].includes(metric.aggregation) && metric.field) {
return {
id,
aggregations: {
[id]: {
[metric.aggregation]: { field: metric.field },
},
},
};
}
return {
id,
aggregations: {
[id]: {
bucket_script: {
buckets_path: { count: '_count' },
script: {
source: 'count * 1',
lang: 'expression',
},
gap_policy: 'skip',
},
},
},
};
};

View file

@ -0,0 +1,123 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { MetricsExplorerRequestBody, MetricsAPIRequest } from '../../../../common/http_api';
import { convertRequestToMetricsAPIOptions } from './convert_request_to_metrics_api_options';
const BASE_REQUEST: MetricsExplorerRequestBody = {
timerange: {
field: '@timestamp',
from: new Date('2020-01-01T00:00:00Z').getTime(),
to: new Date('2020-01-01T01:00:00Z').getTime(),
interval: '1m',
},
limit: 9,
indexPattern: 'metrics-*',
metrics: [{ aggregation: 'avg', field: 'system.cpu.user.pct' }],
};
const BASE_METRICS_UI_OPTIONS: MetricsAPIRequest = {
timerange: {
field: '@timestamp',
from: new Date('2020-01-01T00:00:00Z').getTime(),
to: new Date('2020-01-01T01:00:00Z').getTime(),
interval: '1m',
},
limit: 9,
dropLastBucket: true,
indexPattern: 'metrics-*',
metrics: [
{ id: 'metric_0', aggregations: { metric_0: { avg: { field: 'system.cpu.user.pct' } } } },
],
};
describe('convertRequestToMetricsAPIOptions', () => {
it('should just work', () => {
expect(convertRequestToMetricsAPIOptions(BASE_REQUEST)).toEqual(BASE_METRICS_UI_OPTIONS);
});
it('should work with string afterKeys', () => {
expect(convertRequestToMetricsAPIOptions({ ...BASE_REQUEST, afterKey: 'host.name' })).toEqual({
...BASE_METRICS_UI_OPTIONS,
afterKey: { groupBy0: 'host.name' },
});
});
it('should work with afterKey objects', () => {
const afterKey = { groupBy0: 'host.name', groupBy1: 'cloud.availability_zone' };
expect(
convertRequestToMetricsAPIOptions({
...BASE_REQUEST,
afterKey,
})
).toEqual({
...BASE_METRICS_UI_OPTIONS,
afterKey,
});
});
it('should work with string group bys', () => {
expect(
convertRequestToMetricsAPIOptions({
...BASE_REQUEST,
groupBy: 'host.name',
})
).toEqual({
...BASE_METRICS_UI_OPTIONS,
groupBy: ['host.name'],
});
});
it('should work with group by arrays', () => {
expect(
convertRequestToMetricsAPIOptions({
...BASE_REQUEST,
groupBy: ['host.name', 'cloud.availability_zone'],
})
).toEqual({
...BASE_METRICS_UI_OPTIONS,
groupBy: ['host.name', 'cloud.availability_zone'],
});
});
it('should work with filterQuery json string', () => {
const filter = { bool: { filter: [{ match: { 'host.name': 'example-01' } }] } };
expect(
convertRequestToMetricsAPIOptions({
...BASE_REQUEST,
filterQuery: JSON.stringify(filter),
})
).toEqual({
...BASE_METRICS_UI_OPTIONS,
filters: [filter],
});
});
it('should work with filterQuery as Lucene expressions', () => {
const filter = `host.name: 'example-01'`;
expect(
convertRequestToMetricsAPIOptions({
...BASE_REQUEST,
filterQuery: filter,
})
).toEqual({
...BASE_METRICS_UI_OPTIONS,
filters: [{ query_string: { query: filter, analyze_wildcard: true } }],
});
});
it('should work with empty metrics', () => {
expect(
convertRequestToMetricsAPIOptions({
...BASE_REQUEST,
metrics: [],
})
).toEqual({
...BASE_METRICS_UI_OPTIONS,
metrics: [],
});
});
});

View file

@ -0,0 +1,58 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { isObject, isArray } from 'lodash';
import {
MetricsAPIRequest,
MetricsExplorerRequestBody,
afterKeyObjectRT,
} from '../../../../common/http_api';
import { convertMetricToMetricsAPIMetric } from './convert_metric_to_metrics_api_metric';
export const convertRequestToMetricsAPIOptions = (
options: MetricsExplorerRequestBody
): MetricsAPIRequest => {
const metrics = options.metrics.map(convertMetricToMetricsAPIMetric);
const { limit, timerange, indexPattern } = options;
const metricsApiOptions: MetricsAPIRequest = {
timerange,
indexPattern,
limit,
metrics,
dropLastBucket: true,
};
if (options.afterKey) {
metricsApiOptions.afterKey = afterKeyObjectRT.is(options.afterKey)
? options.afterKey
: { groupBy0: options.afterKey };
}
if (options.groupBy) {
metricsApiOptions.groupBy = isArray(options.groupBy) ? options.groupBy : [options.groupBy];
}
if (options.filterQuery) {
try {
const filterObject = JSON.parse(options.filterQuery);
if (isObject(filterObject)) {
metricsApiOptions.filters = [filterObject];
}
} catch (err) {
metricsApiOptions.filters = [
{
query_string: {
query: options.filterQuery,
analyze_wildcard: true,
},
},
];
}
}
return metricsApiOptions;
};

View file

@ -1,97 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { MetricsExplorerRequestBody } from '../../../../common/http_api/metrics_explorer';
import { TSVBMetricModel } from '../../../../common/inventory_models/types';
const percentileToVaue = (agg: 'p95' | 'p99') => {
if (agg === 'p95') {
return 95;
}
return 99;
};
export const createMetricModel = (options: MetricsExplorerRequestBody): TSVBMetricModel => {
// if dropLastBucket is set use the value otherwise default to true.
const dropLastBucket: boolean = options.dropLastBucket != null ? options.dropLastBucket : true;
return {
id: 'custom',
requires: [],
index_pattern: options.indexPattern,
interval: options.timerange.interval,
time_field: options.timerange.field,
drop_last_bucket: dropLastBucket,
type: 'timeseries',
// Create one series per metric requested. The series.id will be used to identify the metric
// when the responses are processed and combined with the grouping request.
series: options.metrics.map((metric, index) => {
// If the metric is a rate then we need to add TSVB metrics for calculating the derivative
if (metric.aggregation === 'rate') {
const aggType = 'max';
return {
id: `metric_${index}`,
split_mode: 'everything',
metrics: [
{
id: `metric_${aggType}_${index}`,
field: metric.field,
type: aggType,
},
{
id: `metric_deriv_${aggType}_${index}`,
field: `metric_${aggType}_${index}`,
type: 'derivative',
unit: '1s',
},
{
id: `metric_posonly_deriv_${aggType}_${index}`,
type: 'calculation',
variables: [
{ id: 'var-rate', name: 'rate', field: `metric_deriv_${aggType}_${index}` },
],
script: 'params.rate > 0.0 ? params.rate : 0.0',
},
],
};
}
if (metric.aggregation === 'p95' || metric.aggregation === 'p99') {
return {
id: `metric_${index}`,
split_mode: 'everything',
metrics: [
{
field: metric.field,
id: `metric_${metric.aggregation}_${index}`,
type: 'percentile',
percentiles: [
{
id: 'percentile_0',
value: percentileToVaue(metric.aggregation),
},
],
},
],
};
}
// Create a basic TSVB series with a single metric
const aggregation = metric.aggregation || 'avg';
return {
id: `metric_${index}`,
split_mode: 'everything',
metrics: [
{
field: metric.field,
id: `metric_${aggregation}_${index}`,
type: aggregation,
},
],
};
}),
};
};

View file

@ -0,0 +1,52 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { uniq } from 'lodash';
import LRU from 'lru-cache';
import { MetricsExplorerRequestBody } from '../../../../common/http_api';
import { ESSearchClient } from '../../../lib/snapshot';
import { getDatasetForField } from './get_dataset_for_field';
import { calculateMetricInterval } from '../../../utils/calculate_metric_interval';
const cache = new LRU({
max: 100,
maxAge: 15 * 60 * 1000,
});
export const findIntervalForMetrics = async (
client: ESSearchClient,
options: MetricsExplorerRequestBody
) => {
const fields = uniq(
options.metrics.map((metric) => (metric.field ? metric.field : null)).filter((f) => f)
) as string[];
const cacheKey = fields.sort().join(':');
if (cache.has(cacheKey)) return cache.get(cacheKey);
if (fields.length === 0) {
return 60;
}
const modules = await Promise.all(
fields.map(
async (field) => await getDatasetForField(client, field as string, options.indexPattern)
)
);
const interval = calculateMetricInterval(
client,
{
indexPattern: options.indexPattern,
timestampField: options.timerange.field,
timerange: options.timerange,
},
modules.filter(Boolean) as string[]
);
cache.set(cacheKey, interval);
return interval;
};

View file

@ -1,150 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { set } from '@elastic/safer-lodash-set';
import { isObject } from 'lodash';
import { i18n } from '@kbn/i18n';
import { InfraDatabaseSearchResponse } from '../../../lib/adapters/framework';
import {
MetricsExplorerRequestBody,
MetricsExplorerResponse,
afterKeyObjectRT,
} from '../../../../common/http_api/metrics_explorer';
interface GroupingAggregation {
groupingsCount: {
value: number;
};
groupings: {
after_key?: {
[name: string]: string;
};
buckets: Array<{ key: { [id: string]: string }; doc_count: number }>;
};
}
const EMPTY_RESPONSE = {
series: [
{
id: i18n.translate('xpack.infra.metricsExploer.everything', { defaultMessage: 'Everything' }),
columns: [],
rows: [],
},
],
pageInfo: { total: 0, afterKey: null },
};
export const getGroupings = async (
search: <Aggregation>(options: object) => Promise<InfraDatabaseSearchResponse<{}, Aggregation>>,
options: MetricsExplorerRequestBody
): Promise<MetricsExplorerResponse> => {
if (!options.groupBy) {
return EMPTY_RESPONSE;
}
if (Array.isArray(options.groupBy) && options.groupBy.length === 0) {
return EMPTY_RESPONSE;
}
const limit = options.limit || 9;
const groupBy = Array.isArray(options.groupBy) ? options.groupBy : [options.groupBy];
const filter: Array<Record<string, any>> = [
{
range: {
[options.timerange.field]: {
gte: options.timerange.from,
lte: options.timerange.to,
format: 'epoch_millis',
},
},
},
...groupBy.map((field) => ({ exists: { field } })),
];
const params = {
allowNoIndices: true,
ignoreUnavailable: true,
index: options.indexPattern,
body: {
size: 0,
query: {
bool: {
should: [
...options.metrics
.filter((m) => m.field)
.map((m) => ({
exists: { field: m.field },
})),
],
filter,
},
},
aggs: {
groupingsCount: {
cardinality: {
script: { source: groupBy.map((field) => `doc['${field}'].value`).join('+') },
},
},
groupings: {
composite: {
size: limit,
sources: groupBy.map((field, index) => ({
[`groupBy${index}`]: { terms: { field, order: 'asc' } },
})),
},
},
},
},
};
if (params.body.query.bool.should.length !== 0) {
set(params, 'body.query.bool.minimum_should_match', 1);
}
if (options.afterKey) {
if (afterKeyObjectRT.is(options.afterKey)) {
set(params, 'body.aggs.groupings.composite.after', options.afterKey);
} else {
set(params, 'body.aggs.groupings.composite.after', { groupBy0: options.afterKey });
}
}
if (options.filterQuery) {
try {
const filterObject = JSON.parse(options.filterQuery);
if (isObject(filterObject)) {
params.body.query.bool.filter.push(filterObject);
}
} catch (err) {
params.body.query.bool.filter.push({
query_string: {
query: options.filterQuery,
analyze_wildcard: true,
},
});
}
}
const response = await search<GroupingAggregation>(params);
if (response.hits.total.value === 0) {
return { ...EMPTY_RESPONSE, series: [] };
}
if (!response.aggregations) {
throw new Error('Aggregations should be present.');
}
const { groupings, groupingsCount } = response.aggregations;
const { after_key: afterKey } = groupings;
return {
series: groupings.buckets.map((bucket) => {
const keys = Object.values(bucket.key);
const id = keys.join(' / ');
return { id, keys, rows: [], columns: [] };
}),
pageInfo: {
total: groupingsCount.value,
afterKey: afterKey && groupings.buckets.length === limit ? afterKey : null,
},
};
};

View file

@ -1,162 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { union, uniq, isArray, isString } from 'lodash';
import { KibanaRequest, RequestHandlerContext } from 'src/core/server';
import { KibanaFramework } from '../../../lib/adapters/framework/kibana_framework_adapter';
import {
MetricsExplorerRow,
MetricsExplorerSeries,
MetricsExplorerRequestBody,
MetricsExplorerColumn,
} from '../../../../common/http_api/metrics_explorer';
import { createMetricModel } from './create_metrics_model';
import { JsonObject } from '../../../../common/typed_json';
import { calculateMetricInterval } from '../../../utils/calculate_metric_interval';
import { getDatasetForField } from './get_dataset_for_field';
import {
CallWithRequestParams,
InfraDatabaseSearchResponse,
} from '../../../lib/adapters/framework';
export const populateSeriesWithTSVBData = (
request: KibanaRequest,
options: MetricsExplorerRequestBody,
framework: KibanaFramework,
requestContext: RequestHandlerContext
) => async (series: MetricsExplorerSeries) => {
// IF there are no metrics selected then we should return an empty result.
if (options.metrics.length === 0) {
return {
...series,
columns: [],
rows: [],
};
}
// Set the filter for the group by or match everything
const isGroupBySet =
Array.isArray(options.groupBy) && options.groupBy.length
? true
: isString(options.groupBy)
? true
: false;
const filters: JsonObject[] = isGroupBySet
? isArray(options.groupBy)
? options.groupBy
.filter((f) => f)
.map((field, index) => ({ match: { [field as string]: series.keys?.[index] || '' } }))
: [{ match: { [options.groupBy as string]: series.id } }]
: [];
if (options.filterQuery) {
try {
const filterQuery = JSON.parse(options.filterQuery);
filters.push(filterQuery);
} catch (error) {
filters.push({
query_string: {
query: options.filterQuery,
analyze_wildcard: true,
},
});
}
}
const timerange = { min: options.timerange.from, max: options.timerange.to };
const client = <Hit = {}, Aggregation = undefined>(
opts: CallWithRequestParams
): Promise<InfraDatabaseSearchResponse<Hit, Aggregation>> =>
framework.callWithRequest(requestContext, 'search', opts);
// Create the TSVB model based on the request options
const model = createMetricModel(options);
const modules = await Promise.all(
uniq(options.metrics.filter((m) => m.field)).map(
async (m) => await getDatasetForField(client, m.field as string, options.indexPattern)
)
);
const calculatedInterval = await calculateMetricInterval(
client,
{
indexPattern: options.indexPattern,
timestampField: options.timerange.field,
timerange: options.timerange,
},
modules.filter((m) => m) as string[]
);
if (calculatedInterval) {
model.interval = options.forceInterval
? options.timerange.interval
: `>=${calculatedInterval}s`;
}
// Get TSVB results using the model, timerange and filters
const tsvbResults = await framework.makeTSVBRequest(
requestContext,
request,
model,
timerange,
filters
);
// If there is no data `custom` will not exist.
if (!tsvbResults.custom) {
return {
...series,
columns: [],
rows: [],
};
}
// Setup the dynamic columns and row attributes depending on if the user is doing a group by
// and multiple metrics
const attributeColumns: MetricsExplorerColumn[] =
options.groupBy != null ? [{ name: 'groupBy', type: 'string' }] : [];
const metricColumns: MetricsExplorerColumn[] = options.metrics.map((m, i) => ({
name: `metric_${i}`,
type: 'number',
}));
const rowAttributes = options.groupBy != null ? { groupBy: series.id } : {};
// To support multiple metrics, there are multiple TSVB series which need to be combined
// into one MetricExplorerRow (Canvas row). This is done by collecting all the timestamps
// across each TSVB series. Then for each timestamp we find the values and create a
// MetricsExplorerRow.
const timestamps = tsvbResults.custom.series.reduce(
(currentTimestamps, tsvbSeries) =>
union(
currentTimestamps,
tsvbSeries.data.map((row) => row[0])
).sort(),
[] as number[]
);
// Combine the TSVB series for multiple metrics.
const rows = timestamps.map((timestamp) => {
return tsvbResults.custom.series.reduce(
(currentRow, tsvbSeries) => {
const matches = tsvbSeries.data.find((d) => d[0] === timestamp);
if (matches) {
return { ...currentRow, [tsvbSeries.id]: matches[1] };
}
return currentRow;
},
{ timestamp, ...rowAttributes } as MetricsExplorerRow
);
});
return {
...series,
rows,
columns: [
{ name: 'timestamp', type: 'date' } as MetricsExplorerColumn,
...metricColumns,
...attributeColumns,
],
};
};

View file

@ -0,0 +1,59 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { isArray } from 'lodash';
import { MetricsAPIRequest } from '../../../../common/http_api';
import { ESSearchClient } from '../../../lib/metrics/types';
interface GroupingResponse {
count: {
value: number;
};
}
export const queryTotalGroupings = async (
client: ESSearchClient,
options: MetricsAPIRequest
): Promise<number> => {
if (!options.groupBy || (isArray(options.groupBy) && options.groupBy.length === 0)) {
return Promise.resolve(0);
}
const params = {
allowNoIndices: true,
ignoreUnavailable: true,
index: options.indexPattern,
body: {
size: 0,
query: {
bool: {
filter: [
{
range: {
[options.timerange.field]: {
gte: options.timerange.from,
lte: options.timerange.to,
format: 'epoch_millis',
},
},
},
...options.groupBy.map((field) => ({ exists: { field } })),
],
},
},
aggs: {
count: {
cardinality: {
script: options.groupBy.map((field) => `doc['${field}'].value`).join('+'),
},
},
},
},
};
const response = await client<{}, GroupingResponse>(params);
return response.aggregations?.count.value ?? 0;
};

View file

@ -0,0 +1,24 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { MetricsAPISeries, MetricsExplorerSeries } from '../../../../common/http_api';
export const transformSeries = (hasGroupBy: boolean) => (
series: MetricsAPISeries
): MetricsExplorerSeries => {
const id = series.keys?.join(' / ') ?? series.id;
return {
...series,
id,
rows: series.rows.map((row) => {
if (hasGroupBy) {
return { ...row, groupBy: id };
}
return row;
}),
columns: hasGroupBy ? [...series.columns, { name: 'groupBy', type: 'string' }] : series.columns,
};
};

View file

@ -8891,7 +8891,7 @@
"xpack.infra.metrics.missingTSVBModelError": "{nodeType}では{metricId}のTSVBモデルが存在しません",
"xpack.infra.metrics.pluginTitle": "メトリック",
"xpack.infra.metrics.refetchButtonLabel": "新規データを確認",
"xpack.infra.metricsExploer.everything": "すべて",
"xpack.infra.metricsExplorer.everything": "すべて",
"xpack.infra.metricsExplorer.actionsLabel.aria": "{grouping} のアクション",
"xpack.infra.metricsExplorer.actionsLabel.button": "アクション",
"xpack.infra.metricsExplorer.aggregationLabel": "/",

View file

@ -8894,7 +8894,7 @@
"xpack.infra.metrics.missingTSVBModelError": "{nodeType} 的 {metricId} TSVB 模型不存在",
"xpack.infra.metrics.pluginTitle": "指标",
"xpack.infra.metrics.refetchButtonLabel": "检查新数据",
"xpack.infra.metricsExploer.everything": "所有内容",
"xpack.infra.metricsExplorer.everything": "所有内容",
"xpack.infra.metricsExplorer.actionsLabel.aria": "适用于 {grouping} 的操作",
"xpack.infra.metricsExplorer.actionsLabel.button": "操作",
"xpack.infra.metricsExplorer.aggregationLabel": "的",

View file

@ -50,7 +50,7 @@ export default function ({ getService }: FtrProviderContext) {
const body = decodeOrThrow(metricsExplorerResponseRT)(response.body);
expect(body.series).length(1);
const firstSeries = first(body.series) as any;
expect(firstSeries).to.have.property('id', 'Everything');
expect(firstSeries).to.have.property('id', '*');
expect(firstSeries.columns).to.eql([
{ name: 'timestamp', type: 'date' },
{ name: 'metric_0', type: 'number' },
@ -90,7 +90,7 @@ export default function ({ getService }: FtrProviderContext) {
const body = decodeOrThrow(metricsExplorerResponseRT)(response.body);
expect(body.series).length(1);
const firstSeries = first(body.series) as any;
expect(firstSeries).to.have.property('id', 'Everything');
expect(firstSeries).to.have.property('id', '*');
expect(firstSeries.columns).to.eql([
{ name: 'timestamp', type: 'date' },
{ name: 'metric_0', type: 'number' },
@ -121,7 +121,7 @@ export default function ({ getService }: FtrProviderContext) {
const body = decodeOrThrow(metricsExplorerResponseRT)(response.body);
expect(body.series).length(1);
const firstSeries = first(body.series) as any;
expect(firstSeries).to.have.property('id', 'Everything');
expect(firstSeries).to.have.property('id', '*');
expect(firstSeries.columns).to.eql([]);
expect(firstSeries.rows).to.have.length(0);
});