[Stack Monitoring] Convert first half of server/lib folder to Typescript (#106327)

* TS convert top-level files

* Fix ccs_utils

* TS convert beats folder

* Add missing types file

* TS covert cluster folder

* TSify details folder

* TSify top level of nodes folder

* TSify elasticsearch nodes folder

* TSify elasticsearch shards folder

* TSify APM folder

* Fix type errors

* Fix failing typechecks and tests

* Fix more failing checks and tests

* Fix typecheck

* Fix typecheck

* Restore lodash get to logstash

* Fix fetching logstash cluster

* Restore lodash get to get_clusters_from_request

* Fix typecheck

* Fix detect_reason

* Fix get_kibanas_for_clusters

* Simplify kibanaUuids in get_kibanas_for_clusters

* Revert "Simplify kibanaUuids in get_kibanas_for_clusters"

This reverts commit 99597b09d7.

* Simplify kibanaUuids

* Fix test fixture
This commit is contained in:
Zacqary Adam Xeper 2021-08-02 21:33:59 -04:00 committed by GitHub
parent 5533a4061f
commit 26c1dae0df
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
96 changed files with 883 additions and 527 deletions

View file

@ -5,4 +5,5 @@
* 2.0.
*/
// @ts-ignore
export { formatTimestampToDuration } from './format_timestamp_to_duration';

View file

@ -46,7 +46,6 @@ describe('fetchStatus', () => {
triggeredMS: 0,
};
let alertStates: AlertState[] = [];
const licenseService = null;
const rulesClient = {
find: jest.fn(() => ({
total: 1,
@ -74,7 +73,7 @@ describe('fetchStatus', () => {
});
it('should fetch from the alerts client', async () => {
const status = await fetchStatus(rulesClient as any, licenseService as any, alertTypes, [
const status = await fetchStatus(rulesClient as any, alertTypes, [
defaultClusterState.clusterUuid,
]);
expect(status).toEqual({
@ -96,7 +95,7 @@ describe('fetchStatus', () => {
},
];
const status = await fetchStatus(rulesClient as any, licenseService as any, alertTypes, [
const status = await fetchStatus(rulesClient as any, alertTypes, [
defaultClusterState.clusterUuid,
]);
expect(Object.values(status).length).toBe(1);
@ -105,9 +104,7 @@ describe('fetchStatus', () => {
});
it('should pass in the right filter to the alerts client', async () => {
await fetchStatus(rulesClient as any, licenseService as any, alertTypes, [
defaultClusterState.clusterUuid,
]);
await fetchStatus(rulesClient as any, alertTypes, [defaultClusterState.clusterUuid]);
expect((rulesClient.find as jest.Mock).mock.calls[0][0].options.filter).toBe(
`alert.attributes.alertTypeId:${alertType}`
);
@ -118,7 +115,7 @@ describe('fetchStatus', () => {
alertTypeState: null,
})) as any;
const status = await fetchStatus(rulesClient as any, licenseService as any, alertTypes, [
const status = await fetchStatus(rulesClient as any, alertTypes, [
defaultClusterState.clusterUuid,
]);
expect(status[alertType].states.length).toEqual(0);
@ -130,7 +127,7 @@ describe('fetchStatus', () => {
data: [],
})) as any;
const status = await fetchStatus(rulesClient as any, licenseService as any, alertTypes, [
const status = await fetchStatus(rulesClient as any, alertTypes, [
defaultClusterState.clusterUuid,
]);
expect(status).toEqual({});
@ -146,7 +143,6 @@ describe('fetchStatus', () => {
};
await fetchStatus(
rulesClient as any,
customLicenseService as any,
[ALERT_CLUSTER_HEALTH],
[defaultClusterState.clusterUuid]
);
@ -183,7 +179,6 @@ describe('fetchStatus', () => {
};
const status = await fetchStatus(
customRulesClient as any,
licenseService as any,
[ALERT_CPU_USAGE, ALERT_DISK_USAGE, ALERT_MISSING_MONITORING_DATA],
[defaultClusterState.clusterUuid]
);

View file

@ -14,11 +14,9 @@ import {
CommonAlertFilter,
} from '../../../common/types/alerts';
import { ALERTS } from '../../../common/constants';
import { MonitoringLicenseService } from '../../types';
export async function fetchStatus(
rulesClient: RulesClient,
licenseService: MonitoringLicenseService,
alertTypes: string[] | undefined,
clusterUuids: string[],
filters: CommonAlertFilter[] = []

View file

@ -5,14 +5,14 @@
* 2.0.
*/
import { get } from 'lodash';
import type { ElasticsearchResponse } from '../../../common/types/es';
const getMemPath = (cgroup) =>
const getMemPath = (cgroup?: string) =>
cgroup
? 'beats_stats.metrics.beat.cgroup.memory.mem.usage.bytes'
: 'beats_stats.metrics.beat.memstats.rss';
export const getDiffCalculation = (max, min) => {
export const getDiffCalculation = (max: number | null, min: number | null) => {
// no need to test max >= 0, but min <= 0 which is normal for a derivative after restart
// because we are aggregating/collapsing on ephemeral_ids
if (max !== null && min !== null && max >= 0 && min >= 0 && max >= min) {
@ -30,7 +30,7 @@ export const apmAggFilterPath = [
'aggregations.max_mem_total.value',
'aggregations.versions.buckets',
];
export const apmUuidsAgg = (maxBucketSize, cgroup) => ({
export const apmUuidsAgg = (maxBucketSize?: string, cgroup?: string) => ({
total: {
cardinality: {
field: 'beats_stats.beat.uuid',
@ -92,14 +92,16 @@ export const apmUuidsAgg = (maxBucketSize, cgroup) => ({
},
});
export const apmAggResponseHandler = (response) => {
const apmTotal = get(response, 'aggregations.total.value', 0);
export const apmAggResponseHandler = (response: ElasticsearchResponse) => {
const apmTotal = response.aggregations?.total.value ?? 0;
const eventsTotalMax = get(response, 'aggregations.max_events_total.value', 0);
const eventsTotalMin = get(response, 'aggregations.min_events_total.value', 0);
const memMax = get(response, 'aggregations.max_mem_total.value', 0);
const memMin = get(response, 'aggregations.min_mem_total.value', 0);
const versions = get(response, 'aggregations.versions.buckets', []).map(({ key }) => key);
const eventsTotalMax = response.aggregations?.max_events_total.value ?? 0;
const eventsTotalMin = response.aggregations?.min_events_total.value ?? 0;
const memMax = response.aggregations?.max_mem_total.value ?? 0;
const memMin = response.aggregations?.min_mem_total.value ?? 0;
const versions = (response.aggregations?.versions.buckets ?? []).map(
({ key }: { key: string }) => key
);
return {
apmTotal,

View file

@ -5,9 +5,7 @@
* 2.0.
*/
// @ts-ignore
import { createApmQuery } from './create_apm_query';
// @ts-ignore
import { ApmClusterMetric } from '../metrics';
import { LegacyRequest } from '../../types';
import { ElasticsearchResponse } from '../../../common/types/es';

View file

@ -5,8 +5,7 @@
* 2.0.
*/
import { defaults } from 'lodash';
import { ApmMetric } from '../metrics';
import { ApmMetric, ApmMetricFields } from '../metrics';
import { createQuery } from '../create_query';
/**
@ -14,14 +13,23 @@ import { createQuery } from '../create_query';
*
* @param {Object} options The options to pass to {@code createQuery}
*/
export function createApmQuery(options = {}) {
options = defaults(options, {
filters: [],
export function createApmQuery(options: {
filters?: any[];
types?: string[];
metric?: ApmMetricFields;
uuid?: string;
clusterUuid: string;
start?: number;
end?: number;
}) {
const opts = {
filters: [] as any[],
metric: ApmMetric.getMetricFields(),
types: ['stats', 'beats_stats'],
});
...(options ?? {}),
};
options.filters.push({
opts.filters.push({
bool: {
must: {
term: {
@ -31,5 +39,5 @@ export function createApmQuery(options = {}) {
},
});
return createQuery(options);
return createQuery(opts);
}

View file

@ -66,7 +66,7 @@ export function handleResponse(
eventsTotal: getDiffCalculation(eventsTotalLast, eventsTotalFirst),
eventsEmitted: getDiffCalculation(eventsEmittedLast, eventsEmittedFirst),
eventsDropped: getDiffCalculation(eventsDroppedLast, eventsDroppedFirst),
bytesWritten: getDiffCalculation(bytesWrittenLast, bytesWrittenFirst),
bytesWritten: getDiffCalculation(Number(bytesWrittenLast), Number(bytesWrittenFirst)),
config: {
container: config.get('monitoring.ui.container.apm.enabled'),
},

View file

@ -49,9 +49,11 @@ export function handleResponse(response: ElasticsearchResponse, start: number, e
// add the beat
const rateOptions = {
hitTimestamp: stats?.timestamp ?? hit._source['@timestamp'],
hitTimestamp: stats?.timestamp ?? hit._source['@timestamp'] ?? null,
earliestHitTimestamp:
earliestStats?.timestamp ?? hit.inner_hits?.earliest.hits?.hits[0]._source['@timestamp'],
earliestStats?.timestamp ??
hit.inner_hits?.earliest.hits?.hits[0]._source['@timestamp'] ??
null,
timeWindowMin: start,
timeWindowMax: end,
};

View file

@ -5,14 +5,15 @@
* 2.0.
*/
import { get } from 'lodash';
import { LegacyRequest, Cluster } from '../../types';
import { checkParam } from '../error_missing_required';
import { createApmQuery } from './create_apm_query';
import { ApmMetric } from '../metrics';
import { apmAggResponseHandler, apmUuidsAgg, apmAggFilterPath } from './_apm_stats';
import { getTimeOfLastEvent } from './_get_time_of_last_event';
import { ElasticsearchResponse } from '../../../common/types/es';
export function handleResponse(clusterUuid, response) {
export function handleResponse(clusterUuid: string, response: ElasticsearchResponse) {
const { apmTotal, totalEvents, memRss, versions } = apmAggResponseHandler(response);
// combine stats
@ -31,7 +32,11 @@ export function handleResponse(clusterUuid, response) {
};
}
export function getApmsForClusters(req, apmIndexPattern, clusters) {
export function getApmsForClusters(
req: LegacyRequest,
apmIndexPattern: string,
clusters: Cluster[]
) {
checkParam(apmIndexPattern, 'apmIndexPattern in apms/getApmsForClusters');
const start = req.payload.timeRange.min;
@ -42,7 +47,7 @@ export function getApmsForClusters(req, apmIndexPattern, clusters) {
return Promise.all(
clusters.map(async (cluster) => {
const clusterUuid = get(cluster, 'elasticsearch.cluster.id', cluster.cluster_uuid);
const clusterUuid = cluster.elasticsearch?.cluster?.id ?? cluster.cluster_uuid;
const params = {
index: apmIndexPattern,
size: 0,

View file

@ -6,16 +6,17 @@
*/
import moment from 'moment';
import { LegacyRequest } from '../../types';
import { checkParam } from '../error_missing_required';
import { createApmQuery } from './create_apm_query';
import { apmAggFilterPath, apmUuidsAgg, apmAggResponseHandler } from './_apm_stats';
import { getTimeOfLastEvent } from './_get_time_of_last_event';
import type { ElasticsearchResponse } from '../../../common/types/es';
export function handleResponse(...args) {
const { apmTotal, totalEvents, bytesSent } = apmAggResponseHandler(...args);
export function handleResponse(response: ElasticsearchResponse) {
const { apmTotal, totalEvents } = apmAggResponseHandler(response);
return {
bytesSent,
totalEvents,
apms: {
total: apmTotal,
@ -23,7 +24,7 @@ export function handleResponse(...args) {
};
}
export async function getStats(req, apmIndexPattern, clusterUuid) {
export async function getStats(req: LegacyRequest, apmIndexPattern: string, clusterUuid: string) {
checkParam(apmIndexPattern, 'apmIndexPattern in getBeats');
const config = req.server.config();
@ -60,7 +61,7 @@ export async function getStats(req, apmIndexPattern, clusterUuid) {
}),
]);
const formattedResponse = handleResponse(response, start, end);
const formattedResponse = handleResponse(response);
return {
...formattedResponse,
timeOfLastEvent,

View file

@ -5,9 +5,10 @@
* 2.0.
*/
import { upperFirst, get } from 'lodash';
import { upperFirst } from 'lodash';
import type { BeatsElasticsearchResponse, BucketCount } from './types';
export const getDiffCalculation = (max, min) => {
export const getDiffCalculation = (max: number | null, min: number | null) => {
// no need to test max >= 0, but min <= 0 which is normal for a derivative after restart
// because we are aggregating/collapsing on ephemeral_ids
if (max !== null && min !== null && max >= 0 && min >= 0 && max >= min) {
@ -27,7 +28,7 @@ export const beatsAggFilterPath = [
'aggregations.max_bytes_sent_total.value',
];
export const beatsUuidsAgg = (maxBucketSize) => ({
export const beatsUuidsAgg = (maxBucketSize: string) => ({
types: {
terms: {
field: 'beats_stats.beat.type',
@ -98,24 +99,24 @@ export const beatsUuidsAgg = (maxBucketSize) => ({
},
});
export const beatsAggResponseHandler = (response) => {
export const beatsAggResponseHandler = (response?: BeatsElasticsearchResponse) => {
// beat types stat
const buckets = get(response, 'aggregations.types.buckets', []);
const beatTotal = get(response, 'aggregations.total.value', 0);
const beatTypes = buckets.reduce((types, typeBucket) => {
const buckets = response?.aggregations?.types?.buckets ?? [];
const beatTotal = response?.aggregations?.total.value ?? 0;
const beatTypes = buckets.reduce((types: BucketCount<{ type: string }>, typeBucket) => {
return [
...types,
{
type: upperFirst(typeBucket.key),
count: get(typeBucket, 'uuids.buckets.length'),
count: typeBucket.uuids.buckets.length,
},
];
}, []);
const eventsTotalMax = get(response, 'aggregations.max_events_total.value', 0);
const eventsTotalMin = get(response, 'aggregations.min_events_total.value', 0);
const bytesSentMax = get(response, 'aggregations.max_bytes_sent_total.value', 0);
const bytesSentMin = get(response, 'aggregations.min_bytes_sent_total.value', 0);
const eventsTotalMax = response?.aggregations?.max_events_total.value ?? 0;
const eventsTotalMin = response?.aggregations?.min_events_total.value ?? 0;
const bytesSentMax = response?.aggregations?.max_bytes_sent_total.value ?? 0;
const bytesSentMin = response?.aggregations?.min_bytes_sent_total.value ?? 0;
return {
beatTotal,

View file

@ -5,8 +5,7 @@
* 2.0.
*/
import { defaults } from 'lodash';
import { BeatsMetric } from '../metrics';
import { BeatsMetric, BeatsMetricFields } from '../metrics';
import { createQuery } from '../create_query';
/**
@ -17,15 +16,24 @@ import { createQuery } from '../create_query';
*
* @param {Object} options The options to pass to {@code createQuery}
*/
export function createBeatsQuery(options = {}) {
options = defaults(options, {
filters: [],
export function createBeatsQuery(options: {
filters?: any[];
types?: string[];
metric?: BeatsMetricFields;
uuid?: string;
clusterUuid: string;
start?: number;
end?: number;
}) {
const opts = {
filters: [] as any[],
metric: BeatsMetric.getMetricFields(),
types: ['stats', 'beats_stats'],
});
...(options ?? {}),
};
// avoid showing APM Server stats alongside other Beats because APM Server will have its own UI
options.filters.push({
opts.filters.push({
bool: {
must_not: {
term: {
@ -35,5 +43,5 @@ export function createBeatsQuery(options = {}) {
},
});
return createQuery(options);
return createQuery(opts);
}

View file

@ -11,7 +11,7 @@ import { ElasticsearchResponse } from '../../../common/types/es';
// @ts-ignore
import { checkParam } from '../error_missing_required';
// @ts-ignore
import { createBeatsQuery } from './create_beats_query.js';
import { createBeatsQuery } from './create_beats_query';
// @ts-ignore
import { getDiffCalculation } from './_beats_stats';
@ -67,7 +67,7 @@ export function handleResponse(response: ElasticsearchResponse, beatUuid: string
eventsTotal: getDiffCalculation(eventsTotalLast, eventsTotalFirst) ?? null,
eventsEmitted: getDiffCalculation(eventsEmittedLast, eventsEmittedFirst) ?? null,
eventsDropped: getDiffCalculation(eventsDroppedLast, eventsDroppedFirst) ?? null,
bytesWritten: getDiffCalculation(bytesWrittenLast, bytesWrittenFirst) ?? null,
bytesWritten: getDiffCalculation(Number(bytesWrittenLast), Number(bytesWrittenFirst)) ?? null,
handlesHardLimit,
handlesSoftLimit,
};

View file

@ -19,14 +19,14 @@ import { LegacyRequest } from '../../types';
import { ElasticsearchResponse } from '../../../common/types/es';
interface Beat {
uuid: string | undefined;
name: string | undefined;
type: string | undefined;
output: string | undefined;
uuid?: string;
name?: string;
type?: string;
output?: string;
total_events_rate: number;
bytes_sent_rate: number;
memory: number | undefined;
version: string | undefined;
memory?: number;
version?: string;
errors: any;
}
@ -63,9 +63,9 @@ export function handleResponse(response: ElasticsearchResponse, start: number, e
// add the beat
const rateOptions = {
hitTimestamp: stats?.timestamp ?? hit._source['@timestamp'],
hitTimestamp: stats?.timestamp ?? hit._source['@timestamp']!,
earliestHitTimestamp:
earliestStats?.timestamp ?? hit.inner_hits?.earliest.hits?.hits[0]._source['@timestamp'],
earliestStats?.timestamp ?? hit.inner_hits?.earliest.hits?.hits[0]._source['@timestamp']!,
timeWindowMin: start,
timeWindowMax: end,
};
@ -96,8 +96,8 @@ export function handleResponse(response: ElasticsearchResponse, start: number, e
name: stats?.beat?.name,
type: upperFirst(stats?.beat?.type),
output: upperFirst(statsMetrics?.libbeat?.output?.type),
total_events_rate: totalEventsRate,
bytes_sent_rate: bytesSentRate,
total_events_rate: totalEventsRate!,
bytes_sent_rate: bytesSentRate!,
errors,
memory:
hit._source.beats_stats?.metrics?.beat?.memstats?.memory_alloc ??

View file

@ -5,13 +5,14 @@
* 2.0.
*/
import { get } from 'lodash';
import { checkParam } from '../error_missing_required';
import { BeatsClusterMetric } from '../metrics';
import { createBeatsQuery } from './create_beats_query';
import { beatsAggFilterPath, beatsUuidsAgg, beatsAggResponseHandler } from './_beats_stats';
import type { ElasticsearchResponse } from '../../../common/types/es';
import { LegacyRequest, Cluster } from '../../types';
export function handleResponse(clusterUuid, response) {
export function handleResponse(clusterUuid: string, response: ElasticsearchResponse) {
const { beatTotal, beatTypes, totalEvents, bytesSent } = beatsAggResponseHandler(response);
// combine stats
@ -30,7 +31,11 @@ export function handleResponse(clusterUuid, response) {
};
}
export function getBeatsForClusters(req, beatsIndexPattern, clusters) {
export function getBeatsForClusters(
req: LegacyRequest,
beatsIndexPattern: string,
clusters: Cluster[]
) {
checkParam(beatsIndexPattern, 'beatsIndexPattern in beats/getBeatsForClusters');
const start = req.payload.timeRange.min;
@ -40,7 +45,7 @@ export function getBeatsForClusters(req, beatsIndexPattern, clusters) {
return Promise.all(
clusters.map(async (cluster) => {
const clusterUuid = get(cluster, 'elasticsearch.cluster.id', cluster.cluster_uuid);
const clusterUuid = cluster.elasticsearch?.cluster?.id ?? cluster.cluster_uuid;
const params = {
index: beatsIndexPattern,
size: 0,
@ -53,7 +58,7 @@ export function getBeatsForClusters(req, beatsIndexPattern, clusters) {
clusterUuid,
metric: BeatsClusterMetric.getMetricFields(), // override default of BeatMetric.getMetricFields
}),
aggs: beatsUuidsAgg(maxBucketSize),
aggs: beatsUuidsAgg(maxBucketSize!),
},
};

View file

@ -5,17 +5,19 @@
* 2.0.
*/
import { upperFirst, get } from 'lodash';
import { upperFirst } from 'lodash';
import { LegacyRequest } from '../../types';
import { checkParam } from '../error_missing_required';
import { createBeatsQuery } from './create_beats_query';
import type { BeatsElasticsearchResponse, BucketCount } from './types';
export function handleResponse(response) {
const aggs = get(response, 'aggregations');
export function handleResponse(response?: BeatsElasticsearchResponse) {
const aggs = response?.aggregations;
const getTimeRangeCount = (name) => {
const lastActiveBuckets = get(aggs, 'active_counts.buckets', []);
const getTimeRangeCount = (name: string) => {
const lastActiveBuckets = aggs?.active_counts?.buckets ?? [];
const rangeBucket = lastActiveBuckets.find((bucket) => bucket.key === name);
return get(rangeBucket, 'uuids.buckets.length');
return rangeBucket?.uuids.buckets.length;
};
// aggregations are not ordered, so we find the bucket for each timestamp range
@ -34,25 +36,31 @@ export function handleResponse(response) {
{ range: 'last1d', count: last1dCount },
];
const latestVersions = get(aggs, 'versions.buckets', []).reduce((accum, current) => {
return [
...accum,
{
version: current.key,
count: get(current, 'uuids.buckets.length'),
},
];
}, []);
const latestVersions = (aggs?.versions?.buckets ?? []).reduce(
(accum: BucketCount<{ version: string }>, current) => {
return [
...accum,
{
version: current.key,
count: current.uuids.buckets.length,
},
];
},
[]
);
const latestTypes = get(aggs, 'types.buckets', []).reduce((accum, current) => {
return [
...accum,
{
type: upperFirst(current.key),
count: get(current, 'uuids.buckets.length'),
},
];
}, []);
const latestTypes = (aggs?.types?.buckets ?? []).reduce(
(accum: BucketCount<{ type: string }>, current) => {
return [
...accum,
{
type: upperFirst(current.key),
count: current.uuids.buckets.length,
},
];
},
[]
);
return {
latestActive,
@ -61,7 +69,7 @@ export function handleResponse(response) {
};
}
export function getLatestStats(req, beatsIndexPattern, clusterUuid) {
export function getLatestStats(req: LegacyRequest, beatsIndexPattern: string, clusterUuid: string) {
checkParam(beatsIndexPattern, 'beatsIndexPattern in getBeats');
const config = req.server.config();

View file

@ -6,12 +6,14 @@
*/
import moment from 'moment';
import type { BeatsElasticsearchResponse } from './types';
import { LegacyRequest } from '../../types';
import { checkParam } from '../error_missing_required';
import { createBeatsQuery } from './create_beats_query';
import { beatsAggFilterPath, beatsUuidsAgg, beatsAggResponseHandler } from './_beats_stats';
export function handleResponse(...args) {
const { beatTotal, beatTypes, totalEvents, bytesSent } = beatsAggResponseHandler(...args);
export function handleResponse(response: BeatsElasticsearchResponse) {
const { beatTotal, beatTypes, totalEvents, bytesSent } = beatsAggResponseHandler(response);
return {
total: beatTotal,
@ -23,7 +25,7 @@ export function handleResponse(...args) {
};
}
export async function getStats(req, beatsIndexPattern, clusterUuid) {
export async function getStats(req: LegacyRequest, beatsIndexPattern: string, clusterUuid: string) {
checkParam(beatsIndexPattern, 'beatsIndexPattern in getBeats');
const config = req.server.config();
@ -42,12 +44,12 @@ export async function getStats(req, beatsIndexPattern, clusterUuid) {
end,
clusterUuid,
}),
aggs: beatsUuidsAgg(maxBucketSize),
aggs: beatsUuidsAgg(maxBucketSize!),
},
};
const { callWithRequest } = req.server.plugins.elasticsearch.getCluster('monitoring');
const response = await callWithRequest(req, 'search', params);
const response: BeatsElasticsearchResponse = await callWithRequest(req, 'search', params);
return handleResponse(response, start, end);
return handleResponse(response);
}

View file

@ -0,0 +1,38 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import type { ElasticsearchResponse } from '../../../common/types/es';
import type { Aggregation } from '../../types';
export type BucketCount<T> = Array<
T & {
count: number;
}
>;
export interface BeatsElasticsearchResponse extends ElasticsearchResponse {
aggregations?: {
types?: Aggregation;
active_counts?: Aggregation;
versions?: Aggregation;
total: {
value: number;
};
max_events_total: {
value: number;
};
min_events_total: {
value: number;
};
max_bytes_sent_total: {
value: number;
};
min_bytes_sent_total: {
value: number;
};
};
}

View file

@ -5,7 +5,7 @@
* 2.0.
*/
import { calculateAuto } from './calculate_auto.js';
import { calculateAuto } from './calculate_auto';
import _ from 'lodash';
import moment from 'moment';

View file

@ -5,7 +5,9 @@
* 2.0.
*/
import moment from 'moment';
import moment, { Duration } from 'moment';
type RoundingRule = [number | Duration, Duration];
const d = moment.duration;
const roundingRules = [
@ -25,18 +27,22 @@ const roundingRules = [
[d(3, 'week'), d(1, 'week')],
[d(1, 'year'), d(1, 'month')],
[Infinity, d(1, 'year')],
];
] as RoundingRule[];
function find(rules, check) {
function pick(buckets, duration) {
const target = duration / buckets;
function find(
rules: RoundingRule[],
check: (b: number | Duration, i: Duration, t: number) => Duration | void
) {
function pick(buckets?: number, duration?: Duration): Duration {
if (!buckets || !duration) return moment.duration(0);
const target = duration.asMilliseconds() / buckets;
let lastResp;
for (let i = 0; i < rules.length; i++) {
const rule = rules[i];
const resp = check(rule[0], rule[1], target);
if (resp == null) {
if (resp === null || resp === undefined) {
if (lastResp) {
return lastResp;
}
@ -51,11 +57,9 @@ function find(rules, check) {
return moment.duration(ms, 'ms');
}
return function (buckets, duration) {
return function (buckets: number, duration: Duration) {
const interval = pick(buckets, duration);
if (interval) {
return moment.duration(interval._data);
}
return interval;
};
}

View file

@ -10,7 +10,7 @@ import moment from 'moment';
* Return `true` if timestamp of last update is younger than 10 minutes ago
* If older than, it indicates cluster/instance is offline
*/
export function calculateAvailability(timestamp) {
export function calculateAvailability(timestamp: string) {
const lastUpdate = moment(timestamp); // converts to local time
return lastUpdate.isAfter(moment().subtract(10, 'minutes')); // compares with local time
}

View file

@ -9,7 +9,7 @@
* A reduce that takes statuses from different products in a cluster and boil
* it down into a single status
*/
export function calculateOverallStatus(set) {
export function calculateOverallStatus(set: Array<string | null | undefined>) {
return set.reduce((result, current) => {
if (!current) {
return result;

View file

@ -20,6 +20,16 @@ import moment from 'moment';
* 4. From that subtract the earliest timestamp from the time picker
* This gives you the denominator in millis. Divide it by 1000 to convert to seconds
*/
interface CalculateRateProps {
hitTimestamp: string | null;
earliestHitTimestamp: string | null;
latestTotal?: string | number | null;
earliestTotal?: string | number | null;
timeWindowMin: number;
timeWindowMax: number;
}
export function calculateRate({
hitTimestamp = null,
earliestHitTimestamp = null,
@ -27,7 +37,7 @@ export function calculateRate({
earliestTotal = null,
timeWindowMin,
timeWindowMax,
} = {}) {
}: CalculateRateProps) {
const nullResult = {
rate: null,
isEstimate: false,
@ -58,9 +68,9 @@ export function calculateRate({
let rate = null;
let isEstimate = false;
if (millisDelta !== 0) {
const totalDelta = latestTotal - earliestTotal;
const totalDelta = Number(latestTotal) - Number(earliestTotal);
if (totalDelta < 0) {
rate = latestTotal / (millisDelta / 1000); // a restart caused an unwanted negative rate
rate = Number(latestTotal) / (millisDelta / 1000); // a restart caused an unwanted negative rate
isEstimate = true;
} else {
rate = totalDelta / (millisDelta / 1000);

View file

@ -9,11 +9,14 @@ import moment from 'moment';
import { calculateAuto } from './calculate_auto';
export function calculateTimeseriesInterval(
lowerBoundInMsSinceEpoch,
upperBoundInMsSinceEpoch,
minIntervalSeconds
lowerBoundInMsSinceEpoch: number,
upperBoundInMsSinceEpoch: number,
minIntervalSeconds: number
) {
const duration = moment.duration(upperBoundInMsSinceEpoch - lowerBoundInMsSinceEpoch, 'ms');
return Math.max(minIntervalSeconds, calculateAuto(100, duration).asSeconds());
return Math.max(
!isNaN(minIntervalSeconds) ? minIntervalSeconds : 0,
calculateAuto(100, duration).asSeconds()
);
}

View file

@ -6,8 +6,18 @@
*/
import { isFunction, get } from 'lodash';
import type { MonitoringConfig } from '../config';
export function appendMetricbeatIndex(config, indexPattern, ccs, bypass = false) {
type Config = Partial<MonitoringConfig> & {
get?: (key: string) => any;
};
export function appendMetricbeatIndex(
config: Config,
indexPattern: string,
ccs?: string,
bypass: boolean = false
) {
if (bypass) {
return indexPattern;
}
@ -39,7 +49,12 @@ export function appendMetricbeatIndex(config, indexPattern, ccs, bypass = false)
* @param {String} ccs The optional cluster-prefix to prepend.
* @return {String} The index pattern with the {@code cluster} prefix appropriately prepended.
*/
export function prefixIndexPattern(config, indexPattern, ccs, monitoringIndicesOnly = false) {
export function prefixIndexPattern(
config: Config,
indexPattern: string,
ccs?: string,
monitoringIndicesOnly: boolean = false
) {
let ccsEnabled = false;
// TODO: NP
// This function is called with both NP config and LP config
@ -102,7 +117,7 @@ export function prefixIndexPattern(config, indexPattern, ccs, monitoringIndicesO
* @param {String} indexName The index's name, possibly including the cross-cluster prefix
* @return {String} {@code null} if none. Otherwise the cluster prefix.
*/
export function parseCrossClusterPrefix(indexName) {
export function parseCrossClusterPrefix(indexName: string) {
const colonIndex = indexName.indexOf(':');
if (colonIndex === -1) {

View file

@ -6,8 +6,9 @@
*/
import { badRequest, notFound } from '@hapi/boom';
import { getClustersStats } from './get_clusters_stats';
import { i18n } from '@kbn/i18n';
import { LegacyRequest } from '../../types';
import { getClustersStats } from './get_clusters_stats';
/**
* This will fetch the cluster stats and cluster state as a single object for the cluster specified by the {@code req}.
@ -17,7 +18,7 @@ import { i18n } from '@kbn/i18n';
* @param {String} clusterUuid The requested cluster's UUID
* @return {Promise} The object cluster response.
*/
export function getClusterStats(req, esIndexPattern, clusterUuid) {
export function getClusterStats(req: LegacyRequest, esIndexPattern: string, clusterUuid: string) {
if (!clusterUuid) {
throw badRequest(
i18n.translate('xpack.monitoring.clusterStats.uuidNotSpecifiedErrorMessage', {

View file

@ -6,8 +6,9 @@
*/
import { notFound } from '@hapi/boom';
import { set } from '@elastic/safer-lodash-set';
import { get } from 'lodash';
import { set } from '@elastic/safer-lodash-set';
import { i18n } from '@kbn/i18n';
import { getClustersStats } from './get_clusters_stats';
import { flagSupportedClusters } from './flag_supported_clusters';
import { getMlJobsForCluster } from '../elasticsearch';
@ -15,7 +16,7 @@ import { getKibanasForClusters } from '../kibana';
import { getLogstashForClusters } from '../logstash';
import { getLogstashPipelineIds } from '../logstash/get_pipeline_ids';
import { getBeatsForClusters } from '../beats';
import { getClustersSummary } from './get_clusters_summary';
import { getClustersSummary, EnhancedClusters } from './get_clusters_summary';
import {
STANDALONE_CLUSTER_CLUSTER_UUID,
CODE_PATH_ML,
@ -26,21 +27,27 @@ import {
CODE_PATH_BEATS,
CODE_PATH_APM,
} from '../../../common/constants';
import { getApmsForClusters } from '../apm/get_apms_for_clusters';
import { i18n } from '@kbn/i18n';
import { checkCcrEnabled } from '../elasticsearch/ccr';
import { fetchStatus } from '../alerts/fetch_status';
import { getStandaloneClusterDefinition, hasStandaloneClusters } from '../standalone_clusters';
import { getLogTypes } from '../logs';
import { isInCodePath } from './is_in_code_path';
import { LegacyRequest, Cluster } from '../../types';
/**
* Get all clusters or the cluster associated with {@code clusterUuid} when it is defined.
*/
export async function getClustersFromRequest(
req,
indexPatterns,
{ clusterUuid, start, end, codePaths } = {}
req: LegacyRequest,
indexPatterns: { [x: string]: string },
{
clusterUuid,
start,
end,
codePaths,
}: { clusterUuid: string; start: number; end: number; codePaths: string[] }
) {
const {
esIndexPattern,
@ -54,7 +61,7 @@ export async function getClustersFromRequest(
const config = req.server.config();
const isStandaloneCluster = clusterUuid === STANDALONE_CLUSTER_CLUSTER_UUID;
let clusters = [];
let clusters: Cluster[] = [];
if (isStandaloneCluster) {
clusters.push(getStandaloneClusterDefinition());
@ -120,7 +127,6 @@ export async function getClustersFromRequest(
const rulesClient = req.getRulesClient();
const alertStatus = await fetchStatus(
rulesClient,
req.server.plugins.monitoring.info,
undefined,
clusters.map((cluster) => get(cluster, 'elasticsearch.cluster.id', cluster.cluster_uuid))
);
@ -139,16 +145,16 @@ export async function getClustersFromRequest(
list: Object.keys(alertStatus).reduce((accum, alertName) => {
const value = alertStatus[alertName];
if (value.states && value.states.length) {
accum[alertName] = {
Reflect.set(accum, alertName, {
...value,
states: value.states.filter(
(state) =>
state.state.cluster.clusterUuid ===
get(cluster, 'elasticsearch.cluster.id', cluster.cluster_uuid)
),
};
});
} else {
accum[alertName] = value;
Reflect.set(accum, alertName, value);
}
return accum;
}, {}),
@ -197,7 +203,7 @@ export async function getClustersFromRequest(
);
// withhold LS overview stats until there is at least 1 pipeline
if (logstash.clusterUuid === clusterUuid && !pipelines.length) {
logstash.stats = {};
Reflect.set(logstash, 'stats', {});
}
set(clusters[clusterIndex], 'logstash', logstash.stats);
});
@ -225,18 +231,18 @@ export async function getClustersFromRequest(
get(cluster, 'elasticsearch.cluster.id', cluster.cluster_uuid) === apm.clusterUuid
);
if (clusterIndex >= 0) {
const { stats, config } = apm;
clusters[clusterIndex].apm = {
const { stats, config: apmConfig } = apm;
Reflect.set(clusters[clusterIndex], 'apm', {
...stats,
config,
};
config: apmConfig,
});
}
});
// check ccr configuration
const isCcrEnabled = await checkCcrEnabled(req, esIndexPattern);
const kibanaUuid = config.get('server.uuid');
const kibanaUuid = config.get('server.uuid')!;
return getClustersSummary(req.server, clusters, kibanaUuid, isCcrEnabled);
return getClustersSummary(req.server, clusters as EnhancedClusters[], kibanaUuid, isCcrEnabled);
}

View file

@ -16,7 +16,7 @@ import { calculateOverallStatus } from '../calculate_overall_status';
// @ts-ignore
import { MonitoringLicenseError } from '../errors/custom_errors';
type EnhancedClusters = ElasticsearchModifiedSource & {
export type EnhancedClusters = ElasticsearchModifiedSource & {
license: ElasticsearchLegacySource['license'];
[key: string]: any;
};

View file

@ -5,6 +5,7 @@
* 2.0.
*/
import { LegacyServer } from '../../types';
import { prefixIndexPattern } from '../ccs_utils';
import {
INDEX_PATTERN_ELASTICSEARCH,
@ -14,7 +15,11 @@ import {
INDEX_ALERTS,
} from '../../../common/constants';
export function getIndexPatterns(server, additionalPatterns = {}, ccs = '*') {
export function getIndexPatterns(
server: LegacyServer,
additionalPatterns: Record<string, string> = {},
ccs: string = '*'
) {
const config = server.config();
const esIndexPattern = prefixIndexPattern(config, INDEX_PATTERN_ELASTICSEARCH, ccs);
const kbnIndexPattern = prefixIndexPattern(config, INDEX_PATTERN_KIBANA, ccs);

View file

@ -7,7 +7,7 @@
import { CODE_PATH_ALL } from '../../../common/constants';
export function isInCodePath(codePaths, codePathsToTest) {
export function isInCodePath(codePaths: string[], codePathsToTest: string[]) {
if (codePaths.includes(CODE_PATH_ALL)) {
return true;
}

View file

@ -8,7 +8,7 @@
import { set } from '@elastic/safer-lodash-set';
import { MissingRequiredError } from './error_missing_required';
import { ElasticsearchMetric } from './metrics';
import { createQuery } from './create_query.js';
import { createQuery } from './create_query';
let metric;

View file

@ -5,23 +5,37 @@
* 2.0.
*/
import { defaults, get } from 'lodash';
import { MissingRequiredError } from './error_missing_required';
import { defaults } from 'lodash';
import moment from 'moment';
import { MissingRequiredError } from './error_missing_required';
import { standaloneClusterFilter } from './standalone_clusters';
import { STANDALONE_CLUSTER_CLUSTER_UUID } from '../../common/constants';
export function createTimeFilter(options) {
export interface TimerangeFilter {
range: {
[x: string]: {
format: 'epoch_millis';
gte?: number;
lte?: number;
};
};
}
export function createTimeFilter(options: {
start?: number;
end?: number;
metric?: { timestampField: string };
}) {
const { start, end } = options;
if (!start && !end) {
return null;
}
const timestampField = get(options, 'metric.timestampField');
const timestampField = options.metric?.timestampField;
if (!timestampField) {
throw new MissingRequiredError('metric.timestampField');
}
const timeRangeFilter = {
const timeRangeFilter: TimerangeFilter = {
range: {
[timestampField]: {
format: 'epoch_millis',
@ -50,9 +64,17 @@ export function createTimeFilter(options) {
* @param {Date} options.end - numeric timestamp (optional)
* @param {Metric} options.metric - Metric instance or metric fields object @see ElasticsearchMetric.getMetricFields
*/
export function createQuery(options) {
options = defaults(options, { filters: [] });
const { type, types, clusterUuid, uuid, filters } = options;
export function createQuery(options: {
type?: string;
types?: string[];
filters?: any[];
clusterUuid: string;
uuid?: string;
start?: number;
end?: number;
metric?: { uuidField?: string; timestampField: string };
}) {
const { type, types, clusterUuid, uuid, filters } = defaults(options, { filters: [] });
const isFromStandaloneCluster = clusterUuid === STANDALONE_CLUSTER_CLUSTER_UUID;
@ -63,8 +85,8 @@ export function createQuery(options) {
typeFilter = {
bool: {
should: [
...types.map((type) => ({ term: { type } })),
...types.map((type) => ({ term: { 'metricset.name': type } })),
...types.map((t) => ({ term: { type: t } })),
...types.map((t) => ({ term: { 'metricset.name': t } })),
],
},
};
@ -78,23 +100,26 @@ export function createQuery(options) {
let uuidFilter;
// options.uuid can be null, for example getting all the clusters
if (uuid) {
const uuidField = get(options, 'metric.uuidField');
const uuidField = options.metric?.uuidField;
if (!uuidField) {
throw new MissingRequiredError('options.uuid given but options.metric.uuidField is false');
}
uuidFilter = { term: { [uuidField]: uuid } };
}
const timestampField = get(options, 'metric.timestampField');
const timestampField = options.metric?.timestampField;
if (!timestampField) {
throw new MissingRequiredError('metric.timestampField');
}
const timeRangeFilter = createTimeFilter(options);
const combinedFilters = [typeFilter, clusterUuidFilter, uuidFilter, ...filters];
if (timeRangeFilter) {
combinedFilters.push(timeRangeFilter);
}
const combinedFilters = [
typeFilter,
clusterUuidFilter,
uuidFilter,
timeRangeFilter ?? undefined,
...filters,
];
if (isFromStandaloneCluster) {
combinedFilters.push(standaloneClusterFilter);

View file

@ -6,21 +6,23 @@
*/
import moment from 'moment';
import { isPlainObject } from 'lodash';
import Bluebird from 'bluebird';
import { checkParam } from '../error_missing_required';
import { getSeries } from './get_series';
import { calculateTimeseriesInterval } from '../calculate_timeseries_interval';
import { getTimezone } from '../get_timezone';
import { LegacyRequest } from '../../types';
type Metric = string | { keys: string | string[]; name: string };
export async function getMetrics(
req,
indexPattern,
metricSet = [],
filters = [],
req: LegacyRequest,
indexPattern: string,
metricSet: Metric[] = [],
filters: Array<Record<string, any>> = [],
metricOptions = {},
numOfBuckets = 0,
groupBy = null
numOfBuckets: number = 0,
groupBy: string | Record<string, any> | null = null
) {
checkParam(indexPattern, 'indexPattern in details/getMetrics');
checkParam(metricSet, 'metricSet in details/getMetrics');
@ -29,7 +31,7 @@ export async function getMetrics(
// TODO: Pass in req parameters as explicit function parameters
let min = moment.utc(req.payload.timeRange.min).valueOf();
const max = moment.utc(req.payload.timeRange.max).valueOf();
const minIntervalSeconds = config.get('monitoring.ui.min_interval_seconds');
const minIntervalSeconds = Number(config.get('monitoring.ui.min_interval_seconds'));
const bucketSize = calculateTimeseriesInterval(min, max, minIntervalSeconds);
const timezone = await getTimezone(req);
@ -38,11 +40,11 @@ export async function getMetrics(
min = max - numOfBuckets * bucketSize * 1000;
}
return Bluebird.map(metricSet, (metric) => {
return Bluebird.map(metricSet, (metric: Metric) => {
// metric names match the literal metric name, but they can be supplied in groups or individually
let metricNames;
if (isPlainObject(metric)) {
if (typeof metric !== 'string') {
metricNames = metric.keys;
} else {
metricNames = [metric];
@ -57,10 +59,10 @@ export async function getMetrics(
});
});
}).then((rows) => {
const data = {};
const data: Record<string, any> = {};
metricSet.forEach((key, index) => {
// keyName must match the value stored in the html template
const keyName = isPlainObject(key) ? key.name : key;
const keyName = typeof key === 'string' ? key : key.name;
data[keyName] = rows[index];
});

View file

@ -7,13 +7,36 @@
import { get } from 'lodash';
import moment from 'moment';
import { ElasticsearchResponse } from '../../../common/types/es';
import { LegacyRequest, Bucket } from '../../types';
import { checkParam } from '../error_missing_required';
import { metrics } from '../metrics';
import { createQuery } from '../create_query.js';
import { createQuery } from '../create_query';
import { formatTimestampToDuration } from '../../../common';
import { NORMALIZED_DERIVATIVE_UNIT, CALCULATE_DURATION_UNTIL } from '../../../common/constants';
import { formatUTCTimestampForTimezone } from '../format_timezone';
type SeriesBucket = Bucket & { metric_mb_deriv?: { normalized_value: number } };
interface Metric {
app: string;
derivative: boolean;
mbField?: string;
aggs: any;
getDateHistogramSubAggs?: Function;
dateHistogramSubAggs?: any;
metricAgg: string;
field: string;
timestampField: string;
calculation: (
b: SeriesBucket,
key: string,
metric: Metric,
defaultSizeInSeconds: number
) => number | null;
serialize: () => string;
}
/**
* Derivative metrics for the first two agg buckets are unusable. For the first bucket, there
* simply is no derivative metric (as calculating a derivative requires two adjacent buckets). For
@ -27,12 +50,12 @@ import { formatUTCTimestampForTimezone } from '../format_timezone';
* @param {int} minInMsSinceEpoch Lower bound of timepicker range, in ms-since-epoch
* @param {int} bucketSizeInSeconds Size of a single date_histogram bucket, in seconds
*/
function offsetMinForDerivativeMetric(minInMsSinceEpoch, bucketSizeInSeconds) {
function offsetMinForDerivativeMetric(minInMsSinceEpoch: number, bucketSizeInSeconds: number) {
return minInMsSinceEpoch - 2 * bucketSizeInSeconds * 1000;
}
// Use the metric object as the source of truth on where to find the UUID
function getUuid(req, metric) {
function getUuid(req: LegacyRequest, metric: Metric) {
if (metric.app === 'kibana') {
return req.params.kibanaUuid;
} else if (metric.app === 'logstash') {
@ -42,12 +65,11 @@ function getUuid(req, metric) {
}
}
function defaultCalculation(bucket, key) {
const mbKey = `metric_mb_deriv.normalized_value`;
const legacyValue = get(bucket, key, null);
const mbValue = get(bucket, mbKey, null);
function defaultCalculation(bucket: SeriesBucket, key: string) {
const legacyValue: number = get(bucket, key, null);
const mbValue = bucket.metric_mb_deriv?.normalized_value ?? null;
let value;
if (!isNaN(mbValue) && mbValue > 0) {
if (mbValue !== null && !isNaN(mbValue) && mbValue > 0) {
value = mbValue;
} else {
value = legacyValue;
@ -60,7 +82,7 @@ function defaultCalculation(bucket, key) {
return value;
}
function createMetricAggs(metric) {
function createMetricAggs(metric: Metric) {
if (metric.derivative) {
const mbDerivative = metric.mbField
? {
@ -90,18 +112,20 @@ function createMetricAggs(metric) {
}
async function fetchSeries(
req,
indexPattern,
metric,
metricOptions,
groupBy,
min,
max,
bucketSize,
filters
req: LegacyRequest,
indexPattern: string,
metric: Metric,
metricOptions: any,
groupBy: string | Record<string, any> | null,
min: string | number,
max: string | number,
bucketSize: number,
filters: Array<Record<string, any>>
) {
// if we're using a derivative metric, offset the min (also @see comment on offsetMinForDerivativeMetric function)
const adjustedMin = metric.derivative ? offsetMinForDerivativeMetric(min, bucketSize) : min;
const adjustedMin = metric.derivative
? offsetMinForDerivativeMetric(Number(min), bucketSize)
: Number(min);
let dateHistogramSubAggs = null;
if (metric.getDateHistogramSubAggs) {
@ -118,15 +142,15 @@ async function fetchSeries(
...createMetricAggs(metric),
};
if (metric.mbField) {
dateHistogramSubAggs.metric_mb = {
Reflect.set(dateHistogramSubAggs, 'metric_mb', {
[metric.metricAgg]: {
field: metric.mbField,
},
};
});
}
}
let aggs = {
let aggs: any = {
check: {
date_histogram: {
field: metric.timestampField,
@ -154,7 +178,7 @@ async function fetchSeries(
body: {
query: createQuery({
start: adjustedMin,
end: max,
end: Number(max),
metric,
clusterUuid: req.params.clusterUuid,
// TODO: Pass in the UUID as an explicit function parameter
@ -165,10 +189,6 @@ async function fetchSeries(
},
};
if (metric.debug) {
console.log('metric.debug', JSON.stringify(params));
}
const { callWithRequest } = req.server.plugins.elasticsearch.getCluster('monitoring');
return await callWithRequest(req, 'search', params);
}
@ -180,11 +200,11 @@ async function fetchSeries(
* @param {String} min Max timestamp for results to exist within.
* @return {Number} Index position to use for the first bucket. {@code buckets.length} if none should be used.
*/
function findFirstUsableBucketIndex(buckets, min) {
function findFirstUsableBucketIndex(buckets: SeriesBucket[], min: string | number) {
const minInMillis = moment.utc(min).valueOf();
for (let i = 0; i < buckets.length; ++i) {
const bucketTime = get(buckets, [i, 'key']);
const bucketTime = buckets[i].key;
const bucketTimeInMillis = moment.utc(bucketTime).valueOf();
// if the bucket start time, without knowing the bucket size, is before the filter time, then it's inherently a partial bucket
@ -208,11 +228,16 @@ function findFirstUsableBucketIndex(buckets, min) {
* @param {Number} bucketSizeInMillis Size of a bucket in milliseconds. Set to 0 to allow partial trailing buckets.
* @return {Number} Index position to use for the last bucket. {@code -1} if none should be used.
*/
function findLastUsableBucketIndex(buckets, max, firstUsableBucketIndex, bucketSizeInMillis = 0) {
function findLastUsableBucketIndex(
buckets: SeriesBucket[],
max: string | number,
firstUsableBucketIndex: number,
bucketSizeInMillis: number = 0
) {
const maxInMillis = moment.utc(max).valueOf();
for (let i = buckets.length - 1; i > firstUsableBucketIndex - 1; --i) {
const bucketTime = get(buckets, [i, 'key']);
const bucketTime = buckets[i].key;
const bucketTimeInMillis = moment.utc(bucketTime).valueOf() + bucketSizeInMillis;
if (bucketTimeInMillis <= maxInMillis) {
@ -224,41 +249,25 @@ function findLastUsableBucketIndex(buckets, max, firstUsableBucketIndex, bucketS
return -1;
}
const formatBucketSize = (bucketSizeInSeconds) => {
const formatBucketSize = (bucketSizeInSeconds: number) => {
const now = moment();
const timestamp = moment(now).add(bucketSizeInSeconds, 'seconds'); // clone the `now` object
return formatTimestampToDuration(timestamp, CALCULATE_DURATION_UNTIL, now);
};
function isObject(value) {
return typeof value === 'object' && !!value && !Array.isArray(value);
}
function countBuckets(data, count = 0) {
if (data && data.buckets) {
count += data.buckets.length;
for (const bucket of data.buckets) {
for (const key of Object.keys(bucket)) {
if (isObject(bucket[key])) {
count = countBuckets(bucket[key], count);
}
}
}
} else if (data) {
for (const key of Object.keys(data)) {
if (isObject(data[key])) {
count = countBuckets(data[key], count);
}
}
}
return count;
}
function handleSeries(metric, groupBy, min, max, bucketSizeInSeconds, timezone, response) {
function handleSeries(
metric: Metric,
groupBy: string | Record<string, any> | null,
min: string | number,
max: string | number,
bucketSizeInSeconds: number,
timezone: string,
response: ElasticsearchResponse
) {
const { derivative, calculation: customCalculation } = metric;
function getAggregatedData(buckets) {
function getAggregatedData(buckets: SeriesBucket[]) {
const firstUsableBucketIndex = findFirstUsableBucketIndex(buckets, min);
const lastUsableBucketIndex = findLastUsableBucketIndex(
buckets,
@ -266,20 +275,7 @@ function handleSeries(metric, groupBy, min, max, bucketSizeInSeconds, timezone,
firstUsableBucketIndex,
bucketSizeInSeconds * 1000
);
let data = [];
if (metric.debug) {
console.log(
`metric.debug field=${metric.field} bucketsCreated: ${countBuckets(
get(response, 'aggregations.check')
)}`
);
console.log(`metric.debug`, {
bucketsLength: buckets.length,
firstUsableBucketIndex,
lastUsableBucketIndex,
});
}
let data: Array<[string | number, number | null]> = [];
if (firstUsableBucketIndex <= lastUsableBucketIndex) {
// map buckets to values for charts
@ -306,15 +302,17 @@ function handleSeries(metric, groupBy, min, max, bucketSizeInSeconds, timezone,
}
if (groupBy) {
return get(response, 'aggregations.groupBy.buckets', []).map((bucket) => {
return {
groupedBy: bucket.key,
...getAggregatedData(get(bucket, 'check.buckets', [])),
};
});
return (response?.aggregations?.groupBy?.buckets ?? []).map(
(bucket: Bucket & { check: { buckets: SeriesBucket[] } }) => {
return {
groupedBy: bucket.key,
...getAggregatedData(bucket.check.buckets ?? []),
};
}
);
}
return getAggregatedData(get(response, 'aggregations.check.buckets', []));
return getAggregatedData(response.aggregations?.check?.buckets ?? []);
}
/**
@ -329,13 +327,18 @@ function handleSeries(metric, groupBy, min, max, bucketSizeInSeconds, timezone,
* @return {Promise} The object response containing the {@code timeRange}, {@code metric}, and {@code data}.
*/
export async function getSeries(
req,
indexPattern,
metricName,
metricOptions,
filters,
groupBy,
{ min, max, bucketSize, timezone }
req: LegacyRequest,
indexPattern: string,
metricName: string,
metricOptions: Record<string, any>,
filters: Array<Record<string, any>>,
groupBy: string | Record<string, any> | null,
{
min,
max,
bucketSize,
timezone,
}: { min: string | number; max: string | number; bucketSize: number; timezone: string }
) {
checkParam(indexPattern, 'indexPattern in details/getSeries');

View file

@ -50,5 +50,5 @@ export async function checkCcrEnabled(req: LegacyRequest, esIndexPattern: string
const mbCcr = response.hits?.hits[0]?._source?.elasticsearch?.cluster?.stats?.stack?.xpack?.ccr;
const isEnabled = legacyCcr?.enabled ?? mbCcr?.enabled;
const isAvailable = legacyCcr?.available ?? mbCcr?.available;
return isEnabled && isAvailable;
return Boolean(isEnabled && isAvailable);
}

View file

@ -30,13 +30,13 @@ const CONVERTED_TOKEN = `odh_`;
* @param string prefix - This is the aggregation name prefix where the rest of the name will be the type of aggregation
* @param object metricObj The metric aggregation itself
*/
export function convertMetricNames(prefix, metricObj) {
export function convertMetricNames(prefix: string, metricObj: Record<string, any>) {
return Object.entries(metricObj).reduce((newObj, [key, value]) => {
const newValue = cloneDeep(value);
if (key.includes('_deriv') && newValue.derivative) {
newValue.derivative.buckets_path = `${CONVERTED_TOKEN}${prefix}__${newValue.derivative.buckets_path}`;
}
newObj[`${CONVERTED_TOKEN}${prefix}__${key}`] = newValue;
Reflect.set(newObj, `${CONVERTED_TOKEN}${prefix}__${key}`, newValue);
return newObj;
}, {});
}
@ -50,32 +50,36 @@ export function convertMetricNames(prefix, metricObj) {
*
* @param object byDateBucketResponse - The response object from the single `date_histogram` bucket
*/
export function uncovertMetricNames(byDateBucketResponse) {
const unconverted = {};
type MetricNameBucket = { key: string; key_as_string: string; doc_count: number } & Record<
string,
any
>;
export function uncovertMetricNames(byDateBucketResponse: { buckets: MetricNameBucket[] }) {
const unconverted: Record<string, { buckets: MetricNameBucket[] }> = {};
for (const metricName of LISTING_METRICS_NAMES) {
unconverted[metricName] = {
buckets: byDateBucketResponse.buckets.map((bucket) => {
const {
// eslint-disable-next-line camelcase
// eslint-disable-next-line @typescript-eslint/naming-convention
key_as_string,
// eslint-disable-next-line camelcase
key,
// eslint-disable-next-line camelcase
// eslint-disable-next-line @typescript-eslint/naming-convention
doc_count,
...rest
} = bucket;
const metrics = Object.entries(rest).reduce((accum, [key, value]) => {
if (key.startsWith(`${CONVERTED_TOKEN}${metricName}`)) {
const name = key.split('__')[1];
const metrics = Object.entries(rest).reduce((accum, [k, value]) => {
if (k.startsWith(`${CONVERTED_TOKEN}${metricName}`)) {
const name = k.split('__')[1];
accum[name] = value;
}
return accum;
}, {});
}, {} as Record<string, any>);
return {
key_as_string /* eslint-disable-line camelcase */,
key_as_string,
key,
doc_count /* eslint-disable-line camelcase */,
doc_count,
...metrics,
};
}),

View file

@ -35,10 +35,11 @@ export function handleResponse(
hit.inner_hits?.earliest?.hits?.hits[0]?._source.elasticsearch?.index;
const rateOptions = {
hitTimestamp: hit._source.timestamp ?? hit._source['@timestamp'],
hitTimestamp: hit._source.timestamp ?? hit._source['@timestamp'] ?? null,
earliestHitTimestamp:
hit.inner_hits?.earliest?.hits?.hits[0]?._source.timestamp ??
hit.inner_hits?.earliest?.hits?.hits[0]?._source['@timestamp'],
hit.inner_hits?.earliest?.hits?.hits[0]?._source['@timestamp'] ??
null,
timeWindowMin: min,
timeWindowMax: max,
};

View file

@ -6,7 +6,7 @@
*/
import { set } from '@elastic/safer-lodash-set';
import { calculateNodeType } from './calculate_node_type.js';
import { calculateNodeType } from './calculate_node_type';
const masterNodeId = 'def456';

View file

@ -12,16 +12,22 @@
* - client only node: --node.data=false --node.master=false
* https://www.elastic.co/guide/en/elasticsearch/reference/2.x/modules-node.html
*/
import { includes, isUndefined } from 'lodash';
import { isUndefined } from 'lodash';
import { ElasticsearchLegacySource } from '../../../../common/types/es';
export function calculateNodeType(node, masterNodeId) {
export type Node = ElasticsearchLegacySource['source_node'] & {
attributes?: Record<string, any>;
node_ids?: Array<string | boolean | undefined>;
};
export function calculateNodeType(node: Node, masterNodeId?: string | boolean) {
const attrs = node.attributes || {};
function mightBe(attr) {
function mightBe(attr?: string) {
return attr === 'true' || isUndefined(attr);
}
function isNot(attr) {
function isNot(attr?: string) {
return attr === 'false';
}
@ -30,7 +36,7 @@ export function calculateNodeType(node, masterNodeId) {
if (uuid !== undefined && uuid === masterNodeId) {
return 'master';
}
if (includes(node.node_ids, masterNodeId)) {
if (node.node_ids?.includes(masterNodeId)) {
return 'master';
}
if (isNot(attrs.data) && isNot(attrs.master)) {

View file

@ -10,7 +10,7 @@
* If node information can't be retrieved, we call this function
* that provides some usable defaults
*/
export function getDefaultNodeFromId(nodeId) {
export function getDefaultNodeFromId(nodeId: string) {
return {
id: nodeId,
name: nodeId,
@ -20,3 +20,7 @@ export function getDefaultNodeFromId(nodeId) {
attributes: {},
};
}
export function isDefaultNode(node: any): node is ReturnType<typeof getDefaultNodeFromId> {
return !node.uuid;
}

View file

@ -5,7 +5,6 @@
* 2.0.
*/
import { get } from 'lodash';
import { i18n } from '@kbn/i18n';
// @ts-ignore
import { checkParam } from '../../error_missing_required';
@ -14,7 +13,7 @@ import { createQuery } from '../../create_query';
// @ts-ignore
import { ElasticsearchMetric } from '../../metrics';
// @ts-ignore
import { getDefaultNodeFromId } from './get_default_node_from_id';
import { getDefaultNodeFromId, isDefaultNode } from './get_default_node_from_id';
// @ts-ignore
import { calculateNodeType } from './calculate_node_type';
// @ts-ignore
@ -23,7 +22,6 @@ import {
ElasticsearchSource,
ElasticsearchResponse,
ElasticsearchLegacySource,
ElasticsearchMetricbeatNode,
} from '../../../../common/types/es';
import { LegacyRequest } from '../../../types';
@ -35,9 +33,9 @@ export function handleResponse(
return (response: ElasticsearchResponse) => {
let nodeSummary = {};
const nodeStatsHits = response.hits?.hits ?? [];
const nodes: Array<
ElasticsearchLegacySource['source_node'] | ElasticsearchMetricbeatNode
> = nodeStatsHits.map((hit) => hit._source.elasticsearch?.node || hit._source.source_node); // using [0] value because query results are sorted desc per timestamp
const nodes: Array<ElasticsearchLegacySource['source_node']> = nodeStatsHits.map(
(hit) => hit._source.elasticsearch?.node || hit._source.source_node
); // using [0] value because query results are sorted desc per timestamp
const node = nodes[0] || getDefaultNodeFromId(nodeUuid);
const sourceStats =
response.hits?.hits[0]?._source.elasticsearch?.node?.stats ||
@ -46,7 +44,7 @@ export function handleResponse(
clusterState && clusterState.nodes ? clusterState.nodes[nodeUuid] : undefined;
const stats = {
resolver: nodeUuid,
node_ids: nodes.map((_node) => node.id || node.uuid),
node_ids: nodes.map((_node) => (isDefaultNode(node) ? node.id : node.id || node.uuid)),
attributes: node.attributes,
transport_address: response.hits?.hits[0]?._source.service?.address || node.transport_address,
name: node.name,
@ -54,8 +52,8 @@ export function handleResponse(
};
if (clusterNode) {
const _shardStats = get(shardStats, ['nodes', nodeUuid], {});
const calculatedNodeType = calculateNodeType(stats, get(clusterState, 'master_node')); // set type for labeling / iconography
const _shardStats = shardStats.nodes[nodeUuid] ?? {};
const calculatedNodeType = calculateNodeType(stats, clusterState?.master_node); // set type for labeling / iconography
const { nodeType, nodeTypeLabel, nodeTypeClass } = getNodeTypeClassLabel(
node,
calculatedNodeType

View file

@ -6,6 +6,10 @@
*/
import { nodeTypeLabel, nodeTypeClass } from './lookups';
import {
ElasticsearchLegacySource,
ElasticsearchMetricbeatNode,
} from '../../../../common/types/es';
/*
* Note: currently only `node` and `master` are supported due to
@ -13,8 +17,11 @@ import { nodeTypeLabel, nodeTypeClass } from './lookups';
* @param {Object} node - a node object from getNodes / getNodeSummary
* @param {Object} type - the node type calculated from `calculateNodeType`
*/
export function getNodeTypeClassLabel(node, type) {
const nodeType = node.master ? 'master' : type;
export function getNodeTypeClassLabel(
node: ElasticsearchLegacySource['source_node'] | ElasticsearchMetricbeatNode,
type: keyof typeof nodeTypeLabel
) {
const nodeType = node && 'master' in node ? 'master' : type;
const returnObj = {
nodeType,
nodeTypeLabel: nodeTypeLabel[nodeType],

View file

@ -5,7 +5,9 @@
* 2.0.
*/
export async function getLivesNodes(req) {
import { LegacyRequest } from '../../../../types';
export async function getLivesNodes(req: LegacyRequest) {
const params = {
path: '/_nodes',
method: 'GET',

View file

@ -18,7 +18,7 @@ import { convertMetricNames } from '../../convert_metric_names';
* @param {Number} bucketSize: Bucket size in seconds for date histogram interval
* @return {Object} Aggregation DSL
*/
export function getMetricAggs(listingMetrics) {
export function getMetricAggs(listingMetrics: string[]) {
let aggItems = {};
listingMetrics.forEach((metricName) => {

View file

@ -9,8 +9,14 @@ import moment from 'moment';
import { get } from 'lodash';
import { ElasticsearchMetric } from '../../../metrics';
import { createQuery } from '../../../create_query';
import { LegacyRequest, Bucket } from '../../../../types';
export async function getNodeIds(req, indexPattern, { clusterUuid }, size) {
export async function getNodeIds(
req: LegacyRequest,
indexPattern: string,
{ clusterUuid }: { clusterUuid: string },
size: number
) {
const start = moment.utc(req.payload.timeRange.min).valueOf();
const end = moment.utc(req.payload.timeRange.max).valueOf();
@ -55,5 +61,7 @@ export async function getNodeIds(req, indexPattern, { clusterUuid }, size) {
const { callWithRequest } = req.server.plugins.elasticsearch.getCluster('monitoring');
const response = await callWithRequest(req, 'search', params);
return get(response, 'aggregations.composite_data.buckets', []).map((bucket) => bucket.key);
return get(response, 'aggregations.composite_data.buckets', []).map(
(bucket: Bucket) => bucket.key
);
}

View file

@ -6,18 +6,12 @@
*/
import moment from 'moment';
// @ts-ignore
import { checkParam } from '../../../error_missing_required';
// @ts-ignore
import { createQuery } from '../../../create_query';
// @ts-ignore
import { calculateAuto } from '../../../calculate_auto';
// @ts-ignore
import { ElasticsearchMetric } from '../../../metrics';
// @ts-ignore
import { getMetricAggs } from './get_metric_aggs';
import { handleResponse } from './handle_response';
// @ts-ignore
import { LISTING_METRICS_NAMES, LISTING_METRICS_PATHS } from './nodes_listing_metrics';
import { LegacyRequest } from '../../../../types';
import { ElasticsearchModifiedSource } from '../../../../../common/types/es';
@ -103,7 +97,7 @@ export async function getNodes(
min_doc_count: 0,
fixed_interval: bucketSize + 's',
},
aggs: getMetricAggs(LISTING_METRICS_NAMES, bucketSize),
aggs: getMetricAggs(LISTING_METRICS_NAMES),
},
},
},

View file

@ -5,12 +5,15 @@
* 2.0.
*/
import { get, isUndefined } from 'lodash';
import { isUndefined } from 'lodash';
import { getNodeIds } from './get_node_ids';
// @ts-ignore
import { filter } from '../../../pagination/filter';
import { sortNodes } from './sort_nodes';
// @ts-ignore
import { paginate } from '../../../pagination/paginate';
import { getMetrics } from '../../../details/get_metrics';
import { LegacyRequest } from '../../../../types';
/**
* This function performs an optimization around the node listing tables in the UI. To avoid
@ -28,25 +31,41 @@ import { getMetrics } from '../../../details/get_metrics';
* @param {*} sort - ({ field, direction })
* @param {*} queryText - Text that will be used to filter out pipelines
*/
interface Node {
name: string;
uuid: string;
isOnline: boolean;
shardCount: number;
}
export async function getPaginatedNodes(
req,
esIndexPattern,
{ clusterUuid },
metricSet,
pagination,
sort,
queryText,
{ clusterStats, nodesShardCount }
req: LegacyRequest,
esIndexPattern: string,
{ clusterUuid }: { clusterUuid: string },
metricSet: string[],
pagination: { index: number; size: number },
sort: { field: string; direction: 'asc' | 'desc' },
queryText: string,
{
clusterStats,
nodesShardCount,
}: {
clusterStats: {
cluster_state: { nodes: Record<string, Node> };
};
nodesShardCount: { nodes: Record<string, { shardCount: number }> };
}
) {
const config = req.server.config();
const size = config.get('monitoring.ui.max_bucket_size');
const nodes = await getNodeIds(req, esIndexPattern, { clusterUuid }, size);
const size = Number(config.get('monitoring.ui.max_bucket_size'));
const nodes: Node[] = await getNodeIds(req, esIndexPattern, { clusterUuid }, size);
// Add `isOnline` and shards from the cluster state and shard stats
const clusterState = get(clusterStats, 'cluster_state', { nodes: {} });
const clusterState = clusterStats?.cluster_state ?? { nodes: {} };
for (const node of nodes) {
node.isOnline = !isUndefined(get(clusterState, ['nodes', node.uuid]));
node.shardCount = get(nodesShardCount, `nodes[${node.uuid}].shardCount`, 0);
node.isOnline = !isUndefined(clusterState?.nodes[node.uuid]);
node.shardCount = nodesShardCount?.nodes[node.uuid]?.shardCount ?? 0;
}
// `metricSet` defines a list of metrics that are sortable in the UI
@ -82,7 +101,7 @@ export async function getPaginatedNodes(
const metricList = metricSeriesData[metricName];
for (const metricItem of metricList[0]) {
const node = nodes.find((node) => node.uuid === metricItem.groupedBy);
const node = nodes.find((n) => n.uuid === metricItem.groupedBy);
if (!node) {
continue;
}
@ -91,7 +110,7 @@ export async function getPaginatedNodes(
if (dataSeries && dataSeries.length) {
const lastItem = dataSeries[dataSeries.length - 1];
if (lastItem.length && lastItem.length === 2) {
node[metricName] = lastItem[1];
Reflect.set(node, metricName, lastItem[1]);
}
}
}

View file

@ -7,9 +7,7 @@
import { get } from 'lodash';
import { mapNodesInfo } from './map_nodes_info';
// @ts-ignore
import { mapNodesMetrics } from './map_nodes_metrics';
// @ts-ignore
import { uncovertMetricNames } from '../../convert_metric_names';
import { ElasticsearchResponse, ElasticsearchModifiedSource } from '../../../../../common/types/es';
@ -26,7 +24,7 @@ export function handleResponse(
clusterStats: ElasticsearchModifiedSource | undefined,
nodesShardCount: { nodes: { [nodeId: string]: { shardCount: number } } } | undefined,
pageOfNodes: Array<{ uuid: string }>,
timeOptions = {}
timeOptions: { min?: number; max?: number; bucketSize?: number } = {}
) {
if (!get(response, 'hits.hits')) {
return [];

View file

@ -6,9 +6,7 @@
*/
import { isUndefined } from 'lodash';
// @ts-ignore
import { calculateNodeType } from '../calculate_node_type';
// @ts-ignore
import { getNodeTypeClassLabel } from '../get_node_type_class_label';
import {
ElasticsearchResponseHit,
@ -31,6 +29,7 @@ export function mapNodesInfo(
return nodeHits.reduce((prev, node) => {
const sourceNode = node._source.source_node || node._source.elasticsearch?.node;
if (!sourceNode) return prev;
const calculatedNodeType = calculateNodeType(sourceNode, clusterState?.master_node);
const { nodeType, nodeTypeLabel, nodeTypeClass } = getNodeTypeClassLabel(

View file

@ -8,8 +8,16 @@
import { get, map, min, max, last } from 'lodash';
import { filterPartialBuckets } from '../../../filter_partial_buckets';
import { metrics } from '../../../metrics';
import { Bucket } from '../../../../types';
function calcSlope(data) {
type MetricBucket = Bucket & { metric_deriv?: { value: number; normalized_value: number } };
interface TimeOptions {
min?: number;
max?: number;
bucketSize?: number;
}
function calcSlope(data: Array<{ x: number; y: number }>) {
const length = data.length;
const xSum = data.reduce((prev, curr) => prev + curr.x, 0);
const ySum = data.reduce((prev, curr) => prev + curr.y, 0);
@ -27,12 +35,15 @@ function calcSlope(data) {
return null; // convert possible NaN to `null` for JSON-friendliness
}
const mapBuckets = (bucket, metric) => {
const mapBuckets = (
bucket: MetricBucket,
metric: { derivative: boolean; calculation: (b: Bucket) => number | null }
) => {
const x = bucket.key;
if (metric.calculation) {
return {
x: bucket.key,
x: Number(bucket.key),
y: metric.calculation(bucket),
};
}
@ -60,12 +71,16 @@ const mapBuckets = (bucket, metric) => {
return { x, y: null };
};
function reduceMetric(metricName, metricBuckets, { min: startTime, max: endTime, bucketSize }) {
function reduceMetric(
metricName: string,
metricBuckets: MetricBucket[],
{ min: startTime, max: endTime, bucketSize }: TimeOptions
) {
if (startTime === undefined || endTime === undefined || startTime >= endTime) {
return null;
}
const partialBucketFilter = filterPartialBuckets(startTime, endTime, bucketSize, {
const partialBucketFilter = filterPartialBuckets(startTime, endTime, bucketSize!, {
ignoreEarly: true,
});
const metric = metrics[metricName];
@ -85,7 +100,7 @@ function reduceMetric(metricName, metricBuckets, { min: startTime, max: endTime,
const minVal = min(map(mappedData, 'y'));
const maxVal = max(map(mappedData, 'y'));
const lastVal = last(map(mappedData, 'y'));
const slope = calcSlope(mappedData) > 0 ? 1 : -1; // no need for the entire precision, it's just an up/down arrow
const slope = Number(calcSlope(mappedData as Array<{ x: number; y: number }>)) > 0 ? 1 : -1; // no need for the entire precision, it's just an up/down arrow
return {
metric: metric.serialize(),
@ -93,14 +108,14 @@ function reduceMetric(metricName, metricBuckets, { min: startTime, max: endTime,
};
}
function reduceAllMetrics(metricSet, timeOptions) {
const metrics = {};
function reduceAllMetrics(metricSet: string[], timeOptions: TimeOptions) {
const reducedMetrics: Record<string, any> = {};
Object.keys(metricSet).forEach((metricName) => {
const metricBuckets = get(metricSet, [metricName, 'buckets']);
metrics[metricName] = reduceMetric(metricName, metricBuckets, timeOptions); // append summarized metric data
reducedMetrics[metricName] = reduceMetric(metricName, metricBuckets, timeOptions); // append summarized metric data
});
return metrics;
return reducedMetrics;
}
/*
@ -112,8 +127,12 @@ function reduceAllMetrics(metricSet, timeOptions) {
* @param {Object} timeOptions: min, max, and bucketSize needed for date histogram creation
* @return {Object} summarized metric data about each node keyed by nodeId
*/
export function mapNodesMetrics(metricsForNodes, nodesInfo, timeOptions) {
const metricRows = {};
export function mapNodesMetrics(
metricsForNodes: Record<string, string[]>,
nodesInfo: Record<string, { isOnline: boolean }>,
timeOptions: TimeOptions
) {
const metricRows: Record<string, any> = {};
Object.keys(metricsForNodes).forEach((nodeId) => {
if (nodesInfo[nodeId].isOnline) {
// only do the work of mapping metrics if the node is online

View file

@ -7,7 +7,9 @@
import { orderBy } from 'lodash';
export function sortNodes(nodes, sort) {
type Node = Record<string, any>;
export function sortNodes(nodes: Node[], sort?: { field: string; direction: 'asc' | 'desc' }) {
if (!sort || !sort.field) {
return nodes;
}

View file

@ -7,6 +7,6 @@
export { getNodes } from './get_nodes';
export { getNodeSummary } from './get_node_summary';
export { calculateNodeType } from './calculate_node_type';
export { calculateNodeType, Node } from './calculate_node_type';
export { getNodeTypeClassLabel } from './get_node_type_class_label';
export { getDefaultNodeFromId } from './get_default_node_from_id';

View file

@ -0,0 +1,13 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { ElasticsearchLegacySource } from '../../../../common/types/es';
export type Node = ElasticsearchLegacySource['source_node'] & {
attributes?: Record<string, any>;
node_ids: Array<string | undefined>;
};

View file

@ -8,7 +8,16 @@
/*
* Calculate totals from mapped indices data
*/
export function calculateIndicesTotals(indices) {
export function calculateIndicesTotals(
indices: Record<
string,
{
primary: number;
replica: number;
unassigned: { primary: number; replica: number };
}
>
) {
// create datasets for each index
const metrics = Object.keys(indices).map((i) => {
const index = indices[i];

View file

@ -9,7 +9,14 @@
* @param {Object} config - Kibana config service
* @param {Boolean} includeNodes - whether to add the aggs for node shards
*/
export function getShardAggs(config, includeNodes, includeIndices) {
import { LegacyServer } from '../../../types';
export function getShardAggs(
config: ReturnType<LegacyServer['config']>,
includeNodes: boolean,
includeIndices: boolean
) {
const maxBucketSize = config.get('monitoring.ui.max_bucket_size');
const aggSize = 10;
const indicesAgg = {

View file

@ -5,18 +5,18 @@
* 2.0.
*/
import { get } from 'lodash';
// Methods for calculating metrics for
// - Number of Primary Shards
// - Number of Replica Shards
// - Unassigned Primary Shards
// - Unassigned Replica Shards
export function getUnassignedShards(indexShardStats) {
export function getUnassignedShards(indexShardStats: {
unassigned: { primary: number; replica: number };
}) {
let unassignedShards = 0;
unassignedShards += get(indexShardStats, 'unassigned.primary');
unassignedShards += get(indexShardStats, 'unassigned.replica');
unassignedShards += indexShardStats.unassigned.primary;
unassignedShards += indexShardStats.unassigned.replica;
return unassignedShards;
}

View file

@ -5,8 +5,8 @@
* 2.0.
*/
import { get, partition } from 'lodash';
import { calculateNodeType } from '../nodes';
import { partition } from 'lodash';
import { calculateNodeType, Node } from '../nodes';
/*
* Reducer function for a set of nodes to key the array by nodeId, summarize
@ -14,8 +14,28 @@ import { calculateNodeType } from '../nodes';
* @param masterNode = nodeId of master node
* @return reducer function for set of nodes
*/
export function normalizeNodeShards(masterNode) {
return (nodes, node) => {
type NodeShard = Node & {
key: string;
node_ids: { buckets: Array<{ key: string }> };
node_names: { buckets: Array<{ key: string }> };
index_count: { value: number };
doc_count: number;
};
interface ShardBucket {
key: string;
primary: {
buckets: Array<{
key: string;
key_as_string: string;
doc_count: number;
}>;
};
}
export function normalizeNodeShards(masterNode: string) {
return (nodes: NodeShard[], node: NodeShard) => {
if (node.key && node.node_ids) {
const nodeIds = node.node_ids.buckets.map((b) => b.key);
const _node = {
@ -27,8 +47,8 @@ export function normalizeNodeShards(masterNode) {
...nodes,
[node.key]: {
shardCount: node.doc_count,
indexCount: get(node, 'index_count.value'),
name: get(node, 'node_names.buckets[0].key'),
indexCount: node.index_count.value,
name: node.node_names.buckets[0].key,
node_ids: nodeIds,
type: calculateNodeType(_node, masterNode), // put the "star" icon on the node link in the shard allocator
},
@ -38,12 +58,12 @@ export function normalizeNodeShards(masterNode) {
};
}
const countShards = (shardBuckets) => {
const countShards = (shardBuckets: ShardBucket[]) => {
let primaryShards = 0;
let replicaShards = 0;
shardBuckets.forEach((shard) => {
const primaryMap = get(shard, 'primary.buckets', []);
const primaryMap = shard.primary.buckets ?? [];
const primaryBucket = primaryMap.find((b) => b.key_as_string === 'true');
if (primaryBucket !== undefined) {
@ -62,13 +82,18 @@ const countShards = (shardBuckets) => {
};
};
interface Index {
key: string;
states?: { buckets?: ShardBucket[] };
}
/*
* Reducer function for a set of indices to key the array by index name, and
* summarize the shard data.
* @return reducer function for set of indices
*/
export function normalizeIndexShards(indices, index) {
const stateBuckets = get(index, 'states.buckets', []);
export function normalizeIndexShards(indices: Index[], index: Index) {
const stateBuckets = index.states?.buckets ?? [];
const [assignedShardBuckets, unassignedShardBuckets] = partition(stateBuckets, (b) => {
return b.key === 'STARTED' || b.key === 'RELOCATING';
});

View file

@ -8,6 +8,7 @@
import { get } from 'lodash';
import Boom from '@hapi/boom';
import { INDEX_PATTERN } from '../../../common/constants';
import { LegacyRequest } from '../../types';
/*
* Check the currently logged-in user's privileges for "read" privileges on the
@ -16,7 +17,7 @@ import { INDEX_PATTERN } from '../../../common/constants';
*
* @param req {Object} the server route handler request object
*/
export async function verifyMonitoringAuth(req) {
export async function verifyMonitoringAuth(req: LegacyRequest) {
const xpackInfo = get(req.server.plugins.monitoring, 'info');
if (xpackInfo) {
@ -37,7 +38,7 @@ export async function verifyMonitoringAuth(req) {
* @param req {Object} the server route handler request object
* @return {Promise} That either resolves with no response (void) or an exception.
*/
async function verifyHasPrivileges(req) {
async function verifyHasPrivileges(req: LegacyRequest) {
const { callWithRequest } = req.server.plugins.elasticsearch.getCluster('monitoring');
let response;

View file

@ -10,7 +10,7 @@
* @param param - anything
* @param context {String} calling context used in the error message
*/
export function checkParam(param, context) {
export function checkParam(param: any, context: string) {
if (!param) {
throw new MissingRequiredError(context);
}
@ -21,10 +21,12 @@ export function checkParam(param, context) {
* - verification in unit tests
* see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error
*/
export function MissingRequiredError(param) {
this.name = 'MissingRequiredError';
this.message = `Missing required parameter or field: ${param}`;
this.stack = new Error().stack;
export class MissingRequiredError extends Error {
constructor(param: string) {
super();
this.name = 'MissingRequiredError';
this.message = `Missing required parameter or field: ${param}`;
this.stack = new Error().stack;
}
}
MissingRequiredError.prototype = Object.create(Error.prototype);
MissingRequiredError.prototype.constructor = MissingRequiredError;

View file

@ -7,22 +7,31 @@
import moment from 'moment';
interface Bucket {
key: string;
}
/* calling .subtract or .add on a moment object mutates the object
* so this function shortcuts creating a fresh object */
function getTime(bucket) {
function getTime(bucket: Bucket) {
return moment.utc(bucket.key);
}
/* find the milliseconds of difference between 2 moment objects */
function getDelta(t1, t2) {
function getDelta(t1: number, t2: number) {
return moment.duration(t1 - t2).asMilliseconds();
}
export function filterPartialBuckets(min, max, bucketSize, options = {}) {
return (bucket) => {
export function filterPartialBuckets(
min: number,
max: number,
bucketSize: number,
options: { ignoreEarly?: boolean } = {}
) {
return (bucket: Bucket) => {
const bucketTime = getTime(bucket);
// timestamp is too late to be complete
if (getDelta(max, bucketTime.add(bucketSize, 'seconds')) < 0) {
if (getDelta(max, bucketTime.add(bucketSize, 'seconds').valueOf()) < 0) {
return false;
}
@ -32,7 +41,7 @@ export function filterPartialBuckets(min, max, bucketSize, options = {}) {
* ignoreEarly */
if (options.ignoreEarly !== true) {
// timestamp is too early to be complete
if (getDelta(bucketTime.subtract(bucketSize, 'seconds'), min) < 0) {
if (getDelta(bucketTime.subtract(bucketSize, 'seconds').valueOf(), min) < 0) {
return false;
}
}

View file

@ -16,11 +16,11 @@ import moment from 'moment';
* @param {*} utcTimestamp UTC timestamp
* @param {*} timezone The timezone to convert into
*/
export const formatUTCTimestampForTimezone = (utcTimestamp, timezone) => {
export const formatUTCTimestampForTimezone = (utcTimestamp: string | number, timezone: string) => {
if (timezone === 'Browser') {
return utcTimestamp;
}
const offsetInMinutes = moment.tz(timezone).utcOffset();
const offsetTimestamp = utcTimestamp + offsetInMinutes * 1 * 60 * 1000;
const offsetTimestamp = Number(utcTimestamp) + offsetInMinutes * 1 * 60 * 1000;
return offsetTimestamp;
};

View file

@ -5,6 +5,8 @@
* 2.0.
*/
export async function getTimezone(req) {
import { LegacyRequest } from '../types';
export async function getTimezone(req: LegacyRequest) {
return await req.getUiSettingsService().get('dateFormat:tz');
}

View file

@ -5,6 +5,8 @@
* 2.0.
*/
import { ElasticsearchResponse, ElasticsearchResponseHit } from '../../common/types/es';
export const response = {
hits: {
hits: [
@ -83,20 +85,22 @@ export const response = {
},
};
export const defaultResponseSort = (handleResponse) => {
const responseMulti = { hits: { hits: [] } };
export const defaultResponseSort = (
handleResponse: (r: ElasticsearchResponse, n1: number, n2: number) => any
) => {
const responseMulti = { hits: { hits: [] as ElasticsearchResponseHit[] } };
const hit = response.hits.hits[0];
const version = ['6.6.2', '7.0.0-rc1', '6.7.1'];
for (let i = 0, l = version.length; i < l; ++i) {
// Deep clone the object to preserve the original
const newBeat = JSON.parse(JSON.stringify({ ...hit }));
const newBeat: ElasticsearchResponseHit = JSON.parse(JSON.stringify({ ...hit }));
const { beats_stats: beatsStats } = newBeat._source;
beatsStats.timestamp = `2019-01-0${i + 1}T05:00:00.000Z`;
beatsStats.beat.version = version[i];
beatsStats.beat.uuid = `${i}${beatsStats.beat.uuid}`;
beatsStats!.timestamp = `2019-01-0${i + 1}T05:00:00.000Z`;
beatsStats!.beat!.version = version[i];
beatsStats!.beat!.uuid = `${i}${beatsStats!.beat!.uuid}`;
responseMulti.hits.hits.push(newBeat);
}
return { beats: handleResponse(responseMulti, 0, 0), version };
return { beats: handleResponse(responseMulti as ElasticsearchResponse, 0, 0), version };
};

View file

@ -7,7 +7,7 @@
import { merge } from 'lodash';
// @ts-ignore
import { checkParam } from '../error_missing_required';
import { checkParam, MissingRequiredError } from '../error_missing_required';
// @ts-ignore
import { calculateAvailability } from '../calculate_availability';
import { LegacyRequest } from '../../types';
@ -17,10 +17,13 @@ export function handleResponse(resp: ElasticsearchResponse) {
const legacySource = resp.hits?.hits[0]?._source.kibana_stats;
const mbSource = resp.hits?.hits[0]?._source.kibana?.stats;
const kibana = resp.hits?.hits[0]?._source.kibana?.kibana ?? legacySource?.kibana;
const availabilityTimestamp =
resp.hits?.hits[0]?._source['@timestamp'] ?? legacySource?.timestamp;
if (!availabilityTimestamp) {
throw new MissingRequiredError('timestamp');
}
return merge(kibana, {
availability: calculateAvailability(
resp.hits?.hits[0]?._source['@timestamp'] ?? legacySource?.timestamp
),
availability: calculateAvailability(availabilityTimestamp),
os_memory_free: mbSource?.os?.memory?.free_in_bytes ?? legacySource?.os?.memory?.free_in_bytes,
uptime: mbSource?.process?.uptime?.ms ?? legacySource?.process?.uptime_in_millis,
});

View file

@ -6,9 +6,10 @@
*/
import Bluebird from 'bluebird';
import { chain, find, get } from 'lodash';
import { chain, find } from 'lodash';
import { LegacyRequest, Cluster, Bucket } from '../../types';
import { checkParam } from '../error_missing_required';
import { createQuery } from '../create_query.js';
import { createQuery } from '../create_query';
import { KibanaClusterMetric } from '../metrics';
/*
@ -24,7 +25,11 @@ import { KibanaClusterMetric } from '../metrics';
* - number of instances
* - combined health
*/
export function getKibanasForClusters(req, kbnIndexPattern, clusters) {
export function getKibanasForClusters(
req: LegacyRequest,
kbnIndexPattern: string,
clusters: Cluster[]
) {
checkParam(kbnIndexPattern, 'kbnIndexPattern in kibana/getKibanasForClusters');
const config = req.server.config();
@ -32,7 +37,7 @@ export function getKibanasForClusters(req, kbnIndexPattern, clusters) {
const end = req.payload.timeRange.max;
return Bluebird.map(clusters, (cluster) => {
const clusterUuid = get(cluster, 'elasticsearch.cluster.id', cluster.cluster_uuid);
const clusterUuid = cluster.elasticsearch?.cluster?.id ?? cluster.cluster_uuid;
const metric = KibanaClusterMetric.getMetricFields();
const params = {
index: kbnIndexPattern,
@ -162,9 +167,9 @@ export function getKibanasForClusters(req, kbnIndexPattern, clusters) {
const { callWithRequest } = req.server.plugins.elasticsearch.getCluster('monitoring');
return callWithRequest(req, 'search', params).then((result) => {
const aggregations = get(result, 'aggregations', {});
const kibanaUuids = get(aggregations, 'kibana_uuids.buckets', []);
const statusBuckets = get(aggregations, 'status.buckets', []);
const aggregations = result.aggregations ?? {};
const kibanaUuids = aggregations.kibana_uuids?.buckets ?? [];
const statusBuckets = aggregations.status?.buckets ?? [];
// everything is initialized such that it won't impact any rollup
let status = null;
@ -185,19 +190,19 @@ export function getKibanasForClusters(req, kbnIndexPattern, clusters) {
statusBuckets,
(bucket) => bucket.max_timestamp.value === latestTimestamp
);
status = get(latestBucket, 'key');
status = latestBucket.key;
requestsTotal = get(aggregations, 'requests_total.value');
connections = get(aggregations, 'concurrent_connections.value');
responseTime = get(aggregations, 'response_time_max.value');
memorySize = get(aggregations, 'memory_rss.value'); // resident set size
memoryLimit = get(aggregations, 'memory_heap_size_limit.value'); // max old space
requestsTotal = aggregations.requests_total?.value;
connections = aggregations.concurrent_connections?.value;
responseTime = aggregations.response_time_max?.value;
memorySize = aggregations.memory_rss?.value;
memoryLimit = aggregations.memory_heap_size_limit?.value;
}
return {
clusterUuid,
stats: {
uuids: get(aggregations, 'kibana_uuids.buckets', []).map(({ key }) => key),
uuids: kibanaUuids.map(({ key }: Bucket) => key),
status,
requests_total: requestsTotal,
concurrent_connections: connections,

View file

@ -5,13 +5,21 @@
* 2.0.
*/
import { LegacyRequest } from '../../types';
import { createTimeFilter } from '../create_query';
import { get } from 'lodash';
interface Opts {
start: number;
end: number;
clusterUuid?: string;
nodeUuid?: string;
indexUuid?: string;
}
async function doesFilebeatIndexExist(
req,
filebeatIndexPattern,
{ start, end, clusterUuid, nodeUuid, indexUuid }
req: LegacyRequest,
filebeatIndexPattern: string,
{ start, end, clusterUuid, nodeUuid, indexUuid }: Opts
) {
const metric = { timestampField: '@timestamp' };
const filter = [createTimeFilter({ start, end, metric })];
@ -122,18 +130,18 @@ async function doesFilebeatIndexExist(
} = await callWithRequest(req, 'msearch', { body });
return {
indexPatternExists: get(indexPatternExistsResponse, 'hits.total.value', 0) > 0,
indexPatternExists: (indexPatternExistsResponse?.hits?.total.value ?? 0) > 0,
indexPatternInTimeRangeExists:
get(indexPatternExistsInTimeRangeResponse, 'hits.total.value', 0) > 0,
typeExistsAtAnyTime: get(typeExistsAtAnyTimeResponse, 'hits.total.value', 0) > 0,
typeExists: get(typeExistsResponse, 'hits.total.value', 0) > 0,
usingStructuredLogs: get(usingStructuredLogsResponse, 'hits.total.value', 0) > 0,
clusterExists: clusterUuid ? get(clusterExistsResponse, 'hits.total.value', 0) > 0 : null,
nodeExists: nodeUuid ? get(nodeExistsResponse, 'hits.total.value', 0) > 0 : null,
indexExists: indexUuid ? get(indexExistsResponse, 'hits.total.value', 0) > 0 : null,
(indexPatternExistsInTimeRangeResponse?.hits?.total.value ?? 0) > 0,
typeExistsAtAnyTime: (typeExistsAtAnyTimeResponse?.hits?.total.value ?? 0) > 0,
typeExists: (typeExistsResponse?.hits?.total.value ?? 0) > 0,
usingStructuredLogs: (usingStructuredLogsResponse?.hits?.total.value ?? 0) > 0,
clusterExists: clusterUuid ? (clusterExistsResponse?.hits?.total.value ?? 0) > 0 : null,
nodeExists: nodeUuid ? (nodeExistsResponse?.hits?.total.value ?? 0) > 0 : null,
indexExists: indexUuid ? (indexExistsResponse?.hits?.total.value ?? 0) > 0 : null,
};
}
export async function detectReason(req, filebeatIndexPattern, opts) {
export async function detectReason(req: LegacyRequest, filebeatIndexPattern: string, opts: Opts) {
return await doesFilebeatIndexExist(req, filebeatIndexPattern, opts);
}

View file

@ -5,7 +5,7 @@
* 2.0.
*/
export function detectReasonFromException(exception) {
export function detectReasonFromException(exception: Error & { status: number }) {
const reason = { correctIndexName: true };
if (exception) {

View file

@ -5,13 +5,9 @@
* 2.0.
*/
// @ts-ignore
import { checkParam } from '../error_missing_required';
// @ts-ignore
import { createTimeFilter } from '../create_query';
// @ts-ignore
import { createTimeFilter, TimerangeFilter } from '../create_query';
import { detectReason } from './detect_reason';
// @ts-ignore
import { detectReasonFromException } from './detect_reason_from_exception';
import { LegacyRequest } from '../../types';
import { FilebeatResponse } from '../../../common/types/filebeat';
@ -25,7 +21,7 @@ async function handleResponse(
response: FilebeatResponse,
req: LegacyRequest,
filebeatIndexPattern: string,
opts: { clusterUuid: string; nodeUuid: string; indexUuid: string; start: number; end: number }
opts: { clusterUuid?: string; nodeUuid?: string; indexUuid?: string; start: number; end: number }
) {
const result: { enabled: boolean; types: LogType[]; reason?: any } = {
enabled: false,
@ -62,12 +58,12 @@ export async function getLogTypes(
indexUuid,
start,
end,
}: { clusterUuid: string; nodeUuid: string; indexUuid: string; start: number; end: number }
}: { clusterUuid?: string; nodeUuid?: string; indexUuid?: string; start: number; end: number }
) {
checkParam(filebeatIndexPattern, 'filebeatIndexPattern in logs/getLogTypes');
const metric = { timestampField: '@timestamp' };
const filter = [
const filter: Array<{ term: { [x: string]: string } } | TimerangeFilter | null> = [
{ term: { 'service.type': 'elasticsearch' } },
createTimeFilter({ start, end, metric }),
];

View file

@ -22,7 +22,7 @@ import { LegacyRequest } from '../../types';
import { FilebeatResponse } from '../../../common/types/filebeat';
interface Log {
timestamp?: string;
timestamp?: string | number;
component?: string;
node?: string;
index?: string;
@ -83,7 +83,7 @@ export async function getLogs(
checkParam(filebeatIndexPattern, 'filebeatIndexPattern in logs/getLogs');
const metric = { timestampField: '@timestamp' };
const filter = [
const filter: any[] = [
{ term: { 'service.type': 'elasticsearch' } },
createTimeFilter({ start, end, metric }),
];

View file

@ -7,14 +7,15 @@
import Bluebird from 'bluebird';
import { get } from 'lodash';
import { checkParam } from '../error_missing_required';
import { createQuery } from '../create_query.js';
import { LogstashClusterMetric } from '../metrics';
import { LegacyRequest, Cluster, Bucket } from '../../types';
import { LOGSTASH } from '../../../common/constants';
import { checkParam } from '../error_missing_required';
import { createQuery } from '../create_query';
import { LogstashClusterMetric } from '../metrics';
const { MEMORY, PERSISTED } = LOGSTASH.QUEUE_TYPES;
const getQueueTypes = (queueBuckets) => {
const getQueueTypes = (queueBuckets: Array<Bucket & { num_pipelines: { value: number } }>) => {
const memory = queueBuckets.find((bucket) => bucket.key === MEMORY);
const persisted = queueBuckets.find((bucket) => bucket.key === PERSISTED);
return {
@ -36,7 +37,11 @@ const getQueueTypes = (queueBuckets) => {
* - number of instances
* - combined health
*/
export function getLogstashForClusters(req, lsIndexPattern, clusters) {
export function getLogstashForClusters(
req: LegacyRequest,
lsIndexPattern: string,
clusters: Cluster[]
) {
checkParam(lsIndexPattern, 'lsIndexPattern in logstash/getLogstashForClusters');
const start = req.payload.timeRange.min;
@ -226,7 +231,7 @@ export function getLogstashForClusters(req, lsIndexPattern, clusters) {
let types = get(aggregations, 'pipelines_nested_mb.queue_types.buckets', []);
if (!types || types.length === 0) {
types = get(aggregations, 'pipelines_nested.queue_types.buckets', []);
types = aggregations.pipelines_nested?.queue_types.buckets ?? [];
}
return {
@ -242,7 +247,7 @@ export function getLogstashForClusters(req, lsIndexPattern, clusters) {
get(aggregations, 'pipelines_nested_mb.pipelines.value') ||
get(aggregations, 'pipelines_nested.pipelines.value', 0),
queue_types: getQueueTypes(types),
versions: logstashVersions.map((versionBucket) => versionBucket.key),
versions: logstashVersions.map((versionBucket: Bucket) => versionBucket.key),
},
};
});

View file

@ -7,7 +7,7 @@
import { merge } from 'lodash';
// @ts-ignore
import { checkParam } from '../error_missing_required';
import { checkParam, MissingRequiredError } from '../error_missing_required';
// @ts-ignore
import { calculateAvailability } from '../calculate_availability';
import { LegacyRequest } from '../../types';
@ -20,8 +20,12 @@ export function handleResponse(resp: ElasticsearchResponse) {
const legacyStats = resp.hits?.hits[0]?._source?.logstash_stats;
const mbStats = resp.hits?.hits[0]?._source?.logstash?.node?.stats;
const logstash = mbStats?.logstash ?? legacyStats?.logstash;
const availabilityTimestamp = mbStats?.timestamp ?? legacyStats?.timestamp;
if (!availabilityTimestamp) {
throw new MissingRequiredError('timestamp');
}
const info = merge(logstash, {
availability: calculateAvailability(mbStats?.timestamp ?? legacyStats?.timestamp),
availability: calculateAvailability(availabilityTimestamp),
events: mbStats?.events ?? legacyStats?.events,
reloads: mbStats?.reloads ?? legacyStats?.reloads,
queue_type: mbStats?.queue?.type ?? legacyStats?.queue?.type,

View file

@ -132,9 +132,9 @@ export async function getPipeline(
// Determine metrics' timeseries interval based on version's timespan
const minIntervalSeconds = config.get('monitoring.ui.min_interval_seconds');
const timeseriesInterval = calculateTimeseriesInterval(
version.firstSeen,
version.lastSeen,
minIntervalSeconds
Number(version.firstSeen),
Number(version.lastSeen),
Number(minIntervalSeconds)
);
const [stateDocument, statsAggregation] = await Promise.all([

View file

@ -7,14 +7,15 @@
import moment from 'moment';
import { get } from 'lodash';
import { LegacyRequest, Bucket } from '../../types';
import { createQuery } from '../create_query';
import { LogstashMetric } from '../metrics';
export async function getLogstashPipelineIds(
req,
logstashIndexPattern,
{ clusterUuid, logstashUuid },
size
req: LegacyRequest,
logstashIndexPattern: string,
{ clusterUuid, logstashUuid }: { clusterUuid: string; logstashUuid?: string },
size: number
) {
const start = moment.utc(req.payload.timeRange.min).valueOf();
const end = moment.utc(req.payload.timeRange.max).valueOf();
@ -100,14 +101,14 @@ export async function getLogstashPipelineIds(
if (!buckets || buckets.length === 0) {
buckets = get(response, 'aggregations.nest.id.buckets', []);
}
return buckets.map((bucket) => {
return buckets.map((bucket: Bucket) => {
let nodeBuckets = get(bucket, 'unnest_mb.nodes.buckets', []);
if (!nodeBuckets || nodeBuckets.length === 0) {
nodeBuckets = get(bucket, 'unnest.nodes.buckets', []);
}
return {
id: bucket.key,
nodeIds: nodeBuckets.map((item) => item.key),
nodeIds: nodeBuckets.map((item: Bucket) => item.key),
};
});
}

View file

@ -150,9 +150,9 @@ export async function getPipelineVertex(
// Determine metrics' timeseries interval based on version's timespan
const minIntervalSeconds = config.get('monitoring.ui.min_interval_seconds');
const timeseriesInterval = calculateTimeseriesInterval(
version.firstSeen,
version.lastSeen,
minIntervalSeconds
Number(version.firstSeen),
Number(version.lastSeen),
Number(minIntervalSeconds)
);
const [stateDocument, statsAggregation] = await Promise.all([

View file

@ -5,12 +5,16 @@
* 2.0.
*/
/* eslint-disable max-classes-per-file */
import { i18n } from '@kbn/i18n';
// @ts-ignore
import { ClusterMetric, Metric } from '../classes';
import { SMALL_FLOAT, LARGE_FLOAT } from '../../../../common/formatting';
import { i18n } from '@kbn/i18n';
import { NORMALIZED_DERIVATIVE_UNIT } from '../../../../common/constants';
export class ApmClusterMetric extends ClusterMetric {
// @ts-ignore
constructor(opts) {
super({
...opts,
@ -28,6 +32,7 @@ export class ApmClusterMetric extends ClusterMetric {
}
export class ApmMetric extends Metric {
// @ts-ignore
constructor(opts) {
super({
...opts,
@ -44,7 +49,10 @@ export class ApmMetric extends Metric {
}
}
export type ApmMetricFields = ReturnType<typeof ApmMetric.getMetricFields>;
export class ApmCpuUtilizationMetric extends ApmMetric {
// @ts-ignore
constructor(opts) {
super({
...opts,
@ -57,6 +65,7 @@ export class ApmCpuUtilizationMetric extends ApmMetric {
/*
* Convert a counter of milliseconds of utilization time into a percentage of the bucket size
*/
// @ts-ignore
this.calculation = ({ metric_deriv: metricDeriv } = {}, _key, _metric, bucketSizeInSeconds) => {
if (metricDeriv) {
const { value: metricDerivValue } = metricDeriv;
@ -72,6 +81,7 @@ export class ApmCpuUtilizationMetric extends ApmMetric {
}
export class ApmEventsRateClusterMetric extends ApmClusterMetric {
// @ts-ignore
constructor(opts) {
super({
...opts,
@ -83,6 +93,7 @@ export class ApmEventsRateClusterMetric extends ApmClusterMetric {
}),
});
// @ts-ignore
this.aggs = {
beats_uuids: {
terms: {
@ -92,6 +103,7 @@ export class ApmEventsRateClusterMetric extends ApmClusterMetric {
aggs: {
event_rate_per_beat: {
max: {
// @ts-ignore
field: this.field,
},
},

View file

@ -5,16 +5,20 @@
* 2.0.
*/
/* eslint-disable max-classes-per-file */
import { i18n } from '@kbn/i18n';
// @ts-ignore
import { ClusterMetric, Metric } from '../classes';
import { SMALL_FLOAT, LARGE_FLOAT, LARGE_BYTES } from '../../../../common/formatting';
import { NORMALIZED_DERIVATIVE_UNIT } from '../../../../common/constants';
import { i18n } from '@kbn/i18n';
const perSecondUnitLabel = i18n.translate('xpack.monitoring.metrics.beats.perSecondUnitLabel', {
defaultMessage: '/s',
});
export class BeatsClusterMetric extends ClusterMetric {
// @ts-ignore
constructor(opts) {
super({
...opts,
@ -32,6 +36,7 @@ export class BeatsClusterMetric extends ClusterMetric {
}
export class BeatsEventsRateClusterMetric extends BeatsClusterMetric {
// @ts-ignore
constructor(opts) {
super({
...opts,
@ -40,6 +45,7 @@ export class BeatsEventsRateClusterMetric extends BeatsClusterMetric {
metricAgg: 'max',
units: perSecondUnitLabel,
});
// @ts-ignore
this.aggs = {
beats_uuids: {
@ -50,6 +56,7 @@ export class BeatsEventsRateClusterMetric extends BeatsClusterMetric {
aggs: {
event_rate_per_beat: {
max: {
// @ts-ignore
field: this.field,
},
},
@ -73,6 +80,7 @@ export class BeatsEventsRateClusterMetric extends BeatsClusterMetric {
}
export class BeatsMetric extends Metric {
// @ts-ignore
constructor(opts) {
super({
...opts,
@ -89,7 +97,10 @@ export class BeatsMetric extends Metric {
}
}
export type BeatsMetricFields = ReturnType<typeof BeatsMetric.getMetricFields>;
export class BeatsByteRateClusterMetric extends BeatsEventsRateClusterMetric {
// @ts-ignore
constructor(opts) {
super({
...opts,
@ -99,6 +110,7 @@ export class BeatsByteRateClusterMetric extends BeatsEventsRateClusterMetric {
}
export class BeatsEventsRateMetric extends BeatsMetric {
// @ts-ignore
constructor(opts) {
super({
...opts,
@ -111,6 +123,7 @@ export class BeatsEventsRateMetric extends BeatsMetric {
}
export class BeatsByteRateMetric extends BeatsMetric {
// @ts-ignore
constructor(opts) {
super({
...opts,
@ -123,6 +136,7 @@ export class BeatsByteRateMetric extends BeatsMetric {
}
export class BeatsCpuUtilizationMetric extends BeatsMetric {
// @ts-ignore
constructor(opts) {
super({
...opts,
@ -135,6 +149,7 @@ export class BeatsCpuUtilizationMetric extends BeatsMetric {
/*
* Convert a counter of milliseconds of utilization time into a percentage of the bucket size
*/
// @ts-ignore
this.calculation = ({ metric_deriv: metricDeriv } = {}, _key, _metric, bucketSizeInSeconds) => {
if (metricDeriv) {
const { value } = metricDeriv;

View file

@ -5,9 +5,13 @@
* 2.0.
*/
// @ts-ignore
export { ElasticsearchMetric } from './elasticsearch/classes';
// @ts-ignore
export { KibanaClusterMetric, KibanaMetric } from './kibana/classes';
export { ApmMetric, ApmClusterMetric } from './apm/classes';
export { ApmMetric, ApmClusterMetric, ApmMetricFields } from './apm/classes';
// @ts-ignore
export { LogstashClusterMetric, LogstashMetric } from './logstash/classes';
export { BeatsClusterMetric, BeatsMetric } from './beats/classes';
export { BeatsClusterMetric, BeatsMetric, BeatsMetricFields } from './beats/classes';
// @ts-ignore
export { metrics } from './metrics';

View file

@ -6,7 +6,7 @@
*/
import { escape } from 'lodash';
export function normalizeVersionString(string) {
export function normalizeVersionString(string: string) {
if (string) {
// get just the number.number.number portion (filter out '-snapshot')
const matches = string.match(/^\d+\.\d+.\d+/);

View file

@ -5,6 +5,9 @@
* 2.0.
*/
// @ts-ignore
export { hasStandaloneClusters } from './has_standalone_clusters';
// @ts-ignore
export { getStandaloneClusterDefinition } from './get_standalone_cluster_definition';
// @ts-ignore
export { standaloneClusterFilter } from './standalone_cluster_query_filter';

View file

@ -41,7 +41,6 @@ export function alertStatusRoute(server: any, npRoute: RouteDependencies) {
const status = await fetchStatus(
rulesClient,
npRoute.licenseService,
alertTypeIds,
[clusterUuid],
filters as CommonAlertFilter[]

View file

@ -7,10 +7,12 @@
import { schema } from '@kbn/config-schema';
import { getClustersFromRequest } from '../../../../lib/cluster/get_clusters_from_request';
// @ts-ignore
import { handleError } from '../../../../lib/errors';
import { getIndexPatterns } from '../../../../lib/cluster/get_index_patterns';
import { LegacyRequest, LegacyServer } from '../../../../types';
export function clusterRoute(server) {
export function clusterRoute(server: LegacyServer) {
/*
* Cluster Overview
*/
@ -32,11 +34,11 @@ export function clusterRoute(server) {
}),
},
},
handler: async (req) => {
handler: async (req: LegacyRequest) => {
const config = server.config();
const indexPatterns = getIndexPatterns(server, {
filebeatIndexPattern: config.get('monitoring.ui.logs.index'),
filebeatIndexPattern: config.get('monitoring.ui.logs.index')!,
});
const options = {
clusterUuid: req.params.clusterUuid,

View file

@ -17,7 +17,7 @@ import {
import { prefixIndexPattern } from '../../../../../lib/ccs_utils';
// @ts-ignore
import { handleError } from '../../../../../lib/errors';
import { RouteDependencies } from '../../../../../types';
import { RouteDependencies, LegacyServer } from '../../../../../types';
const queryBody = {
size: 0,
@ -70,10 +70,7 @@ const checkLatestMonitoringIsLegacy = async (context: RequestHandlerContext, ind
return counts;
};
export function internalMonitoringCheckRoute(
server: { config: () => unknown },
npRoute: RouteDependencies
) {
export function internalMonitoringCheckRoute(server: LegacyServer, npRoute: RouteDependencies) {
npRoute.router.post(
{
path: '/api/monitoring/v1/elasticsearch_settings/check/internal_monitoring',

View file

@ -30,6 +30,7 @@ import { LicensingPluginStart } from '../../licensing/server';
import { PluginSetupContract as FeaturesPluginSetupContract } from '../../features/server';
import { EncryptedSavedObjectsPluginSetup } from '../../encrypted_saved_objects/server';
import { CloudSetup } from '../../cloud/server';
import { ElasticsearchModifiedSource } from '../common/types/es';
export interface MonitoringLicenseService {
refresh: () => Promise<any>;
@ -146,3 +147,20 @@ export interface LegacyServer {
};
};
}
export type Cluster = ElasticsearchModifiedSource & {
ml?: { jobs: any };
logs?: any;
alerts?: any;
};
export interface Bucket {
key: string;
uuids: {
buckets: unknown[];
};
}
export interface Aggregation {
buckets: Bucket[];
}

View file

@ -1,61 +1,66 @@
[{
"cluster_uuid": "__standalone_cluster__",
"license": {},
"elasticsearch": {
"cluster_stats": {
"indices": {
"docs": {},
"shards": {},
"store": {}
},
"nodes": {
"count": {
"total": {}
[
{
"cluster_uuid": "__standalone_cluster__",
"license": {},
"elasticsearch": {
"cluster_stats": {
"indices": {
"docs": {},
"shards": {},
"store": {}
},
"fs": {},
"jvm": {
"mem": {}
"nodes": {
"count": {
"total": {}
},
"fs": {},
"jvm": {
"mem": {}
}
}
},
"logs": {
"enabled": false,
"reason": {
"clusterExists": false,
"indexPatternExists": false,
"indexPatternInTimeRangeExists": false,
"typeExistsAtAnyTime": false,
"usingStructuredLogs": false,
"nodeExists": null,
"indexExists": null,
"typeExists": false
},
"types": []
}
},
"logs": {
"enabled": false,
"reason": {
"clusterExists": false,
"indexPatternExists": false,
"indexPatternInTimeRangeExists": false,
"typeExistsAtAnyTime": false,
"usingStructuredLogs": false,
"nodeExists": null,
"indexExists": null,
"typeExists": false
},
"types": []
}
},
"logstash": {},
"kibana": {},
"beats": {
"totalEvents": 348,
"bytesSent": 319913,
"logstash": {},
"kibana": {},
"beats": {
"total": 1,
"types": [{
"type": "Packetbeat",
"count": 1
}]
}
},
"apm": {
"totalEvents": 0,
"memRss": 0,
"apms": {
"total": 0
"totalEvents": 348,
"bytesSent": 319913,
"beats": {
"total": 1,
"types": [
{
"type": "Packetbeat",
"count": 1
}
]
}
},
"config": {
"container": false
"apm": {
"totalEvents": 0,
"memRss": 0,
"apms": {
"total": 0
},
"config": {
"container": false
},
"versions": []
},
"versions": []
},
"isPrimary": false
}]
"isPrimary": false,
"isCcrEnabled": false
}
]