[Metrics UI] Anomaly Detection setup flow for Metrics (#76787)

* adds metrics ml integration

* Add ability to create ml jobs from inventory

* Fix i18n stuff

* Fix typecheck

* renames jobs, updates datafeeds

* adds allow_no_indices: true for datafeeds

* Revert "[Metrics UI] Replace Snapshot API with Metrics API (#76253)"

This reverts commit 0ca647286a.

* Add ability to fetch anomalies

* Fix typecheck

* Fix typecheck

* Fix i18n

* Fix lint, use the right partition field

* Delete log files

* Fix merge

* Fix merge issues

* Update name of jobs

* Remove CPU job

* [Metrics UI] Replace Snapshot API with Metrics API (#76253)

- Remove server/lib/snapshot
- Replace backend for /api/infra/snapshot with data from Metrics API
- Fixing tests with updates to the snapshot node

Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>

* Add links back to ML for anomalies and manage jobs

* Fix typecheck

* Remove unecessary validation

Co-authored-by: Michael Hirsch <michaelahirsch@gmail.com>
Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
Co-authored-by: Chris Cowan <chris@chriscowan.us>
This commit is contained in:
Phillip Burch 2020-09-23 13:27:28 -05:00 committed by GitHub
parent e0f93233bd
commit 14921a037e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
51 changed files with 5395 additions and 103 deletions

View file

@ -0,0 +1,7 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export * from './results';

View file

@ -0,0 +1,59 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import * as rt from 'io-ts';
// [Sort field value, tiebreaker value]
export const paginationCursorRT = rt.tuple([
rt.union([rt.string, rt.number]),
rt.union([rt.string, rt.number]),
]);
export type PaginationCursor = rt.TypeOf<typeof paginationCursorRT>;
export const anomalyTypeRT = rt.keyof({
metrics_hosts: null,
metrics_k8s: null,
});
export type AnomalyType = rt.TypeOf<typeof anomalyTypeRT>;
const sortOptionsRT = rt.keyof({
anomalyScore: null,
dataset: null,
startTime: null,
});
const sortDirectionsRT = rt.keyof({
asc: null,
desc: null,
});
const paginationPreviousPageCursorRT = rt.type({
searchBefore: paginationCursorRT,
});
const paginationNextPageCursorRT = rt.type({
searchAfter: paginationCursorRT,
});
export const paginationRT = rt.intersection([
rt.type({
pageSize: rt.number,
}),
rt.partial({
cursor: rt.union([paginationPreviousPageCursorRT, paginationNextPageCursorRT]),
}),
]);
export type Pagination = rt.TypeOf<typeof paginationRT>;
export const sortRT = rt.type({
field: sortOptionsRT,
direction: sortDirectionsRT,
});
export type Sort = rt.TypeOf<typeof sortRT>;

View file

@ -0,0 +1,9 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export * from './metrics_hosts_anomalies';
export * from './metrics_k8s_anomalies';
export * from './common';

View file

@ -0,0 +1,79 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import * as rt from 'io-ts';
import { timeRangeRT, routeTimingMetadataRT } from '../../shared';
import { anomalyTypeRT, paginationCursorRT, sortRT, paginationRT } from './common';
export const INFA_ML_GET_METRICS_HOSTS_ANOMALIES_PATH =
'/api/infra/infra_ml/results/metrics_hosts_anomalies';
const metricsHostAnomalyCommonFieldsRT = rt.type({
id: rt.string,
anomalyScore: rt.number,
typical: rt.number,
actual: rt.number,
type: anomalyTypeRT,
duration: rt.number,
startTime: rt.number,
jobId: rt.string,
});
const metricsHostsAnomalyRT = metricsHostAnomalyCommonFieldsRT;
export type MetricsHostsAnomaly = rt.TypeOf<typeof metricsHostsAnomalyRT>;
export const getMetricsHostsAnomaliesSuccessReponsePayloadRT = rt.intersection([
rt.type({
data: rt.intersection([
rt.type({
anomalies: rt.array(metricsHostsAnomalyRT),
// Signifies there are more entries backwards or forwards. If this was a request
// for a previous page, there are more previous pages, if this was a request for a next page,
// there are more next pages.
hasMoreEntries: rt.boolean,
}),
rt.partial({
paginationCursors: rt.type({
// The cursor to use to fetch the previous page
previousPageCursor: paginationCursorRT,
// The cursor to use to fetch the next page
nextPageCursor: paginationCursorRT,
}),
}),
]),
}),
rt.partial({
timing: routeTimingMetadataRT,
}),
]);
export type GetMetricsHostsAnomaliesSuccessResponsePayload = rt.TypeOf<
typeof getMetricsHostsAnomaliesSuccessReponsePayloadRT
>;
export const getMetricsHostsAnomaliesRequestPayloadRT = rt.type({
data: rt.intersection([
rt.type({
// the ID of the source configuration
sourceId: rt.string,
// the time range to fetch the log entry anomalies from
timeRange: timeRangeRT,
}),
rt.partial({
// Pagination properties
pagination: paginationRT,
// Sort properties
sort: sortRT,
// // Dataset filters
// datasets: rt.array(rt.string),
}),
]),
});
export type GetMetricsHostsAnomaliesRequestPayload = rt.TypeOf<
typeof getMetricsHostsAnomaliesRequestPayloadRT
>;

View file

@ -0,0 +1,79 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import * as rt from 'io-ts';
import { timeRangeRT, routeTimingMetadataRT } from '../../shared';
import { paginationCursorRT, anomalyTypeRT, sortRT, paginationRT } from './common';
export const INFA_ML_GET_METRICS_K8S_ANOMALIES_PATH =
'/api/infra/infra_ml/results/metrics_k8s_anomalies';
const metricsK8sAnomalyCommonFieldsRT = rt.type({
id: rt.string,
anomalyScore: rt.number,
typical: rt.number,
actual: rt.number,
type: anomalyTypeRT,
duration: rt.number,
startTime: rt.number,
jobId: rt.string,
});
const metricsK8sAnomalyRT = metricsK8sAnomalyCommonFieldsRT;
export type MetricsK8sAnomaly = rt.TypeOf<typeof metricsK8sAnomalyRT>;
export const getMetricsK8sAnomaliesSuccessReponsePayloadRT = rt.intersection([
rt.type({
data: rt.intersection([
rt.type({
anomalies: rt.array(metricsK8sAnomalyRT),
// Signifies there are more entries backwards or forwards. If this was a request
// for a previous page, there are more previous pages, if this was a request for a next page,
// there are more next pages.
hasMoreEntries: rt.boolean,
}),
rt.partial({
paginationCursors: rt.type({
// The cursor to use to fetch the previous page
previousPageCursor: paginationCursorRT,
// The cursor to use to fetch the next page
nextPageCursor: paginationCursorRT,
}),
}),
]),
}),
rt.partial({
timing: routeTimingMetadataRT,
}),
]);
export type GetMetricsK8sAnomaliesSuccessResponsePayload = rt.TypeOf<
typeof getMetricsK8sAnomaliesSuccessReponsePayloadRT
>;
export const getMetricsK8sAnomaliesRequestPayloadRT = rt.type({
data: rt.intersection([
rt.type({
// the ID of the source configuration
sourceId: rt.string,
// the time range to fetch the log entry anomalies from
timeRange: timeRangeRT,
}),
rt.partial({
// Pagination properties
pagination: paginationRT,
// Sort properties
sort: sortRT,
// Dataset filters
datasets: rt.array(rt.string),
}),
]),
});
export type GetMetricsK8sAnomaliesRequestPayload = rt.TypeOf<
typeof getMetricsK8sAnomaliesRequestPayloadRT
>;

View file

@ -0,0 +1,57 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export const ML_SEVERITY_SCORES = {
warning: 3,
minor: 25,
major: 50,
critical: 75,
};
export type MLSeverityScoreCategories = keyof typeof ML_SEVERITY_SCORES;
export const ML_SEVERITY_COLORS = {
critical: 'rgb(228, 72, 72)',
major: 'rgb(229, 113, 0)',
minor: 'rgb(255, 221, 0)',
warning: 'rgb(125, 180, 226)',
};
export const getSeverityCategoryForScore = (
score: number
): MLSeverityScoreCategories | undefined => {
if (score >= ML_SEVERITY_SCORES.critical) {
return 'critical';
} else if (score >= ML_SEVERITY_SCORES.major) {
return 'major';
} else if (score >= ML_SEVERITY_SCORES.minor) {
return 'minor';
} else if (score >= ML_SEVERITY_SCORES.warning) {
return 'warning';
} else {
// Category is too low to include
return undefined;
}
};
export const formatAnomalyScore = (score: number) => {
return Math.round(score);
};
export const formatOneDecimalPlace = (number: number) => {
return Math.round(number * 10) / 10;
};
export const getFriendlyNameForPartitionId = (partitionId: string) => {
return partitionId !== '' ? partitionId : 'unknown';
};
export const compareDatasetsByMaximumAnomalyScore = <
Dataset extends { maximumAnomalyScore: number }
>(
firstDataset: Dataset,
secondDataset: Dataset
) => firstDataset.maximumAnomalyScore - secondDataset.maximumAnomalyScore;

View file

@ -0,0 +1,11 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export * from './infra_ml';
export * from './anomaly_results';
export * from './job_parameters';
export * from './metrics_hosts_ml';
export * from './metrics_k8s_ml';

View file

@ -0,0 +1,52 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
// combines and abstracts job and datafeed status
export type JobStatus =
| 'unknown'
| 'missing'
| 'initializing'
| 'stopped'
| 'started'
| 'finished'
| 'failed';
export type SetupStatus =
| { type: 'initializing' } // acquiring job statuses to determine setup status
| { type: 'unknown' } // job status could not be acquired (failed request etc)
| { type: 'required' } // setup required
| { type: 'pending' } // In the process of setting up the module for the first time or retrying, waiting for response
| { type: 'succeeded' } // setup succeeded, notifying user
| {
type: 'failed';
reasons: string[];
} // setup failed, notifying user
| {
type: 'skipped';
newlyCreated?: boolean;
}; // setup is not necessary
/**
* Maps a job status to the possibility that results have already been produced
* before this state was reached.
*/
export const isJobStatusWithResults = (jobStatus: JobStatus) =>
['started', 'finished', 'stopped', 'failed'].includes(jobStatus);
export const isHealthyJobStatus = (jobStatus: JobStatus) =>
['started', 'finished'].includes(jobStatus);
/**
* Maps a setup status to the possibility that results have already been
* produced before this state was reached.
*/
export const isSetupStatusWithResults = (setupStatus: SetupStatus) =>
setupStatus.type === 'skipped';
const KIBANA_SAMPLE_DATA_INDICES = ['kibana_sample_data_logs*'];
export const isExampleDataIndex = (indexName: string) =>
KIBANA_SAMPLE_DATA_INDICES.includes(indexName);

View file

@ -0,0 +1,93 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import * as rt from 'io-ts';
export const bucketSpan = 900000;
export const categoriesMessageField = 'message';
export const partitionField = 'event.dataset';
export const getJobIdPrefix = (spaceId: string, sourceId: string) =>
`kibana-metrics-ui-${spaceId}-${sourceId}-`;
export const getJobId = (spaceId: string, sourceId: string, jobType: string) =>
`${getJobIdPrefix(spaceId, sourceId)}${jobType}`;
export const getDatafeedId = (spaceId: string, sourceId: string, jobType: string) =>
`datafeed-${getJobId(spaceId, sourceId, jobType)}`;
export const datasetFilterRT = rt.union([
rt.strict({
type: rt.literal('includeAll'),
}),
rt.strict({
type: rt.literal('includeSome'),
datasets: rt.array(rt.string),
}),
]);
export type DatasetFilter = rt.TypeOf<typeof datasetFilterRT>;
export const jobSourceConfigurationRT = rt.partial({
indexPattern: rt.string,
timestampField: rt.string,
bucketSpan: rt.number,
datasetFilter: datasetFilterRT,
});
export type JobSourceConfiguration = rt.TypeOf<typeof jobSourceConfigurationRT>;
export const jobCustomSettingsRT = rt.partial({
job_revision: rt.number,
metrics_source_config: jobSourceConfigurationRT,
});
export type JobCustomSettings = rt.TypeOf<typeof jobCustomSettingsRT>;
export const combineDatasetFilters = (
firstFilter: DatasetFilter,
secondFilter: DatasetFilter
): DatasetFilter => {
if (firstFilter.type === 'includeAll' && secondFilter.type === 'includeAll') {
return {
type: 'includeAll',
};
}
const includedDatasets = new Set([
...(firstFilter.type === 'includeSome' ? firstFilter.datasets : []),
...(secondFilter.type === 'includeSome' ? secondFilter.datasets : []),
]);
return {
type: 'includeSome',
datasets: [...includedDatasets],
};
};
export const filterDatasetFilter = (
datasetFilter: DatasetFilter,
predicate: (dataset: string) => boolean
): DatasetFilter => {
if (datasetFilter.type === 'includeAll') {
return datasetFilter;
} else {
const newDatasets = datasetFilter.datasets.filter(predicate);
if (newDatasets.length > 0) {
return {
type: 'includeSome',
datasets: newDatasets,
};
} else {
return {
type: 'includeAll',
};
}
}
};

View file

@ -0,0 +1,21 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import * as rt from 'io-ts';
export const metricsHostsJobTypeRT = rt.keyof({
hosts_memory_usage: null,
hosts_network_in: null,
hosts_network_out: null,
});
export type MetricsHostsJobType = rt.TypeOf<typeof metricsHostsJobTypeRT>;
export const metricsHostsJobTypes: MetricsHostsJobType[] = [
'hosts_memory_usage',
'hosts_network_in',
'hosts_network_out',
];

View file

@ -0,0 +1,21 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import * as rt from 'io-ts';
export const metricK8sJobTypeRT = rt.keyof({
k8s_memory_usage: null,
k8s_network_in: null,
k8s_network_out: null,
});
export type MetricK8sJobType = rt.TypeOf<typeof metricK8sJobTypeRT>;
export const metricsK8SJobTypes: MetricK8sJobType[] = [
'k8s_memory_usage',
'k8s_network_in',
'k8s_network_out',
];

View file

@ -0,0 +1,28 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import * as rt from 'io-ts';
export const getMlCapabilitiesResponsePayloadRT = rt.type({
capabilities: rt.type({
canGetJobs: rt.boolean,
canCreateJob: rt.boolean,
canDeleteJob: rt.boolean,
canOpenJob: rt.boolean,
canCloseJob: rt.boolean,
canForecastJob: rt.boolean,
canGetDatafeeds: rt.boolean,
canStartStopDatafeed: rt.boolean,
canUpdateJob: rt.boolean,
canUpdateDatafeed: rt.boolean,
canPreviewDatafeed: rt.boolean,
}),
isPlatinumOrTrialLicense: rt.boolean,
mlFeatureEnabledInSpace: rt.boolean,
upgradeInProgress: rt.boolean,
});
export type GetMlCapabilitiesResponsePayload = rt.TypeOf<typeof getMlCapabilitiesResponsePayloadRT>;

View file

@ -0,0 +1,95 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import * as rt from 'io-ts';
import { pipe } from 'fp-ts/lib/pipeable';
import { fold } from 'fp-ts/lib/Either';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../legacy_singletons';
import { getDatafeedId, getJobId } from '../../../../common/infra_ml';
import { throwErrors, createPlainError } from '../../../../common/runtime_types';
export const callDeleteJobs = async <JobType extends string>(
spaceId: string,
sourceId: string,
jobTypes: JobType[]
) => {
// NOTE: Deleting the jobs via this API will delete the datafeeds at the same time
const deleteJobsResponse = await npStart.http.fetch('/api/ml/jobs/delete_jobs', {
method: 'POST',
body: JSON.stringify(
deleteJobsRequestPayloadRT.encode({
jobIds: jobTypes.map((jobType) => getJobId(spaceId, sourceId, jobType)),
})
),
});
return pipe(
deleteJobsResponsePayloadRT.decode(deleteJobsResponse),
fold(throwErrors(createPlainError), identity)
);
};
export const callGetJobDeletionTasks = async () => {
const jobDeletionTasksResponse = await npStart.http.fetch('/api/ml/jobs/deleting_jobs_tasks');
return pipe(
getJobDeletionTasksResponsePayloadRT.decode(jobDeletionTasksResponse),
fold(throwErrors(createPlainError), identity)
);
};
export const callStopDatafeeds = async <JobType extends string>(
spaceId: string,
sourceId: string,
jobTypes: JobType[]
) => {
// Stop datafeed due to https://github.com/elastic/kibana/issues/44652
const stopDatafeedResponse = await npStart.http.fetch('/api/ml/jobs/stop_datafeeds', {
method: 'POST',
body: JSON.stringify(
stopDatafeedsRequestPayloadRT.encode({
datafeedIds: jobTypes.map((jobType) => getDatafeedId(spaceId, sourceId, jobType)),
})
),
});
return pipe(
stopDatafeedsResponsePayloadRT.decode(stopDatafeedResponse),
fold(throwErrors(createPlainError), identity)
);
};
export const deleteJobsRequestPayloadRT = rt.type({
jobIds: rt.array(rt.string),
});
export type DeleteJobsRequestPayload = rt.TypeOf<typeof deleteJobsRequestPayloadRT>;
export const deleteJobsResponsePayloadRT = rt.record(
rt.string,
rt.type({
deleted: rt.boolean,
})
);
export type DeleteJobsResponsePayload = rt.TypeOf<typeof deleteJobsResponsePayloadRT>;
export const getJobDeletionTasksResponsePayloadRT = rt.type({
jobIds: rt.array(rt.string),
});
export const stopDatafeedsRequestPayloadRT = rt.type({
datafeedIds: rt.array(rt.string),
});
export const stopDatafeedsResponsePayloadRT = rt.record(
rt.string,
rt.type({
stopped: rt.boolean,
})
);

View file

@ -0,0 +1,93 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { fold } from 'fp-ts/lib/Either';
import { identity } from 'fp-ts/lib/function';
import { pipe } from 'fp-ts/lib/pipeable';
import * as rt from 'io-ts';
import { npStart } from '../../../legacy_singletons';
import { getJobId, jobCustomSettingsRT } from '../../../../common/infra_ml';
import { createPlainError, throwErrors } from '../../../../common/runtime_types';
export const callJobsSummaryAPI = async <JobType extends string>(
spaceId: string,
sourceId: string,
jobTypes: JobType[]
) => {
const response = await npStart.http.fetch('/api/ml/jobs/jobs_summary', {
method: 'POST',
body: JSON.stringify(
fetchJobStatusRequestPayloadRT.encode({
jobIds: jobTypes.map((jobType) => getJobId(spaceId, sourceId, jobType)),
})
),
});
return pipe(
fetchJobStatusResponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
};
export const fetchJobStatusRequestPayloadRT = rt.type({
jobIds: rt.array(rt.string),
});
export type FetchJobStatusRequestPayload = rt.TypeOf<typeof fetchJobStatusRequestPayloadRT>;
const datafeedStateRT = rt.keyof({
started: null,
stopped: null,
stopping: null,
'': null,
});
const jobStateRT = rt.keyof({
closed: null,
closing: null,
deleting: null,
failed: null,
opened: null,
opening: null,
});
const jobCategorizationStatusRT = rt.keyof({
ok: null,
warn: null,
});
const jobModelSizeStatsRT = rt.type({
categorization_status: jobCategorizationStatusRT,
categorized_doc_count: rt.number,
dead_category_count: rt.number,
frequent_category_count: rt.number,
rare_category_count: rt.number,
total_category_count: rt.number,
});
export type JobModelSizeStats = rt.TypeOf<typeof jobModelSizeStatsRT>;
export const jobSummaryRT = rt.intersection([
rt.type({
id: rt.string,
jobState: jobStateRT,
}),
rt.partial({
datafeedIndices: rt.array(rt.string),
datafeedState: datafeedStateRT,
fullJob: rt.partial({
custom_settings: jobCustomSettingsRT,
finished_time: rt.number,
model_size_stats: jobModelSizeStatsRT,
}),
}),
]);
export type JobSummary = rt.TypeOf<typeof jobSummaryRT>;
export const fetchJobStatusResponsePayloadRT = rt.array(jobSummaryRT);
export type FetchJobStatusResponsePayload = rt.TypeOf<typeof fetchJobStatusResponsePayloadRT>;

View file

@ -0,0 +1,41 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { fold } from 'fp-ts/lib/Either';
import { identity } from 'fp-ts/lib/function';
import { pipe } from 'fp-ts/lib/pipeable';
import * as rt from 'io-ts';
import { npStart } from '../../../legacy_singletons';
import { jobCustomSettingsRT } from '../../../../common/log_analysis';
import { createPlainError, throwErrors } from '../../../../common/runtime_types';
export const callGetMlModuleAPI = async (moduleId: string) => {
const response = await npStart.http.fetch(`/api/ml/modules/get_module/${moduleId}`, {
method: 'GET',
});
return pipe(
getMlModuleResponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
};
const jobDefinitionRT = rt.type({
id: rt.string,
config: rt.type({
custom_settings: jobCustomSettingsRT,
}),
});
export type JobDefinition = rt.TypeOf<typeof jobDefinitionRT>;
const getMlModuleResponsePayloadRT = rt.type({
id: rt.string,
jobs: rt.array(jobDefinitionRT),
});
export type GetMlModuleResponsePayload = rt.TypeOf<typeof getMlModuleResponsePayloadRT>;

View file

@ -0,0 +1,115 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { fold } from 'fp-ts/lib/Either';
import { identity } from 'fp-ts/lib/function';
import { pipe } from 'fp-ts/lib/pipeable';
import * as rt from 'io-ts';
import { npStart } from '../../../legacy_singletons';
import { getJobIdPrefix, jobCustomSettingsRT } from '../../../../common/infra_ml';
import { createPlainError, throwErrors } from '../../../../common/runtime_types';
export const callSetupMlModuleAPI = async (
moduleId: string,
start: number | undefined,
end: number | undefined,
spaceId: string,
sourceId: string,
indexPattern: string,
jobOverrides: SetupMlModuleJobOverrides[] = [],
datafeedOverrides: SetupMlModuleDatafeedOverrides[] = [],
query?: object
) => {
const response = await npStart.http.fetch(`/api/ml/modules/setup/${moduleId}`, {
method: 'POST',
body: JSON.stringify(
setupMlModuleRequestPayloadRT.encode({
start,
end,
indexPatternName: indexPattern,
prefix: getJobIdPrefix(spaceId, sourceId),
startDatafeed: true,
jobOverrides,
datafeedOverrides,
query,
})
),
});
return pipe(
setupMlModuleResponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
};
const setupMlModuleTimeParamsRT = rt.partial({
start: rt.number,
end: rt.number,
});
const setupMlModuleJobOverridesRT = rt.type({
job_id: rt.string,
custom_settings: jobCustomSettingsRT,
});
export type SetupMlModuleJobOverrides = rt.TypeOf<typeof setupMlModuleJobOverridesRT>;
const setupMlModuleDatafeedOverridesRT = rt.object;
export type SetupMlModuleDatafeedOverrides = rt.TypeOf<typeof setupMlModuleDatafeedOverridesRT>;
const setupMlModuleRequestParamsRT = rt.intersection([
rt.strict({
indexPatternName: rt.string,
prefix: rt.string,
startDatafeed: rt.boolean,
jobOverrides: rt.array(setupMlModuleJobOverridesRT),
datafeedOverrides: rt.array(setupMlModuleDatafeedOverridesRT),
}),
rt.exact(
rt.partial({
query: rt.object,
})
),
]);
const setupMlModuleRequestPayloadRT = rt.intersection([
setupMlModuleTimeParamsRT,
setupMlModuleRequestParamsRT,
]);
const setupErrorResponseRT = rt.type({
msg: rt.string,
});
const datafeedSetupResponseRT = rt.intersection([
rt.type({
id: rt.string,
started: rt.boolean,
success: rt.boolean,
}),
rt.partial({
error: setupErrorResponseRT,
}),
]);
const jobSetupResponseRT = rt.intersection([
rt.type({
id: rt.string,
success: rt.boolean,
}),
rt.partial({
error: setupErrorResponseRT,
}),
]);
const setupMlModuleResponsePayloadRT = rt.type({
datafeeds: rt.array(datafeedSetupResponseRT),
jobs: rt.array(jobSetupResponseRT),
});
export type SetupMlModuleResponsePayload = rt.TypeOf<typeof setupMlModuleResponsePayloadRT>;

View file

@ -0,0 +1,97 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import createContainer from 'constate';
import { useMemo, useState, useEffect } from 'react';
import { fold } from 'fp-ts/lib/Either';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { useTrackedPromise } from '../../utils/use_tracked_promise';
import { npStart } from '../../legacy_singletons';
import {
getMlCapabilitiesResponsePayloadRT,
GetMlCapabilitiesResponsePayload,
} from './api/ml_api_types';
import { throwErrors, createPlainError } from '../../../common/runtime_types';
export const useInfraMLCapabilities = () => {
const [mlCapabilities, setMlCapabilities] = useState<GetMlCapabilitiesResponsePayload>(
initialMlCapabilities
);
const [fetchMlCapabilitiesRequest, fetchMlCapabilities] = useTrackedPromise(
{
cancelPreviousOn: 'resolution',
createPromise: async () => {
const rawResponse = await npStart.http.fetch('/api/ml/ml_capabilities');
return pipe(
getMlCapabilitiesResponsePayloadRT.decode(rawResponse),
fold(throwErrors(createPlainError), identity)
);
},
onResolve: (response) => {
setMlCapabilities(response);
},
},
[]
);
useEffect(() => {
fetchMlCapabilities();
}, [fetchMlCapabilities]);
const isLoading = useMemo(() => fetchMlCapabilitiesRequest.state === 'pending', [
fetchMlCapabilitiesRequest.state,
]);
const hasInfraMLSetupCapabilities = mlCapabilities.capabilities.canCreateJob;
const hasInfraMLReadCapabilities = mlCapabilities.capabilities.canGetJobs;
const hasInfraMLCapabilites =
mlCapabilities.isPlatinumOrTrialLicense && mlCapabilities.mlFeatureEnabledInSpace;
return {
hasInfraMLCapabilites,
hasInfraMLReadCapabilities,
hasInfraMLSetupCapabilities,
isLoading,
};
};
export const [InfraMLCapabilitiesProvider, useInfraMLCapabilitiesContext] = createContainer(
useInfraMLCapabilities
);
const initialMlCapabilities = {
capabilities: {
canGetJobs: false,
canCreateJob: false,
canDeleteJob: false,
canOpenJob: false,
canCloseJob: false,
canForecastJob: false,
canGetDatafeeds: false,
canStartStopDatafeed: false,
canUpdateJob: false,
canUpdateDatafeed: false,
canPreviewDatafeed: false,
canGetCalendars: false,
canCreateCalendar: false,
canDeleteCalendar: false,
canGetFilters: false,
canCreateFilter: false,
canDeleteFilter: false,
canFindFileStructure: false,
canGetDataFrameJobs: false,
canDeleteDataFrameJob: false,
canPreviewDataFrameJob: false,
canCreateDataFrameJob: false,
canStartStopDataFrameJob: false,
},
isPlatinumOrTrialLicense: false,
mlFeatureEnabledInSpace: false,
upgradeInProgress: false,
};

View file

@ -0,0 +1,55 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { getJobId } from '../../../common/infra_ml';
import { callDeleteJobs, callGetJobDeletionTasks, callStopDatafeeds } from './api/ml_cleanup';
export const cleanUpJobsAndDatafeeds = async <JobType extends string>(
spaceId: string,
sourceId: string,
jobTypes: JobType[]
) => {
try {
await callStopDatafeeds(spaceId, sourceId, jobTypes);
} catch (err) {
// Proceed only if datafeed has been deleted or didn't exist in the first place
if (err?.res?.status !== 404) {
throw err;
}
}
return await deleteJobs(spaceId, sourceId, jobTypes);
};
const deleteJobs = async <JobType extends string>(
spaceId: string,
sourceId: string,
jobTypes: JobType[]
) => {
const deleteJobsResponse = await callDeleteJobs(spaceId, sourceId, jobTypes);
await waitUntilJobsAreDeleted(spaceId, sourceId, jobTypes);
return deleteJobsResponse;
};
const waitUntilJobsAreDeleted = async <JobType extends string>(
spaceId: string,
sourceId: string,
jobTypes: JobType[]
) => {
const moduleJobIds = jobTypes.map((jobType) => getJobId(spaceId, sourceId, jobType));
while (true) {
const { jobIds: jobIdsBeingDeleted } = await callGetJobDeletionTasks();
const needToWait = jobIdsBeingDeleted.some((jobId) => moduleJobIds.includes(jobId));
if (needToWait) {
await timeout(1000);
} else {
return true;
}
}
};
const timeout = (ms: number) => new Promise((res) => setTimeout(res, ms));

View file

@ -0,0 +1,147 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { useCallback, useMemo } from 'react';
import { DatasetFilter } from '../../../common/infra_ml';
import { useTrackedPromise } from '../../utils/use_tracked_promise';
import { useModuleStatus } from './infra_ml_module_status';
import { ModuleDescriptor, ModuleSourceConfiguration } from './infra_ml_module_types';
export const useInfraMLModule = <JobType extends string>({
sourceConfiguration,
moduleDescriptor,
}: {
sourceConfiguration: ModuleSourceConfiguration;
moduleDescriptor: ModuleDescriptor<JobType>;
}) => {
const { spaceId, sourceId, timestampField } = sourceConfiguration;
const [moduleStatus, dispatchModuleStatus] = useModuleStatus(moduleDescriptor.jobTypes);
const [, fetchJobStatus] = useTrackedPromise(
{
cancelPreviousOn: 'resolution',
createPromise: async () => {
dispatchModuleStatus({ type: 'fetchingJobStatuses' });
return await moduleDescriptor.getJobSummary(spaceId, sourceId);
},
onResolve: (jobResponse) => {
dispatchModuleStatus({
type: 'fetchedJobStatuses',
payload: jobResponse,
spaceId,
sourceId,
});
},
onReject: () => {
dispatchModuleStatus({ type: 'failedFetchingJobStatuses' });
},
},
[spaceId, sourceId]
);
const [, setUpModule] = useTrackedPromise(
{
cancelPreviousOn: 'resolution',
createPromise: async (
selectedIndices: string[],
start: number | undefined,
end: number | undefined,
datasetFilter: DatasetFilter,
partitionField?: string
) => {
dispatchModuleStatus({ type: 'startedSetup' });
const setupResult = await moduleDescriptor.setUpModule(
start,
end,
datasetFilter,
{
indices: selectedIndices,
sourceId,
spaceId,
timestampField,
},
partitionField
);
const jobSummaries = await moduleDescriptor.getJobSummary(spaceId, sourceId);
return { setupResult, jobSummaries };
},
onResolve: ({ setupResult: { datafeeds, jobs }, jobSummaries }) => {
dispatchModuleStatus({
type: 'finishedSetup',
datafeedSetupResults: datafeeds,
jobSetupResults: jobs,
jobSummaries,
spaceId,
sourceId,
});
},
onReject: () => {
dispatchModuleStatus({ type: 'failedSetup' });
},
},
[moduleDescriptor.setUpModule, spaceId, sourceId, timestampField]
);
const [cleanUpModuleRequest, cleanUpModule] = useTrackedPromise(
{
cancelPreviousOn: 'resolution',
createPromise: async () => {
return await moduleDescriptor.cleanUpModule(spaceId, sourceId);
},
},
[spaceId, sourceId]
);
const isCleaningUp = useMemo(() => cleanUpModuleRequest.state === 'pending', [
cleanUpModuleRequest.state,
]);
const cleanUpAndSetUpModule = useCallback(
(
selectedIndices: string[],
start: number | undefined,
end: number | undefined,
datasetFilter: DatasetFilter,
partitionField?: string
) => {
dispatchModuleStatus({ type: 'startedSetup' });
cleanUpModule()
.then(() => {
setUpModule(selectedIndices, start, end, datasetFilter, partitionField);
})
.catch(() => {
dispatchModuleStatus({ type: 'failedSetup' });
});
},
[cleanUpModule, dispatchModuleStatus, setUpModule]
);
const viewResults = useCallback(() => {
dispatchModuleStatus({ type: 'viewedResults' });
}, [dispatchModuleStatus]);
const jobIds = useMemo(() => moduleDescriptor.getJobIds(spaceId, sourceId), [
moduleDescriptor,
spaceId,
sourceId,
]);
return {
cleanUpAndSetUpModule,
cleanUpModule,
fetchJobStatus,
isCleaningUp,
jobIds,
jobStatus: moduleStatus.jobStatus,
jobSummaries: moduleStatus.jobSummaries,
lastSetupErrorMessages: moduleStatus.lastSetupErrorMessages,
moduleDescriptor,
setUpModule,
setupStatus: moduleStatus.setupStatus,
sourceConfiguration,
viewResults,
};
};

View file

@ -0,0 +1,52 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { useMemo } from 'react';
import { JobSummary } from './api/ml_get_jobs_summary_api';
import { ModuleDescriptor, ModuleSourceConfiguration } from './infra_ml_module_types';
export const useInfraMLModuleConfiguration = <JobType extends string>({
moduleDescriptor,
sourceConfiguration,
}: {
moduleDescriptor: ModuleDescriptor<JobType>;
sourceConfiguration: ModuleSourceConfiguration;
}) => {
const getIsJobConfigurationOutdated = useMemo(
() => isJobConfigurationOutdated(moduleDescriptor, sourceConfiguration),
[sourceConfiguration, moduleDescriptor]
);
return {
getIsJobConfigurationOutdated,
};
};
export const isJobConfigurationOutdated = <JobType extends string>(
{ bucketSpan }: ModuleDescriptor<JobType>,
currentSourceConfiguration: ModuleSourceConfiguration
) => (jobSummary: JobSummary): boolean => {
if (!jobSummary.fullJob || !jobSummary.fullJob.custom_settings) {
return false;
}
const jobConfiguration = jobSummary.fullJob.custom_settings.metrics_source_config;
return !(
jobConfiguration &&
jobConfiguration.bucketSpan === bucketSpan &&
jobConfiguration.indexPattern &&
isSubset(
new Set(jobConfiguration.indexPattern.split(',')),
new Set(currentSourceConfiguration.indices)
) &&
jobConfiguration.timestampField === currentSourceConfiguration.timestampField
);
};
const isSubset = <T>(subset: Set<T>, superset: Set<T>) => {
return Array.from(subset).every((subsetElement) => superset.has(subsetElement));
};

View file

@ -0,0 +1,76 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { useCallback, useMemo, useState } from 'react';
import { getJobId } from '../../../common/log_analysis';
import { useTrackedPromise } from '../../utils/use_tracked_promise';
import { JobSummary } from './api/ml_get_jobs_summary_api';
import { GetMlModuleResponsePayload, JobDefinition } from './api/ml_get_module';
import { ModuleDescriptor, ModuleSourceConfiguration } from './infra_ml_module_types';
export const useInfraMLModuleDefinition = <JobType extends string>({
sourceConfiguration: { spaceId, sourceId },
moduleDescriptor,
}: {
sourceConfiguration: ModuleSourceConfiguration;
moduleDescriptor: ModuleDescriptor<JobType>;
}) => {
const [moduleDefinition, setModuleDefinition] = useState<
GetMlModuleResponsePayload | undefined
>();
const jobDefinitionByJobId = useMemo(
() =>
moduleDefinition
? moduleDefinition.jobs.reduce<Record<string, JobDefinition>>(
(accumulatedJobDefinitions, jobDefinition) => ({
...accumulatedJobDefinitions,
[getJobId(spaceId, sourceId, jobDefinition.id)]: jobDefinition,
}),
{}
)
: {},
[moduleDefinition, sourceId, spaceId]
);
const [fetchModuleDefinitionRequest, fetchModuleDefinition] = useTrackedPromise(
{
cancelPreviousOn: 'resolution',
createPromise: async () => {
return await moduleDescriptor.getModuleDefinition();
},
onResolve: (response) => {
setModuleDefinition(response);
},
onReject: () => {
setModuleDefinition(undefined);
},
},
[moduleDescriptor.getModuleDefinition, spaceId, sourceId]
);
const getIsJobDefinitionOutdated = useCallback(
(jobSummary: JobSummary): boolean => {
const jobDefinition: JobDefinition | undefined = jobDefinitionByJobId[jobSummary.id];
if (jobDefinition == null) {
return false;
}
const currentRevision = jobDefinition?.config.custom_settings.job_revision;
return (jobSummary.fullJob?.custom_settings?.job_revision ?? 0) < (currentRevision ?? 0);
},
[jobDefinitionByJobId]
);
return {
fetchModuleDefinition,
fetchModuleDefinitionRequestState: fetchModuleDefinitionRequest.state,
getIsJobDefinitionOutdated,
jobDefinitionByJobId,
moduleDefinition,
};
};

View file

@ -0,0 +1,268 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { useReducer } from 'react';
import {
JobStatus,
getDatafeedId,
getJobId,
isJobStatusWithResults,
SetupStatus,
} from '../../../common/infra_ml';
import { FetchJobStatusResponsePayload, JobSummary } from './api/ml_get_jobs_summary_api';
import { SetupMlModuleResponsePayload } from './api/ml_setup_module_api';
import { MandatoryProperty } from '../../../common/utility_types';
interface StatusReducerState<JobType extends string> {
jobStatus: Record<JobType, JobStatus>;
jobSummaries: JobSummary[];
lastSetupErrorMessages: string[];
setupStatus: SetupStatus;
}
type StatusReducerAction =
| { type: 'startedSetup' }
| {
type: 'finishedSetup';
sourceId: string;
spaceId: string;
jobSetupResults: SetupMlModuleResponsePayload['jobs'];
jobSummaries: FetchJobStatusResponsePayload;
datafeedSetupResults: SetupMlModuleResponsePayload['datafeeds'];
}
| { type: 'failedSetup' }
| { type: 'fetchingJobStatuses' }
| {
type: 'fetchedJobStatuses';
spaceId: string;
sourceId: string;
payload: FetchJobStatusResponsePayload;
}
| { type: 'failedFetchingJobStatuses' }
| { type: 'viewedResults' };
const createInitialState = <JobType extends string>({
jobTypes,
}: {
jobTypes: JobType[];
}): StatusReducerState<JobType> => ({
jobStatus: jobTypes.reduce(
(accumulatedJobStatus, jobType) => ({
...accumulatedJobStatus,
[jobType]: 'unknown',
}),
{} as Record<JobType, JobStatus>
),
jobSummaries: [],
lastSetupErrorMessages: [],
setupStatus: { type: 'initializing' },
});
const createStatusReducer = <JobType extends string>(jobTypes: JobType[]) => (
state: StatusReducerState<JobType>,
action: StatusReducerAction
): StatusReducerState<JobType> => {
switch (action.type) {
case 'startedSetup': {
return {
...state,
jobStatus: jobTypes.reduce(
(accumulatedJobStatus, jobType) => ({
...accumulatedJobStatus,
[jobType]: 'initializing',
}),
{} as Record<JobType, JobStatus>
),
setupStatus: { type: 'pending' },
};
}
case 'finishedSetup': {
const { datafeedSetupResults, jobSetupResults, jobSummaries, spaceId, sourceId } = action;
const nextJobStatus = jobTypes.reduce(
(accumulatedJobStatus, jobType) => ({
...accumulatedJobStatus,
[jobType]:
hasSuccessfullyCreatedJob(getJobId(spaceId, sourceId, jobType))(jobSetupResults) &&
hasSuccessfullyStartedDatafeed(getDatafeedId(spaceId, sourceId, jobType))(
datafeedSetupResults
)
? 'started'
: 'failed',
}),
{} as Record<JobType, JobStatus>
);
const nextSetupStatus: SetupStatus = Object.values<JobStatus>(nextJobStatus).every(
(jobState) => jobState === 'started'
)
? { type: 'succeeded' }
: {
type: 'failed',
reasons: [
...Object.values(datafeedSetupResults)
.filter(hasError)
.map((datafeed) => datafeed.error.msg),
...Object.values(jobSetupResults)
.filter(hasError)
.map((job) => job.error.msg),
],
};
return {
...state,
jobStatus: nextJobStatus,
jobSummaries,
setupStatus: nextSetupStatus,
};
}
case 'failedSetup': {
return {
...state,
jobStatus: jobTypes.reduce(
(accumulatedJobStatus, jobType) => ({
...accumulatedJobStatus,
[jobType]: 'failed',
}),
{} as Record<JobType, JobStatus>
),
setupStatus: { type: 'failed', reasons: ['unknown'] },
};
}
case 'fetchingJobStatuses': {
return {
...state,
setupStatus:
state.setupStatus.type === 'unknown' ? { type: 'initializing' } : state.setupStatus,
};
}
case 'fetchedJobStatuses': {
const { payload: jobSummaries, spaceId, sourceId } = action;
const { setupStatus } = state;
const nextJobStatus = jobTypes.reduce(
(accumulatedJobStatus, jobType) => ({
...accumulatedJobStatus,
[jobType]: getJobStatus(getJobId(spaceId, sourceId, jobType))(jobSummaries),
}),
{} as Record<JobType, JobStatus>
);
const nextSetupStatus = getSetupStatus(nextJobStatus)(setupStatus);
return {
...state,
jobSummaries,
jobStatus: nextJobStatus,
setupStatus: nextSetupStatus,
};
}
case 'failedFetchingJobStatuses': {
return {
...state,
setupStatus: { type: 'unknown' },
jobStatus: jobTypes.reduce(
(accumulatedJobStatus, jobType) => ({
...accumulatedJobStatus,
[jobType]: 'unknown',
}),
{} as Record<JobType, JobStatus>
),
};
}
case 'viewedResults': {
return {
...state,
setupStatus: { type: 'skipped', newlyCreated: true },
};
}
default: {
return state;
}
}
};
const hasSuccessfullyCreatedJob = (jobId: string) => (
jobSetupResponses: SetupMlModuleResponsePayload['jobs']
) =>
jobSetupResponses.filter(
(jobSetupResponse) =>
jobSetupResponse.id === jobId && jobSetupResponse.success && !jobSetupResponse.error
).length > 0;
const hasSuccessfullyStartedDatafeed = (datafeedId: string) => (
datafeedSetupResponses: SetupMlModuleResponsePayload['datafeeds']
) =>
datafeedSetupResponses.filter(
(datafeedSetupResponse) =>
datafeedSetupResponse.id === datafeedId &&
datafeedSetupResponse.success &&
datafeedSetupResponse.started &&
!datafeedSetupResponse.error
).length > 0;
const getJobStatus = (jobId: string) => (
jobSummaries: FetchJobStatusResponsePayload
): JobStatus => {
return (
jobSummaries
.filter((jobSummary) => jobSummary.id === jobId)
.map(
(jobSummary): JobStatus => {
if (jobSummary.jobState === 'failed' || jobSummary.datafeedState === '') {
return 'failed';
} else if (
jobSummary.jobState === 'closed' &&
jobSummary.datafeedState === 'stopped' &&
jobSummary.fullJob &&
jobSummary.fullJob.finished_time != null
) {
return 'finished';
} else if (
jobSummary.jobState === 'closed' ||
jobSummary.jobState === 'closing' ||
jobSummary.datafeedState === 'stopped'
) {
return 'stopped';
} else if (jobSummary.jobState === 'opening') {
return 'initializing';
} else if (jobSummary.jobState === 'opened' && jobSummary.datafeedState === 'started') {
return 'started';
}
return 'unknown';
}
)[0] || 'missing'
);
};
const getSetupStatus = <JobType extends string>(everyJobStatus: Record<JobType, JobStatus>) => (
previousSetupStatus: SetupStatus
): SetupStatus => {
return Object.entries<JobStatus>(everyJobStatus).reduce<SetupStatus>(
(setupStatus, [, jobStatus]) => {
if (jobStatus === 'missing') {
return { type: 'required' };
} else if (setupStatus.type === 'required' || setupStatus.type === 'succeeded') {
return setupStatus;
} else if (setupStatus.type === 'skipped' || isJobStatusWithResults(jobStatus)) {
return {
type: 'skipped',
// preserve newlyCreated status
newlyCreated: setupStatus.type === 'skipped' && setupStatus.newlyCreated,
};
}
return setupStatus;
},
previousSetupStatus
);
};
const hasError = <Value extends { error?: any }>(
value: Value
): value is MandatoryProperty<Value, 'error'> => value.error != null;
export const useModuleStatus = <JobType extends string>(jobTypes: JobType[]) => {
return useReducer(createStatusReducer(jobTypes), { jobTypes }, createInitialState);
};

View file

@ -0,0 +1,93 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import {
ValidateLogEntryDatasetsResponsePayload,
ValidationIndicesResponsePayload,
} from '../../../common/http_api/log_analysis';
import { DatasetFilter } from '../../../common/infra_ml';
import { DeleteJobsResponsePayload } from './api/ml_cleanup';
import { FetchJobStatusResponsePayload } from './api/ml_get_jobs_summary_api';
import { GetMlModuleResponsePayload } from './api/ml_get_module';
import { SetupMlModuleResponsePayload } from './api/ml_setup_module_api';
export { JobModelSizeStats, JobSummary } from './api/ml_get_jobs_summary_api';
export interface ModuleDescriptor<JobType extends string> {
moduleId: string;
moduleName: string;
moduleDescription: string;
jobTypes: JobType[];
bucketSpan: number;
getJobIds: (spaceId: string, sourceId: string) => Record<JobType, string>;
getJobSummary: (spaceId: string, sourceId: string) => Promise<FetchJobStatusResponsePayload>;
getModuleDefinition: () => Promise<GetMlModuleResponsePayload>;
setUpModule: (
start: number | undefined,
end: number | undefined,
datasetFilter: DatasetFilter,
sourceConfiguration: ModuleSourceConfiguration,
partitionField?: string
) => Promise<SetupMlModuleResponsePayload>;
cleanUpModule: (spaceId: string, sourceId: string) => Promise<DeleteJobsResponsePayload>;
validateSetupIndices: (
indices: string[],
timestampField: string
) => Promise<ValidationIndicesResponsePayload>;
validateSetupDatasets: (
indices: string[],
timestampField: string,
startTime: number,
endTime: number
) => Promise<ValidateLogEntryDatasetsResponsePayload>;
}
export interface ModuleSourceConfiguration {
indices: string[];
sourceId: string;
spaceId: string;
timestampField: string;
}
interface ManyCategoriesWarningReason {
type: 'manyCategories';
categoriesDocumentRatio: number;
}
interface ManyDeadCategoriesWarningReason {
type: 'manyDeadCategories';
deadCategoriesRatio: number;
}
interface ManyRareCategoriesWarningReason {
type: 'manyRareCategories';
rareCategoriesRatio: number;
}
interface NoFrequentCategoriesWarningReason {
type: 'noFrequentCategories';
}
interface SingleCategoryWarningReason {
type: 'singleCategory';
}
export type CategoryQualityWarningReason =
| ManyCategoriesWarningReason
| ManyDeadCategoriesWarningReason
| ManyRareCategoriesWarningReason
| NoFrequentCategoriesWarningReason
| SingleCategoryWarningReason;
export type CategoryQualityWarningReasonType = CategoryQualityWarningReason['type'];
export interface CategoryQualityWarning {
type: 'categoryQualityWarning';
jobId: string;
reasons: CategoryQualityWarningReason[];
}
export type QualityWarning = CategoryQualityWarning;

View file

@ -0,0 +1,289 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { isEqual } from 'lodash';
import { useCallback, useEffect, useMemo, useState } from 'react';
import { usePrevious } from 'react-use';
import {
combineDatasetFilters,
DatasetFilter,
filterDatasetFilter,
isExampleDataIndex,
} from '../../../common/infra_ml';
import {
AvailableIndex,
ValidationIndicesError,
ValidationUIError,
} from '../../components/logging/log_analysis_setup/initial_configuration_step';
import { useTrackedPromise } from '../../utils/use_tracked_promise';
import { ModuleDescriptor, ModuleSourceConfiguration } from './infra_ml_module_types';
type SetupHandler = (
indices: string[],
startTime: number | undefined,
endTime: number | undefined,
datasetFilter: DatasetFilter
) => void;
interface AnalysisSetupStateArguments<JobType extends string> {
cleanUpAndSetUpModule: SetupHandler;
moduleDescriptor: ModuleDescriptor<JobType>;
setUpModule: SetupHandler;
sourceConfiguration: ModuleSourceConfiguration;
}
const fourWeeksInMs = 86400000 * 7 * 4;
export const useAnalysisSetupState = <JobType extends string>({
cleanUpAndSetUpModule,
moduleDescriptor: { validateSetupDatasets, validateSetupIndices },
setUpModule,
sourceConfiguration,
}: AnalysisSetupStateArguments<JobType>) => {
const [startTime, setStartTime] = useState<number | undefined>(Date.now() - fourWeeksInMs);
const [endTime, setEndTime] = useState<number | undefined>(undefined);
const isTimeRangeValid = useMemo(
() => (startTime != null && endTime != null ? startTime < endTime : true),
[endTime, startTime]
);
const [validatedIndices, setValidatedIndices] = useState<AvailableIndex[]>(
sourceConfiguration.indices.map((indexName) => ({
name: indexName,
validity: 'unknown' as const,
}))
);
const updateIndicesWithValidationErrors = useCallback(
(validationErrors: ValidationIndicesError[]) =>
setValidatedIndices((availableIndices) =>
availableIndices.map((previousAvailableIndex) => {
const indexValiationErrors = validationErrors.filter(
({ index }) => index === previousAvailableIndex.name
);
if (indexValiationErrors.length > 0) {
return {
validity: 'invalid',
name: previousAvailableIndex.name,
errors: indexValiationErrors,
};
} else if (previousAvailableIndex.validity === 'valid') {
return {
...previousAvailableIndex,
validity: 'valid',
errors: [],
};
} else {
return {
validity: 'valid',
name: previousAvailableIndex.name,
isSelected: !isExampleDataIndex(previousAvailableIndex.name),
availableDatasets: [],
datasetFilter: {
type: 'includeAll' as const,
},
};
}
})
),
[]
);
const updateIndicesWithAvailableDatasets = useCallback(
(availableDatasets: Array<{ indexName: string; datasets: string[] }>) =>
setValidatedIndices((availableIndices) =>
availableIndices.map((previousAvailableIndex) => {
if (previousAvailableIndex.validity !== 'valid') {
return previousAvailableIndex;
}
const availableDatasetsForIndex = availableDatasets.filter(
({ indexName }) => indexName === previousAvailableIndex.name
);
const newAvailableDatasets = availableDatasetsForIndex.flatMap(
({ datasets }) => datasets
);
// filter out datasets that have disappeared if this index' datasets were updated
const newDatasetFilter: DatasetFilter =
availableDatasetsForIndex.length > 0
? filterDatasetFilter(previousAvailableIndex.datasetFilter, (dataset) =>
newAvailableDatasets.includes(dataset)
)
: previousAvailableIndex.datasetFilter;
return {
...previousAvailableIndex,
availableDatasets: newAvailableDatasets,
datasetFilter: newDatasetFilter,
};
})
),
[]
);
const validIndexNames = useMemo(
() => validatedIndices.filter((index) => index.validity === 'valid').map((index) => index.name),
[validatedIndices]
);
const selectedIndexNames = useMemo(
() =>
validatedIndices
.filter((index) => index.validity === 'valid' && index.isSelected)
.map((i) => i.name),
[validatedIndices]
);
const datasetFilter = useMemo(
() =>
validatedIndices
.flatMap((validatedIndex) =>
validatedIndex.validity === 'valid'
? validatedIndex.datasetFilter
: { type: 'includeAll' as const }
)
.reduce(combineDatasetFilters, { type: 'includeAll' as const }),
[validatedIndices]
);
const [validateIndicesRequest, validateIndices] = useTrackedPromise(
{
cancelPreviousOn: 'resolution',
createPromise: async () => {
return await validateSetupIndices(
sourceConfiguration.indices,
sourceConfiguration.timestampField
);
},
onResolve: ({ data: { errors } }) => {
updateIndicesWithValidationErrors(errors);
},
onReject: () => {
setValidatedIndices([]);
},
},
[sourceConfiguration.indices, sourceConfiguration.timestampField]
);
const [validateDatasetsRequest, validateDatasets] = useTrackedPromise(
{
cancelPreviousOn: 'resolution',
createPromise: async () => {
if (validIndexNames.length === 0) {
return { data: { datasets: [] } };
}
return await validateSetupDatasets(
validIndexNames,
sourceConfiguration.timestampField,
startTime ?? 0,
endTime ?? Date.now()
);
},
onResolve: ({ data: { datasets } }) => {
updateIndicesWithAvailableDatasets(datasets);
},
},
[validIndexNames, sourceConfiguration.timestampField, startTime, endTime]
);
const setUp = useCallback(() => {
return setUpModule(selectedIndexNames, startTime, endTime, datasetFilter);
}, [setUpModule, selectedIndexNames, startTime, endTime, datasetFilter]);
const cleanUpAndSetUp = useCallback(() => {
return cleanUpAndSetUpModule(selectedIndexNames, startTime, endTime, datasetFilter);
}, [cleanUpAndSetUpModule, selectedIndexNames, startTime, endTime, datasetFilter]);
const isValidating = useMemo(
() => validateIndicesRequest.state === 'pending' || validateDatasetsRequest.state === 'pending',
[validateDatasetsRequest.state, validateIndicesRequest.state]
);
const validationErrors = useMemo<ValidationUIError[]>(() => {
if (isValidating) {
return [];
}
return [
// validate request status
...(validateIndicesRequest.state === 'rejected' ||
validateDatasetsRequest.state === 'rejected'
? [{ error: 'NETWORK_ERROR' as const }]
: []),
// validation request results
...validatedIndices.reduce<ValidationUIError[]>((errors, index) => {
return index.validity === 'invalid' && selectedIndexNames.includes(index.name)
? [...errors, ...index.errors]
: errors;
}, []),
// index count
...(selectedIndexNames.length === 0 ? [{ error: 'TOO_FEW_SELECTED_INDICES' as const }] : []),
// time range
...(!isTimeRangeValid ? [{ error: 'INVALID_TIME_RANGE' as const }] : []),
];
}, [
isValidating,
validateIndicesRequest.state,
validateDatasetsRequest.state,
validatedIndices,
selectedIndexNames,
isTimeRangeValid,
]);
const prevStartTime = usePrevious(startTime);
const prevEndTime = usePrevious(endTime);
const prevValidIndexNames = usePrevious(validIndexNames);
useEffect(() => {
if (!isTimeRangeValid) {
return;
}
validateIndices();
}, [isTimeRangeValid, validateIndices]);
useEffect(() => {
if (!isTimeRangeValid) {
return;
}
if (
startTime !== prevStartTime ||
endTime !== prevEndTime ||
!isEqual(validIndexNames, prevValidIndexNames)
) {
validateDatasets();
}
}, [
endTime,
isTimeRangeValid,
prevEndTime,
prevStartTime,
prevValidIndexNames,
startTime,
validIndexNames,
validateDatasets,
]);
return {
cleanUpAndSetUp,
datasetFilter,
endTime,
isValidating,
selectedIndexNames,
setEndTime,
setStartTime,
setUp,
startTime,
validatedIndices,
setValidatedIndices,
validationErrors,
};
};

View file

@ -0,0 +1,80 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import createContainer from 'constate';
import { useMemo } from 'react';
import { useInfraMLModule } from '../../infra_ml_module';
import { useInfraMLModuleConfiguration } from '../../infra_ml_module_configuration';
import { useInfraMLModuleDefinition } from '../../infra_ml_module_definition';
import { ModuleSourceConfiguration } from '../../infra_ml_module_types';
import { metricHostsModule } from './module_descriptor';
export const useMetricHostsModule = ({
indexPattern,
sourceId,
spaceId,
timestampField,
}: {
indexPattern: string;
sourceId: string;
spaceId: string;
timestampField: string;
}) => {
const sourceConfiguration: ModuleSourceConfiguration = useMemo(
() => ({
indices: indexPattern.split(','),
sourceId,
spaceId,
timestampField,
}),
[indexPattern, sourceId, spaceId, timestampField]
);
const infraMLModule = useInfraMLModule({
moduleDescriptor: metricHostsModule,
sourceConfiguration,
});
const { getIsJobConfigurationOutdated } = useInfraMLModuleConfiguration({
sourceConfiguration,
moduleDescriptor: metricHostsModule,
});
const { fetchModuleDefinition, getIsJobDefinitionOutdated } = useInfraMLModuleDefinition({
sourceConfiguration,
moduleDescriptor: metricHostsModule,
});
const hasOutdatedJobConfigurations = useMemo(
() => infraMLModule.jobSummaries.some(getIsJobConfigurationOutdated),
[getIsJobConfigurationOutdated, infraMLModule.jobSummaries]
);
const hasOutdatedJobDefinitions = useMemo(
() => infraMLModule.jobSummaries.some(getIsJobDefinitionOutdated),
[getIsJobDefinitionOutdated, infraMLModule.jobSummaries]
);
const hasStoppedJobs = useMemo(
() =>
Object.values(infraMLModule.jobStatus).some(
(currentJobStatus) => currentJobStatus === 'stopped'
),
[infraMLModule.jobStatus]
);
return {
...infraMLModule,
fetchModuleDefinition,
hasOutdatedJobConfigurations,
hasOutdatedJobDefinitions,
hasStoppedJobs,
};
};
export const [MetricHostsModuleProvider, useMetricHostsModuleContext] = createContainer(
useMetricHostsModule
);

View file

@ -0,0 +1,126 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { i18n } from '@kbn/i18n';
import { ModuleDescriptor, ModuleSourceConfiguration } from '../../infra_ml_module_types';
import { cleanUpJobsAndDatafeeds } from '../../infra_ml_cleanup';
import { callJobsSummaryAPI } from '../../api/ml_get_jobs_summary_api';
import { callGetMlModuleAPI } from '../../api/ml_get_module';
import { callSetupMlModuleAPI } from '../../api/ml_setup_module_api';
import { callValidateIndicesAPI } from '../../../logs/log_analysis/api/validate_indices';
import { callValidateDatasetsAPI } from '../../../logs/log_analysis/api/validate_datasets';
import {
metricsHostsJobTypes,
getJobId,
MetricsHostsJobType,
DatasetFilter,
bucketSpan,
partitionField,
} from '../../../../../common/infra_ml';
const moduleId = 'metrics_ui_hosts';
const moduleName = i18n.translate('xpack.infra.ml.metricsModuleName', {
defaultMessage: 'Metrics anomanly detection',
});
const moduleDescription = i18n.translate('xpack.infra.ml.metricsHostModuleDescription', {
defaultMessage: 'Use Machine Learning to automatically detect anomalous log entry rates.',
});
const getJobIds = (spaceId: string, sourceId: string) =>
metricsHostsJobTypes.reduce(
(accumulatedJobIds, jobType) => ({
...accumulatedJobIds,
[jobType]: getJobId(spaceId, sourceId, jobType),
}),
{} as Record<MetricsHostsJobType, string>
);
const getJobSummary = async (spaceId: string, sourceId: string) => {
const response = await callJobsSummaryAPI(spaceId, sourceId, metricsHostsJobTypes);
const jobIds = Object.values(getJobIds(spaceId, sourceId));
return response.filter((jobSummary) => jobIds.includes(jobSummary.id));
};
const getModuleDefinition = async () => {
return await callGetMlModuleAPI(moduleId);
};
const setUpModule = async (
start: number | undefined,
end: number | undefined,
datasetFilter: DatasetFilter,
{ spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration,
pField?: string
) => {
const indexNamePattern = indices.join(',');
const jobIds = ['hosts_memory_usage', 'hosts_network_in', 'hosts_network_out'];
const jobOverrides = jobIds.map((id) => ({
job_id: id,
data_description: {
time_field: timestampField,
},
custom_settings: {
metrics_source_config: {
indexPattern: indexNamePattern,
timestampField,
bucketSpan,
},
},
}));
return callSetupMlModuleAPI(
moduleId,
start,
end,
spaceId,
sourceId,
indexNamePattern,
jobOverrides,
[]
);
};
const cleanUpModule = async (spaceId: string, sourceId: string) => {
return await cleanUpJobsAndDatafeeds(spaceId, sourceId, metricsHostsJobTypes);
};
const validateSetupIndices = async (indices: string[], timestampField: string) => {
return await callValidateIndicesAPI(indices, [
{
name: timestampField,
validTypes: ['date'],
},
{
name: partitionField,
validTypes: ['keyword'],
},
]);
};
const validateSetupDatasets = async (
indices: string[],
timestampField: string,
startTime: number,
endTime: number
) => {
return await callValidateDatasetsAPI(indices, timestampField, startTime, endTime);
};
export const metricHostsModule: ModuleDescriptor<MetricsHostsJobType> = {
moduleId,
moduleName,
moduleDescription,
jobTypes: metricsHostsJobTypes,
bucketSpan,
getJobIds,
getJobSummary,
getModuleDefinition,
setUpModule,
cleanUpModule,
validateSetupDatasets,
validateSetupIndices,
};

View file

@ -0,0 +1,80 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import createContainer from 'constate';
import { useMemo } from 'react';
import { useInfraMLModule } from '../../infra_ml_module';
import { useInfraMLModuleConfiguration } from '../../infra_ml_module_configuration';
import { useInfraMLModuleDefinition } from '../../infra_ml_module_definition';
import { ModuleSourceConfiguration } from '../../infra_ml_module_types';
import { metricHostsModule } from './module_descriptor';
export const useMetricK8sModule = ({
indexPattern,
sourceId,
spaceId,
timestampField,
}: {
indexPattern: string;
sourceId: string;
spaceId: string;
timestampField: string;
}) => {
const sourceConfiguration: ModuleSourceConfiguration = useMemo(
() => ({
indices: indexPattern.split(','),
sourceId,
spaceId,
timestampField,
}),
[indexPattern, sourceId, spaceId, timestampField]
);
const infraMLModule = useInfraMLModule({
moduleDescriptor: metricHostsModule,
sourceConfiguration,
});
const { getIsJobConfigurationOutdated } = useInfraMLModuleConfiguration({
sourceConfiguration,
moduleDescriptor: metricHostsModule,
});
const { fetchModuleDefinition, getIsJobDefinitionOutdated } = useInfraMLModuleDefinition({
sourceConfiguration,
moduleDescriptor: metricHostsModule,
});
const hasOutdatedJobConfigurations = useMemo(
() => infraMLModule.jobSummaries.some(getIsJobConfigurationOutdated),
[getIsJobConfigurationOutdated, infraMLModule.jobSummaries]
);
const hasOutdatedJobDefinitions = useMemo(
() => infraMLModule.jobSummaries.some(getIsJobDefinitionOutdated),
[getIsJobDefinitionOutdated, infraMLModule.jobSummaries]
);
const hasStoppedJobs = useMemo(
() =>
Object.values(infraMLModule.jobStatus).some(
(currentJobStatus) => currentJobStatus === 'stopped'
),
[infraMLModule.jobStatus]
);
return {
...infraMLModule,
fetchModuleDefinition,
hasOutdatedJobConfigurations,
hasOutdatedJobDefinitions,
hasStoppedJobs,
};
};
export const [MetricK8sModuleProvider, useMetricK8sModuleContext] = createContainer(
useMetricK8sModule
);

View file

@ -0,0 +1,129 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { i18n } from '@kbn/i18n';
import { ModuleDescriptor, ModuleSourceConfiguration } from '../../infra_ml_module_types';
import { cleanUpJobsAndDatafeeds } from '../../infra_ml_cleanup';
import { callJobsSummaryAPI } from '../../api/ml_get_jobs_summary_api';
import { callGetMlModuleAPI } from '../../api/ml_get_module';
import { callSetupMlModuleAPI } from '../../api/ml_setup_module_api';
import { callValidateIndicesAPI } from '../../../logs/log_analysis/api/validate_indices';
import { callValidateDatasetsAPI } from '../../../logs/log_analysis/api/validate_datasets';
import {
metricsK8SJobTypes,
getJobId,
MetricK8sJobType,
DatasetFilter,
bucketSpan,
partitionField,
} from '../../../../../common/infra_ml';
const moduleId = 'metrics_ui_k8s';
const moduleName = i18n.translate('xpack.infra.ml.metricsModuleName', {
defaultMessage: 'Metrics anomanly detection',
});
const moduleDescription = i18n.translate('xpack.infra.ml.metricsHostModuleDescription', {
defaultMessage: 'Use Machine Learning to automatically detect anomalous log entry rates.',
});
const getJobIds = (spaceId: string, sourceId: string) =>
metricsK8SJobTypes.reduce(
(accumulatedJobIds, jobType) => ({
...accumulatedJobIds,
[jobType]: getJobId(spaceId, sourceId, jobType),
}),
{} as Record<MetricK8sJobType, string>
);
const getJobSummary = async (spaceId: string, sourceId: string) => {
const response = await callJobsSummaryAPI(spaceId, sourceId, metricsK8SJobTypes);
const jobIds = Object.values(getJobIds(spaceId, sourceId));
return response.filter((jobSummary) => jobIds.includes(jobSummary.id));
};
const getModuleDefinition = async () => {
return await callGetMlModuleAPI(moduleId);
};
const setUpModule = async (
start: number | undefined,
end: number | undefined,
datasetFilter: DatasetFilter,
{ spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration,
pField?: string
) => {
const indexNamePattern = indices.join(',');
const jobIds = ['k8s_memory_usage', 'k8s_network_in', 'k8s_network_out'];
const jobOverrides = jobIds.map((id) => ({
job_id: id,
analysis_config: {
bucket_span: `${bucketSpan}ms`,
},
data_description: {
time_field: timestampField,
},
custom_settings: {
metrics_source_config: {
indexPattern: indexNamePattern,
timestampField,
bucketSpan,
},
},
}));
return callSetupMlModuleAPI(
moduleId,
start,
end,
spaceId,
sourceId,
indexNamePattern,
jobOverrides,
[]
);
};
const cleanUpModule = async (spaceId: string, sourceId: string) => {
return await cleanUpJobsAndDatafeeds(spaceId, sourceId, metricsK8SJobTypes);
};
const validateSetupIndices = async (indices: string[], timestampField: string) => {
return await callValidateIndicesAPI(indices, [
{
name: timestampField,
validTypes: ['date'],
},
{
name: partitionField,
validTypes: ['keyword'],
},
]);
};
const validateSetupDatasets = async (
indices: string[],
timestampField: string,
startTime: number,
endTime: number
) => {
return await callValidateDatasetsAPI(indices, timestampField, startTime, endTime);
};
export const metricHostsModule: ModuleDescriptor<MetricK8sJobType> = {
moduleId,
moduleName,
moduleDescription,
jobTypes: metricsK8SJobTypes,
bucketSpan,
getJobIds,
getJobSummary,
getModuleDefinition,
setUpModule,
cleanUpModule,
validateSetupDatasets,
validateSetupIndices,
};

View file

@ -38,6 +38,8 @@ import { MetricsAlertDropdown } from '../../alerting/metric_threshold/components
import { SavedView } from '../../containers/saved_view/saved_view';
import { SourceConfigurationFields } from '../../graphql/types';
import { AlertPrefillProvider } from '../../alerting/use_alert_prefill';
import { InfraMLCapabilitiesProvider } from '../../containers/ml/infra_ml_capabilities';
import { AnomalyDetectionFlyout } from './inventory_view/components/ml/anomaly_detection/anomoly_detection_flyout';
const ADD_DATA_LABEL = i18n.translate('xpack.infra.metricsHeaderAddDataButtonLabel', {
defaultMessage: 'Add data',
@ -55,110 +57,118 @@ export const InfrastructurePage = ({ match }: RouteComponentProps) => {
<WaffleOptionsProvider>
<WaffleTimeProvider>
<WaffleFiltersProvider>
<ColumnarPage>
<DocumentTitle
title={i18n.translate('xpack.infra.homePage.documentTitle', {
defaultMessage: 'Metrics',
})}
/>
<HelpCenterContent
feedbackLink="https://discuss.elastic.co/c/metrics"
appName={i18n.translate('xpack.infra.header.infrastructureHelpAppName', {
defaultMessage: 'Metrics',
})}
/>
<Header
breadcrumbs={[
{
text: i18n.translate('xpack.infra.header.infrastructureTitle', {
defaultMessage: 'Metrics',
}),
},
]}
readOnlyBadge={!uiCapabilities?.infrastructure?.save}
/>
<AppNavigation
aria-label={i18n.translate('xpack.infra.header.infrastructureNavigationTitle', {
defaultMessage: 'Metrics',
})}
>
<EuiFlexGroup gutterSize={'none'} alignItems={'center'}>
<EuiFlexItem>
<RoutedTabs
tabs={[
{
app: 'metrics',
title: i18n.translate('xpack.infra.homePage.inventoryTabTitle', {
defaultMessage: 'Inventory',
}),
pathname: '/inventory',
},
{
app: 'metrics',
title: i18n.translate(
'xpack.infra.homePage.metricsExplorerTabTitle',
{
defaultMessage: 'Metrics Explorer',
}
),
pathname: '/explorer',
},
{
app: 'metrics',
title: i18n.translate('xpack.infra.homePage.settingsTabTitle', {
defaultMessage: 'Settings',
}),
pathname: '/settings',
},
]}
/>
</EuiFlexItem>
<EuiFlexItem grow={false}>
<Route path={'/explorer'} component={MetricsAlertDropdown} />
<Route path={'/inventory'} component={InventoryAlertDropdown} />
</EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiButtonEmpty
href={kibana.services?.application?.getUrlForApp(
'/home#/tutorial_directory/metrics'
)}
size="s"
color="primary"
iconType="plusInCircle"
>
{ADD_DATA_LABEL}
</EuiButtonEmpty>
</EuiFlexItem>
</EuiFlexGroup>
</AppNavigation>
<Switch>
<Route path={'/inventory'} component={SnapshotPage} />
<Route
path={'/explorer'}
render={(props) => (
<WithSource>
{({ configuration, createDerivedIndexPattern }) => (
<MetricsExplorerOptionsContainer.Provider>
<WithMetricsExplorerOptionsUrlState />
{configuration ? (
<PageContent
configuration={configuration}
createDerivedIndexPattern={createDerivedIndexPattern}
/>
) : (
<SourceLoadingPage />
)}
</MetricsExplorerOptionsContainer.Provider>
)}
</WithSource>
)}
<InfraMLCapabilitiesProvider>
<ColumnarPage>
<DocumentTitle
title={i18n.translate('xpack.infra.homePage.documentTitle', {
defaultMessage: 'Metrics',
})}
/>
<Route path={'/settings'} component={MetricsSettingsPage} />
</Switch>
</ColumnarPage>
<HelpCenterContent
feedbackLink="https://discuss.elastic.co/c/metrics"
appName={i18n.translate('xpack.infra.header.infrastructureHelpAppName', {
defaultMessage: 'Metrics',
})}
/>
<Header
breadcrumbs={[
{
text: i18n.translate('xpack.infra.header.infrastructureTitle', {
defaultMessage: 'Metrics',
}),
},
]}
readOnlyBadge={!uiCapabilities?.infrastructure?.save}
/>
<AppNavigation
aria-label={i18n.translate(
'xpack.infra.header.infrastructureNavigationTitle',
{
defaultMessage: 'Metrics',
}
)}
>
<EuiFlexGroup gutterSize={'none'} alignItems={'center'}>
<EuiFlexItem>
<RoutedTabs
tabs={[
{
app: 'metrics',
title: i18n.translate('xpack.infra.homePage.inventoryTabTitle', {
defaultMessage: 'Inventory',
}),
pathname: '/inventory',
},
{
app: 'metrics',
title: i18n.translate(
'xpack.infra.homePage.metricsExplorerTabTitle',
{
defaultMessage: 'Metrics Explorer',
}
),
pathname: '/explorer',
},
{
app: 'metrics',
title: i18n.translate('xpack.infra.homePage.settingsTabTitle', {
defaultMessage: 'Settings',
}),
pathname: '/settings',
},
]}
/>
</EuiFlexItem>
<EuiFlexItem grow={false}>
<Route path={'/inventory'} component={AnomalyDetectionFlyout} />
</EuiFlexItem>
<EuiFlexItem grow={false}>
<Route path={'/explorer'} component={MetricsAlertDropdown} />
<Route path={'/inventory'} component={InventoryAlertDropdown} />
</EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiButtonEmpty
href={kibana.services?.application?.getUrlForApp(
'/home#/tutorial_directory/metrics'
)}
size="s"
color="primary"
iconType="plusInCircle"
>
{ADD_DATA_LABEL}
</EuiButtonEmpty>
</EuiFlexItem>
</EuiFlexGroup>
</AppNavigation>
<Switch>
<Route path={'/inventory'} component={SnapshotPage} />
<Route
path={'/explorer'}
render={(props) => (
<WithSource>
{({ configuration, createDerivedIndexPattern }) => (
<MetricsExplorerOptionsContainer.Provider>
<WithMetricsExplorerOptionsUrlState />
{configuration ? (
<PageContent
configuration={configuration}
createDerivedIndexPattern={createDerivedIndexPattern}
/>
) : (
<SourceLoadingPage />
)}
</MetricsExplorerOptionsContainer.Provider>
)}
</WithSource>
)}
/>
<Route path={'/settings'} component={MetricsSettingsPage} />
</Switch>
</ColumnarPage>
</InfraMLCapabilitiesProvider>
</WaffleFiltersProvider>
</WaffleTimeProvider>
</WaffleOptionsProvider>

View file

@ -0,0 +1,92 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import React, { useState, useCallback } from 'react';
import { EuiButtonEmpty, EuiFlyout } from '@elastic/eui';
import { FormattedMessage } from '@kbn/i18n/react';
import { FlyoutHome } from './flyout_home';
import { JobSetupScreen } from './job_setup_screen';
import { useInfraMLCapabilities } from '../../../../../../containers/ml/infra_ml_capabilities';
import { MetricHostsModuleProvider } from '../../../../../../containers/ml/modules/metrics_hosts/module';
import { MetricK8sModuleProvider } from '../../../../../../containers/ml/modules/metrics_k8s/module';
import { useSourceViaHttp } from '../../../../../../containers/source/use_source_via_http';
import { useActiveKibanaSpace } from '../../../../../../hooks/use_kibana_space';
export const AnomalyDetectionFlyout = () => {
const { hasInfraMLSetupCapabilities } = useInfraMLCapabilities();
const [showFlyout, setShowFlyout] = useState(false);
const [screenName, setScreenName] = useState<'home' | 'setup'>('home');
const [screenParams, setScreenParams] = useState<any | null>(null);
const { source } = useSourceViaHttp({
sourceId: 'default',
type: 'metrics',
});
const { space } = useActiveKibanaSpace();
const openFlyout = useCallback(() => {
setScreenName('home');
setShowFlyout(true);
}, []);
const openJobSetup = useCallback(
(jobType: 'hosts' | 'kubernetes') => {
setScreenName('setup');
setScreenParams({ jobType });
},
[setScreenName]
);
const closeFlyout = useCallback(() => {
setShowFlyout(false);
}, []);
if (source?.configuration.metricAlias == null || space == null) {
return null;
}
return (
<>
<EuiButtonEmpty iconSide={'right'} onClick={openFlyout}>
<FormattedMessage
id="xpack.infra.ml.anomalyDetectionButton"
defaultMessage="Anomaly Detection"
/>
</EuiButtonEmpty>
{showFlyout && (
<MetricHostsModuleProvider
indexPattern={source?.configuration.metricAlias ?? ''}
sourceId={'default'}
spaceId={space.id}
timestampField={source?.configuration.fields.timestamp ?? ''}
>
<MetricK8sModuleProvider
indexPattern={source?.configuration.metricAlias ?? ''}
sourceId={'default'}
spaceId={space.id}
timestampField={source?.configuration.fields.timestamp ?? ''}
>
<EuiFlyout onClose={closeFlyout} data-test-subj="loadMLFlyout">
{screenName === 'home' && (
<FlyoutHome
hasSetupCapabilities={hasInfraMLSetupCapabilities}
goToSetup={openJobSetup}
/>
)}
{screenName === 'setup' && (
<JobSetupScreen
goHome={openFlyout}
closeFlyout={closeFlyout}
jobType={screenParams.jobType}
/>
)}
</EuiFlyout>
</MetricK8sModuleProvider>
</MetricHostsModuleProvider>
)}
</>
);
};

View file

@ -0,0 +1,333 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import React, { useState, useCallback, useEffect } from 'react';
import { EuiFlyoutHeader, EuiTitle, EuiFlyoutBody, EuiTabs, EuiTab, EuiSpacer } from '@elastic/eui';
import { FormattedMessage } from '@kbn/i18n/react';
import { EuiText, EuiFlexGroup, EuiFlexItem, EuiCard, EuiIcon } from '@elastic/eui';
import { i18n } from '@kbn/i18n';
import { EuiCallOut } from '@elastic/eui';
import { EuiButton } from '@elastic/eui';
import { EuiButtonEmpty } from '@elastic/eui';
import moment from 'moment';
import { useInfraMLCapabilitiesContext } from '../../../../../../containers/ml/infra_ml_capabilities';
import { SubscriptionSplashContent } from './subscription_splash_content';
import {
MissingResultsPrivilegesPrompt,
MissingSetupPrivilegesPrompt,
} from '../../../../../../components/logging/log_analysis_setup';
import { useMetricHostsModuleContext } from '../../../../../../containers/ml/modules/metrics_hosts/module';
import { useMetricK8sModuleContext } from '../../../../../../containers/ml/modules/metrics_k8s/module';
import { LoadingPage } from '../../../../../../components/loading_page';
import { useLinkProps } from '../../../../../../hooks/use_link_props';
interface Props {
hasSetupCapabilities: boolean;
goToSetup(type: 'hosts' | 'kubernetes'): void;
}
export const FlyoutHome = (props: Props) => {
const [tab, setTab] = useState<'jobs' | 'anomalies'>('jobs');
const { goToSetup } = props;
const {
fetchJobStatus: fetchHostJobStatus,
setupStatus: hostSetupStatus,
jobSummaries: hostJobSummaries,
} = useMetricHostsModuleContext();
const {
fetchJobStatus: fetchK8sJobStatus,
setupStatus: k8sSetupStatus,
jobSummaries: k8sJobSummaries,
} = useMetricK8sModuleContext();
const {
hasInfraMLCapabilites,
hasInfraMLReadCapabilities,
hasInfraMLSetupCapabilities,
} = useInfraMLCapabilitiesContext();
const createHosts = useCallback(() => {
goToSetup('hosts');
}, [goToSetup]);
const createK8s = useCallback(() => {
goToSetup('kubernetes');
}, [goToSetup]);
const goToJobs = useCallback(() => {
setTab('jobs');
}, []);
const jobIds = [
...(k8sJobSummaries || []).map((k) => k.id),
...(hostJobSummaries || []).map((h) => h.id),
];
const anomaliesUrl = useLinkProps({
app: 'ml',
pathname: `/explorer?_g=${createResultsUrl(jobIds)}`,
});
useEffect(() => {
if (hasInfraMLReadCapabilities) {
fetchHostJobStatus();
fetchK8sJobStatus();
}
}, [fetchK8sJobStatus, fetchHostJobStatus, hasInfraMLReadCapabilities]);
if (!hasInfraMLCapabilites) {
return <SubscriptionSplashContent />;
} else if (!hasInfraMLReadCapabilities) {
return <MissingResultsPrivilegesPrompt />;
} else if (hostSetupStatus.type === 'initializing' || k8sSetupStatus.type === 'initializing') {
return (
<LoadingPage
message={i18n.translate('xpack.infra.ml.anomalyFlyout.jobStatusLoadingMessage', {
defaultMessage: 'Checking status of metris jobs...',
})}
/>
);
} else if (!hasInfraMLSetupCapabilities) {
return <MissingSetupPrivilegesPrompt />;
} else {
return (
<>
<EuiFlyoutHeader>
<EuiTitle size="m">
<h2>
<FormattedMessage
defaultMessage="Machine Learning anomaly detection"
id="xpack.infra.ml.anomalyFlyout.flyoutHeader"
/>
</h2>
</EuiTitle>
</EuiFlyoutHeader>
<EuiFlyoutBody>
<EuiTabs>
<EuiTab isSelected={tab === 'jobs'} onClick={goToJobs}>
<FormattedMessage
defaultMessage="Jobs"
id="xpack.infra.ml.anomalyFlyout.jobsTabLabel"
/>
</EuiTab>
<EuiTab
disabled={jobIds.length === 0}
isSelected={tab === 'anomalies'}
{...anomaliesUrl}
>
<FormattedMessage
defaultMessage="Anomalies"
id="xpack.infra.ml.anomalyFlyout.anomaliesTabLabel"
/>
</EuiTab>
</EuiTabs>
<EuiSpacer size="l" />
{hostJobSummaries.length > 0 && (
<>
<JobsEnabledCallout
hasHostJobs={hostJobSummaries.length > 0}
hasK8sJobs={k8sJobSummaries.length > 0}
/>
<EuiSpacer size="l" />
</>
)}
{tab === 'jobs' && (
<CreateJobTab
hasHostJobs={hostJobSummaries.length > 0}
hasK8sJobs={k8sJobSummaries.length > 0}
hasSetupCapabilities={props.hasSetupCapabilities}
createHosts={createHosts}
createK8s={createK8s}
/>
)}
</EuiFlyoutBody>
</>
);
}
};
interface CalloutProps {
hasHostJobs: boolean;
hasK8sJobs: boolean;
}
const JobsEnabledCallout = (props: CalloutProps) => {
let target = '';
if (props.hasHostJobs && props.hasK8sJobs) {
target = `${i18n.translate('xpack.infra.ml.anomalyFlyout.create.hostTitle', {
defaultMessage: 'Hosts',
})} and ${i18n.translate('xpack.infra.ml.anomalyFlyout.create.k8sSuccessTitle', {
defaultMessage: 'Kubernetes',
})}`;
} else if (props.hasHostJobs) {
target = i18n.translate('xpack.infra.ml.anomalyFlyout.create.hostSuccessTitle', {
defaultMessage: 'Hosts',
});
} else if (props.hasK8sJobs) {
target = i18n.translate('xpack.infra.ml.anomalyFlyout.create.k8sSuccessTitle', {
defaultMessage: 'Kubernetes',
});
}
const manageJobsLinkProps = useLinkProps({
app: 'ml',
pathname: '/jobs',
});
return (
<>
<EuiCallOut
size="m"
color="success"
title={
<FormattedMessage
defaultMessage="Anomaly detection enabled for {target}"
id="xpack.infra.ml.anomalyFlyout.enabledCallout"
values={{ target }}
/>
}
iconType="check"
/>
<EuiSpacer size="l" />
<EuiButton {...manageJobsLinkProps}>
<FormattedMessage
defaultMessage="Manage Jobs"
id="xpack.infra.ml.anomalyFlyout.manageJobs"
/>
</EuiButton>
</>
);
};
interface CreateJobTab {
hasSetupCapabilities: boolean;
hasHostJobs: boolean;
hasK8sJobs: boolean;
createHosts(): void;
createK8s(): void;
}
const CreateJobTab = (props: CreateJobTab) => {
return (
<>
<div>
<EuiText>
<h3>
<FormattedMessage
defaultMessage="Create ML Jobs"
id="xpack.infra.ml.anomalyFlyout.create.jobsTitle"
/>
</h3>
</EuiText>
<EuiText>
<p>
<FormattedMessage
defaultMessage="Machine Learning jobs are available for the following resource types. Enable these jobs to begin detecting anomalies in your infrastructure metrics"
id="xpack.infra.ml.anomalyFlyout.create.description"
/>
</p>
</EuiText>
</div>
<EuiSpacer size="l" />
<EuiFlexGroup gutterSize={'m'}>
<EuiFlexItem>
<EuiCard
// isDisabled={props.hasSetupCapabilities}
icon={<EuiIcon type={'storage'} />}
// title="Hosts"
title={
<FormattedMessage
defaultMessage="Hosts"
id="xpack.infra.ml.anomalyFlyout.create.hostTitle"
/>
}
description={
<FormattedMessage
defaultMessage="Detect anomalies for CPU usage, memory usage, network traffic, and load."
id="xpack.infra.ml.anomalyFlyout.create.hostDescription"
/>
}
footer={
<>
{props.hasHostJobs && (
<EuiButtonEmpty onClick={props.createHosts}>
<FormattedMessage
defaultMessage="Recreate Jobs"
id="xpack.infra.ml.anomalyFlyout.create.recreateButton"
/>
</EuiButtonEmpty>
)}
{!props.hasHostJobs && (
<EuiButton onClick={props.createHosts}>
<FormattedMessage
defaultMessage="Create Jobs"
id="xpack.infra.ml.anomalyFlyout.create.createButton"
/>
</EuiButton>
)}
</>
}
/>
</EuiFlexItem>
<EuiFlexItem>
<EuiCard
// isDisabled={props.hasSetupCapabilities}
icon={<EuiIcon type={'logoKubernetes'} />}
title={
<FormattedMessage
defaultMessage="Kubernetes Pods"
id="xpack.infra.ml.anomalyFlyout.create.k8sTitle"
/>
}
description={
<FormattedMessage
defaultMessage="Detect anomalies for CPU usage, memory usage, network traffic, and load."
id="xpack.infra.ml.anomalyFlyout.create.k8sDescription"
/>
}
footer={
<>
{props.hasK8sJobs && (
<EuiButtonEmpty onClick={props.createK8s}>
<FormattedMessage
defaultMessage="Recreate Jobs"
id="xpack.infra.ml.anomalyFlyout.create.recreateButton"
/>
</EuiButtonEmpty>
)}
{!props.hasK8sJobs && (
<EuiButton onClick={props.createK8s}>
<FormattedMessage
defaultMessage="Create Jobs"
id="xpack.infra.ml.anomalyFlyout.create.createButton"
/>
</EuiButton>
)}
</>
}
/>
</EuiFlexItem>
</EuiFlexGroup>
</>
);
};
function createResultsUrl(jobIds: string[], mode = 'absolute') {
const idString = jobIds.map((j) => `'${j}'`).join(',');
let path = '';
const from = moment().subtract(4, 'weeks').toISOString();
const to = moment().toISOString();
path += `(ml:(jobIds:!(${idString}))`;
path += `,refreshInterval:(display:Off,pause:!f,value:0),time:(from:'${from}'`;
path += `,to:'${to}'`;
if (mode === 'invalid') {
path += `,mode:invalid`;
}
path += "))&_a=(query:(query_string:(analyze_wildcard:!t,query:'*')))";
return path;
}

View file

@ -0,0 +1,277 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import React, { useState, useCallback, useMemo, useEffect } from 'react';
import { EuiForm, EuiDescribedFormGroup, EuiFormRow } from '@elastic/eui';
import { EuiText, EuiSpacer } from '@elastic/eui';
import { EuiFlyoutHeader, EuiTitle, EuiFlyoutBody } from '@elastic/eui';
import { FormattedMessage } from '@kbn/i18n/react';
import { EuiFlyoutFooter } from '@elastic/eui';
import { EuiButton } from '@elastic/eui';
import { EuiFlexGroup, EuiFlexItem, EuiButtonEmpty } from '@elastic/eui';
import moment, { Moment } from 'moment';
import { EuiComboBox } from '@elastic/eui';
import { i18n } from '@kbn/i18n';
import { EuiLoadingSpinner } from '@elastic/eui';
import { useSourceViaHttp } from '../../../../../../containers/source/use_source_via_http';
import { useMetricK8sModuleContext } from '../../../../../../containers/ml/modules/metrics_k8s/module';
import { useMetricHostsModuleContext } from '../../../../../../containers/ml/modules/metrics_hosts/module';
import { FixedDatePicker } from '../../../../../../components/fixed_datepicker';
interface Props {
jobType: 'hosts' | 'kubernetes';
closeFlyout(): void;
goHome(): void;
}
export const JobSetupScreen = (props: Props) => {
const [now] = useState(() => moment());
const { goHome } = props;
const [startDate, setStartDate] = useState<Moment>(now.clone().subtract(4, 'weeks'));
const [partitionField, setPartitionField] = useState<string[] | null>(null);
const h = useMetricHostsModuleContext();
const k = useMetricK8sModuleContext();
const { createDerivedIndexPattern } = useSourceViaHttp({
sourceId: 'default',
type: 'metrics',
});
const indicies = h.sourceConfiguration.indices;
const setupStatus = useMemo(() => {
if (props.jobType === 'kubernetes') {
return k.setupStatus;
} else {
return h.setupStatus;
}
}, [props.jobType, k.setupStatus, h.setupStatus]);
const cleanUpAndSetUpModule = useMemo(() => {
if (props.jobType === 'kubernetes') {
return k.cleanUpAndSetUpModule;
} else {
return h.cleanUpAndSetUpModule;
}
}, [props.jobType, k.cleanUpAndSetUpModule, h.cleanUpAndSetUpModule]);
const setUpModule = useMemo(() => {
if (props.jobType === 'kubernetes') {
return k.setUpModule;
} else {
return h.setUpModule;
}
}, [props.jobType, k.setUpModule, h.setUpModule]);
const hasSummaries = useMemo(() => {
if (props.jobType === 'kubernetes') {
return k.jobSummaries.length > 0;
} else {
return h.jobSummaries.length > 0;
}
}, [props.jobType, k.jobSummaries, h.jobSummaries]);
const derivedIndexPattern = useMemo(() => createDerivedIndexPattern('metrics'), [
createDerivedIndexPattern,
]);
const updateStart = useCallback((date: Moment) => {
setStartDate(date);
}, []);
const createJobs = useCallback(() => {
if (hasSummaries) {
cleanUpAndSetUpModule(
indicies,
moment(startDate).toDate().getTime(),
undefined,
{ type: 'includeAll' },
partitionField ? partitionField[0] : undefined
);
} else {
setUpModule(
indicies,
moment(startDate).toDate().getTime(),
undefined,
{ type: 'includeAll' },
partitionField ? partitionField[0] : undefined
);
}
}, [cleanUpAndSetUpModule, setUpModule, hasSummaries, indicies, partitionField, startDate]);
const onPartitionFieldChange = useCallback((value: Array<{ label: string }>) => {
setPartitionField(value.map((v) => v.label));
}, []);
useEffect(() => {
if (props.jobType === 'kubernetes') {
setPartitionField(['kubernetes.namespace']);
}
}, [props.jobType]);
useEffect(() => {
if (setupStatus.type === 'succeeded') {
goHome();
}
}, [setupStatus, goHome]);
return (
<>
<EuiFlyoutHeader>
<EuiTitle size="m">
<h2>
<FormattedMessage
defaultMessage="Enable Machine Learning for {nodeType}"
id="xpack.infra.ml.aomalyFlyout.jobSetup.flyoutHeader"
values={{ nodeType: props.jobType }}
/>
</h2>
</EuiTitle>
</EuiFlyoutHeader>
<EuiFlyoutBody>
{setupStatus.type === 'pending' ? (
<EuiFlexGroup alignItems="center">
<EuiFlexItem grow={false}>
<EuiLoadingSpinner size="xl" />
</EuiFlexItem>
<EuiFlexItem grow={false}>
<FormattedMessage
id="xpack.infra.ml.steps.setupProcess.loadingText"
defaultMessage="Creating ML job..."
/>
</EuiFlexItem>
</EuiFlexGroup>
) : setupStatus.type === 'failed' ? (
<>
<FormattedMessage
id="xpack.infra.ml.steps.setupProcess.failureText"
defaultMessage="Something went wrong creating the necessary ML jobs."
/>
<EuiSpacer />
<EuiButton fill onClick={createJobs}>
<FormattedMessage
id="xpack.infra.ml.steps.setupProcess.tryAgainButton"
defaultMessage="Try again"
/>
</EuiButton>
</>
) : (
<>
<EuiText>
<p>
<FormattedMessage
id="xpack.infra.ml.steps.setupProcess.description"
defaultMessage="Answer the following questions to configure Machine Learning jobs for Metrics. These settings can not be changed once the jobs are created. You can recreate these jobs later; however, any previously detected anomalies will be removed as a result."
/>
</p>
</EuiText>
<EuiSpacer size="l" />
<EuiForm>
<EuiDescribedFormGroup
title={
<h3>
<FormattedMessage
id="xpack.infra.ml.steps.setupProcess.when.title"
defaultMessage="When does your model begin?"
/>
</h3>
}
description={
<FormattedMessage
id="xpack.infra.ml.steps.setupProcess.when.description"
defaultMessage="By default, Machine Learning jobs will analyze the past 4 weeks of data and continue to run indefinitely. You can specify a different start date, end date, or both. We recommend that you run jobs indefinitely."
/>
}
>
<EuiFormRow
label={
<FormattedMessage
id="xpack.infra.ml.steps.setupProcess.when.timePicker.label"
defaultMessage="Start date"
/>
}
>
<FixedDatePicker
showTimeSelect
selected={startDate}
onChange={updateStart}
maxDate={now}
/>
</EuiFormRow>
</EuiDescribedFormGroup>
<EuiDescribedFormGroup
title={
<h3>
<FormattedMessage
id="xpack.infra.ml.steps.setupProcess.partition.title"
defaultMessage="How do you want to partition your data?"
/>
</h3>
}
description={
<FormattedMessage
id="xpack.infra.ml.steps.setupProcess.partition.description"
defaultMessage="Partitions allow you to build independent models for different groups of data that share similar behavior. For example, you may want to build seperate models for machine type or cloud availability zone so that anomalies are not weighted equally across groups."
/>
}
>
<EuiFormRow
label={
<FormattedMessage
id="xpack.infra.ml.steps.setupProcess.partition.label"
defaultMessage="Partition filed"
/>
}
compressed
>
<EuiComboBox
placeholder={i18n.translate('xpack.infra.metricsExplorer.groupByLabel', {
defaultMessage: 'Everything',
})}
aria-label={i18n.translate('xpack.infra.metricsExplorer.groupByAriaLabel', {
defaultMessage: 'Graph per',
})}
fullWidth
singleSelection={true}
selectedOptions={
partitionField ? partitionField.map((p) => ({ label: p })) : undefined
}
options={derivedIndexPattern.fields
.filter((f) => f.aggregatable && f.type === 'string')
.map((f) => ({ label: f.name }))}
onChange={onPartitionFieldChange}
isClearable={true}
/>
</EuiFormRow>
</EuiDescribedFormGroup>
</EuiForm>
</>
)}
</EuiFlyoutBody>
<EuiFlyoutFooter>
<EuiFlexGroup justifyContent="spaceBetween">
<EuiFlexItem grow={false}>
<EuiButtonEmpty onClick={props.closeFlyout}>
<FormattedMessage
id="xpack.infra.ml.steps.setupProcess.cancelButton"
defaultMessage="Cancel"
/>
</EuiButtonEmpty>
</EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiButton fill={true} fullWidth={false} onClick={createJobs}>
<FormattedMessage
id="xpack.infra.ml.steps.setupProcess.enableButton"
defaultMessage="Enable Jobs"
/>
</EuiButton>
</EuiFlexItem>
</EuiFlexGroup>
</EuiFlyoutFooter>
</>
);
};

View file

@ -0,0 +1,172 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import React, { useEffect } from 'react';
import { i18n } from '@kbn/i18n';
import {
EuiPage,
EuiPageBody,
EuiPageContent,
EuiFlexGroup,
EuiFlexItem,
EuiSpacer,
EuiTitle,
EuiText,
EuiButton,
EuiButtonEmpty,
EuiImage,
} from '@elastic/eui';
import { FormattedMessage } from '@kbn/i18n/react';
import { LoadingPage } from '../../../../../../components/loading_page';
import { useTrialStatus } from '../../../../../../hooks/use_trial_status';
import { useKibana } from '../../../../../../../../../../src/plugins/kibana_react/public';
import { euiStyled } from '../../../../../../../../observability/public';
import { HttpStart } from '../../../../../../../../../../src/core/public';
export const SubscriptionSplashContent: React.FC = () => {
const { services } = useKibana<{ http: HttpStart }>();
const { loadState, isTrialAvailable, checkTrialAvailability } = useTrialStatus();
useEffect(() => {
checkTrialAvailability();
}, [checkTrialAvailability]);
if (loadState === 'pending') {
return (
<LoadingPage
message={i18n.translate('xpack.infra.ml.splash.loadingMessage', {
defaultMessage: 'Checking license...',
})}
/>
);
}
const canStartTrial = isTrialAvailable && loadState === 'resolved';
let title;
let description;
let cta;
if (canStartTrial) {
title = (
<FormattedMessage
id="xpack.infra.ml.splash.startTrialTitle"
defaultMessage="To access anomaly detection, start a free trial"
/>
);
description = (
<FormattedMessage
id="xpack.infra.ml.splash.startTrialDescription"
defaultMessage="Our free trial includes machine learning features, which enable you to detect anomalies in your logs."
/>
);
cta = (
<EuiButton
fullWidth={false}
fill
href={services.http.basePath.prepend('/app/management/stack/license_management')}
>
<FormattedMessage id="xpack.infra.ml.splash.startTrialCta" defaultMessage="Start trial" />
</EuiButton>
);
} else {
title = (
<FormattedMessage
id="xpack.infra.ml.splash.updateSubscriptionTitle"
defaultMessage="To access anomaly detection, upgrade to a Platinum Subscription"
/>
);
description = (
<FormattedMessage
id="xpack.infra.ml.splash.updateSubscriptionDescription"
defaultMessage="You must have a Platinum Subscription to use machine learning features."
/>
);
cta = (
<EuiButton fullWidth={false} fill href="https://www.elastic.co/subscriptions">
<FormattedMessage
id="xpack.infra.ml.splash.updateSubscriptionCta"
defaultMessage="Upgrade subscription"
/>
</EuiButton>
);
}
return (
<SubscriptionPage>
<EuiPageBody>
<SubscriptionPageContent verticalPosition="center" horizontalPosition="center">
<EuiFlexGroup>
<EuiFlexItem>
<EuiTitle size="m">
<h2>{title}</h2>
</EuiTitle>
<EuiSpacer size="xl" />
<EuiText>
<p>{description}</p>
</EuiText>
<EuiSpacer />
<div>{cta}</div>
</EuiFlexItem>
<EuiFlexItem>
<EuiImage
alt={i18n.translate('xpack.infra.ml.splash.splashImageAlt', {
defaultMessage: 'Placeholder image',
})}
url={services.http.basePath.prepend(
'/plugins/infra/assets/anomaly_chart_minified.svg'
)}
size="fullWidth"
/>
</EuiFlexItem>
</EuiFlexGroup>
<SubscriptionPageFooter>
<EuiTitle size="xs">
<h3>
<FormattedMessage
id="xpack.infra.ml.splash.learnMoreTitle"
defaultMessage="Want to learn more?"
/>
</h3>
</EuiTitle>
<EuiButtonEmpty
flush="left"
iconType="training"
target="_blank"
color="text"
href="https://www.elastic.co/guide/en/kibana/master/xpack-logs-analysis.html"
>
<FormattedMessage
id="xpack.infra.ml.splash.learnMoreLink"
defaultMessage="Read documentation"
/>
</EuiButtonEmpty>
</SubscriptionPageFooter>
</SubscriptionPageContent>
</EuiPageBody>
</SubscriptionPage>
);
};
const SubscriptionPage = euiStyled(EuiPage)`
height: 100%
`;
const SubscriptionPageContent = euiStyled(EuiPageContent)`
max-width: 768px !important;
`;
const SubscriptionPageFooter = euiStyled.div`
background: ${(props) => props.theme.eui.euiColorLightestShade};
margin: 0 -${(props) => props.theme.eui.paddingSizes.l} -${(props) =>
props.theme.eui.paddingSizes.l};
padding: ${(props) => props.theme.eui.paddingSizes.l};
`;

View file

@ -0,0 +1,318 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { useMemo, useState, useCallback, useEffect, useReducer } from 'react';
import {
INFA_ML_GET_METRICS_HOSTS_ANOMALIES_PATH,
Sort,
Pagination,
PaginationCursor,
getMetricsHostsAnomaliesRequestPayloadRT,
MetricsHostsAnomaly,
getMetricsHostsAnomaliesSuccessReponsePayloadRT,
} from '../../../../../common/http_api/infra_ml';
import { useTrackedPromise } from '../../../../utils/use_tracked_promise';
import { npStart } from '../../../../legacy_singletons';
import { decodeOrThrow } from '../../../../../common/runtime_types';
export type SortOptions = Sort;
export type PaginationOptions = Pick<Pagination, 'pageSize'>;
export type Page = number;
export type FetchNextPage = () => void;
export type FetchPreviousPage = () => void;
export type ChangeSortOptions = (sortOptions: Sort) => void;
export type ChangePaginationOptions = (paginationOptions: PaginationOptions) => void;
export type MetricsHostsAnomalies = MetricsHostsAnomaly[];
interface PaginationCursors {
previousPageCursor: PaginationCursor;
nextPageCursor: PaginationCursor;
}
interface ReducerState {
page: number;
lastReceivedCursors: PaginationCursors | undefined;
paginationCursor: Pagination['cursor'] | undefined;
hasNextPage: boolean;
paginationOptions: PaginationOptions;
sortOptions: Sort;
timeRange: {
start: number;
end: number;
};
filteredDatasets?: string[];
}
type ReducerStateDefaults = Pick<
ReducerState,
'page' | 'lastReceivedCursors' | 'paginationCursor' | 'hasNextPage'
>;
type ReducerAction =
| { type: 'changePaginationOptions'; payload: { paginationOptions: PaginationOptions } }
| { type: 'changeSortOptions'; payload: { sortOptions: Sort } }
| { type: 'fetchNextPage' }
| { type: 'fetchPreviousPage' }
| { type: 'changeHasNextPage'; payload: { hasNextPage: boolean } }
| { type: 'changeLastReceivedCursors'; payload: { lastReceivedCursors: PaginationCursors } }
| { type: 'changeTimeRange'; payload: { timeRange: { start: number; end: number } } }
| { type: 'changeFilteredDatasets'; payload: { filteredDatasets?: string[] } };
const stateReducer = (state: ReducerState, action: ReducerAction): ReducerState => {
const resetPagination = {
page: 1,
paginationCursor: undefined,
};
switch (action.type) {
case 'changePaginationOptions':
return {
...state,
...resetPagination,
...action.payload,
};
case 'changeSortOptions':
return {
...state,
...resetPagination,
...action.payload,
};
case 'changeHasNextPage':
return {
...state,
...action.payload,
};
case 'changeLastReceivedCursors':
return {
...state,
...action.payload,
};
case 'fetchNextPage':
return state.lastReceivedCursors
? {
...state,
page: state.page + 1,
paginationCursor: { searchAfter: state.lastReceivedCursors.nextPageCursor },
}
: state;
case 'fetchPreviousPage':
return state.lastReceivedCursors
? {
...state,
page: state.page - 1,
paginationCursor: { searchBefore: state.lastReceivedCursors.previousPageCursor },
}
: state;
case 'changeTimeRange':
return {
...state,
...resetPagination,
...action.payload,
};
case 'changeFilteredDatasets':
return {
...state,
...resetPagination,
...action.payload,
};
default:
return state;
}
};
const STATE_DEFAULTS: ReducerStateDefaults = {
// NOTE: This piece of state is purely for the client side, it could be extracted out of the hook.
page: 1,
// Cursor from the last request
lastReceivedCursors: undefined,
// Cursor to use for the next request. For the first request, and therefore not paging, this will be undefined.
paginationCursor: undefined,
hasNextPage: false,
};
export const useMetricsHostsAnomaliesResults = ({
endTime,
startTime,
sourceId,
defaultSortOptions,
defaultPaginationOptions,
onGetMetricsHostsAnomaliesDatasetsError,
filteredDatasets,
}: {
endTime: number;
startTime: number;
sourceId: string;
defaultSortOptions: Sort;
defaultPaginationOptions: Pick<Pagination, 'pageSize'>;
onGetMetricsHostsAnomaliesDatasetsError?: (error: Error) => void;
filteredDatasets?: string[];
}) => {
const initStateReducer = (stateDefaults: ReducerStateDefaults): ReducerState => {
return {
...stateDefaults,
paginationOptions: defaultPaginationOptions,
sortOptions: defaultSortOptions,
filteredDatasets,
timeRange: {
start: startTime,
end: endTime,
},
};
};
const [reducerState, dispatch] = useReducer(stateReducer, STATE_DEFAULTS, initStateReducer);
const [metricsHostsAnomalies, setMetricsHostsAnomalies] = useState<MetricsHostsAnomalies>([]);
const [getMetricsHostsAnomaliesRequest, getMetricsHostsAnomalies] = useTrackedPromise(
{
cancelPreviousOn: 'creation',
createPromise: async () => {
const {
timeRange: { start: queryStartTime, end: queryEndTime },
sortOptions,
paginationOptions,
paginationCursor,
} = reducerState;
return await callGetMetricHostsAnomaliesAPI(
sourceId,
queryStartTime,
queryEndTime,
sortOptions,
{
...paginationOptions,
cursor: paginationCursor,
}
);
},
onResolve: ({ data: { anomalies, paginationCursors: requestCursors, hasMoreEntries } }) => {
const { paginationCursor } = reducerState;
if (requestCursors) {
dispatch({
type: 'changeLastReceivedCursors',
payload: { lastReceivedCursors: requestCursors },
});
}
// Check if we have more "next" entries. "Page" covers the "previous" scenario,
// since we need to know the page we're on anyway.
if (!paginationCursor || (paginationCursor && 'searchAfter' in paginationCursor)) {
dispatch({ type: 'changeHasNextPage', payload: { hasNextPage: hasMoreEntries } });
} else if (paginationCursor && 'searchBefore' in paginationCursor) {
// We've requested a previous page, therefore there is a next page.
dispatch({ type: 'changeHasNextPage', payload: { hasNextPage: true } });
}
setMetricsHostsAnomalies(anomalies);
},
},
[
sourceId,
dispatch,
reducerState.timeRange,
reducerState.sortOptions,
reducerState.paginationOptions,
reducerState.paginationCursor,
reducerState.filteredDatasets,
]
);
const changeSortOptions = useCallback(
(nextSortOptions: Sort) => {
dispatch({ type: 'changeSortOptions', payload: { sortOptions: nextSortOptions } });
},
[dispatch]
);
const changePaginationOptions = useCallback(
(nextPaginationOptions: PaginationOptions) => {
dispatch({
type: 'changePaginationOptions',
payload: { paginationOptions: nextPaginationOptions },
});
},
[dispatch]
);
// Time range has changed
useEffect(() => {
dispatch({
type: 'changeTimeRange',
payload: { timeRange: { start: startTime, end: endTime } },
});
}, [startTime, endTime]);
// Selected datasets have changed
useEffect(() => {
dispatch({
type: 'changeFilteredDatasets',
payload: { filteredDatasets },
});
}, [filteredDatasets]);
useEffect(() => {
getMetricsHostsAnomalies();
}, [getMetricsHostsAnomalies]); // TODO: FIgure out the deps here.
const handleFetchNextPage = useCallback(() => {
if (reducerState.lastReceivedCursors) {
dispatch({ type: 'fetchNextPage' });
}
}, [dispatch, reducerState]);
const handleFetchPreviousPage = useCallback(() => {
if (reducerState.lastReceivedCursors) {
dispatch({ type: 'fetchPreviousPage' });
}
}, [dispatch, reducerState]);
const isLoadingMetricsHostsAnomalies = useMemo(
() => getMetricsHostsAnomaliesRequest.state === 'pending',
[getMetricsHostsAnomaliesRequest.state]
);
const hasFailedLoadingMetricsHostsAnomalies = useMemo(
() => getMetricsHostsAnomaliesRequest.state === 'rejected',
[getMetricsHostsAnomaliesRequest.state]
);
return {
metricsHostsAnomalies,
getMetricsHostsAnomalies,
isLoadingMetricsHostsAnomalies,
hasFailedLoadingMetricsHostsAnomalies,
changeSortOptions,
sortOptions: reducerState.sortOptions,
changePaginationOptions,
paginationOptions: reducerState.paginationOptions,
fetchPreviousPage: reducerState.page > 1 ? handleFetchPreviousPage : undefined,
fetchNextPage: reducerState.hasNextPage ? handleFetchNextPage : undefined,
page: reducerState.page,
};
};
export const callGetMetricHostsAnomaliesAPI = async (
sourceId: string,
startTime: number,
endTime: number,
sort: Sort,
pagination: Pagination
) => {
const response = await npStart.http.fetch(INFA_ML_GET_METRICS_HOSTS_ANOMALIES_PATH, {
method: 'POST',
body: JSON.stringify(
getMetricsHostsAnomaliesRequestPayloadRT.encode({
data: {
sourceId,
timeRange: {
startTime,
endTime,
},
sort,
pagination,
},
})
),
});
return decodeOrThrow(getMetricsHostsAnomaliesSuccessReponsePayloadRT)(response);
};

View file

@ -0,0 +1,322 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { useMemo, useState, useCallback, useEffect, useReducer } from 'react';
import {
Sort,
Pagination,
PaginationCursor,
INFA_ML_GET_METRICS_K8S_ANOMALIES_PATH,
getMetricsK8sAnomaliesSuccessReponsePayloadRT,
getMetricsK8sAnomaliesRequestPayloadRT,
MetricsK8sAnomaly,
} from '../../../../../common/http_api/infra_ml';
import { useTrackedPromise } from '../../../../utils/use_tracked_promise';
import { npStart } from '../../../../legacy_singletons';
import { decodeOrThrow } from '../../../../../common/runtime_types';
export type SortOptions = Sort;
export type PaginationOptions = Pick<Pagination, 'pageSize'>;
export type Page = number;
export type FetchNextPage = () => void;
export type FetchPreviousPage = () => void;
export type ChangeSortOptions = (sortOptions: Sort) => void;
export type ChangePaginationOptions = (paginationOptions: PaginationOptions) => void;
export type MetricsK8sAnomalies = MetricsK8sAnomaly[];
interface PaginationCursors {
previousPageCursor: PaginationCursor;
nextPageCursor: PaginationCursor;
}
interface ReducerState {
page: number;
lastReceivedCursors: PaginationCursors | undefined;
paginationCursor: Pagination['cursor'] | undefined;
hasNextPage: boolean;
paginationOptions: PaginationOptions;
sortOptions: Sort;
timeRange: {
start: number;
end: number;
};
filteredDatasets?: string[];
}
type ReducerStateDefaults = Pick<
ReducerState,
'page' | 'lastReceivedCursors' | 'paginationCursor' | 'hasNextPage'
>;
type ReducerAction =
| { type: 'changePaginationOptions'; payload: { paginationOptions: PaginationOptions } }
| { type: 'changeSortOptions'; payload: { sortOptions: Sort } }
| { type: 'fetchNextPage' }
| { type: 'fetchPreviousPage' }
| { type: 'changeHasNextPage'; payload: { hasNextPage: boolean } }
| { type: 'changeLastReceivedCursors'; payload: { lastReceivedCursors: PaginationCursors } }
| { type: 'changeTimeRange'; payload: { timeRange: { start: number; end: number } } }
| { type: 'changeFilteredDatasets'; payload: { filteredDatasets?: string[] } };
const stateReducer = (state: ReducerState, action: ReducerAction): ReducerState => {
const resetPagination = {
page: 1,
paginationCursor: undefined,
};
switch (action.type) {
case 'changePaginationOptions':
return {
...state,
...resetPagination,
...action.payload,
};
case 'changeSortOptions':
return {
...state,
...resetPagination,
...action.payload,
};
case 'changeHasNextPage':
return {
...state,
...action.payload,
};
case 'changeLastReceivedCursors':
return {
...state,
...action.payload,
};
case 'fetchNextPage':
return state.lastReceivedCursors
? {
...state,
page: state.page + 1,
paginationCursor: { searchAfter: state.lastReceivedCursors.nextPageCursor },
}
: state;
case 'fetchPreviousPage':
return state.lastReceivedCursors
? {
...state,
page: state.page - 1,
paginationCursor: { searchBefore: state.lastReceivedCursors.previousPageCursor },
}
: state;
case 'changeTimeRange':
return {
...state,
...resetPagination,
...action.payload,
};
case 'changeFilteredDatasets':
return {
...state,
...resetPagination,
...action.payload,
};
default:
return state;
}
};
const STATE_DEFAULTS: ReducerStateDefaults = {
// NOTE: This piece of state is purely for the client side, it could be extracted out of the hook.
page: 1,
// Cursor from the last request
lastReceivedCursors: undefined,
// Cursor to use for the next request. For the first request, and therefore not paging, this will be undefined.
paginationCursor: undefined,
hasNextPage: false,
};
export const useMetricsK8sAnomaliesResults = ({
endTime,
startTime,
sourceId,
defaultSortOptions,
defaultPaginationOptions,
onGetMetricsHostsAnomaliesDatasetsError,
filteredDatasets,
}: {
endTime: number;
startTime: number;
sourceId: string;
defaultSortOptions: Sort;
defaultPaginationOptions: Pick<Pagination, 'pageSize'>;
onGetMetricsHostsAnomaliesDatasetsError?: (error: Error) => void;
filteredDatasets?: string[];
}) => {
const initStateReducer = (stateDefaults: ReducerStateDefaults): ReducerState => {
return {
...stateDefaults,
paginationOptions: defaultPaginationOptions,
sortOptions: defaultSortOptions,
filteredDatasets,
timeRange: {
start: startTime,
end: endTime,
},
};
};
const [reducerState, dispatch] = useReducer(stateReducer, STATE_DEFAULTS, initStateReducer);
const [metricsK8sAnomalies, setMetricsK8sAnomalies] = useState<MetricsK8sAnomalies>([]);
const [getMetricsK8sAnomaliesRequest, getMetricsK8sAnomalies] = useTrackedPromise(
{
cancelPreviousOn: 'creation',
createPromise: async () => {
const {
timeRange: { start: queryStartTime, end: queryEndTime },
sortOptions,
paginationOptions,
paginationCursor,
filteredDatasets: queryFilteredDatasets,
} = reducerState;
return await callGetMetricsK8sAnomaliesAPI(
sourceId,
queryStartTime,
queryEndTime,
sortOptions,
{
...paginationOptions,
cursor: paginationCursor,
},
queryFilteredDatasets
);
},
onResolve: ({ data: { anomalies, paginationCursors: requestCursors, hasMoreEntries } }) => {
const { paginationCursor } = reducerState;
if (requestCursors) {
dispatch({
type: 'changeLastReceivedCursors',
payload: { lastReceivedCursors: requestCursors },
});
}
// Check if we have more "next" entries. "Page" covers the "previous" scenario,
// since we need to know the page we're on anyway.
if (!paginationCursor || (paginationCursor && 'searchAfter' in paginationCursor)) {
dispatch({ type: 'changeHasNextPage', payload: { hasNextPage: hasMoreEntries } });
} else if (paginationCursor && 'searchBefore' in paginationCursor) {
// We've requested a previous page, therefore there is a next page.
dispatch({ type: 'changeHasNextPage', payload: { hasNextPage: true } });
}
setMetricsK8sAnomalies(anomalies);
},
},
[
sourceId,
dispatch,
reducerState.timeRange,
reducerState.sortOptions,
reducerState.paginationOptions,
reducerState.paginationCursor,
reducerState.filteredDatasets,
]
);
const changeSortOptions = useCallback(
(nextSortOptions: Sort) => {
dispatch({ type: 'changeSortOptions', payload: { sortOptions: nextSortOptions } });
},
[dispatch]
);
const changePaginationOptions = useCallback(
(nextPaginationOptions: PaginationOptions) => {
dispatch({
type: 'changePaginationOptions',
payload: { paginationOptions: nextPaginationOptions },
});
},
[dispatch]
);
// Time range has changed
useEffect(() => {
dispatch({
type: 'changeTimeRange',
payload: { timeRange: { start: startTime, end: endTime } },
});
}, [startTime, endTime]);
// Selected datasets have changed
useEffect(() => {
dispatch({
type: 'changeFilteredDatasets',
payload: { filteredDatasets },
});
}, [filteredDatasets]);
useEffect(() => {
getMetricsK8sAnomalies();
}, [getMetricsK8sAnomalies]);
const handleFetchNextPage = useCallback(() => {
if (reducerState.lastReceivedCursors) {
dispatch({ type: 'fetchNextPage' });
}
}, [dispatch, reducerState]);
const handleFetchPreviousPage = useCallback(() => {
if (reducerState.lastReceivedCursors) {
dispatch({ type: 'fetchPreviousPage' });
}
}, [dispatch, reducerState]);
const isLoadingMetricsK8sAnomalies = useMemo(
() => getMetricsK8sAnomaliesRequest.state === 'pending',
[getMetricsK8sAnomaliesRequest.state]
);
const hasFailedLoadingMetricsK8sAnomalies = useMemo(
() => getMetricsK8sAnomaliesRequest.state === 'rejected',
[getMetricsK8sAnomaliesRequest.state]
);
return {
metricsK8sAnomalies,
getMetricsK8sAnomalies,
isLoadingMetricsK8sAnomalies,
hasFailedLoadingMetricsK8sAnomalies,
changeSortOptions,
sortOptions: reducerState.sortOptions,
changePaginationOptions,
paginationOptions: reducerState.paginationOptions,
fetchPreviousPage: reducerState.page > 1 ? handleFetchPreviousPage : undefined,
fetchNextPage: reducerState.hasNextPage ? handleFetchNextPage : undefined,
page: reducerState.page,
};
};
export const callGetMetricsK8sAnomaliesAPI = async (
sourceId: string,
startTime: number,
endTime: number,
sort: Sort,
pagination: Pagination,
datasets?: string[]
) => {
const response = await npStart.http.fetch(INFA_ML_GET_METRICS_K8S_ANOMALIES_PATH, {
method: 'POST',
body: JSON.stringify(
getMetricsK8sAnomaliesRequestPayloadRT.encode({
data: {
sourceId,
timeRange: {
startTime,
endTime,
},
sort,
pagination,
datasets,
},
})
),
});
return decodeOrThrow(getMetricsK8sAnomaliesSuccessReponsePayloadRT)(response);
};

View file

@ -21,6 +21,8 @@ import {
initGetLogEntryAnomaliesRoute,
initGetLogEntryAnomaliesDatasetsRoute,
} from './routes/log_analysis';
import { initGetK8sAnomaliesRoute } from './routes/infra_ml';
import { initGetHostsAnomaliesRoute } from './routes/infra_ml';
import { initMetricExplorerRoute } from './routes/metrics_explorer';
import { initMetadataRoute } from './routes/metadata';
import { initSnapshotRoute } from './routes/snapshot';
@ -56,6 +58,8 @@ export const initInfraServer = (libs: InfraBackendLibs) => {
initGetLogEntryRateRoute(libs);
initGetLogEntryAnomaliesRoute(libs);
initGetLogEntryAnomaliesDatasetsRoute(libs);
initGetK8sAnomaliesRoute(libs);
initGetHostsAnomaliesRoute(libs);
initSnapshotRoute(libs);
initNodeDetailsRoute(libs);
initSourceRoute(libs);

View file

@ -0,0 +1,89 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import type { MlAnomalyDetectors, MlSystem } from '../../types';
import { NoLogAnalysisMlJobError } from './errors';
import {
CompositeDatasetKey,
createLogEntryDatasetsQuery,
LogEntryDatasetBucket,
logEntryDatasetsResponseRT,
} from './queries/log_entry_data_sets';
import { decodeOrThrow } from '../../../common/runtime_types';
import { startTracingSpan, TracingSpan } from '../../../common/performance_tracing';
export async function fetchMlJob(mlAnomalyDetectors: MlAnomalyDetectors, jobId: string) {
const finalizeMlGetJobSpan = startTracingSpan('Fetch ml job from ES');
const {
jobs: [mlJob],
} = await mlAnomalyDetectors.jobs(jobId);
const mlGetJobSpan = finalizeMlGetJobSpan();
if (mlJob == null) {
throw new NoLogAnalysisMlJobError(`Failed to find ml job ${jobId}.`);
}
return {
mlJob,
timing: {
spans: [mlGetJobSpan],
},
};
}
const COMPOSITE_AGGREGATION_BATCH_SIZE = 1000;
// Finds datasets related to ML job ids
export async function getLogEntryDatasets(
mlSystem: MlSystem,
startTime: number,
endTime: number,
jobIds: string[]
) {
const finalizeLogEntryDatasetsSpan = startTracingSpan('get data sets');
let logEntryDatasetBuckets: LogEntryDatasetBucket[] = [];
let afterLatestBatchKey: CompositeDatasetKey | undefined;
let esSearchSpans: TracingSpan[] = [];
while (true) {
const finalizeEsSearchSpan = startTracingSpan('fetch log entry dataset batch from ES');
const logEntryDatasetsResponse = decodeOrThrow(logEntryDatasetsResponseRT)(
await mlSystem.mlAnomalySearch(
createLogEntryDatasetsQuery(
jobIds,
startTime,
endTime,
COMPOSITE_AGGREGATION_BATCH_SIZE,
afterLatestBatchKey
)
)
);
const { after_key: afterKey, buckets: latestBatchBuckets = [] } =
logEntryDatasetsResponse.aggregations?.dataset_buckets ?? {};
logEntryDatasetBuckets = [...logEntryDatasetBuckets, ...latestBatchBuckets];
afterLatestBatchKey = afterKey;
esSearchSpans = [...esSearchSpans, finalizeEsSearchSpan()];
if (latestBatchBuckets.length < COMPOSITE_AGGREGATION_BATCH_SIZE) {
break;
}
}
const logEntryDatasetsSpan = finalizeLogEntryDatasetsSpan();
return {
data: logEntryDatasetBuckets.map((logEntryDatasetBucket) => logEntryDatasetBucket.key.dataset),
timing: {
spans: [logEntryDatasetsSpan, ...esSearchSpans],
},
};
}

View file

@ -0,0 +1,49 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
/* eslint-disable max-classes-per-file */
import {
UnknownMLCapabilitiesError,
InsufficientMLCapabilities,
MLPrivilegesUninitialized,
} from '../../../../ml/server';
export class NoLogAnalysisMlJobError extends Error {
constructor(message?: string) {
super(message);
Object.setPrototypeOf(this, new.target.prototype);
}
}
export class InsufficientLogAnalysisMlJobConfigurationError extends Error {
constructor(message?: string) {
super(message);
Object.setPrototypeOf(this, new.target.prototype);
}
}
export class UnknownCategoryError extends Error {
constructor(categoryId: number) {
super(`Unknown ml category ${categoryId}`);
Object.setPrototypeOf(this, new.target.prototype);
}
}
export class InsufficientAnomalyMlJobsConfigured extends Error {
constructor(message?: string) {
super(message);
Object.setPrototypeOf(this, new.target.prototype);
}
}
export const isMlPrivilegesError = (error: any) => {
return (
error instanceof UnknownMLCapabilitiesError ||
error instanceof InsufficientMLCapabilities ||
error instanceof MLPrivilegesUninitialized
);
};

View file

@ -0,0 +1,9 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export * from './errors';
export * from './metrics_hosts_anomalies';
export * from './metrics_k8s_anomalies';

View file

@ -0,0 +1,289 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { RequestHandlerContext } from 'src/core/server';
import { InfraRequestHandlerContext } from '../../types';
import { TracingSpan, startTracingSpan } from '../../../common/performance_tracing';
import { fetchMlJob, getLogEntryDatasets } from './common';
import { getJobId, metricsHostsJobTypes } from '../../../common/infra_ml';
import { Sort, Pagination } from '../../../common/http_api/infra_ml';
import type { MlSystem, MlAnomalyDetectors } from '../../types';
import { InsufficientAnomalyMlJobsConfigured, isMlPrivilegesError } from './errors';
import { decodeOrThrow } from '../../../common/runtime_types';
import {
metricsHostsAnomaliesResponseRT,
createMetricsHostsAnomaliesQuery,
} from './queries/metrics_hosts_anomalies';
interface MappedAnomalyHit {
id: string;
anomalyScore: number;
dataset: string;
typical: number;
actual: number;
jobId: string;
startTime: number;
duration: number;
hostName: string[];
categoryId?: string;
}
async function getCompatibleAnomaliesJobIds(
spaceId: string,
sourceId: string,
mlAnomalyDetectors: MlAnomalyDetectors
) {
const metricsHostsJobIds = metricsHostsJobTypes.map((jt) => getJobId(spaceId, sourceId, jt));
const jobIds: string[] = [];
let jobSpans: TracingSpan[] = [];
try {
await Promise.all(
metricsHostsJobIds.map((id) => {
return (async () => {
const {
timing: { spans },
} = await fetchMlJob(mlAnomalyDetectors, id);
jobIds.push(id);
jobSpans = [...jobSpans, ...spans];
})();
})
);
} catch (e) {
if (isMlPrivilegesError(e)) {
throw e;
}
// An error is also thrown when no jobs are found
}
return {
jobIds,
timing: { spans: jobSpans },
};
}
export async function getMetricsHostsAnomalies(
context: RequestHandlerContext & { infra: Required<InfraRequestHandlerContext> },
sourceId: string,
startTime: number,
endTime: number,
sort: Sort,
pagination: Pagination
) {
const finalizeMetricsHostsAnomaliesSpan = startTracingSpan('get metrics hosts entry anomalies');
const {
jobIds,
timing: { spans: jobSpans },
} = await getCompatibleAnomaliesJobIds(
context.infra.spaceId,
sourceId,
context.infra.mlAnomalyDetectors
);
if (jobIds.length === 0) {
throw new InsufficientAnomalyMlJobsConfigured(
'Metrics Hosts ML jobs need to be configured to search anomalies'
);
}
try {
const {
anomalies,
paginationCursors,
hasMoreEntries,
timing: { spans: fetchLogEntryAnomaliesSpans },
} = await fetchMetricsHostsAnomalies(
context.infra.mlSystem,
jobIds,
startTime,
endTime,
sort,
pagination
);
const data = anomalies.map((anomaly) => {
const { jobId } = anomaly;
return parseAnomalyResult(anomaly, jobId);
});
const metricsHostsAnomaliesSpan = finalizeMetricsHostsAnomaliesSpan();
return {
data,
paginationCursors,
hasMoreEntries,
timing: {
spans: [metricsHostsAnomaliesSpan, ...jobSpans, ...fetchLogEntryAnomaliesSpans],
},
};
} catch (e) {
throw new Error(e);
}
}
const parseAnomalyResult = (anomaly: MappedAnomalyHit, jobId: string) => {
const {
id,
anomalyScore,
dataset,
typical,
actual,
duration,
hostName,
startTime: anomalyStartTime,
} = anomaly;
return {
id,
anomalyScore,
dataset,
typical,
actual,
duration,
hostName,
startTime: anomalyStartTime,
type: 'metrics_hosts' as const,
jobId,
};
};
async function fetchMetricsHostsAnomalies(
mlSystem: MlSystem,
jobIds: string[],
startTime: number,
endTime: number,
sort: Sort,
pagination: Pagination
) {
// We'll request 1 extra entry on top of our pageSize to determine if there are
// more entries to be fetched. This avoids scenarios where the client side can't
// determine if entries.length === pageSize actually means there are more entries / next page
// or not.
const expandedPagination = { ...pagination, pageSize: pagination.pageSize + 1 };
const finalizeFetchLogEntryAnomaliesSpan = startTracingSpan('fetch metrics hosts anomalies');
// console.log(
// 'data',
// JSON.stringify(
// await mlSystem.mlAnomalySearch(
// createMetricsHostsAnomaliesQuery(jobIds, startTime, endTime, sort, expandedPagination)
// ),
// null,
// 2
// )
// );
const results = decodeOrThrow(metricsHostsAnomaliesResponseRT)(
await mlSystem.mlAnomalySearch(
createMetricsHostsAnomaliesQuery(jobIds, startTime, endTime, sort, expandedPagination)
)
);
const {
hits: { hits },
} = results;
const hasMoreEntries = hits.length > pagination.pageSize;
// An extra entry was found and hasMoreEntries has been determined, the extra entry can be removed.
if (hasMoreEntries) {
hits.pop();
}
// To "search_before" the sort order will have been reversed for ES.
// The results are now reversed back, to match the requested sort.
if (pagination.cursor && 'searchBefore' in pagination.cursor) {
hits.reverse();
}
const paginationCursors =
hits.length > 0
? {
previousPageCursor: hits[0].sort,
nextPageCursor: hits[hits.length - 1].sort,
}
: undefined;
const anomalies = hits.map((result) => {
const {
// eslint-disable-next-line @typescript-eslint/naming-convention
job_id,
record_score: anomalyScore,
typical,
actual,
bucket_span: duration,
timestamp: anomalyStartTime,
by_field_value: categoryId,
} = result._source;
return {
id: result._id,
anomalyScore,
dataset: '',
typical: typical[0],
actual: actual[0],
jobId: job_id,
hostName: result._source['host.name'],
startTime: anomalyStartTime,
duration: duration * 1000,
categoryId,
};
});
const fetchLogEntryAnomaliesSpan = finalizeFetchLogEntryAnomaliesSpan();
return {
anomalies,
paginationCursors,
hasMoreEntries,
timing: {
spans: [fetchLogEntryAnomaliesSpan],
},
};
}
// TODO: FIgure out why we need datasets
export async function getMetricsHostsAnomaliesDatasets(
context: {
infra: {
mlSystem: MlSystem;
mlAnomalyDetectors: MlAnomalyDetectors;
spaceId: string;
};
},
sourceId: string,
startTime: number,
endTime: number
) {
const {
jobIds,
timing: { spans: jobSpans },
} = await getCompatibleAnomaliesJobIds(
context.infra.spaceId,
sourceId,
context.infra.mlAnomalyDetectors
);
if (jobIds.length === 0) {
throw new InsufficientAnomalyMlJobsConfigured(
'Log rate or categorisation ML jobs need to be configured to search for anomaly datasets'
);
}
const {
data: datasets,
timing: { spans: datasetsSpans },
} = await getLogEntryDatasets(context.infra.mlSystem, startTime, endTime, jobIds);
return {
datasets,
timing: {
spans: [...jobSpans, ...datasetsSpans],
},
};
}

View file

@ -0,0 +1,272 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { RequestHandlerContext } from 'src/core/server';
import { InfraRequestHandlerContext } from '../../types';
import { TracingSpan, startTracingSpan } from '../../../common/performance_tracing';
import { fetchMlJob, getLogEntryDatasets } from './common';
import { getJobId, metricsK8SJobTypes } from '../../../common/infra_ml';
import { Sort, Pagination } from '../../../common/http_api/infra_ml';
import type { MlSystem, MlAnomalyDetectors } from '../../types';
import { InsufficientAnomalyMlJobsConfigured, isMlPrivilegesError } from './errors';
import { decodeOrThrow } from '../../../common/runtime_types';
import {
metricsK8sAnomaliesResponseRT,
createMetricsK8sAnomaliesQuery,
} from './queries/metrics_k8s_anomalies';
interface MappedAnomalyHit {
id: string;
anomalyScore: number;
// dataset: string;
typical: number;
actual: number;
jobId: string;
startTime: number;
duration: number;
categoryId?: string;
}
async function getCompatibleAnomaliesJobIds(
spaceId: string,
sourceId: string,
mlAnomalyDetectors: MlAnomalyDetectors
) {
const metricsK8sJobIds = metricsK8SJobTypes.map((jt) => getJobId(spaceId, sourceId, jt));
const jobIds: string[] = [];
let jobSpans: TracingSpan[] = [];
try {
await Promise.all(
metricsK8sJobIds.map((id) => {
return (async () => {
const {
timing: { spans },
} = await fetchMlJob(mlAnomalyDetectors, id);
jobIds.push(id);
jobSpans = [...jobSpans, ...spans];
})();
})
);
} catch (e) {
if (isMlPrivilegesError(e)) {
throw e;
}
// An error is also thrown when no jobs are found
}
return {
jobIds,
timing: { spans: jobSpans },
};
}
export async function getMetricK8sAnomalies(
context: RequestHandlerContext & { infra: Required<InfraRequestHandlerContext> },
sourceId: string,
startTime: number,
endTime: number,
sort: Sort,
pagination: Pagination
) {
const finalizeMetricsK8sAnomaliesSpan = startTracingSpan('get metrics k8s entry anomalies');
const {
jobIds,
timing: { spans: jobSpans },
} = await getCompatibleAnomaliesJobIds(
context.infra.spaceId,
sourceId,
context.infra.mlAnomalyDetectors
);
if (jobIds.length === 0) {
throw new InsufficientAnomalyMlJobsConfigured(
'Log rate or categorisation ML jobs need to be configured to search anomalies'
);
}
const {
anomalies,
paginationCursors,
hasMoreEntries,
timing: { spans: fetchLogEntryAnomaliesSpans },
} = await fetchMetricK8sAnomalies(
context.infra.mlSystem,
jobIds,
startTime,
endTime,
sort,
pagination
);
const data = anomalies.map((anomaly) => {
const { jobId } = anomaly;
return parseAnomalyResult(anomaly, jobId);
});
const metricsK8sAnomaliesSpan = finalizeMetricsK8sAnomaliesSpan();
return {
data,
paginationCursors,
hasMoreEntries,
timing: {
spans: [metricsK8sAnomaliesSpan, ...jobSpans, ...fetchLogEntryAnomaliesSpans],
},
};
}
const parseAnomalyResult = (anomaly: MappedAnomalyHit, jobId: string) => {
const {
id,
anomalyScore,
// dataset,
typical,
actual,
duration,
startTime: anomalyStartTime,
} = anomaly;
return {
id,
anomalyScore,
// dataset,
typical,
actual,
duration,
startTime: anomalyStartTime,
type: 'metrics_k8s' as const,
jobId,
};
};
async function fetchMetricK8sAnomalies(
mlSystem: MlSystem,
jobIds: string[],
startTime: number,
endTime: number,
sort: Sort,
pagination: Pagination
) {
// We'll request 1 extra entry on top of our pageSize to determine if there are
// more entries to be fetched. This avoids scenarios where the client side can't
// determine if entries.length === pageSize actually means there are more entries / next page
// or not.
const expandedPagination = { ...pagination, pageSize: pagination.pageSize + 1 };
const finalizeFetchLogEntryAnomaliesSpan = startTracingSpan('fetch metrics k8s anomalies');
const results = decodeOrThrow(metricsK8sAnomaliesResponseRT)(
await mlSystem.mlAnomalySearch(
createMetricsK8sAnomaliesQuery(jobIds, startTime, endTime, sort, expandedPagination)
)
);
const {
hits: { hits },
} = results;
const hasMoreEntries = hits.length > pagination.pageSize;
// An extra entry was found and hasMoreEntries has been determined, the extra entry can be removed.
if (hasMoreEntries) {
hits.pop();
}
// To "search_before" the sort order will have been reversed for ES.
// The results are now reversed back, to match the requested sort.
if (pagination.cursor && 'searchBefore' in pagination.cursor) {
hits.reverse();
}
const paginationCursors =
hits.length > 0
? {
previousPageCursor: hits[0].sort,
nextPageCursor: hits[hits.length - 1].sort,
}
: undefined;
const anomalies = hits.map((result) => {
const {
// eslint-disable-next-line @typescript-eslint/naming-convention
job_id,
record_score: anomalyScore,
typical,
actual,
// partition_field_value: dataset,
bucket_span: duration,
timestamp: anomalyStartTime,
by_field_value: categoryId,
} = result._source;
return {
id: result._id,
anomalyScore,
// dataset,
typical: typical[0],
actual: actual[0],
jobId: job_id,
startTime: anomalyStartTime,
duration: duration * 1000,
categoryId,
};
});
const fetchLogEntryAnomaliesSpan = finalizeFetchLogEntryAnomaliesSpan();
return {
anomalies,
paginationCursors,
hasMoreEntries,
timing: {
spans: [fetchLogEntryAnomaliesSpan],
},
};
}
// TODO: FIgure out why we need datasets
export async function getMetricK8sAnomaliesDatasets(
context: {
infra: {
mlSystem: MlSystem;
mlAnomalyDetectors: MlAnomalyDetectors;
spaceId: string;
};
},
sourceId: string,
startTime: number,
endTime: number
) {
const {
jobIds,
timing: { spans: jobSpans },
} = await getCompatibleAnomaliesJobIds(
context.infra.spaceId,
sourceId,
context.infra.mlAnomalyDetectors
);
if (jobIds.length === 0) {
throw new InsufficientAnomalyMlJobsConfigured(
'Log rate or categorisation ML jobs need to be configured to search for anomaly datasets'
);
}
const {
data: datasets,
timing: { spans: datasetsSpans },
} = await getLogEntryDatasets(context.infra.mlSystem, startTime, endTime, jobIds);
return {
datasets,
timing: {
spans: [...jobSpans, ...datasetsSpans],
},
};
}

View file

@ -0,0 +1,68 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export const defaultRequestParameters = {
allowNoIndices: true,
ignoreUnavailable: true,
trackScores: false,
trackTotalHits: false,
};
export const createJobIdFilters = (jobId: string) => [
{
term: {
job_id: {
value: jobId,
},
},
},
];
export const createJobIdsFilters = (jobIds: string[]) => [
{
terms: {
job_id: jobIds,
},
},
];
export const createTimeRangeFilters = (startTime: number, endTime: number) => [
{
range: {
timestamp: {
gte: startTime,
lte: endTime,
},
},
},
];
export const createResultTypeFilters = (resultTypes: Array<'model_plot' | 'record'>) => [
{
terms: {
result_type: resultTypes,
},
},
];
export const createCategoryIdFilters = (categoryIds: number[]) => [
{
terms: {
category_id: categoryIds,
},
},
];
export const createDatasetsFilters = (datasets?: string[]) =>
datasets && datasets.length > 0
? [
{
terms: {
partition_field_value: datasets,
},
},
]
: [];

View file

@ -0,0 +1,7 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export * from './metrics_k8s_anomalies';
export * from './metrics_hosts_anomalies';

View file

@ -0,0 +1,84 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import * as rt from 'io-ts';
import { commonSearchSuccessResponseFieldsRT } from '../../../utils/elasticsearch_runtime_types';
import {
createJobIdsFilters,
createResultTypeFilters,
createTimeRangeFilters,
defaultRequestParameters,
} from './common';
export const createLogEntryDatasetsQuery = (
jobIds: string[],
startTime: number,
endTime: number,
size: number,
afterKey?: CompositeDatasetKey
) => ({
...defaultRequestParameters,
body: {
query: {
bool: {
filter: [
...createJobIdsFilters(jobIds),
...createTimeRangeFilters(startTime, endTime),
...createResultTypeFilters(['model_plot']),
],
},
},
aggs: {
dataset_buckets: {
composite: {
after: afterKey,
size,
sources: [
{
dataset: {
terms: {
field: 'partition_field_value',
order: 'asc',
},
},
},
],
},
},
},
},
size: 0,
});
const compositeDatasetKeyRT = rt.type({
dataset: rt.string,
});
export type CompositeDatasetKey = rt.TypeOf<typeof compositeDatasetKeyRT>;
const logEntryDatasetBucketRT = rt.type({
key: compositeDatasetKeyRT,
});
export type LogEntryDatasetBucket = rt.TypeOf<typeof logEntryDatasetBucketRT>;
export const logEntryDatasetsResponseRT = rt.intersection([
commonSearchSuccessResponseFieldsRT,
rt.partial({
aggregations: rt.type({
dataset_buckets: rt.intersection([
rt.type({
buckets: rt.array(logEntryDatasetBucketRT),
}),
rt.partial({
after_key: compositeDatasetKeyRT,
}),
]),
}),
}),
]);
export type LogEntryDatasetsResponse = rt.TypeOf<typeof logEntryDatasetsResponseRT>;

View file

@ -0,0 +1,131 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import * as rt from 'io-ts';
import { commonSearchSuccessResponseFieldsRT } from '../../../utils/elasticsearch_runtime_types';
import {
createJobIdsFilters,
createTimeRangeFilters,
createResultTypeFilters,
defaultRequestParameters,
} from './common';
import { Sort, Pagination } from '../../../../common/http_api/infra_ml';
// TODO: Reassess validity of this against ML docs
const TIEBREAKER_FIELD = '_doc';
const sortToMlFieldMap = {
dataset: 'partition_field_value',
anomalyScore: 'record_score',
startTime: 'timestamp',
};
export const createMetricsHostsAnomaliesQuery = (
jobIds: string[],
startTime: number,
endTime: number,
sort: Sort,
pagination: Pagination
) => {
const { field } = sort;
const { pageSize } = pagination;
const filters = [
...createJobIdsFilters(jobIds),
...createTimeRangeFilters(startTime, endTime),
...createResultTypeFilters(['record']),
];
const sourceFields = [
'job_id',
'record_score',
'typical',
'actual',
'partition_field_value',
'timestamp',
'bucket_span',
'by_field_value',
'host.name',
'influencers.influencer_field_name',
'influencers.influencer_field_values',
];
const { querySortDirection, queryCursor } = parsePaginationCursor(sort, pagination);
const sortOptions = [
{ [sortToMlFieldMap[field]]: querySortDirection },
{ [TIEBREAKER_FIELD]: querySortDirection }, // Tiebreaker
];
const resultsQuery = {
...defaultRequestParameters,
body: {
query: {
bool: {
filter: filters,
},
},
search_after: queryCursor,
sort: sortOptions,
size: pageSize,
_source: sourceFields,
},
};
return resultsQuery;
};
export const metricsHostsAnomalyHitRT = rt.type({
_id: rt.string,
_source: rt.intersection([
rt.type({
job_id: rt.string,
record_score: rt.number,
typical: rt.array(rt.number),
actual: rt.array(rt.number),
'host.name': rt.array(rt.string),
bucket_span: rt.number,
timestamp: rt.number,
}),
rt.partial({
by_field_value: rt.string,
}),
]),
sort: rt.tuple([rt.union([rt.string, rt.number]), rt.union([rt.string, rt.number])]),
});
export type MetricsHostsAnomalyHit = rt.TypeOf<typeof metricsHostsAnomalyHitRT>;
export const metricsHostsAnomaliesResponseRT = rt.intersection([
commonSearchSuccessResponseFieldsRT,
rt.type({
hits: rt.type({
hits: rt.array(metricsHostsAnomalyHitRT),
}),
}),
]);
export type MetricsHostsAnomaliesResponseRT = rt.TypeOf<typeof metricsHostsAnomaliesResponseRT>;
const parsePaginationCursor = (sort: Sort, pagination: Pagination) => {
const { cursor } = pagination;
const { direction } = sort;
if (!cursor) {
return { querySortDirection: direction, queryCursor: undefined };
}
// We will always use ES's search_after to paginate, to mimic "search_before" behaviour we
// need to reverse the user's chosen search direction for the ES query.
if ('searchBefore' in cursor) {
return {
querySortDirection: direction === 'desc' ? 'asc' : 'desc',
queryCursor: cursor.searchBefore,
};
} else {
return { querySortDirection: direction, queryCursor: cursor.searchAfter };
}
};

View file

@ -0,0 +1,128 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import * as rt from 'io-ts';
import { commonSearchSuccessResponseFieldsRT } from '../../../utils/elasticsearch_runtime_types';
import {
createJobIdsFilters,
createTimeRangeFilters,
createResultTypeFilters,
defaultRequestParameters,
} from './common';
import { Sort, Pagination } from '../../../../common/http_api/infra_ml';
// TODO: Reassess validity of this against ML docs
const TIEBREAKER_FIELD = '_doc';
const sortToMlFieldMap = {
dataset: 'partition_field_value',
anomalyScore: 'record_score',
startTime: 'timestamp',
};
export const createMetricsK8sAnomaliesQuery = (
jobIds: string[],
startTime: number,
endTime: number,
sort: Sort,
pagination: Pagination
) => {
const { field } = sort;
const { pageSize } = pagination;
const filters = [
...createJobIdsFilters(jobIds),
...createTimeRangeFilters(startTime, endTime),
...createResultTypeFilters(['record']),
];
const sourceFields = [
'job_id',
'record_score',
'typical',
'actual',
'partition_field_value',
'timestamp',
'bucket_span',
'by_field_value',
];
const { querySortDirection, queryCursor } = parsePaginationCursor(sort, pagination);
const sortOptions = [
{ [sortToMlFieldMap[field]]: querySortDirection },
{ [TIEBREAKER_FIELD]: querySortDirection }, // Tiebreaker
];
const resultsQuery = {
...defaultRequestParameters,
body: {
query: {
bool: {
filter: filters,
},
},
search_after: queryCursor,
sort: sortOptions,
size: pageSize,
_source: sourceFields,
},
};
return resultsQuery;
};
export const metricsK8sAnomalyHitRT = rt.type({
_id: rt.string,
_source: rt.intersection([
rt.type({
job_id: rt.string,
record_score: rt.number,
typical: rt.array(rt.number),
actual: rt.array(rt.number),
// partition_field_value: rt.string,
bucket_span: rt.number,
timestamp: rt.number,
}),
rt.partial({
by_field_value: rt.string,
}),
]),
sort: rt.tuple([rt.union([rt.string, rt.number]), rt.union([rt.string, rt.number])]),
});
export type MetricsK8sAnomalyHit = rt.TypeOf<typeof metricsK8sAnomalyHitRT>;
export const metricsK8sAnomaliesResponseRT = rt.intersection([
commonSearchSuccessResponseFieldsRT,
rt.type({
hits: rt.type({
hits: rt.array(metricsK8sAnomalyHitRT),
}),
}),
]);
export type MetricsK8sAnomaliesResponseRT = rt.TypeOf<typeof metricsK8sAnomaliesResponseRT>;
const parsePaginationCursor = (sort: Sort, pagination: Pagination) => {
const { cursor } = pagination;
const { direction } = sort;
if (!cursor) {
return { querySortDirection: direction, queryCursor: undefined };
}
// We will always use ES's search_after to paginate, to mimic "search_before" behaviour we
// need to reverse the user's chosen search direction for the ES query.
if ('searchBefore' in cursor) {
return {
querySortDirection: direction === 'desc' ? 'asc' : 'desc',
queryCursor: cursor.searchBefore,
};
} else {
return { querySortDirection: direction, queryCursor: cursor.searchAfter };
}
};

View file

@ -0,0 +1,24 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import * as rt from 'io-ts';
export const createMlJobsQuery = (jobIds: string[]) => ({
method: 'GET',
path: `/_ml/anomaly_detectors/${jobIds.join(',')}`,
query: {
allow_no_jobs: true,
},
});
export const mlJobRT = rt.type({
job_id: rt.string,
custom_settings: rt.unknown,
});
export const mlJobsResponseRT = rt.type({
jobs: rt.array(mlJobRT),
});

View file

@ -0,0 +1,7 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export * from './results';

View file

@ -0,0 +1,8 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export * from './metrics_hosts_anomalies';
export * from './metrics_k8s_anomalies';

View file

@ -0,0 +1,125 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import Boom from 'boom';
import { InfraBackendLibs } from '../../../lib/infra_types';
import {
INFA_ML_GET_METRICS_HOSTS_ANOMALIES_PATH,
getMetricsHostsAnomaliesSuccessReponsePayloadRT,
getMetricsHostsAnomaliesRequestPayloadRT,
GetMetricsHostsAnomaliesRequestPayload,
Sort,
Pagination,
} from '../../../../common/http_api/infra_ml';
import { createValidationFunction } from '../../../../common/runtime_types';
import { assertHasInfraMlPlugins } from '../../../utils/request_context';
import { isMlPrivilegesError } from '../../../lib/infra_ml/errors';
import { getMetricsHostsAnomalies } from '../../../lib/infra_ml';
export const initGetHostsAnomaliesRoute = ({ framework }: InfraBackendLibs) => {
framework.registerRoute(
{
method: 'post',
path: INFA_ML_GET_METRICS_HOSTS_ANOMALIES_PATH,
validate: {
body: createValidationFunction(getMetricsHostsAnomaliesRequestPayloadRT),
},
},
framework.router.handleLegacyErrors(async (requestContext, request, response) => {
const {
data: {
sourceId,
timeRange: { startTime, endTime },
sort: sortParam,
pagination: paginationParam,
},
} = request.body;
const { sort, pagination } = getSortAndPagination(sortParam, paginationParam);
try {
assertHasInfraMlPlugins(requestContext);
const {
data: anomalies,
paginationCursors,
hasMoreEntries,
timing,
} = await getMetricsHostsAnomalies(
requestContext,
sourceId,
startTime,
endTime,
sort,
pagination
);
// console.log('---- anomalies', anomalies);
return response.ok({
body: getMetricsHostsAnomaliesSuccessReponsePayloadRT.encode({
data: {
anomalies,
hasMoreEntries,
paginationCursors,
},
timing,
}),
});
} catch (error) {
if (Boom.isBoom(error)) {
throw error;
}
if (isMlPrivilegesError(error)) {
return response.customError({
statusCode: 403,
body: {
message: error.message,
},
});
}
return response.customError({
statusCode: error.statusCode ?? 500,
body: {
message: error.message ?? 'An unexpected error occurred',
},
});
}
})
);
};
const getSortAndPagination = (
sort: Partial<GetMetricsHostsAnomaliesRequestPayload['data']['sort']> = {},
pagination: Partial<GetMetricsHostsAnomaliesRequestPayload['data']['pagination']> = {}
): {
sort: Sort;
pagination: Pagination;
} => {
const sortDefaults = {
field: 'anomalyScore' as const,
direction: 'desc' as const,
};
const sortWithDefaults = {
...sortDefaults,
...sort,
};
const paginationDefaults = {
pageSize: 50,
};
const paginationWithDefaults = {
...paginationDefaults,
...pagination,
};
return { sort: sortWithDefaults, pagination: paginationWithDefaults };
};

View file

@ -0,0 +1,122 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import Boom from 'boom';
import { InfraBackendLibs } from '../../../lib/infra_types';
import {
INFA_ML_GET_METRICS_K8S_ANOMALIES_PATH,
getMetricsK8sAnomaliesSuccessReponsePayloadRT,
getMetricsK8sAnomaliesRequestPayloadRT,
GetMetricsK8sAnomaliesRequestPayload,
Sort,
Pagination,
} from '../../../../common/http_api/infra_ml';
import { createValidationFunction } from '../../../../common/runtime_types';
import { assertHasInfraMlPlugins } from '../../../utils/request_context';
import { getMetricK8sAnomalies } from '../../../lib/infra_ml';
import { isMlPrivilegesError } from '../../../lib/infra_ml/errors';
export const initGetK8sAnomaliesRoute = ({ framework }: InfraBackendLibs) => {
framework.registerRoute(
{
method: 'post',
path: INFA_ML_GET_METRICS_K8S_ANOMALIES_PATH,
validate: {
body: createValidationFunction(getMetricsK8sAnomaliesRequestPayloadRT),
},
},
framework.router.handleLegacyErrors(async (requestContext, request, response) => {
const {
data: {
sourceId,
timeRange: { startTime, endTime },
sort: sortParam,
pagination: paginationParam,
},
} = request.body;
const { sort, pagination } = getSortAndPagination(sortParam, paginationParam);
try {
assertHasInfraMlPlugins(requestContext);
const {
data: anomalies,
paginationCursors,
hasMoreEntries,
timing,
} = await getMetricK8sAnomalies(
requestContext,
sourceId,
startTime,
endTime,
sort,
pagination
);
return response.ok({
body: getMetricsK8sAnomaliesSuccessReponsePayloadRT.encode({
data: {
anomalies,
hasMoreEntries,
paginationCursors,
},
timing,
}),
});
} catch (error) {
if (Boom.isBoom(error)) {
throw error;
}
if (isMlPrivilegesError(error)) {
return response.customError({
statusCode: 403,
body: {
message: error.message,
},
});
}
return response.customError({
statusCode: error.statusCode ?? 500,
body: {
message: error.message ?? 'An unexpected error occurred',
},
});
}
})
);
};
const getSortAndPagination = (
sort: Partial<GetMetricsK8sAnomaliesRequestPayload['data']['sort']> = {},
pagination: Partial<GetMetricsK8sAnomaliesRequestPayload['data']['pagination']> = {}
): {
sort: Sort;
pagination: Pagination;
} => {
const sortDefaults = {
field: 'anomalyScore' as const,
direction: 'desc' as const,
};
const sortWithDefaults = {
...sortDefaults,
...sort,
};
const paginationDefaults = {
pageSize: 50,
};
const paginationWithDefaults = {
...paginationDefaults,
...pagination,
};
return { sort: sortWithDefaults, pagination: paginationWithDefaults };
};