[ML] Use indices options in anomaly detection job wizards (#91830)

* [ML] WIP Datafeed preview refactor

* adding indices options to ad job creation searches

* update datafeed preview schema

* updating types

* recalculating wizard time range on JSON edit save

* updating endpoint docs

* fixing types

* more type fixes

* fixing missing runtime fields

* using isPopulatedObject

* adding indices options schema

* fixing test

* fixing schema

Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
James Gowdy 2021-03-16 19:54:56 +00:00 committed by GitHub
parent 2374af3011
commit 4914acc0d1
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
45 changed files with 490 additions and 289 deletions

View file

@ -45,7 +45,7 @@ export type Aggregation = Record<
}
>;
interface IndicesOptions {
export interface IndicesOptions {
expand_wildcards?: 'all' | 'open' | 'closed' | 'hidden' | 'none';
ignore_unavailable?: boolean;
allow_no_indices?: boolean;

View file

@ -5,7 +5,9 @@
* 2.0.
*/
import { Job, JobStats } from './anomaly_detection_jobs';
import { Job, JobStats, IndicesOptions } from './anomaly_detection_jobs';
import { RuntimeMappings } from './fields';
import { ES_AGGREGATION } from '../constants/aggregation_types';
export interface MlJobsResponse {
jobs: Job[];
@ -23,3 +25,18 @@ export interface JobsExistResponse {
isGroup: boolean;
};
}
export interface BucketSpanEstimatorData {
aggTypes: Array<ES_AGGREGATION | null>;
duration: {
start: number;
end: number;
};
fields: Array<string | null>;
index: string;
query: any;
splitField: string | undefined;
timeField: string | undefined;
runtimeMappings: RuntimeMappings | undefined;
indicesOptions: IndicesOptions | undefined;
}

View file

@ -29,7 +29,8 @@ export function chartLoaderProvider(mlResultsService: MlResultsService) {
job.data_description.time_field,
job.data_counts.earliest_record_timestamp,
job.data_counts.latest_record_timestamp,
intervalMs
intervalMs,
job.datafeed_config.indices_options
);
if (resp.error !== undefined) {
throw resp.error;

View file

@ -8,6 +8,7 @@
import memoizeOne from 'memoize-one';
import { isEqual } from 'lodash';
import { IndexPatternTitle } from '../../../../../../common/types/kibana';
import { IndicesOptions } from '../../../../../../common/types/anomaly_detection_jobs';
import {
Field,
SplitField,
@ -56,7 +57,8 @@ export class ChartLoader {
splitField: SplitField,
splitFieldValue: SplitFieldValue,
intervalMs: number,
runtimeMappings: RuntimeMappings | null
runtimeMappings: RuntimeMappings | null,
indicesOptions?: IndicesOptions
): Promise<LineChartData> {
if (this._timeFieldName !== '') {
if (aggFieldPairsCanBeCharted(aggFieldPairs) === false) {
@ -77,7 +79,8 @@ export class ChartLoader {
aggFieldPairNames,
splitFieldName,
splitFieldValue,
runtimeMappings ?? undefined
runtimeMappings ?? undefined,
indicesOptions
);
return resp.results;
@ -91,7 +94,8 @@ export class ChartLoader {
aggFieldPairs: AggFieldPair[],
splitField: SplitField,
intervalMs: number,
runtimeMappings: RuntimeMappings | null
runtimeMappings: RuntimeMappings | null,
indicesOptions?: IndicesOptions
): Promise<LineChartData> {
if (this._timeFieldName !== '') {
if (aggFieldPairsCanBeCharted(aggFieldPairs) === false) {
@ -111,7 +115,8 @@ export class ChartLoader {
this._query,
aggFieldPairNames,
splitFieldName,
runtimeMappings ?? undefined
runtimeMappings ?? undefined,
indicesOptions
);
return resp.results;
@ -122,7 +127,8 @@ export class ChartLoader {
async loadEventRateChart(
start: number,
end: number,
intervalMs: number
intervalMs: number,
indicesOptions?: IndicesOptions
): Promise<LineChartPoint[]> {
if (this._timeFieldName !== '') {
const resp = await getEventRateData(
@ -131,7 +137,8 @@ export class ChartLoader {
this._timeFieldName,
start,
end,
intervalMs * 3
intervalMs * 3,
indicesOptions
);
if (resp.error !== undefined) {
throw resp.error;
@ -147,14 +154,16 @@ export class ChartLoader {
async loadFieldExampleValues(
field: Field,
runtimeMappings: RuntimeMappings | null
runtimeMappings: RuntimeMappings | null,
indicesOptions?: IndicesOptions
): Promise<string[]> {
const { results } = await getCategoryFields(
this._indexPatternTitle,
field.name,
10,
this._query,
runtimeMappings ?? undefined
runtimeMappings ?? undefined,
indicesOptions
);
return results;
}

View file

@ -9,6 +9,7 @@ import { get } from 'lodash';
import { ml } from '../../../../services/ml_api_service';
import { RuntimeMappings } from '../../../../../../common/types/fields';
import { IndicesOptions } from '../../../../../../common/types/anomaly_detection_jobs';
interface CategoryResults {
success: boolean;
@ -20,7 +21,8 @@ export function getCategoryFields(
fieldName: string,
size: number,
query: any,
runtimeMappings?: RuntimeMappings
runtimeMappings?: RuntimeMappings,
indicesOptions?: IndicesOptions
): Promise<CategoryResults> {
return new Promise((resolve, reject) => {
ml.esSearch({
@ -38,6 +40,7 @@ export function getCategoryFields(
},
...(runtimeMappings !== undefined ? { runtime_mappings: runtimeMappings } : {}),
},
...(indicesOptions ?? {}),
})
.then((resp: any) => {
const catFields = get(resp, ['aggregations', 'catFields', 'buckets'], []);

View file

@ -177,16 +177,13 @@ export class AdvancedJobCreator extends JobCreator {
// load the start and end times for the selected index
// and apply them to the job creator
public async autoSetTimeRange() {
try {
const { start, end } = await ml.getTimeFieldRange({
index: this._indexPatternTitle,
timeFieldName: this.timeFieldName,
query: this.query,
});
this.setTimeRange(start.epoch, end.epoch);
} catch (error) {
throw Error(error);
}
const { start, end } = await ml.getTimeFieldRange({
index: this._indexPatternTitle,
timeFieldName: this.timeFieldName,
query: this.query,
indicesOptions: this.datafeedConfig.indices_options,
});
this.setTimeRange(start.epoch, end.epoch);
}
public cloneFromExistingJob(job: Job, datafeed: Datafeed) {

View file

@ -51,7 +51,8 @@ export class CategorizationExamplesLoader {
this._jobCreator.start,
this._jobCreator.end,
analyzer,
this._jobCreator.runtimeMappings ?? undefined
this._jobCreator.runtimeMappings ?? undefined,
this._jobCreator.datafeedConfig.indices_options
);
return resp;
}

View file

@ -256,7 +256,8 @@ export class ResultsLoader {
if (this._jobCreator.splitField !== null) {
const fieldValues = await this._chartLoader.loadFieldExampleValues(
this._jobCreator.splitField,
this._jobCreator.runtimeMappings
this._jobCreator.runtimeMappings,
this._jobCreator.datafeedConfig.indices_options
);
if (fieldValues.length > 0) {
this._detectorSplitFieldFilters = {

View file

@ -25,7 +25,9 @@ import { CombinedJob, Datafeed } from '../../../../../../../../common/types/anom
import { ML_EDITOR_MODE, MLJobEditor } from '../../../../../jobs_list/components/ml_job_editor';
import { isValidJson } from '../../../../../../../../common/util/validation_utils';
import { JobCreatorContext } from '../../job_creator_context';
import { isAdvancedJobCreator } from '../../../../common/job_creator';
import { DatafeedPreview } from '../datafeed_preview_flyout';
import { useToastNotificationService } from '../../../../../../services/toast_notification_service';
export enum EDITOR_MODE {
HIDDEN,
@ -40,6 +42,7 @@ interface Props {
}
export const JsonEditorFlyout: FC<Props> = ({ isDisabled, jobEditorMode, datafeedEditorMode }) => {
const { jobCreator, jobCreatorUpdate, jobCreatorUpdated } = useContext(JobCreatorContext);
const { displayErrorToast } = useToastNotificationService();
const [showJsonFlyout, setShowJsonFlyout] = useState(false);
const [showChangedIndicesWarning, setShowChangedIndicesWarning] = useState(false);
@ -120,10 +123,23 @@ export const JsonEditorFlyout: FC<Props> = ({ isDisabled, jobEditorMode, datafee
setSaveable(valid);
}
function onSave() {
async function onSave() {
const jobConfig = JSON.parse(jobConfigString);
const datafeedConfig = JSON.parse(collapseLiteralStrings(datafeedConfigString));
jobCreator.cloneFromExistingJob(jobConfig, datafeedConfig);
if (isAdvancedJobCreator(jobCreator)) {
try {
await jobCreator.autoSetTimeRange();
} catch (error) {
const title = i18n.translate(
'xpack.ml.newJob.wizard.jsonFlyout.autoSetJobCreatorTimeRange.error',
{
defaultMessage: `Error retrieving beginning and end times of index`,
}
);
displayErrorToast(error, title);
}
}
jobCreatorUpdate();
setShowJsonFlyout(false);
}

View file

@ -9,12 +9,13 @@ import { useContext, useState } from 'react';
import { JobCreatorContext } from '../../../job_creator_context';
import { EVENT_RATE_FIELD_ID } from '../../../../../../../../../common/types/fields';
import { BucketSpanEstimatorData } from '../../../../../../../../../common/types/job_service';
import {
isMultiMetricJobCreator,
isPopulationJobCreator,
isAdvancedJobCreator,
} from '../../../../../common/job_creator';
import { ml, BucketSpanEstimatorData } from '../../../../../../../services/ml_api_service';
import { ml } from '../../../../../../../services/ml_api_service';
import { useMlContext } from '../../../../../../../contexts/ml';
import { getToastNotificationService } from '../../../../../../../services/toast_notification_service';
@ -41,6 +42,7 @@ export function useEstimateBucketSpan() {
splitField: undefined,
timeField: mlContext.currentIndexPattern.timeFieldName,
runtimeMappings: jobCreator.runtimeMappings ?? undefined,
indicesOptions: jobCreator.datafeedConfig.indices_options,
};
if (

View file

@ -54,7 +54,8 @@ export const CategorizationDetectorsSummary: FC = () => {
const resp = await chartLoader.loadEventRateChart(
jobCreator.start,
jobCreator.end,
chartInterval.getInterval().asMilliseconds()
chartInterval.getInterval().asMilliseconds(),
jobCreator.datafeedConfig.indices_options
);
setEventRateChartData(resp);
} catch (error) {

View file

@ -111,7 +111,11 @@ export const MultiMetricDetectors: FC<Props> = ({ setIsValid }) => {
useEffect(() => {
if (splitField !== null) {
chartLoader
.loadFieldExampleValues(splitField, jobCreator.runtimeMappings)
.loadFieldExampleValues(
splitField,
jobCreator.runtimeMappings,
jobCreator.datafeedConfig.indices_options
)
.then(setFieldValues)
.catch((error) => {
getToastNotificationService().displayErrorToast(error);
@ -140,7 +144,8 @@ export const MultiMetricDetectors: FC<Props> = ({ setIsValid }) => {
jobCreator.splitField,
fieldValues.length > 0 ? fieldValues[0] : null,
cs.intervalMs,
jobCreator.runtimeMappings
jobCreator.runtimeMappings,
jobCreator.datafeedConfig.indices_options
);
setLineChartsData(resp);
} catch (error) {

View file

@ -43,7 +43,8 @@ export const MultiMetricDetectorsSummary: FC = () => {
try {
const tempFieldValues = await chartLoader.loadFieldExampleValues(
jobCreator.splitField,
jobCreator.runtimeMappings
jobCreator.runtimeMappings,
jobCreator.datafeedConfig.indices_options
);
setFieldValues(tempFieldValues);
} catch (error) {
@ -76,7 +77,8 @@ export const MultiMetricDetectorsSummary: FC = () => {
jobCreator.splitField,
fieldValues.length > 0 ? fieldValues[0] : null,
cs.intervalMs,
jobCreator.runtimeMappings
jobCreator.runtimeMappings,
jobCreator.datafeedConfig.indices_options
);
setLineChartsData(resp);
} catch (error) {

View file

@ -160,7 +160,8 @@ export const PopulationDetectors: FC<Props> = ({ setIsValid }) => {
aggFieldPairList,
jobCreator.splitField,
cs.intervalMs,
jobCreator.runtimeMappings
jobCreator.runtimeMappings,
jobCreator.datafeedConfig.indices_options
);
setLineChartsData(resp);
@ -180,7 +181,11 @@ export const PopulationDetectors: FC<Props> = ({ setIsValid }) => {
(async (index: number, field: Field) => {
return {
index,
fields: await chartLoader.loadFieldExampleValues(field, jobCreator.runtimeMappings),
fields: await chartLoader.loadFieldExampleValues(
field,
jobCreator.runtimeMappings,
jobCreator.datafeedConfig.indices_options
),
};
})(i, af.by.field)
);

View file

@ -78,7 +78,8 @@ export const PopulationDetectorsSummary: FC = () => {
aggFieldPairList,
jobCreator.splitField,
cs.intervalMs,
jobCreator.runtimeMappings
jobCreator.runtimeMappings,
jobCreator.datafeedConfig.indices_options
);
setLineChartsData(resp);
@ -98,7 +99,11 @@ export const PopulationDetectorsSummary: FC = () => {
(async (index: number, field: Field) => {
return {
index,
fields: await chartLoader.loadFieldExampleValues(field, jobCreator.runtimeMappings),
fields: await chartLoader.loadFieldExampleValues(
field,
jobCreator.runtimeMappings,
jobCreator.datafeedConfig.indices_options
),
};
})(i, af.by.field)
);

View file

@ -93,7 +93,8 @@ export const SingleMetricDetectors: FC<Props> = ({ setIsValid }) => {
null,
null,
cs.intervalMs,
jobCreator.runtimeMappings
jobCreator.runtimeMappings,
jobCreator.datafeedConfig.indices_options
);
if (resp[DTR_IDX] !== undefined) {
setLineChartData(resp);

View file

@ -59,7 +59,8 @@ export const SingleMetricDetectorsSummary: FC = () => {
null,
null,
cs.intervalMs,
jobCreator.runtimeMappings
jobCreator.runtimeMappings,
jobCreator.datafeedConfig.indices_options
);
if (resp[DTR_IDX] !== undefined) {
setLineChartData(resp);

View file

@ -47,7 +47,8 @@ export const TimeRangeStep: FC<StepProps> = ({ setCurrentStep, isCurrentStep })
const resp = await chartLoader.loadEventRateChart(
jobCreator.start,
jobCreator.end,
chartInterval.getInterval().asMilliseconds()
chartInterval.getInterval().asMilliseconds(),
jobCreator.datafeedConfig.indices_options
);
setEventRateChartData(resp);
} catch (error) {

View file

@ -20,7 +20,6 @@ import { FormattedMessage } from '@kbn/i18n/react';
import { Wizard } from './wizard';
import { WIZARD_STEPS } from '../components/step_types';
import { getJobCreatorTitle } from '../../common/job_creator/util/general';
import { useMlKibana } from '../../../../contexts/kibana';
import {
jobCreatorFactory,
isAdvancedJobCreator,
@ -41,6 +40,7 @@ import { ExistingJobsAndGroups, mlJobService } from '../../../../services/job_se
import { newJobCapsService } from '../../../../services/new_job_capabilities_service';
import { EVENT_RATE_FIELD_ID } from '../../../../../../common/types/fields';
import { getNewJobDefaults } from '../../../../services/ml_server_info';
import { useToastNotificationService } from '../../../../services/toast_notification_service';
const PAGE_WIDTH = 1200; // document.querySelector('.single-metric-job-container').width();
const BAR_TARGET = PAGE_WIDTH > 2000 ? 1000 : PAGE_WIDTH / 2;
@ -52,15 +52,13 @@ export interface PageProps {
}
export const Page: FC<PageProps> = ({ existingJobsAndGroups, jobType }) => {
const {
services: { notifications },
} = useMlKibana();
const mlContext = useMlContext();
const jobCreator = jobCreatorFactory(jobType)(
mlContext.currentIndexPattern,
mlContext.currentSavedSearch,
mlContext.combinedQuery
);
const { displayErrorToast } = useToastNotificationService();
const { from, to } = getTimeFilterRange();
jobCreator.setTimeRange(from, to);
@ -154,17 +152,12 @@ export const Page: FC<PageProps> = ({ existingJobsAndGroups, jobType }) => {
if (autoSetTimeRange && isAdvancedJobCreator(jobCreator)) {
// for advanced jobs, load the full time range start and end times
// so they can be used for job validation and bucket span estimation
try {
jobCreator.autoSetTimeRange();
} catch (error) {
const { toasts } = notifications;
toasts.addDanger({
title: i18n.translate('xpack.ml.newJob.wizard.autoSetJobCreatorTimeRange.error', {
defaultMessage: `Error retrieving beginning and end times of index`,
}),
text: error,
jobCreator.autoSetTimeRange().catch((error) => {
const title = i18n.translate('xpack.ml.newJob.wizard.autoSetJobCreatorTimeRange.error', {
defaultMessage: `Error retrieving beginning and end times of index`,
});
}
displayErrorToast(error, title);
});
}
function initCategorizationSettings() {

View file

@ -13,7 +13,6 @@ import { ml } from './ml_api_service';
import { getToastNotificationService } from '../services/toast_notification_service';
import { isWebUrl } from '../util/url_utils';
import { ML_DATA_PREVIEW_COUNT } from '../../../common/util/job_utils';
import { TIME_FORMAT } from '../../../common/constants/time_format';
import { parseInterval } from '../../../common/util/parse_interval';
import { validateTimeRange } from '../../../common/util/date_utils';
@ -348,163 +347,9 @@ class JobService {
return job;
}
searchPreview(job) {
return new Promise((resolve, reject) => {
if (job.datafeed_config) {
// if query is set, add it to the search, otherwise use match_all
let query = { match_all: {} };
if (job.datafeed_config.query) {
query = job.datafeed_config.query;
}
// Get bucket span
// Get first doc time for datafeed
// Create a new query - must user query and must range query.
// Time range 'to' first doc time plus < 10 buckets
// Do a preliminary search to get the date of the earliest doc matching the
// query in the datafeed. This will be used to apply a time range criteria
// on the datafeed search preview.
// This time filter is required for datafeed searches using aggregations to ensure
// the search does not create too many buckets (default 10000 max_bucket limit),
// but apply it to searches without aggregations too for consistency.
ml.getTimeFieldRange({
index: job.datafeed_config.indices,
timeFieldName: job.data_description.time_field,
query,
})
.then((timeRange) => {
const bucketSpan = parseInterval(job.analysis_config.bucket_span);
const earliestMs = timeRange.start.epoch;
const latestMs = +timeRange.start.epoch + 10 * bucketSpan.asMilliseconds();
const body = {
query: {
bool: {
must: [
{
range: {
[job.data_description.time_field]: {
gte: earliestMs,
lt: latestMs,
format: 'epoch_millis',
},
},
},
query,
],
},
},
};
// if aggs or aggregations is set, add it to the search
const aggregations = job.datafeed_config.aggs || job.datafeed_config.aggregations;
if (aggregations && Object.keys(aggregations).length) {
body.size = 0;
body.aggregations = aggregations;
// add script_fields if present
const scriptFields = job.datafeed_config.script_fields;
if (scriptFields && Object.keys(scriptFields).length) {
body.script_fields = scriptFields;
}
// add runtime_mappings if present
const runtimeMappings = job.datafeed_config.runtime_mappings;
if (runtimeMappings && Object.keys(runtimeMappings).length) {
body.runtime_mappings = runtimeMappings;
}
} else {
// if aggregations is not set and retrieveWholeSource is not set, add all of the fields from the job
body.size = ML_DATA_PREVIEW_COUNT;
// add script_fields if present
const scriptFields = job.datafeed_config.script_fields;
if (scriptFields && Object.keys(scriptFields).length) {
body.script_fields = scriptFields;
}
// add runtime_mappings if present
const runtimeMappings = job.datafeed_config.runtime_mappings;
if (runtimeMappings && Object.keys(runtimeMappings).length) {
body.runtime_mappings = runtimeMappings;
}
const fields = {};
// get fields from detectors
if (job.analysis_config.detectors) {
each(job.analysis_config.detectors, (dtr) => {
if (dtr.by_field_name) {
fields[dtr.by_field_name] = {};
}
if (dtr.field_name) {
fields[dtr.field_name] = {};
}
if (dtr.over_field_name) {
fields[dtr.over_field_name] = {};
}
if (dtr.partition_field_name) {
fields[dtr.partition_field_name] = {};
}
});
}
// get fields from influencers
if (job.analysis_config.influencers) {
each(job.analysis_config.influencers, (inf) => {
fields[inf] = {};
});
}
// get fields from categorizationFieldName
if (job.analysis_config.categorization_field_name) {
fields[job.analysis_config.categorization_field_name] = {};
}
// get fields from summary_count_field_name
if (job.analysis_config.summary_count_field_name) {
fields[job.analysis_config.summary_count_field_name] = {};
}
// get fields from time_field
if (job.data_description.time_field) {
fields[job.data_description.time_field] = {};
}
// add runtime fields
if (runtimeMappings) {
Object.keys(runtimeMappings).forEach((fieldName) => {
fields[fieldName] = {};
});
}
const fieldsList = Object.keys(fields);
if (fieldsList.length) {
body.fields = fieldsList;
body._source = false;
}
}
const data = {
index: job.datafeed_config.indices,
body,
...(job.datafeed_config.indices_options || {}),
};
ml.esSearch(data)
.then((resp) => {
resolve(resp);
})
.catch((resp) => {
reject(resp);
});
})
.catch((resp) => {
reject(resp);
});
}
});
searchPreview(combinedJob) {
const { datafeed_config: datafeed, ...job } = combinedJob;
return ml.jobs.datafeedPreview(job, datafeed);
}
openJob(jobId) {

View file

@ -24,7 +24,7 @@ import {
import { MlCapabilitiesResponse } from '../../../../common/types/capabilities';
import { Calendar, CalendarId, UpdateCalendar } from '../../../../common/types/calendars';
import { RuntimeMappings } from '../../../../common/types/fields';
import { BucketSpanEstimatorData } from '../../../../common/types/job_service';
import {
Job,
JobStats,
@ -33,8 +33,8 @@ import {
Detector,
AnalysisConfig,
ModelSnapshot,
IndicesOptions,
} from '../../../../common/types/anomaly_detection_jobs';
import { ES_AGGREGATION } from '../../../../common/constants/aggregation_types';
import {
FieldHistogramRequestConfig,
FieldRequestConfig,
@ -53,20 +53,6 @@ export interface MlInfoResponse {
cloudId?: string;
}
export interface BucketSpanEstimatorData {
aggTypes: Array<ES_AGGREGATION | null>;
duration: {
start: number;
end: number;
};
fields: Array<string | null>;
index: string;
query: any;
splitField: string | undefined;
timeField: string | undefined;
runtimeMappings: RuntimeMappings | undefined;
}
export interface BucketSpanEstimatorResponse {
name: string;
ms: number;
@ -704,12 +690,14 @@ export function mlApiServicesProvider(httpService: HttpService) {
index,
timeFieldName,
query,
indicesOptions,
}: {
index: string;
timeFieldName?: string;
query: any;
indicesOptions?: IndicesOptions;
}) {
const body = JSON.stringify({ index, timeFieldName, query });
const body = JSON.stringify({ index, timeFieldName, query, indicesOptions });
return httpService.http<GetTimeFieldRangeResponse>({
path: `${basePath()}/fields_service/time_field_range`,

View file

@ -15,6 +15,7 @@ import type {
CombinedJobWithStats,
Job,
Datafeed,
IndicesOptions,
} from '../../../../common/types/anomaly_detection_jobs';
import type { JobMessage } from '../../../../common/types/audit_message';
import type { AggFieldNamePair, RuntimeMappings } from '../../../../common/types/fields';
@ -189,7 +190,8 @@ export const jobsApiProvider = (httpService: HttpService) => ({
aggFieldNamePairs: AggFieldNamePair[],
splitFieldName: string | null,
splitFieldValue: string | null,
runtimeMappings?: RuntimeMappings
runtimeMappings?: RuntimeMappings,
indicesOptions?: IndicesOptions
) {
const body = JSON.stringify({
indexPatternTitle,
@ -202,6 +204,7 @@ export const jobsApiProvider = (httpService: HttpService) => ({
splitFieldName,
splitFieldValue,
runtimeMappings,
indicesOptions,
});
return httpService.http<any>({
path: `${ML_BASE_PATH}/jobs/new_job_line_chart`,
@ -219,7 +222,8 @@ export const jobsApiProvider = (httpService: HttpService) => ({
query: any,
aggFieldNamePairs: AggFieldNamePair[],
splitFieldName: string,
runtimeMappings?: RuntimeMappings
runtimeMappings?: RuntimeMappings,
indicesOptions?: IndicesOptions
) {
const body = JSON.stringify({
indexPatternTitle,
@ -231,6 +235,7 @@ export const jobsApiProvider = (httpService: HttpService) => ({
aggFieldNamePairs,
splitFieldName,
runtimeMappings,
indicesOptions,
});
return httpService.http<any>({
path: `${ML_BASE_PATH}/jobs/new_job_population_chart`,
@ -268,7 +273,8 @@ export const jobsApiProvider = (httpService: HttpService) => ({
start: number,
end: number,
analyzer: CategorizationAnalyzer,
runtimeMappings?: RuntimeMappings
runtimeMappings?: RuntimeMappings,
indicesOptions?: IndicesOptions
) {
const body = JSON.stringify({
indexPatternTitle,
@ -280,6 +286,7 @@ export const jobsApiProvider = (httpService: HttpService) => ({
end,
analyzer,
runtimeMappings,
indicesOptions,
});
return httpService.http<{
examples: CategoryFieldExample[];
@ -322,4 +329,16 @@ export const jobsApiProvider = (httpService: HttpService) => ({
body,
});
},
datafeedPreview(job: Job, datafeed: Datafeed) {
const body = JSON.stringify({ job, datafeed });
return httpService.http<{
total: number;
categories: Array<{ count?: number; category: Category }>;
}>({
path: `${ML_BASE_PATH}/jobs/datafeed_preview`,
method: 'POST',
body,
});
},
});

View file

@ -5,6 +5,7 @@
* 2.0.
*/
import { IndicesOptions } from '../../../../common/types/anomaly_detection_jobs';
import { MlApiServices } from '../ml_api_service';
export function resultsServiceProvider(
@ -58,7 +59,8 @@ export function resultsServiceProvider(
timeFieldName: string,
earliestMs: number,
latestMs: number,
intervalMs: number
intervalMs: number,
indicesOptions?: IndicesOptions
): Promise<any>;
getEventDistributionData(
index: string,

View file

@ -1052,7 +1052,15 @@ export function resultsServiceProvider(mlApiServices) {
// Extra query object can be supplied, or pass null if no additional query.
// Returned response contains a results property, which is an object
// of document counts against time (epoch millis).
getEventRateData(index, query, timeFieldName, earliestMs, latestMs, intervalMs) {
getEventRateData(
index,
query,
timeFieldName,
earliestMs,
latestMs,
intervalMs,
indicesOptions
) {
return new Promise((resolve, reject) => {
const obj = { success: true, results: {} };
@ -1102,6 +1110,7 @@ export function resultsServiceProvider(mlApiServices) {
},
},
},
...(indicesOptions ?? {}),
})
.then((resp) => {
const dataByTimeBucket = get(resp, ['aggregations', 'eventRate', 'buckets'], []);

View file

@ -8,20 +8,8 @@
import { IScopedClusterClient } from 'kibana/server';
import { ES_AGGREGATION } from '../../../common/constants/aggregation_types';
import { RuntimeMappings } from '../../../common/types/fields';
export interface BucketSpanEstimatorData {
aggTypes: Array<ES_AGGREGATION | null>;
duration: {
start: number;
end: number;
};
fields: Array<string | null>;
index: string;
query: any;
splitField: string | undefined;
timeField: string | undefined;
runtimeMappings: RuntimeMappings | undefined;
}
import { IndicesOptions } from '../../../common/types/anomaly_detection_jobs';
import { BucketSpanEstimatorData } from '../../../common/types/job_service';
export function estimateBucketSpanFactory({
asCurrentUser,

View file

@ -20,7 +20,17 @@ export function estimateBucketSpanFactory(client) {
class BucketSpanEstimator {
constructor(
{ index, timeField, aggTypes, fields, duration, query, splitField, runtimeMappings },
{
index,
timeField,
aggTypes,
fields,
duration,
query,
splitField,
runtimeMappings,
indicesOptions,
},
splitFieldValues,
maxBuckets
) {
@ -72,7 +82,8 @@ export function estimateBucketSpanFactory(client) {
this.index,
this.timeField,
this.duration,
this.query
this.query,
indicesOptions
);
if (this.aggTypes.length === this.fields.length) {
@ -89,7 +100,8 @@ export function estimateBucketSpanFactory(client) {
this.duration,
this.query,
this.thresholds,
this.runtimeMappings
this.runtimeMappings,
indicesOptions
),
result: null,
});
@ -112,7 +124,8 @@ export function estimateBucketSpanFactory(client) {
this.duration,
queryCopy,
this.thresholds,
this.runtimeMappings
this.runtimeMappings,
indicesOptions
),
result: null,
});
@ -246,7 +259,7 @@ export function estimateBucketSpanFactory(client) {
}
}
const getFieldCardinality = function (index, field, runtimeMappings) {
const getFieldCardinality = function (index, field, runtimeMappings, indicesOptions) {
return new Promise((resolve, reject) => {
asCurrentUser
.search({
@ -262,6 +275,7 @@ export function estimateBucketSpanFactory(client) {
},
...(runtimeMappings !== undefined ? { runtime_mappings: runtimeMappings } : {}),
},
...(indicesOptions ?? {}),
})
.then(({ body }) => {
const value = get(body, ['aggregations', 'field_count', 'value'], 0);
@ -273,13 +287,13 @@ export function estimateBucketSpanFactory(client) {
});
};
const getRandomFieldValues = function (index, field, query, runtimeMappings) {
const getRandomFieldValues = function (index, field, query, runtimeMappings, indicesOptions) {
let fieldValues = [];
return new Promise((resolve, reject) => {
const NUM_PARTITIONS = 10;
// use a partitioned search to load 10 random fields
// load ten fields, to test that there are at least 10.
getFieldCardinality(index, field)
getFieldCardinality(index, field, runtimeMappings, indicesOptions)
.then((value) => {
const numPartitions = Math.floor(value / NUM_PARTITIONS) || 1;
asCurrentUser
@ -301,6 +315,7 @@ export function estimateBucketSpanFactory(client) {
},
...(runtimeMappings !== undefined ? { runtime_mappings: runtimeMappings } : {}),
},
...(indicesOptions ?? {}),
})
.then(({ body }) => {
// eslint-disable-next-line camelcase
@ -390,7 +405,8 @@ export function estimateBucketSpanFactory(client) {
formConfig.index,
formConfig.splitField,
formConfig.query,
formConfig.runtimeMappings
formConfig.runtimeMappings,
formConfig.indicesOptions
)
.then((splitFieldValues) => {
runEstimator(splitFieldValues);

View file

@ -8,8 +8,9 @@
import { IScopedClusterClient } from 'kibana/server';
import { ES_AGGREGATION } from '../../../common/constants/aggregation_types';
import { BucketSpanEstimatorData } from '../../../common/types/job_service';
import { estimateBucketSpanFactory, BucketSpanEstimatorData } from './bucket_span_estimator';
import { estimateBucketSpanFactory } from './bucket_span_estimator';
const callAs = {
search: () => Promise.resolve({ body: {} }),
@ -36,6 +37,7 @@ const formConfig: BucketSpanEstimatorData = {
splitField: undefined,
timeField: undefined,
runtimeMappings: undefined,
indicesOptions: undefined,
};
describe('ML - BucketSpanEstimator', () => {

View file

@ -15,11 +15,12 @@ import { get } from 'lodash';
export function polledDataCheckerFactory({ asCurrentUser }) {
class PolledDataChecker {
constructor(index, timeField, duration, query) {
constructor(index, timeField, duration, query, indicesOptions) {
this.index = index;
this.timeField = timeField;
this.duration = duration;
this.query = query;
this.indicesOptions = indicesOptions;
this.isPolled = false;
this.minimumBucketSpan = 0;
@ -73,6 +74,7 @@ export function polledDataCheckerFactory({ asCurrentUser }) {
index: this.index,
size: 0,
body: searchBody,
...(this.indicesOptions ?? {}),
});
return body;
}

View file

@ -18,7 +18,17 @@ export function singleSeriesCheckerFactory({ asCurrentUser }) {
const REF_DATA_INTERVAL = { name: '1h', ms: 3600000 };
class SingleSeriesChecker {
constructor(index, timeField, aggType, field, duration, query, thresholds, runtimeMappings) {
constructor(
index,
timeField,
aggType,
field,
duration,
query,
thresholds,
runtimeMappings,
indicesOptions
) {
this.index = index;
this.timeField = timeField;
this.aggType = aggType;
@ -32,6 +42,7 @@ export function singleSeriesCheckerFactory({ asCurrentUser }) {
created: false,
};
this.runtimeMappings = runtimeMappings;
this.indicesOptions = indicesOptions;
this.interval = null;
}
@ -193,6 +204,7 @@ export function singleSeriesCheckerFactory({ asCurrentUser }) {
index: this.index,
size: 0,
body: searchBody,
...(this.indicesOptions ?? {}),
});
return body;
}

View file

@ -13,7 +13,7 @@ import { initCardinalityFieldsCache } from './fields_aggs_cache';
import { AggCardinality } from '../../../common/types/fields';
import { isValidAggregationField } from '../../../common/util/validation_utils';
import { getDatafeedAggregations } from '../../../common/util/datafeed_utils';
import { Datafeed } from '../../../common/types/anomaly_detection_jobs';
import { Datafeed, IndicesOptions } from '../../../common/types/anomaly_detection_jobs';
/**
* Service for carrying out queries to obtain data
@ -183,6 +183,7 @@ export function fieldsServiceProvider({ asCurrentUser }: IScopedClusterClient) {
} = await asCurrentUser.search({
index,
body,
...(datafeedConfig?.indices_options ?? {}),
});
if (!aggregations) {
@ -210,7 +211,8 @@ export function fieldsServiceProvider({ asCurrentUser }: IScopedClusterClient) {
async function getTimeFieldRange(
index: string[] | string,
timeFieldName: string,
query: any
query: any,
indicesOptions?: IndicesOptions
): Promise<{
success: boolean;
start: { epoch: number; string: string };
@ -238,6 +240,7 @@ export function fieldsServiceProvider({ asCurrentUser }: IScopedClusterClient) {
},
},
},
...(indicesOptions ?? {}),
});
if (aggregations && aggregations.earliest && aggregations.latest) {
@ -394,6 +397,7 @@ export function fieldsServiceProvider({ asCurrentUser }: IScopedClusterClient) {
} = await asCurrentUser.search({
index,
body,
...(datafeedConfig?.indices_options ?? {}),
});
if (!aggregations) {

View file

@ -6,10 +6,15 @@
*/
import { i18n } from '@kbn/i18n';
import { IScopedClusterClient } from 'kibana/server';
import { JOB_STATE, DATAFEED_STATE } from '../../../common/constants/states';
import { fillResultsWithTimeouts, isRequestTimeout } from './error_utils';
import { Datafeed, DatafeedStats } from '../../../common/types/anomaly_detection_jobs';
import { Datafeed, DatafeedStats, Job } from '../../../common/types/anomaly_detection_jobs';
import { ML_DATA_PREVIEW_COUNT } from '../../../common/util/job_utils';
import { fieldsServiceProvider } from '../fields_service';
import type { MlClient } from '../../lib/ml_client';
import { parseInterval } from '../../../common/util/parse_interval';
import { isPopulatedObject } from '../../../common/util/object_utils';
export interface MlDatafeedsResponse {
datafeeds: Datafeed[];
@ -27,7 +32,7 @@ interface Results {
};
}
export function datafeedsProvider(mlClient: MlClient) {
export function datafeedsProvider(client: IScopedClusterClient, mlClient: MlClient) {
async function forceStartDatafeeds(datafeedIds: string[], start?: number, end?: number) {
const jobIds = await getJobIdsByDatafeedId();
const doStartsCalled = datafeedIds.reduce((acc, cur) => {
@ -204,6 +209,153 @@ export function datafeedsProvider(mlClient: MlClient) {
}
}
async function datafeedPreview(job: Job, datafeed: Datafeed) {
let query: any = { match_all: {} };
if (datafeed.query) {
query = datafeed.query;
}
const { getTimeFieldRange } = fieldsServiceProvider(client);
const { start } = await getTimeFieldRange(
datafeed.indices,
job.data_description.time_field,
query,
datafeed.indices_options
);
// Get bucket span
// Get first doc time for datafeed
// Create a new query - must user query and must range query.
// Time range 'to' first doc time plus < 10 buckets
// Do a preliminary search to get the date of the earliest doc matching the
// query in the datafeed. This will be used to apply a time range criteria
// on the datafeed search preview.
// This time filter is required for datafeed searches using aggregations to ensure
// the search does not create too many buckets (default 10000 max_bucket limit),
// but apply it to searches without aggregations too for consistency.
const bucketSpan = parseInterval(job.analysis_config.bucket_span);
if (bucketSpan === null) {
return;
}
const earliestMs = start.epoch;
const latestMs = +start.epoch + 10 * bucketSpan.asMilliseconds();
const body: any = {
query: {
bool: {
must: [
{
range: {
[job.data_description.time_field]: {
gte: earliestMs,
lt: latestMs,
format: 'epoch_millis',
},
},
},
query,
],
},
},
};
// if aggs or aggregations is set, add it to the search
const aggregations = datafeed.aggs ?? datafeed.aggregations;
if (isPopulatedObject(aggregations)) {
body.size = 0;
body.aggregations = aggregations;
// add script_fields if present
const scriptFields = datafeed.script_fields;
if (isPopulatedObject(scriptFields)) {
body.script_fields = scriptFields;
}
// add runtime_mappings if present
const runtimeMappings = datafeed.runtime_mappings;
if (isPopulatedObject(runtimeMappings)) {
body.runtime_mappings = runtimeMappings;
}
} else {
// if aggregations is not set and retrieveWholeSource is not set, add all of the fields from the job
body.size = ML_DATA_PREVIEW_COUNT;
// add script_fields if present
const scriptFields = datafeed.script_fields;
if (isPopulatedObject(scriptFields)) {
body.script_fields = scriptFields;
}
// add runtime_mappings if present
const runtimeMappings = datafeed.runtime_mappings;
if (isPopulatedObject(runtimeMappings)) {
body.runtime_mappings = runtimeMappings;
}
const fields = new Set<string>();
// get fields from detectors
if (job.analysis_config.detectors) {
job.analysis_config.detectors.forEach((dtr) => {
if (dtr.by_field_name) {
fields.add(dtr.by_field_name);
}
if (dtr.field_name) {
fields.add(dtr.field_name);
}
if (dtr.over_field_name) {
fields.add(dtr.over_field_name);
}
if (dtr.partition_field_name) {
fields.add(dtr.partition_field_name);
}
});
}
// get fields from influencers
if (job.analysis_config.influencers) {
job.analysis_config.influencers.forEach((inf) => {
fields.add(inf);
});
}
// get fields from categorizationFieldName
if (job.analysis_config.categorization_field_name) {
fields.add(job.analysis_config.categorization_field_name);
}
// get fields from summary_count_field_name
if (job.analysis_config.summary_count_field_name) {
fields.add(job.analysis_config.summary_count_field_name);
}
// get fields from time_field
if (job.data_description.time_field) {
fields.add(job.data_description.time_field);
}
// add runtime fields
if (runtimeMappings) {
Object.keys(runtimeMappings).forEach((fieldName) => {
fields.add(fieldName);
});
}
const fieldsList = [...fields];
if (fieldsList.length) {
body.fields = fieldsList;
body._source = false;
}
}
const data = {
index: datafeed.indices,
body,
...(datafeed.indices_options ?? {}),
};
return (await client.asCurrentUser.search(data)).body;
}
return {
forceStartDatafeeds,
stopDatafeeds,
@ -211,5 +363,6 @@ export function datafeedsProvider(mlClient: MlClient) {
getDatafeedIdsByJobId,
getJobIdsByDatafeedId,
getDatafeedByJobId,
datafeedPreview,
};
}

View file

@ -16,12 +16,12 @@ import type { MlClient } from '../../lib/ml_client';
export function jobServiceProvider(client: IScopedClusterClient, mlClient: MlClient) {
return {
...datafeedsProvider(mlClient),
...datafeedsProvider(client, mlClient),
...jobsProvider(client, mlClient),
...groupsProvider(mlClient),
...newJobCapsProvider(client),
...newJobChartsProvider(client),
...topCategoriesProvider(mlClient),
...modelSnapshotProvider(mlClient),
...modelSnapshotProvider(client, mlClient),
};
}

View file

@ -52,6 +52,7 @@ export function jobsProvider(client: IScopedClusterClient, mlClient: MlClient) {
const { asInternalUser } = client;
const { forceDeleteDatafeed, getDatafeedIdsByJobId, getDatafeedByJobId } = datafeedsProvider(
client,
mlClient
);
const { getAuditMessagesSummary } = jobAuditMessagesProvider(client, mlClient);

View file

@ -7,6 +7,7 @@
import Boom from '@hapi/boom';
import { i18n } from '@kbn/i18n';
import { IScopedClusterClient } from 'kibana/server';
import { ModelSnapshot } from '../../../common/types/anomaly_detection_jobs';
import { datafeedsProvider } from './datafeeds';
import { FormCalendar, CalendarManager } from '../calendar';
@ -20,8 +21,8 @@ export interface RevertModelSnapshotResponse {
model: ModelSnapshot;
}
export function modelSnapshotProvider(mlClient: MlClient) {
const { forceStartDatafeeds, getDatafeedIdsByJobId } = datafeedsProvider(mlClient);
export function modelSnapshotProvider(client: IScopedClusterClient, mlClient: MlClient) {
const { forceStartDatafeeds, getDatafeedIdsByJobId } = datafeedsProvider(client, mlClient);
async function revertModelSnapshot(
jobId: string,

View file

@ -15,6 +15,7 @@ import {
CategoryFieldExample,
} from '../../../../../common/types/categories';
import { RuntimeMappings } from '../../../../../common/types/fields';
import { IndicesOptions } from '../../../../../common/types/anomaly_detection_jobs';
import { ValidationResults } from './validation_results';
const CHUNK_SIZE = 100;
@ -34,7 +35,8 @@ export function categorizationExamplesProvider({
start: number,
end: number,
analyzer: CategorizationAnalyzer,
runtimeMappings: RuntimeMappings | undefined
runtimeMappings: RuntimeMappings | undefined,
indicesOptions: IndicesOptions | undefined
): Promise<{ examples: CategoryFieldExample[]; error?: any }> {
if (timeField !== undefined) {
const range = {
@ -69,6 +71,7 @@ export function categorizationExamplesProvider({
sort: ['_doc'],
...(runtimeMappings !== undefined ? { runtime_mappings: runtimeMappings } : {}),
},
...(indicesOptions ?? {}),
});
// hit.fields can be undefined if value is originally null
@ -169,7 +172,8 @@ export function categorizationExamplesProvider({
start: number,
end: number,
analyzer: CategorizationAnalyzer,
runtimeMappings: RuntimeMappings | undefined
runtimeMappings: RuntimeMappings | undefined,
indicesOptions: IndicesOptions | undefined
) {
const resp = await categorizationExamples(
indexPatternTitle,
@ -180,7 +184,8 @@ export function categorizationExamplesProvider({
start,
end,
analyzer,
runtimeMappings
runtimeMappings,
indicesOptions
);
const { examples } = resp;

View file

@ -12,6 +12,7 @@ import {
EVENT_RATE_FIELD_ID,
RuntimeMappings,
} from '../../../../common/types/fields';
import { IndicesOptions } from '../../../../common/types/anomaly_detection_jobs';
import { ML_MEDIAN_PERCENTS } from '../../../../common/util/job_utils';
type DtrIndex = number;
@ -39,7 +40,8 @@ export function newJobLineChartProvider({ asCurrentUser }: IScopedClusterClient)
aggFieldNamePairs: AggFieldNamePair[],
splitFieldName: string | null,
splitFieldValue: string | null,
runtimeMappings: RuntimeMappings | undefined
runtimeMappings: RuntimeMappings | undefined,
indicesOptions: IndicesOptions | undefined
) {
const json: object = getSearchJsonFromConfig(
indexPatternTitle,
@ -51,7 +53,8 @@ export function newJobLineChartProvider({ asCurrentUser }: IScopedClusterClient)
aggFieldNamePairs,
splitFieldName,
splitFieldValue,
runtimeMappings
runtimeMappings,
indicesOptions
);
const { body } = await asCurrentUser.search(json);
@ -110,7 +113,8 @@ function getSearchJsonFromConfig(
aggFieldNamePairs: AggFieldNamePair[],
splitFieldName: string | null,
splitFieldValue: string | null,
runtimeMappings: RuntimeMappings | undefined
runtimeMappings: RuntimeMappings | undefined,
indicesOptions: IndicesOptions | undefined
): object {
const json = {
index: indexPatternTitle,
@ -134,6 +138,7 @@ function getSearchJsonFromConfig(
},
...(runtimeMappings !== undefined ? { runtime_mappings: runtimeMappings } : {}),
},
...(indicesOptions ?? {}),
};
if (query.bool === undefined) {

View file

@ -12,6 +12,7 @@ import {
EVENT_RATE_FIELD_ID,
RuntimeMappings,
} from '../../../../common/types/fields';
import { IndicesOptions } from '../../../../common/types/anomaly_detection_jobs';
import { ML_MEDIAN_PERCENTS } from '../../../../common/util/job_utils';
const OVER_FIELD_EXAMPLES_COUNT = 40;
@ -44,7 +45,8 @@ export function newJobPopulationChartProvider({ asCurrentUser }: IScopedClusterC
query: object,
aggFieldNamePairs: AggFieldNamePair[],
splitFieldName: string | null,
runtimeMappings: RuntimeMappings | undefined
runtimeMappings: RuntimeMappings | undefined,
indicesOptions: IndicesOptions | undefined
) {
const json: object = getPopulationSearchJsonFromConfig(
indexPatternTitle,
@ -55,7 +57,8 @@ export function newJobPopulationChartProvider({ asCurrentUser }: IScopedClusterC
query,
aggFieldNamePairs,
splitFieldName,
runtimeMappings
runtimeMappings,
indicesOptions
);
const { body } = await asCurrentUser.search(json);
@ -138,7 +141,8 @@ function getPopulationSearchJsonFromConfig(
query: any,
aggFieldNamePairs: AggFieldNamePair[],
splitFieldName: string | null,
runtimeMappings: RuntimeMappings | undefined
runtimeMappings: RuntimeMappings | undefined,
indicesOptions: IndicesOptions | undefined
): object {
const json = {
index: indexPatternTitle,
@ -162,6 +166,7 @@ function getPopulationSearchJsonFromConfig(
},
...(runtimeMappings !== undefined ? { runtime_mappings: runtimeMappings } : {}),
},
...(indicesOptions ?? {}),
};
if (query.bool === undefined) {

View file

@ -84,6 +84,7 @@
"GetLookBackProgress",
"ValidateCategoryExamples",
"TopCategories",
"DatafeedPreview",
"UpdateGroups",
"DeletingJobTasks",
"DeleteJobs",

View file

@ -133,7 +133,7 @@ describe('schema_extractor', () => {
{
name: 'expand_wildcards',
documentation: '',
type: 'string[]',
type: '"all" | "open" | "closed" | "hidden" | "none"[]',
},
{
name: 'ignore_unavailable',

View file

@ -22,8 +22,8 @@ function getCardinalityOfFields(client: IScopedClusterClient, payload: any) {
function getTimeFieldRange(client: IScopedClusterClient, payload: any) {
const fs = fieldsServiceProvider(client);
const { index, timeFieldName, query } = payload;
return fs.getTimeFieldRange(index, timeFieldName, query);
const { index, timeFieldName, query, indicesOptions } = payload;
return fs.getTimeFieldRange(index, timeFieldName, query, indicesOptions);
}
/**

View file

@ -20,12 +20,14 @@ import {
updateGroupsSchema,
revertModelSnapshotSchema,
jobsExistSchema,
datafeedPreviewSchema,
} from './schemas/job_service_schema';
import { jobIdSchema } from './schemas/anomaly_detectors_schema';
import { jobServiceProvider } from '../models/job_service';
import { categorizationExamplesProvider } from '../models/job_service/new_job';
import { getAuthorizationHeader } from '../lib/request_authorization';
/**
* Routes for job service
@ -535,6 +537,7 @@ export function jobServiceRoutes({ router, routeGuard }: RouteInitialization) {
splitFieldName,
splitFieldValue,
runtimeMappings,
indicesOptions,
} = request.body;
const { newJobLineChart } = jobServiceProvider(client, mlClient);
@ -548,7 +551,8 @@ export function jobServiceRoutes({ router, routeGuard }: RouteInitialization) {
aggFieldNamePairs,
splitFieldName,
splitFieldValue,
runtimeMappings
runtimeMappings,
indicesOptions
);
return response.ok({
@ -591,6 +595,7 @@ export function jobServiceRoutes({ router, routeGuard }: RouteInitialization) {
aggFieldNamePairs,
splitFieldName,
runtimeMappings,
indicesOptions,
} = request.body;
const { newJobPopulationChart } = jobServiceProvider(client, mlClient);
@ -603,7 +608,8 @@ export function jobServiceRoutes({ router, routeGuard }: RouteInitialization) {
query,
aggFieldNamePairs,
splitFieldName,
runtimeMappings
runtimeMappings,
indicesOptions
);
return response.ok({
@ -710,6 +716,7 @@ export function jobServiceRoutes({ router, routeGuard }: RouteInitialization) {
end,
analyzer,
runtimeMappings,
indicesOptions,
} = request.body;
const resp = await validateCategoryExamples(
@ -721,7 +728,8 @@ export function jobServiceRoutes({ router, routeGuard }: RouteInitialization) {
start,
end,
analyzer,
runtimeMappings
runtimeMappings,
indicesOptions
);
return response.ok({
@ -767,6 +775,52 @@ export function jobServiceRoutes({ router, routeGuard }: RouteInitialization) {
})
);
/**
* @apiGroup JobService
*
* @api {post} /api/ml/jobs/datafeed_preview Get datafeed preview
* @apiName DatafeedPreview
* @apiDescription Returns a preview of the datafeed search
*
* @apiSchema (body) datafeedPreviewSchema
*/
router.post(
{
path: '/api/ml/jobs/datafeed_preview',
validate: {
body: datafeedPreviewSchema,
},
options: {
tags: ['access:ml:canGetJobs'],
},
},
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response }) => {
try {
const { datafeedId, job, datafeed } = request.body;
if (datafeedId !== undefined) {
const { body } = await mlClient.previewDatafeed(
{
datafeed_id: datafeedId,
},
getAuthorizationHeader(request)
);
return response.ok({
body,
});
}
const { datafeedPreview } = jobServiceProvider(client, mlClient);
const body = await datafeedPreview(job, datafeed);
return response.ok({
body,
});
} catch (e) {
return response.customError(wrapError(e));
}
})
);
/**
* @apiGroup JobService
*

View file

@ -13,6 +13,23 @@ export const startDatafeedSchema = schema.object({
timeout: schema.maybe(schema.any()),
});
export const indicesOptionsSchema = schema.object({
expand_wildcards: schema.maybe(
schema.arrayOf(
schema.oneOf([
schema.literal('all'),
schema.literal('open'),
schema.literal('closed'),
schema.literal('hidden'),
schema.literal('none'),
])
)
),
ignore_unavailable: schema.maybe(schema.boolean()),
allow_no_indices: schema.maybe(schema.boolean()),
ignore_throttled: schema.maybe(schema.boolean()),
});
export const datafeedConfigSchema = schema.object({
datafeed_id: schema.maybe(schema.string()),
feed_id: schema.maybe(schema.string()),
@ -35,14 +52,7 @@ export const datafeedConfigSchema = schema.object({
runtime_mappings: schema.maybe(schema.any()),
scroll_size: schema.maybe(schema.number()),
delayed_data_check_config: schema.maybe(schema.any()),
indices_options: schema.maybe(
schema.object({
expand_wildcards: schema.maybe(schema.arrayOf(schema.string())),
ignore_unavailable: schema.maybe(schema.boolean()),
allow_no_indices: schema.maybe(schema.boolean()),
ignore_throttled: schema.maybe(schema.boolean()),
})
),
indices_options: indicesOptionsSchema,
});
export const datafeedIdSchema = schema.object({ datafeedId: schema.string() });

View file

@ -6,6 +6,7 @@
*/
import { schema } from '@kbn/config-schema';
import { indicesOptionsSchema } from './datafeeds_schema';
export const getCardinalityOfFieldsSchema = schema.object({
/** Index or indexes for which to return the time range. */
@ -29,4 +30,6 @@ export const getTimeFieldRangeSchema = schema.object({
timeFieldName: schema.maybe(schema.string()),
/** Query to match documents in the index(es). */
query: schema.maybe(schema.any()),
/** Additional search options. */
indicesOptions: indicesOptionsSchema,
});

View file

@ -6,6 +6,8 @@
*/
import { schema } from '@kbn/config-schema';
import { anomalyDetectionJobSchema } from './anomaly_detectors_schema';
import { datafeedConfigSchema, indicesOptionsSchema } from './datafeeds_schema';
export const categorizationFieldExamplesSchema = {
indexPatternTitle: schema.string(),
@ -17,6 +19,7 @@ export const categorizationFieldExamplesSchema = {
end: schema.number(),
analyzer: schema.any(),
runtimeMappings: schema.maybe(schema.any()),
indicesOptions: indicesOptionsSchema,
};
export const chartSchema = {
@ -30,6 +33,7 @@ export const chartSchema = {
splitFieldName: schema.maybe(schema.nullable(schema.string())),
splitFieldValue: schema.maybe(schema.nullable(schema.string())),
runtimeMappings: schema.maybe(schema.any()),
indicesOptions: indicesOptionsSchema,
};
export const datafeedIdsSchema = schema.object({
@ -92,6 +96,16 @@ export const revertModelSnapshotSchema = schema.object({
),
});
export const datafeedPreviewSchema = schema.oneOf([
schema.object({
job: schema.maybe(schema.object(anomalyDetectionJobSchema)),
datafeed: schema.maybe(datafeedConfigSchema),
}),
schema.object({
datafeedId: schema.maybe(schema.string()),
}),
]);
export const jobsExistSchema = schema.object({
jobIds: schema.arrayOf(schema.string()),
allSpaces: schema.maybe(schema.boolean()),

View file

@ -7,7 +7,7 @@
import { schema } from '@kbn/config-schema';
import { analysisConfigSchema, anomalyDetectionJobSchema } from './anomaly_detectors_schema';
import { datafeedConfigSchema } from './datafeeds_schema';
import { datafeedConfigSchema, indicesOptionsSchema } from './datafeeds_schema';
export const estimateBucketSpanSchema = schema.object({
aggTypes: schema.arrayOf(schema.nullable(schema.string())),
@ -19,6 +19,7 @@ export const estimateBucketSpanSchema = schema.object({
splitField: schema.maybe(schema.string()),
timeField: schema.maybe(schema.string()),
runtimeMappings: schema.maybe(schema.any()),
indicesOptions: indicesOptionsSchema,
});
export const modelMemoryLimitSchema = schema.object({