[Logs UI] Add dataset filter to ML module setup screen (#64470)

This adds the ability to filter the datasets to be processed by the ML jobs on the setup screen.
This commit is contained in:
Felix Stürmer 2020-05-04 13:29:28 +02:00 committed by GitHub
parent ccede29e60
commit 39e31d6123
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
26 changed files with 991 additions and 271 deletions

View file

@ -0,0 +1,44 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import * as rt from 'io-ts';
export const LOG_ANALYSIS_VALIDATE_DATASETS_PATH =
'/api/infra/log_analysis/validation/log_entry_datasets';
/**
* Request types
*/
export const validateLogEntryDatasetsRequestPayloadRT = rt.type({
data: rt.type({
indices: rt.array(rt.string),
timestampField: rt.string,
startTime: rt.number,
endTime: rt.number,
}),
});
export type ValidateLogEntryDatasetsRequestPayload = rt.TypeOf<
typeof validateLogEntryDatasetsRequestPayloadRT
>;
/**
* Response types
* */
const logEntryDatasetsEntryRT = rt.strict({
indexName: rt.string,
datasets: rt.array(rt.string),
});
export const validateLogEntryDatasetsResponsePayloadRT = rt.type({
data: rt.type({
datasets: rt.array(logEntryDatasetsEntryRT),
}),
});
export type ValidateLogEntryDatasetsResponsePayload = rt.TypeOf<
typeof validateLogEntryDatasetsResponsePayloadRT
>;

View file

@ -4,4 +4,5 @@
* you may not use this file except in compliance with the Elastic License.
*/
export * from './datasets';
export * from './log_entry_rate_indices';

View file

@ -21,17 +21,73 @@ export const getJobId = (spaceId: string, sourceId: string, jobType: string) =>
export const getDatafeedId = (spaceId: string, sourceId: string, jobType: string) =>
`datafeed-${getJobId(spaceId, sourceId, jobType)}`;
export const jobSourceConfigurationRT = rt.type({
export const datasetFilterRT = rt.union([
rt.strict({
type: rt.literal('includeAll'),
}),
rt.strict({
type: rt.literal('includeSome'),
datasets: rt.array(rt.string),
}),
]);
export type DatasetFilter = rt.TypeOf<typeof datasetFilterRT>;
export const jobSourceConfigurationRT = rt.partial({
indexPattern: rt.string,
timestampField: rt.string,
bucketSpan: rt.number,
datasetFilter: datasetFilterRT,
});
export type JobSourceConfiguration = rt.TypeOf<typeof jobSourceConfigurationRT>;
export const jobCustomSettingsRT = rt.partial({
job_revision: rt.number,
logs_source_config: rt.partial(jobSourceConfigurationRT.props),
logs_source_config: jobSourceConfigurationRT,
});
export type JobCustomSettings = rt.TypeOf<typeof jobCustomSettingsRT>;
export const combineDatasetFilters = (
firstFilter: DatasetFilter,
secondFilter: DatasetFilter
): DatasetFilter => {
if (firstFilter.type === 'includeAll' && secondFilter.type === 'includeAll') {
return {
type: 'includeAll',
};
}
const includedDatasets = new Set([
...(firstFilter.type === 'includeSome' ? firstFilter.datasets : []),
...(secondFilter.type === 'includeSome' ? secondFilter.datasets : []),
]);
return {
type: 'includeSome',
datasets: [...includedDatasets],
};
};
export const filterDatasetFilter = (
datasetFilter: DatasetFilter,
predicate: (dataset: string) => boolean
): DatasetFilter => {
if (datasetFilter.type === 'includeAll') {
return datasetFilter;
} else {
const newDatasets = datasetFilter.datasets.filter(predicate);
if (newDatasets.length > 0) {
return {
type: 'includeSome',
datasets: newDatasets,
};
} else {
return {
type: 'includeAll',
};
}
}
};

View file

@ -4,56 +4,41 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { EuiCode, EuiDescribedFormGroup, EuiFormRow, EuiCheckbox, EuiToolTip } from '@elastic/eui';
import { EuiDescribedFormGroup, EuiFormRow } from '@elastic/eui';
import { i18n } from '@kbn/i18n';
import { FormattedMessage } from '@kbn/i18n/react';
import React, { useCallback, useMemo } from 'react';
import React, { useCallback } from 'react';
import { LoadingOverlayWrapper } from '../../../loading_overlay_wrapper';
import { ValidatedIndex, ValidationIndicesUIError } from './validation';
import { IndexSetupRow } from './index_setup_row';
import { AvailableIndex } from './validation';
export const AnalysisSetupIndicesForm: React.FunctionComponent<{
disabled?: boolean;
indices: ValidatedIndex[];
indices: AvailableIndex[];
isValidating: boolean;
onChangeSelectedIndices: (selectedIndices: ValidatedIndex[]) => void;
onChangeSelectedIndices: (selectedIndices: AvailableIndex[]) => void;
valid: boolean;
}> = ({ disabled = false, indices, isValidating, onChangeSelectedIndices, valid }) => {
const handleCheckboxChange = useCallback(
(event: React.ChangeEvent<HTMLInputElement>) => {
const changeIsIndexSelected = useCallback(
(indexName: string, isSelected: boolean) => {
onChangeSelectedIndices(
indices.map(index => {
const checkbox = event.currentTarget;
return index.name === checkbox.id ? { ...index, isSelected: checkbox.checked } : index;
return index.name === indexName ? { ...index, isSelected } : index;
})
);
},
[indices, onChangeSelectedIndices]
);
const choices = useMemo(
() =>
indices.map(index => {
const checkbox = (
<EuiCheckbox
key={index.name}
id={index.name}
label={<EuiCode>{index.name}</EuiCode>}
onChange={handleCheckboxChange}
checked={index.validity === 'valid' && index.isSelected}
disabled={disabled || index.validity === 'invalid'}
/>
);
return index.validity === 'valid' ? (
checkbox
) : (
<div key={index.name}>
<EuiToolTip content={formatValidationError(index.errors)}>{checkbox}</EuiToolTip>
</div>
);
}),
[disabled, handleCheckboxChange, indices]
const changeDatasetFilter = useCallback(
(indexName: string, datasetFilter) => {
onChangeSelectedIndices(
indices.map(index => {
return index.name === indexName ? { ...index, datasetFilter } : index;
})
);
},
[indices, onChangeSelectedIndices]
);
return (
@ -69,13 +54,23 @@ export const AnalysisSetupIndicesForm: React.FunctionComponent<{
description={
<FormattedMessage
id="xpack.infra.analysisSetup.indicesSelectionDescription"
defaultMessage="By default, Machine Learning analyzes log messages in all log indices configured for the source. You can choose to only analyze a subset of the index names. Every selected index name must match at least one index with log entries."
defaultMessage="By default, Machine Learning analyzes log messages in all log indices configured for the source. You can choose to only analyze a subset of the index names. Every selected index name must match at least one index with log entries. You can also choose to only include a certain subset of datasets. Note that the dataset filter applies to all selected indices."
/>
}
>
<LoadingOverlayWrapper isLoading={isValidating}>
<EuiFormRow fullWidth isInvalid={!valid} label={indicesSelectionLabel} labelType="legend">
<>{choices}</>
<>
{indices.map(index => (
<IndexSetupRow
index={index}
isDisabled={disabled}
key={index.name}
onChangeIsSelected={changeIsIndexSelected}
onChangeDatasetFilter={changeDatasetFilter}
/>
))}
</>
</EuiFormRow>
</LoadingOverlayWrapper>
</EuiDescribedFormGroup>
@ -85,51 +80,3 @@ export const AnalysisSetupIndicesForm: React.FunctionComponent<{
const indicesSelectionLabel = i18n.translate('xpack.infra.analysisSetup.indicesSelectionLabel', {
defaultMessage: 'Indices',
});
const formatValidationError = (errors: ValidationIndicesUIError[]): React.ReactNode => {
return errors.map(error => {
switch (error.error) {
case 'INDEX_NOT_FOUND':
return (
<p key={`${error.error}-${error.index}`}>
<FormattedMessage
id="xpack.infra.analysisSetup.indicesSelectionIndexNotFound"
defaultMessage="No indices match the pattern {index}"
values={{ index: <EuiCode>{error.index}</EuiCode> }}
/>
</p>
);
case 'FIELD_NOT_FOUND':
return (
<p key={`${error.error}-${error.index}-${error.field}`}>
<FormattedMessage
id="xpack.infra.analysisSetup.indicesSelectionNoTimestampField"
defaultMessage="At least one index matching {index} lacks a required field {field}."
values={{
index: <EuiCode>{error.index}</EuiCode>,
field: <EuiCode>{error.field}</EuiCode>,
}}
/>
</p>
);
case 'FIELD_NOT_VALID':
return (
<p key={`${error.error}-${error.index}-${error.field}`}>
<FormattedMessage
id="xpack.infra.analysisSetup.indicesSelectionTimestampNotValid"
defaultMessage="At least one index matching {index} has a field called {field} without the correct type."
values={{
index: <EuiCode>{error.index}</EuiCode>,
field: <EuiCode>{error.field}</EuiCode>,
}}
/>
</p>
);
default:
return '';
}
});
};

View file

@ -0,0 +1,88 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import {
EuiFilterButton,
EuiFilterGroup,
EuiPopover,
EuiPopoverTitle,
EuiSelectable,
EuiSelectableOption,
} from '@elastic/eui';
import { FormattedMessage } from '@kbn/i18n/react';
import React, { useCallback, useMemo } from 'react';
import { DatasetFilter } from '../../../../../common/log_analysis';
import { useVisibilityState } from '../../../../utils/use_visibility_state';
export const IndexSetupDatasetFilter: React.FC<{
availableDatasets: string[];
datasetFilter: DatasetFilter;
isDisabled?: boolean;
onChangeDatasetFilter: (datasetFilter: DatasetFilter) => void;
}> = ({ availableDatasets, datasetFilter, isDisabled, onChangeDatasetFilter }) => {
const { isVisible, hide, show } = useVisibilityState(false);
const changeDatasetFilter = useCallback(
(options: EuiSelectableOption[]) => {
const selectedDatasets = options
.filter(({ checked }) => checked === 'on')
.map(({ label }) => label);
onChangeDatasetFilter(
selectedDatasets.length === 0
? { type: 'includeAll' }
: { type: 'includeSome', datasets: selectedDatasets }
);
},
[onChangeDatasetFilter]
);
const selectableOptions: EuiSelectableOption[] = useMemo(
() =>
availableDatasets.map(datasetName => ({
label: datasetName,
checked:
datasetFilter.type === 'includeSome' && datasetFilter.datasets.includes(datasetName)
? 'on'
: undefined,
})),
[availableDatasets, datasetFilter]
);
const datasetFilterButton = (
<EuiFilterButton disabled={isDisabled} isSelected={isVisible} onClick={show}>
<FormattedMessage
id="xpack.infra.analysisSetup.indexDatasetFilterIncludeAllButtonLabel"
defaultMessage="{includeType, select, includeAll {All datasets} includeSome {{includedDatasetCount, plural, one {# dataset} other {# datasets}}}}"
values={{
includeType: datasetFilter.type,
includedDatasetCount:
datasetFilter.type === 'includeSome' ? datasetFilter.datasets.length : 0,
}}
/>
</EuiFilterButton>
);
return (
<EuiFilterGroup>
<EuiPopover
button={datasetFilterButton}
closePopover={hide}
isOpen={isVisible}
panelPaddingSize="none"
>
<EuiSelectable onChange={changeDatasetFilter} options={selectableOptions} searchable>
{(list, search) => (
<div>
<EuiPopoverTitle>{search}</EuiPopoverTitle>
{list}
</div>
)}
</EuiSelectable>
</EuiPopover>
</EuiFilterGroup>
);
};

View file

@ -0,0 +1,110 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { EuiCheckbox, EuiCode, EuiFlexGroup, EuiFlexItem, EuiIcon, EuiToolTip } from '@elastic/eui';
import { FormattedMessage } from '@kbn/i18n/react';
import React, { useCallback } from 'react';
import { DatasetFilter } from '../../../../../common/log_analysis';
import { IndexSetupDatasetFilter } from './index_setup_dataset_filter';
import { AvailableIndex, ValidationIndicesUIError } from './validation';
export const IndexSetupRow: React.FC<{
index: AvailableIndex;
isDisabled: boolean;
onChangeDatasetFilter: (indexName: string, datasetFilter: DatasetFilter) => void;
onChangeIsSelected: (indexName: string, isSelected: boolean) => void;
}> = ({ index, isDisabled, onChangeDatasetFilter, onChangeIsSelected }) => {
const changeIsSelected = useCallback(
(event: React.ChangeEvent<HTMLInputElement>) => {
onChangeIsSelected(index.name, event.currentTarget.checked);
},
[index.name, onChangeIsSelected]
);
const changeDatasetFilter = useCallback(
(datasetFilter: DatasetFilter) => onChangeDatasetFilter(index.name, datasetFilter),
[index.name, onChangeDatasetFilter]
);
const isSelected = index.validity === 'valid' && index.isSelected;
return (
<EuiFlexGroup alignItems="center">
<EuiFlexItem>
<EuiCheckbox
key={index.name}
id={index.name}
label={<EuiCode>{index.name}</EuiCode>}
onChange={changeIsSelected}
checked={isSelected}
disabled={isDisabled || index.validity === 'invalid'}
/>
</EuiFlexItem>
<EuiFlexItem grow={false}>
{index.validity === 'invalid' ? (
<EuiToolTip content={formatValidationError(index.errors)}>
<EuiIcon type="alert" color="danger" />
</EuiToolTip>
) : index.validity === 'valid' ? (
<IndexSetupDatasetFilter
availableDatasets={index.availableDatasets}
datasetFilter={index.datasetFilter}
isDisabled={!isSelected || isDisabled}
onChangeDatasetFilter={changeDatasetFilter}
/>
) : null}
</EuiFlexItem>
</EuiFlexGroup>
);
};
const formatValidationError = (errors: ValidationIndicesUIError[]): React.ReactNode => {
return errors.map(error => {
switch (error.error) {
case 'INDEX_NOT_FOUND':
return (
<p key={`${error.error}-${error.index}`}>
<FormattedMessage
id="xpack.infra.analysisSetup.indicesSelectionIndexNotFound"
defaultMessage="No indices match the pattern {index}"
values={{ index: <EuiCode>{error.index}</EuiCode> }}
/>
</p>
);
case 'FIELD_NOT_FOUND':
return (
<p key={`${error.error}-${error.index}-${error.field}`}>
<FormattedMessage
id="xpack.infra.analysisSetup.indicesSelectionNoTimestampField"
defaultMessage="At least one index matching {index} lacks a required field {field}."
values={{
index: <EuiCode>{error.index}</EuiCode>,
field: <EuiCode>{error.field}</EuiCode>,
}}
/>
</p>
);
case 'FIELD_NOT_VALID':
return (
<p key={`${error.error}-${error.index}-${error.field}`}>
<FormattedMessage
id="xpack.infra.analysisSetup.indicesSelectionTimestampNotValid"
defaultMessage="At least one index matching {index} has a field called {field} without the correct type."
values={{
index: <EuiCode>{error.index}</EuiCode>,
field: <EuiCode>{error.field}</EuiCode>,
}}
/>
</p>
);
default:
return '';
}
});
};

View file

@ -13,7 +13,7 @@ import React, { useMemo } from 'react';
import { SetupStatus } from '../../../../../common/log_analysis';
import { AnalysisSetupIndicesForm } from './analysis_setup_indices_form';
import { AnalysisSetupTimerangeForm } from './analysis_setup_timerange_form';
import { ValidatedIndex, ValidationIndicesUIError } from './validation';
import { AvailableIndex, ValidationIndicesUIError } from './validation';
interface InitialConfigurationStepProps {
setStartTime: (startTime: number | undefined) => void;
@ -21,9 +21,9 @@ interface InitialConfigurationStepProps {
startTime: number | undefined;
endTime: number | undefined;
isValidating: boolean;
validatedIndices: ValidatedIndex[];
validatedIndices: AvailableIndex[];
setupStatus: SetupStatus;
setValidatedIndices: (selectedIndices: ValidatedIndex[]) => void;
setValidatedIndices: (selectedIndices: AvailableIndex[]) => void;
validationErrors?: ValidationIndicesUIError[];
}

View file

@ -5,22 +5,35 @@
*/
import { ValidationIndicesError } from '../../../../../common/http_api';
import { DatasetFilter } from '../../../../../common/log_analysis';
export { ValidationIndicesError };
export type ValidationIndicesUIError =
| ValidationIndicesError
| { error: 'NETWORK_ERROR' }
| { error: 'TOO_FEW_SELECTED_INDICES' };
interface ValidIndex {
interface ValidAvailableIndex {
validity: 'valid';
name: string;
isSelected: boolean;
availableDatasets: string[];
datasetFilter: DatasetFilter;
}
interface InvalidIndex {
interface InvalidAvailableIndex {
validity: 'invalid';
name: string;
errors: ValidationIndicesError[];
}
export type ValidatedIndex = ValidIndex | InvalidIndex;
interface UnvalidatedAvailableIndex {
validity: 'unknown';
name: string;
}
export type AvailableIndex =
| ValidAvailableIndex
| InvalidAvailableIndex
| UnvalidatedAvailableIndex;

View file

@ -21,7 +21,8 @@ export const callSetupMlModuleAPI = async (
sourceId: string,
indexPattern: string,
jobOverrides: SetupMlModuleJobOverrides[] = [],
datafeedOverrides: SetupMlModuleDatafeedOverrides[] = []
datafeedOverrides: SetupMlModuleDatafeedOverrides[] = [],
query?: object
) => {
const response = await npStart.http.fetch(`/api/ml/modules/setup/${moduleId}`, {
method: 'POST',
@ -34,6 +35,7 @@ export const callSetupMlModuleAPI = async (
startDatafeed: true,
jobOverrides,
datafeedOverrides,
query,
})
),
});
@ -60,13 +62,20 @@ const setupMlModuleDatafeedOverridesRT = rt.object;
export type SetupMlModuleDatafeedOverrides = rt.TypeOf<typeof setupMlModuleDatafeedOverridesRT>;
const setupMlModuleRequestParamsRT = rt.type({
indexPatternName: rt.string,
prefix: rt.string,
startDatafeed: rt.boolean,
jobOverrides: rt.array(setupMlModuleJobOverridesRT),
datafeedOverrides: rt.array(setupMlModuleDatafeedOverridesRT),
});
const setupMlModuleRequestParamsRT = rt.intersection([
rt.strict({
indexPatternName: rt.string,
prefix: rt.string,
startDatafeed: rt.boolean,
jobOverrides: rt.array(setupMlModuleJobOverridesRT),
datafeedOverrides: rt.array(setupMlModuleDatafeedOverridesRT),
}),
rt.exact(
rt.partial({
query: rt.object,
})
),
]);
const setupMlModuleRequestPayloadRT = rt.intersection([
setupMlModuleTimeParamsRT,

View file

@ -0,0 +1,36 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import {
LOG_ANALYSIS_VALIDATE_DATASETS_PATH,
validateLogEntryDatasetsRequestPayloadRT,
validateLogEntryDatasetsResponsePayloadRT,
} from '../../../../../common/http_api';
import { decodeOrThrow } from '../../../../../common/runtime_types';
import { npStart } from '../../../../legacy_singletons';
export const callValidateDatasetsAPI = async (
indices: string[],
timestampField: string,
startTime: number,
endTime: number
) => {
const response = await npStart.http.fetch(LOG_ANALYSIS_VALIDATE_DATASETS_PATH, {
method: 'POST',
body: JSON.stringify(
validateLogEntryDatasetsRequestPayloadRT.encode({
data: {
endTime,
indices,
startTime,
timestampField,
},
})
),
});
return decodeOrThrow(validateLogEntryDatasetsResponsePayloadRT)(response);
};

View file

@ -5,7 +5,7 @@
*/
import { useCallback, useMemo } from 'react';
import { DatasetFilter } from '../../../../common/log_analysis';
import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { useModuleStatus } from './log_analysis_module_status';
import { ModuleDescriptor, ModuleSourceConfiguration } from './log_analysis_module_types';
@ -48,10 +48,11 @@ export const useLogAnalysisModule = <JobType extends string>({
createPromise: async (
selectedIndices: string[],
start: number | undefined,
end: number | undefined
end: number | undefined,
datasetFilter: DatasetFilter
) => {
dispatchModuleStatus({ type: 'startedSetup' });
const setupResult = await moduleDescriptor.setUpModule(start, end, {
const setupResult = await moduleDescriptor.setUpModule(start, end, datasetFilter, {
indices: selectedIndices,
sourceId,
spaceId,
@ -92,11 +93,16 @@ export const useLogAnalysisModule = <JobType extends string>({
]);
const cleanUpAndSetUpModule = useCallback(
(selectedIndices: string[], start: number | undefined, end: number | undefined) => {
(
selectedIndices: string[],
start: number | undefined,
end: number | undefined,
datasetFilter: DatasetFilter
) => {
dispatchModuleStatus({ type: 'startedSetup' });
cleanUpModule()
.then(() => {
setUpModule(selectedIndices, start, end);
setUpModule(selectedIndices, start, end, datasetFilter);
})
.catch(() => {
dispatchModuleStatus({ type: 'failedSetup' });

View file

@ -8,7 +8,11 @@ import { DeleteJobsResponsePayload } from './api/ml_cleanup';
import { FetchJobStatusResponsePayload } from './api/ml_get_jobs_summary_api';
import { GetMlModuleResponsePayload } from './api/ml_get_module';
import { SetupMlModuleResponsePayload } from './api/ml_setup_module_api';
import { ValidationIndicesResponsePayload } from '../../../../common/http_api/log_analysis';
import {
ValidationIndicesResponsePayload,
ValidateLogEntryDatasetsResponsePayload,
} from '../../../../common/http_api/log_analysis';
import { DatasetFilter } from '../../../../common/log_analysis';
export interface ModuleDescriptor<JobType extends string> {
moduleId: string;
@ -20,12 +24,20 @@ export interface ModuleDescriptor<JobType extends string> {
setUpModule: (
start: number | undefined,
end: number | undefined,
datasetFilter: DatasetFilter,
sourceConfiguration: ModuleSourceConfiguration
) => Promise<SetupMlModuleResponsePayload>;
cleanUpModule: (spaceId: string, sourceId: string) => Promise<DeleteJobsResponsePayload>;
validateSetupIndices: (
sourceConfiguration: ModuleSourceConfiguration
indices: string[],
timestampField: string
) => Promise<ValidationIndicesResponsePayload>;
validateSetupDatasets: (
indices: string[],
timestampField: string,
startTime: number,
endTime: number
) => Promise<ValidateLogEntryDatasetsResponsePayload>;
}
export interface ModuleSourceConfiguration {

View file

@ -0,0 +1,264 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { isEqual } from 'lodash';
import { useCallback, useEffect, useMemo, useState } from 'react';
import { usePrevious } from 'react-use';
import {
combineDatasetFilters,
DatasetFilter,
filterDatasetFilter,
isExampleDataIndex,
} from '../../../../common/log_analysis';
import {
AvailableIndex,
ValidationIndicesError,
ValidationIndicesUIError,
} from '../../../components/logging/log_analysis_setup/initial_configuration_step';
import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { ModuleDescriptor, ModuleSourceConfiguration } from './log_analysis_module_types';
type SetupHandler = (
indices: string[],
startTime: number | undefined,
endTime: number | undefined,
datasetFilter: DatasetFilter
) => void;
interface AnalysisSetupStateArguments<JobType extends string> {
cleanUpAndSetUpModule: SetupHandler;
moduleDescriptor: ModuleDescriptor<JobType>;
setUpModule: SetupHandler;
sourceConfiguration: ModuleSourceConfiguration;
}
const fourWeeksInMs = 86400000 * 7 * 4;
export const useAnalysisSetupState = <JobType extends string>({
cleanUpAndSetUpModule,
moduleDescriptor: { validateSetupDatasets, validateSetupIndices },
setUpModule,
sourceConfiguration,
}: AnalysisSetupStateArguments<JobType>) => {
const [startTime, setStartTime] = useState<number | undefined>(Date.now() - fourWeeksInMs);
const [endTime, setEndTime] = useState<number | undefined>(undefined);
const [validatedIndices, setValidatedIndices] = useState<AvailableIndex[]>(
sourceConfiguration.indices.map(indexName => ({
name: indexName,
validity: 'unknown' as const,
}))
);
const updateIndicesWithValidationErrors = useCallback(
(validationErrors: ValidationIndicesError[]) =>
setValidatedIndices(availableIndices =>
availableIndices.map(previousAvailableIndex => {
const indexValiationErrors = validationErrors.filter(
({ index }) => index === previousAvailableIndex.name
);
if (indexValiationErrors.length > 0) {
return {
validity: 'invalid',
name: previousAvailableIndex.name,
errors: indexValiationErrors,
};
} else if (previousAvailableIndex.validity === 'valid') {
return {
...previousAvailableIndex,
validity: 'valid',
errors: [],
};
} else {
return {
validity: 'valid',
name: previousAvailableIndex.name,
isSelected: !isExampleDataIndex(previousAvailableIndex.name),
availableDatasets: [],
datasetFilter: {
type: 'includeAll' as const,
},
};
}
})
),
[]
);
const updateIndicesWithAvailableDatasets = useCallback(
(availableDatasets: Array<{ indexName: string; datasets: string[] }>) =>
setValidatedIndices(availableIndices =>
availableIndices.map(previousAvailableIndex => {
if (previousAvailableIndex.validity !== 'valid') {
return previousAvailableIndex;
}
const availableDatasetsForIndex = availableDatasets.filter(
({ indexName }) => indexName === previousAvailableIndex.name
);
const newAvailableDatasets = availableDatasetsForIndex.flatMap(
({ datasets }) => datasets
);
// filter out datasets that have disappeared if this index' datasets were updated
const newDatasetFilter: DatasetFilter =
availableDatasetsForIndex.length > 0
? filterDatasetFilter(previousAvailableIndex.datasetFilter, dataset =>
newAvailableDatasets.includes(dataset)
)
: previousAvailableIndex.datasetFilter;
return {
...previousAvailableIndex,
availableDatasets: newAvailableDatasets,
datasetFilter: newDatasetFilter,
};
})
),
[]
);
const validIndexNames = useMemo(
() => validatedIndices.filter(index => index.validity === 'valid').map(index => index.name),
[validatedIndices]
);
const selectedIndexNames = useMemo(
() =>
validatedIndices
.filter(index => index.validity === 'valid' && index.isSelected)
.map(i => i.name),
[validatedIndices]
);
const datasetFilter = useMemo(
() =>
validatedIndices
.flatMap(validatedIndex =>
validatedIndex.validity === 'valid'
? validatedIndex.datasetFilter
: { type: 'includeAll' as const }
)
.reduce(combineDatasetFilters, { type: 'includeAll' as const }),
[validatedIndices]
);
const [validateIndicesRequest, validateIndices] = useTrackedPromise(
{
cancelPreviousOn: 'resolution',
createPromise: async () => {
return await validateSetupIndices(
sourceConfiguration.indices,
sourceConfiguration.timestampField
);
},
onResolve: ({ data: { errors } }) => {
updateIndicesWithValidationErrors(errors);
},
onReject: () => {
setValidatedIndices([]);
},
},
[sourceConfiguration.indices, sourceConfiguration.timestampField]
);
const [validateDatasetsRequest, validateDatasets] = useTrackedPromise(
{
cancelPreviousOn: 'resolution',
createPromise: async () => {
if (validIndexNames.length === 0) {
return { data: { datasets: [] } };
}
return await validateSetupDatasets(
validIndexNames,
sourceConfiguration.timestampField,
startTime ?? 0,
endTime ?? Date.now()
);
},
onResolve: ({ data: { datasets } }) => {
updateIndicesWithAvailableDatasets(datasets);
},
},
[validIndexNames, sourceConfiguration.timestampField, startTime, endTime]
);
const setUp = useCallback(() => {
return setUpModule(selectedIndexNames, startTime, endTime, datasetFilter);
}, [setUpModule, selectedIndexNames, startTime, endTime, datasetFilter]);
const cleanUpAndSetUp = useCallback(() => {
return cleanUpAndSetUpModule(selectedIndexNames, startTime, endTime, datasetFilter);
}, [cleanUpAndSetUpModule, selectedIndexNames, startTime, endTime, datasetFilter]);
const isValidating = useMemo(
() => validateIndicesRequest.state === 'pending' || validateDatasetsRequest.state === 'pending',
[validateDatasetsRequest.state, validateIndicesRequest.state]
);
const validationErrors = useMemo<ValidationIndicesUIError[]>(() => {
if (isValidating) {
return [];
}
if (validateIndicesRequest.state === 'rejected') {
return [{ error: 'NETWORK_ERROR' }];
}
if (selectedIndexNames.length === 0) {
return [{ error: 'TOO_FEW_SELECTED_INDICES' }];
}
return validatedIndices.reduce<ValidationIndicesUIError[]>((errors, index) => {
return index.validity === 'invalid' && selectedIndexNames.includes(index.name)
? [...errors, ...index.errors]
: errors;
}, []);
}, [isValidating, validateIndicesRequest.state, selectedIndexNames, validatedIndices]);
const prevStartTime = usePrevious(startTime);
const prevEndTime = usePrevious(endTime);
const prevValidIndexNames = usePrevious(validIndexNames);
useEffect(() => {
validateIndices();
}, [validateIndices]);
useEffect(() => {
if (
startTime !== prevStartTime ||
endTime !== prevEndTime ||
!isEqual(validIndexNames, prevValidIndexNames)
) {
validateDatasets();
}
}, [
endTime,
prevEndTime,
prevStartTime,
prevValidIndexNames,
startTime,
validIndexNames,
validateDatasets,
]);
return {
cleanUpAndSetUp,
datasetFilter,
endTime,
isValidating,
selectedIndexNames,
setEndTime,
setStartTime,
setUp,
startTime,
validatedIndices,
setValidatedIndices,
validationErrors,
};
};

View file

@ -1,142 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { useCallback, useEffect, useMemo, useState } from 'react';
import { isExampleDataIndex } from '../../../../common/log_analysis';
import {
ValidatedIndex,
ValidationIndicesUIError,
} from '../../../components/logging/log_analysis_setup/initial_configuration_step';
import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { ModuleDescriptor, ModuleSourceConfiguration } from './log_analysis_module_types';
type SetupHandler = (
indices: string[],
startTime: number | undefined,
endTime: number | undefined
) => void;
interface AnalysisSetupStateArguments<JobType extends string> {
cleanUpAndSetUpModule: SetupHandler;
moduleDescriptor: ModuleDescriptor<JobType>;
setUpModule: SetupHandler;
sourceConfiguration: ModuleSourceConfiguration;
}
const fourWeeksInMs = 86400000 * 7 * 4;
export const useAnalysisSetupState = <JobType extends string>({
cleanUpAndSetUpModule,
moduleDescriptor: { validateSetupIndices },
setUpModule,
sourceConfiguration,
}: AnalysisSetupStateArguments<JobType>) => {
const [startTime, setStartTime] = useState<number | undefined>(Date.now() - fourWeeksInMs);
const [endTime, setEndTime] = useState<number | undefined>(undefined);
const [validatedIndices, setValidatedIndices] = useState<ValidatedIndex[]>([]);
const [validateIndicesRequest, validateIndices] = useTrackedPromise(
{
cancelPreviousOn: 'resolution',
createPromise: async () => {
return await validateSetupIndices(sourceConfiguration);
},
onResolve: ({ data: { errors } }) => {
setValidatedIndices(previousValidatedIndices =>
sourceConfiguration.indices.map(indexName => {
const previousValidatedIndex = previousValidatedIndices.filter(
({ name }) => name === indexName
)[0];
const indexValiationErrors = errors.filter(({ index }) => index === indexName);
if (indexValiationErrors.length > 0) {
return {
validity: 'invalid',
name: indexName,
errors: indexValiationErrors,
};
} else {
return {
validity: 'valid',
name: indexName,
isSelected:
previousValidatedIndex?.validity === 'valid'
? previousValidatedIndex?.isSelected
: !isExampleDataIndex(indexName),
};
}
})
);
},
onReject: () => {
setValidatedIndices([]);
},
},
[sourceConfiguration.indices]
);
useEffect(() => {
validateIndices();
}, [validateIndices]);
const selectedIndexNames = useMemo(
() =>
validatedIndices
.filter(index => index.validity === 'valid' && index.isSelected)
.map(i => i.name),
[validatedIndices]
);
const setUp = useCallback(() => {
return setUpModule(selectedIndexNames, startTime, endTime);
}, [setUpModule, selectedIndexNames, startTime, endTime]);
const cleanUpAndSetUp = useCallback(() => {
return cleanUpAndSetUpModule(selectedIndexNames, startTime, endTime);
}, [cleanUpAndSetUpModule, selectedIndexNames, startTime, endTime]);
const isValidating = useMemo(
() =>
validateIndicesRequest.state === 'pending' ||
validateIndicesRequest.state === 'uninitialized',
[validateIndicesRequest.state]
);
const validationErrors = useMemo<ValidationIndicesUIError[]>(() => {
if (isValidating) {
return [];
}
if (validateIndicesRequest.state === 'rejected') {
return [{ error: 'NETWORK_ERROR' }];
}
if (selectedIndexNames.length === 0) {
return [{ error: 'TOO_FEW_SELECTED_INDICES' }];
}
return validatedIndices.reduce<ValidationIndicesUIError[]>((errors, index) => {
return index.validity === 'invalid' && selectedIndexNames.includes(index.name)
? [...errors, ...index.errors]
: errors;
}, []);
}, [isValidating, validateIndicesRequest.state, selectedIndexNames, validatedIndices]);
return {
cleanUpAndSetUp,
endTime,
isValidating,
selectedIndexNames,
setEndTime,
setStartTime,
setUp,
startTime,
validatedIndices,
setValidatedIndices,
validationErrors,
};
};

View file

@ -7,20 +7,21 @@
import {
bucketSpan,
categoriesMessageField,
DatasetFilter,
getJobId,
LogEntryCategoriesJobType,
logEntryCategoriesJobTypes,
partitionField,
} from '../../../../common/log_analysis';
import {
cleanUpJobsAndDatafeeds,
ModuleDescriptor,
ModuleSourceConfiguration,
cleanUpJobsAndDatafeeds,
} from '../../../containers/logs/log_analysis';
import { callJobsSummaryAPI } from '../../../containers/logs/log_analysis/api/ml_get_jobs_summary_api';
import { callGetMlModuleAPI } from '../../../containers/logs/log_analysis/api/ml_get_module';
import { callSetupMlModuleAPI } from '../../../containers/logs/log_analysis/api/ml_setup_module_api';
import { callValidateDatasetsAPI } from '../../../containers/logs/log_analysis/api/validate_datasets';
import { callValidateIndicesAPI } from '../../../containers/logs/log_analysis/api/validate_indices';
const moduleId = 'logs_ui_categories';
@ -48,6 +49,7 @@ const getModuleDefinition = async () => {
const setUpModule = async (
start: number | undefined,
end: number | undefined,
datasetFilter: DatasetFilter,
{ spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration
) => {
const indexNamePattern = indices.join(',');
@ -65,10 +67,31 @@ const setUpModule = async (
indexPattern: indexNamePattern,
timestampField,
bucketSpan,
datasetFilter,
},
},
},
];
const query = {
bool: {
filter: [
...(datasetFilter.type === 'includeSome'
? [
{
terms: {
'event.dataset': datasetFilter.datasets,
},
},
]
: []),
{
exists: {
field: 'message',
},
},
],
},
};
return callSetupMlModuleAPI(
moduleId,
@ -77,7 +100,9 @@ const setUpModule = async (
spaceId,
sourceId,
indexNamePattern,
jobOverrides
jobOverrides,
[],
query
);
};
@ -85,7 +110,7 @@ const cleanUpModule = async (spaceId: string, sourceId: string) => {
return await cleanUpJobsAndDatafeeds(spaceId, sourceId, logEntryCategoriesJobTypes);
};
const validateSetupIndices = async ({ indices, timestampField }: ModuleSourceConfiguration) => {
const validateSetupIndices = async (indices: string[], timestampField: string) => {
return await callValidateIndicesAPI(indices, [
{
name: timestampField,
@ -102,6 +127,15 @@ const validateSetupIndices = async ({ indices, timestampField }: ModuleSourceCon
]);
};
const validateSetupDatasets = async (
indices: string[],
timestampField: string,
startTime: number,
endTime: number
) => {
return await callValidateDatasetsAPI(indices, timestampField, startTime, endTime);
};
export const logEntryCategoriesModule: ModuleDescriptor<LogEntryCategoriesJobType> = {
moduleId,
jobTypes: logEntryCategoriesJobTypes,
@ -111,5 +145,6 @@ export const logEntryCategoriesModule: ModuleDescriptor<LogEntryCategoriesJobTyp
getModuleDefinition,
setUpModule,
cleanUpModule,
validateSetupDatasets,
validateSetupIndices,
};

View file

@ -55,7 +55,7 @@ export const LogEntryCategoriesSetupContent: React.FunctionComponent = () => {
createProcessStep({
cleanUpAndSetUp,
errorMessages: lastSetupErrorMessages,
isConfigurationValid: validationErrors.length <= 0,
isConfigurationValid: validationErrors.length <= 0 && !isValidating,
setUp,
setupStatus,
viewResults,

View file

@ -6,20 +6,21 @@
import {
bucketSpan,
DatasetFilter,
getJobId,
LogEntryRateJobType,
logEntryRateJobTypes,
partitionField,
} from '../../../../common/log_analysis';
import {
cleanUpJobsAndDatafeeds,
ModuleDescriptor,
ModuleSourceConfiguration,
cleanUpJobsAndDatafeeds,
} from '../../../containers/logs/log_analysis';
import { callJobsSummaryAPI } from '../../../containers/logs/log_analysis/api/ml_get_jobs_summary_api';
import { callGetMlModuleAPI } from '../../../containers/logs/log_analysis/api/ml_get_module';
import { callSetupMlModuleAPI } from '../../../containers/logs/log_analysis/api/ml_setup_module_api';
import { callValidateDatasetsAPI } from '../../../containers/logs/log_analysis/api/validate_datasets';
import { callValidateIndicesAPI } from '../../../containers/logs/log_analysis/api/validate_indices';
const moduleId = 'logs_ui_analysis';
@ -47,6 +48,7 @@ const getModuleDefinition = async () => {
const setUpModule = async (
start: number | undefined,
end: number | undefined,
datasetFilter: DatasetFilter,
{ spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration
) => {
const indexNamePattern = indices.join(',');
@ -68,6 +70,20 @@ const setUpModule = async (
},
},
];
const query =
datasetFilter.type === 'includeSome'
? {
bool: {
filter: [
{
terms: {
'event.dataset': datasetFilter.datasets,
},
},
],
},
}
: undefined;
return callSetupMlModuleAPI(
moduleId,
@ -76,7 +92,9 @@ const setUpModule = async (
spaceId,
sourceId,
indexNamePattern,
jobOverrides
jobOverrides,
[],
query
);
};
@ -84,7 +102,7 @@ const cleanUpModule = async (spaceId: string, sourceId: string) => {
return await cleanUpJobsAndDatafeeds(spaceId, sourceId, logEntryRateJobTypes);
};
const validateSetupIndices = async ({ indices, timestampField }: ModuleSourceConfiguration) => {
const validateSetupIndices = async (indices: string[], timestampField: string) => {
return await callValidateIndicesAPI(indices, [
{
name: timestampField,
@ -97,6 +115,15 @@ const validateSetupIndices = async ({ indices, timestampField }: ModuleSourceCon
]);
};
const validateSetupDatasets = async (
indices: string[],
timestampField: string,
startTime: number,
endTime: number
) => {
return await callValidateDatasetsAPI(indices, timestampField, startTime, endTime);
};
export const logEntryRateModule: ModuleDescriptor<LogEntryRateJobType> = {
moduleId,
jobTypes: logEntryRateJobTypes,
@ -106,5 +133,6 @@ export const logEntryRateModule: ModuleDescriptor<LogEntryRateJobType> = {
getModuleDefinition,
setUpModule,
cleanUpModule,
validateSetupDatasets,
validateSetupIndices,
};

View file

@ -55,7 +55,7 @@ export const LogEntryRateSetupContent: React.FunctionComponent = () => {
createProcessStep({
cleanUpAndSetUp,
errorMessages: lastSetupErrorMessages,
isConfigurationValid: validationErrors.length <= 0,
isConfigurationValid: validationErrors.length <= 0 && !isValidating,
setUp,
setupStatus,
viewResults,

View file

@ -15,6 +15,7 @@ import {
initGetLogEntryCategoryDatasetsRoute,
initGetLogEntryCategoryExamplesRoute,
initGetLogEntryRateRoute,
initValidateLogAnalysisDatasetsRoute,
initValidateLogAnalysisIndicesRoute,
} from './routes/log_analysis';
import { initMetricExplorerRoute } from './routes/metrics_explorer';
@ -51,6 +52,7 @@ export const initInfraServer = (libs: InfraBackendLibs) => {
initSnapshotRoute(libs);
initNodeDetailsRoute(libs);
initSourceRoute(libs);
initValidateLogAnalysisDatasetsRoute(libs);
initValidateLogAnalysisIndicesRoute(libs);
initLogEntriesRoute(libs);
initLogEntriesHighlightsRoute(libs);

View file

@ -38,6 +38,7 @@ export function compose(core: CoreSetup, config: InfraConfig, plugins: InfraServ
sources,
}),
logEntries: new InfraLogEntriesDomain(new InfraKibanaLogEntriesAdapter(framework), {
framework,
sources,
}),
metrics: new InfraMetricsDomain(new KibanaMetricsAdapter(framework)),

View file

@ -29,6 +29,14 @@ import {
Highlights,
compileFormattingRules,
} from './message';
import { KibanaFramework } from '../../adapters/framework/kibana_framework_adapter';
import { decodeOrThrow } from '../../../../common/runtime_types';
import {
logEntryDatasetsResponseRT,
LogEntryDatasetBucket,
CompositeDatasetKey,
createLogEntryDatasetsQuery,
} from './queries/log_entry_datasets';
export interface LogEntriesParams {
startTimestamp: number;
@ -51,10 +59,15 @@ export const LOG_ENTRIES_PAGE_SIZE = 200;
const FIELDS_FROM_CONTEXT = ['log.file.path', 'host.name', 'container.id'] as const;
const COMPOSITE_AGGREGATION_BATCH_SIZE = 1000;
export class InfraLogEntriesDomain {
constructor(
private readonly adapter: LogEntriesAdapter,
private readonly libs: { sources: InfraSources }
private readonly libs: {
framework: KibanaFramework;
sources: InfraSources;
}
) {}
public async getLogEntriesAround(
@ -256,6 +269,45 @@ export class InfraLogEntriesDomain {
),
};
}
public async getLogEntryDatasets(
requestContext: RequestHandlerContext,
timestampField: string,
indexName: string,
startTime: number,
endTime: number
) {
let datasetBuckets: LogEntryDatasetBucket[] = [];
let afterLatestBatchKey: CompositeDatasetKey | undefined;
while (true) {
const datasetsReponse = await this.libs.framework.callWithRequest(
requestContext,
'search',
createLogEntryDatasetsQuery(
indexName,
timestampField,
startTime,
endTime,
COMPOSITE_AGGREGATION_BATCH_SIZE,
afterLatestBatchKey
)
);
const { after_key: afterKey, buckets: latestBatchBuckets } = decodeOrThrow(
logEntryDatasetsResponseRT
)(datasetsReponse).aggregations.dataset_buckets;
datasetBuckets = [...datasetBuckets, ...latestBatchBuckets];
afterLatestBatchKey = afterKey;
if (latestBatchBuckets.length < COMPOSITE_AGGREGATION_BATCH_SIZE) {
break;
}
}
return datasetBuckets.map(({ key: { dataset } }) => dataset);
}
}
interface LogItemHit {

View file

@ -0,0 +1,98 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import * as rt from 'io-ts';
import { commonSearchSuccessResponseFieldsRT } from '../../../../utils/elasticsearch_runtime_types';
export const createLogEntryDatasetsQuery = (
indexName: string,
timestampField: string,
startTime: number,
endTime: number,
size: number,
afterKey?: CompositeDatasetKey
) => ({
...defaultRequestParameters,
body: {
query: {
bool: {
filter: [
{
range: {
[timestampField]: {
gte: startTime,
lte: endTime,
},
},
},
{
exists: {
field: 'event.dataset',
},
},
],
},
},
aggs: {
dataset_buckets: {
composite: {
after: afterKey,
size,
sources: [
{
dataset: {
terms: {
field: 'event.dataset',
order: 'asc',
},
},
},
],
},
},
},
},
index: indexName,
size: 0,
});
const defaultRequestParameters = {
allowNoIndices: true,
ignoreUnavailable: true,
trackScores: false,
trackTotalHits: false,
};
const compositeDatasetKeyRT = rt.type({
dataset: rt.string,
});
export type CompositeDatasetKey = rt.TypeOf<typeof compositeDatasetKeyRT>;
const logEntryDatasetBucketRT = rt.type({
key: compositeDatasetKeyRT,
});
export type LogEntryDatasetBucket = rt.TypeOf<typeof logEntryDatasetBucketRT>;
export const logEntryDatasetsResponseRT = rt.intersection([
commonSearchSuccessResponseFieldsRT,
rt.type({
aggregations: rt.type({
dataset_buckets: rt.intersection([
rt.type({
buckets: rt.array(logEntryDatasetBucketRT),
}),
rt.partial({
after_key: compositeDatasetKeyRT,
}),
]),
}),
}),
]);
export type LogEntryDatasetsResponse = rt.TypeOf<typeof logEntryDatasetsResponseRT>;

View file

@ -119,6 +119,7 @@ export class InfraServerPlugin {
sources,
}),
logEntries: new InfraLogEntriesDomain(new InfraKibanaLogEntriesAdapter(framework), {
framework,
sources,
}),
metrics: new InfraMetricsDomain(new KibanaMetricsAdapter(framework)),

View file

@ -0,0 +1,69 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import Boom from 'boom';
import { InfraBackendLibs } from '../../../lib/infra_types';
import {
LOG_ANALYSIS_VALIDATE_DATASETS_PATH,
validateLogEntryDatasetsRequestPayloadRT,
validateLogEntryDatasetsResponsePayloadRT,
} from '../../../../common/http_api';
import { createValidationFunction } from '../../../../common/runtime_types';
export const initValidateLogAnalysisDatasetsRoute = ({
framework,
logEntries,
}: InfraBackendLibs) => {
framework.registerRoute(
{
method: 'post',
path: LOG_ANALYSIS_VALIDATE_DATASETS_PATH,
validate: {
body: createValidationFunction(validateLogEntryDatasetsRequestPayloadRT),
},
},
framework.router.handleLegacyErrors(async (requestContext, request, response) => {
try {
const {
data: { indices, timestampField, startTime, endTime },
} = request.body;
const datasets = await Promise.all(
indices.map(async indexName => {
const indexDatasets = await logEntries.getLogEntryDatasets(
requestContext,
timestampField,
indexName,
startTime,
endTime
);
return {
indexName,
datasets: indexDatasets,
};
})
);
return response.ok({
body: validateLogEntryDatasetsResponsePayloadRT.encode({ data: { datasets } }),
});
} catch (error) {
if (Boom.isBoom(error)) {
throw error;
}
return response.customError({
statusCode: error.statusCode ?? 500,
body: {
message: error.message ?? 'An unexpected error occurred',
},
});
}
})
);
};

View file

@ -4,4 +4,5 @@
* you may not use this file except in compliance with the Elastic License.
*/
export * from './datasets';
export * from './indices';

View file

@ -1,15 +1,4 @@
{
"job_id": "JOB_ID",
"indices": ["INDEX_PATTERN_NAME"],
"query": {
"bool": {
"filter": [
{
"exists": {
"field": "message"
}
}
]
}
}
"indices": ["INDEX_PATTERN_NAME"]
}