[Logs UI] Support runtime mappings in ML job configurations (#97627)

This PR adds our runtime mappings, as defined on the Kibana Index Pattern, to job configurations (both log rate and log categories). It also flags outdated runtime mappings in the "outdated configuration" callout.

Co-authored-by: Felix Stürmer <stuermer@weltenwort.de>
This commit is contained in:
Kerry Gallagher 2021-04-20 22:51:10 +01:00 committed by GitHub
parent 088a618f92
commit d49fbc9d1c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
25 changed files with 128 additions and 31 deletions

View file

@ -19,6 +19,7 @@ export const validateLogEntryDatasetsRequestPayloadRT = rt.type({
timestampField: rt.string,
startTime: rt.number,
endTime: rt.number,
runtimeMappings: rt.UnknownRecord,
}),
});

View file

@ -26,6 +26,7 @@ export const validationIndicesRequestPayloadRT = rt.type({
data: rt.type({
fields: rt.array(validationIndicesFieldSpecificationRT),
indices: rt.array(rt.string),
runtimeMappings: rt.UnknownRecord,
}),
});

View file

@ -97,6 +97,9 @@ export const jobSummaryRT = rt.intersection([
custom_settings: jobCustomSettingsRT,
finished_time: rt.number,
model_size_stats: jobModelSizeStatsRT,
datafeed_config: rt.partial({
runtime_mappings: rt.UnknownRecord,
}),
}),
}),
]);

View file

@ -5,6 +5,7 @@
* 2.0.
*/
import type { estypes } from '@elastic/elasticsearch';
import type { HttpHandler } from 'src/core/public';
import {
LOG_ANALYSIS_VALIDATE_DATASETS_PATH,
@ -18,10 +19,11 @@ interface RequestArgs {
timestampField: string;
startTime: number;
endTime: number;
runtimeMappings: estypes.RuntimeFields;
}
export const callValidateDatasetsAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => {
const { indices, timestampField, startTime, endTime } = requestArgs;
const { indices, timestampField, startTime, endTime, runtimeMappings } = requestArgs;
const response = await fetch(LOG_ANALYSIS_VALIDATE_DATASETS_PATH, {
method: 'POST',
body: JSON.stringify(
@ -31,6 +33,7 @@ export const callValidateDatasetsAPI = async (requestArgs: RequestArgs, fetch: H
indices,
startTime,
timestampField,
runtimeMappings,
},
})
),

View file

@ -6,6 +6,7 @@
*/
import type { HttpHandler } from 'src/core/public';
import { estypes } from '@elastic/elasticsearch';
import {
LOG_ANALYSIS_VALIDATE_INDICES_PATH,
@ -19,13 +20,16 @@ import { decodeOrThrow } from '../../../../../common/runtime_types';
interface RequestArgs {
indices: string[];
fields: ValidationIndicesFieldSpecification[];
runtimeMappings: estypes.RuntimeFields;
}
export const callValidateIndicesAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => {
const { indices, fields } = requestArgs;
const { indices, fields, runtimeMappings } = requestArgs;
const response = await fetch(LOG_ANALYSIS_VALIDATE_INDICES_PATH, {
method: 'POST',
body: JSON.stringify(validationIndicesRequestPayloadRT.encode({ data: { indices, fields } })),
body: JSON.stringify(
validationIndicesRequestPayloadRT.encode({ data: { indices, fields, runtimeMappings } })
),
});
return decodeOrThrow(validationIndicesResponsePayloadRT)(response);

View file

@ -21,7 +21,7 @@ export const useLogAnalysisModule = <JobType extends string>({
moduleDescriptor: ModuleDescriptor<JobType>;
}) => {
const { services } = useKibanaContextForPlugin();
const { spaceId, sourceId, timestampField } = sourceConfiguration;
const { spaceId, sourceId, timestampField, runtimeMappings } = sourceConfiguration;
const [moduleStatus, dispatchModuleStatus] = useModuleStatus(moduleDescriptor.jobTypes);
const trackMetric = useUiTracker({ app: 'infra_logs' });
@ -67,6 +67,7 @@ export const useLogAnalysisModule = <JobType extends string>({
sourceId,
spaceId,
timestampField,
runtimeMappings,
},
services.http.fetch
);

View file

@ -6,6 +6,7 @@
*/
import { useMemo } from 'react';
import equal from 'fast-deep-equal';
import { JobSummary } from './api/ml_get_jobs_summary_api';
import { ModuleDescriptor, ModuleSourceConfiguration } from './log_analysis_module_types';
@ -30,11 +31,16 @@ export const isJobConfigurationOutdated = <JobType extends string>(
{ bucketSpan }: ModuleDescriptor<JobType>,
currentSourceConfiguration: ModuleSourceConfiguration
) => (jobSummary: JobSummary): boolean => {
if (!jobSummary.fullJob || !jobSummary.fullJob.custom_settings) {
if (
!jobSummary.fullJob ||
!jobSummary.fullJob.custom_settings ||
!jobSummary.fullJob.datafeed_config
) {
return false;
}
const jobConfiguration = jobSummary.fullJob.custom_settings.logs_source_config;
const datafeedRuntimeMappings = jobSummary.fullJob.datafeed_config.runtime_mappings;
return !(
jobConfiguration &&
@ -44,7 +50,8 @@ export const isJobConfigurationOutdated = <JobType extends string>(
new Set(jobConfiguration.indexPattern.split(',')),
new Set(currentSourceConfiguration.indices)
) &&
jobConfiguration.timestampField === currentSourceConfiguration.timestampField
jobConfiguration.timestampField === currentSourceConfiguration.timestampField &&
equal(datafeedRuntimeMappings, currentSourceConfiguration.runtimeMappings)
);
};

View file

@ -6,6 +6,7 @@
*/
import type { HttpHandler } from 'src/core/public';
import { estypes } from '@elastic/elasticsearch';
import {
ValidateLogEntryDatasetsResponsePayload,
ValidationIndicesResponsePayload,
@ -46,6 +47,7 @@ export interface ModuleDescriptor<JobType extends string> {
validateSetupIndices: (
indices: string[],
timestampField: string,
runtimeMappings: estypes.RuntimeFields,
fetch: HttpHandler
) => Promise<ValidationIndicesResponsePayload>;
validateSetupDatasets: (
@ -53,6 +55,7 @@ export interface ModuleDescriptor<JobType extends string> {
timestampField: string,
startTime: number,
endTime: number,
runtimeMappings: estypes.RuntimeFields,
fetch: HttpHandler
) => Promise<ValidateLogEntryDatasetsResponsePayload>;
}
@ -62,4 +65,5 @@ export interface ModuleSourceConfiguration {
sourceId: string;
spaceId: string;
timestampField: string;
runtimeMappings: estypes.RuntimeFields;
}

View file

@ -162,6 +162,7 @@ export const useAnalysisSetupState = <JobType extends string>({
return await validateSetupIndices(
sourceConfiguration.indices,
sourceConfiguration.timestampField,
sourceConfiguration.runtimeMappings,
services.http.fetch
);
},
@ -188,6 +189,7 @@ export const useAnalysisSetupState = <JobType extends string>({
sourceConfiguration.timestampField,
startTime ?? 0,
endTime ?? Date.now(),
sourceConfiguration.runtimeMappings,
services.http.fetch
);
},

View file

@ -5,6 +5,7 @@
* 2.0.
*/
import type { estypes } from '@elastic/elasticsearch';
import { i18n } from '@kbn/i18n';
import type { HttpHandler } from 'src/core/public';
import {
@ -62,7 +63,7 @@ const setUpModule = async (
start: number | undefined,
end: number | undefined,
datasetFilter: DatasetFilter,
{ spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration,
{ spaceId, sourceId, indices, timestampField, runtimeMappings }: ModuleSourceConfiguration,
fetch: HttpHandler
) => {
const indexNamePattern = indices.join(',');
@ -85,6 +86,12 @@ const setUpModule = async (
},
},
];
const datafeedOverrides = [
{
job_id: 'log-entry-categories-count' as const,
runtime_mappings: runtimeMappings,
},
];
const query = {
bool: {
filter: [
@ -115,6 +122,7 @@ const setUpModule = async (
sourceId,
indexPattern: indexNamePattern,
jobOverrides,
datafeedOverrides,
query,
},
fetch
@ -128,6 +136,7 @@ const cleanUpModule = async (spaceId: string, sourceId: string, fetch: HttpHandl
const validateSetupIndices = async (
indices: string[],
timestampField: string,
runtimeMappings: estypes.RuntimeFields,
fetch: HttpHandler
) => {
return await callValidateIndicesAPI(
@ -147,6 +156,7 @@ const validateSetupIndices = async (
validTypes: ['text'],
},
],
runtimeMappings,
},
fetch
);
@ -157,9 +167,13 @@ const validateSetupDatasets = async (
timestampField: string,
startTime: number,
endTime: number,
runtimeMappings: estypes.RuntimeFields,
fetch: HttpHandler
) => {
return await callValidateDatasetsAPI({ indices, timestampField, startTime, endTime }, fetch);
return await callValidateDatasetsAPI(
{ indices, timestampField, startTime, endTime, runtimeMappings },
fetch
);
};
export const logEntryCategoriesModule: ModuleDescriptor<LogEntryCategoriesJobType> = {

View file

@ -6,6 +6,7 @@
*/
import createContainer from 'constate';
import { estypes } from '@elastic/elasticsearch';
import { useMemo } from 'react';
import { useLogAnalysisModule } from '../../log_analysis_module';
import { useLogAnalysisModuleConfiguration } from '../../log_analysis_module_configuration';
@ -19,11 +20,13 @@ export const useLogEntryCategoriesModule = ({
sourceId,
spaceId,
timestampField,
runtimeMappings,
}: {
indexPattern: string;
sourceId: string;
spaceId: string;
timestampField: string;
runtimeMappings: estypes.RuntimeFields;
}) => {
const sourceConfiguration: ModuleSourceConfiguration = useMemo(
() => ({
@ -31,8 +34,9 @@ export const useLogEntryCategoriesModule = ({
sourceId,
spaceId,
timestampField,
runtimeMappings,
}),
[indexPattern, sourceId, spaceId, timestampField]
[indexPattern, sourceId, spaceId, timestampField, runtimeMappings]
);
const logAnalysisModule = useLogAnalysisModule({

View file

@ -6,6 +6,7 @@
*/
import { i18n } from '@kbn/i18n';
import type { estypes } from '@elastic/elasticsearch';
import type { HttpHandler } from 'src/core/public';
import {
bucketSpan,
@ -61,7 +62,7 @@ const setUpModule = async (
start: number | undefined,
end: number | undefined,
datasetFilter: DatasetFilter,
{ spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration,
{ spaceId, sourceId, indices, timestampField, runtimeMappings }: ModuleSourceConfiguration,
fetch: HttpHandler
) => {
const indexNamePattern = indices.join(',');
@ -83,6 +84,12 @@ const setUpModule = async (
},
},
];
const datafeedOverrides = [
{
job_id: 'log-entry-rate' as const,
runtime_mappings: runtimeMappings,
},
];
const query =
datasetFilter.type === 'includeSome'
? {
@ -107,6 +114,7 @@ const setUpModule = async (
sourceId,
indexPattern: indexNamePattern,
jobOverrides,
datafeedOverrides,
query,
},
fetch
@ -120,6 +128,7 @@ const cleanUpModule = async (spaceId: string, sourceId: string, fetch: HttpHandl
const validateSetupIndices = async (
indices: string[],
timestampField: string,
runtimeMappings: estypes.RuntimeFields,
fetch: HttpHandler
) => {
return await callValidateIndicesAPI(
@ -135,6 +144,7 @@ const validateSetupIndices = async (
validTypes: ['keyword'],
},
],
runtimeMappings,
},
fetch
);
@ -145,9 +155,13 @@ const validateSetupDatasets = async (
timestampField: string,
startTime: number,
endTime: number,
runtimeMappings: estypes.RuntimeFields,
fetch: HttpHandler
) => {
return await callValidateDatasetsAPI({ indices, timestampField, startTime, endTime }, fetch);
return await callValidateDatasetsAPI(
{ indices, timestampField, startTime, endTime, runtimeMappings },
fetch
);
};
export const logEntryRateModule: ModuleDescriptor<LogEntryRateJobType> = {

View file

@ -5,6 +5,7 @@
* 2.0.
*/
import { estypes } from '@elastic/elasticsearch';
import createContainer from 'constate';
import { useMemo } from 'react';
import { ModuleSourceConfiguration } from '../../log_analysis_module_types';
@ -18,11 +19,13 @@ export const useLogEntryRateModule = ({
sourceId,
spaceId,
timestampField,
runtimeMappings,
}: {
indexPattern: string;
sourceId: string;
spaceId: string;
timestampField: string;
runtimeMappings: estypes.RuntimeFields;
}) => {
const sourceConfiguration: ModuleSourceConfiguration = useMemo(
() => ({
@ -30,8 +33,9 @@ export const useLogEntryRateModule = ({
sourceId,
spaceId,
timestampField,
runtimeMappings,
}),
[indexPattern, sourceId, spaceId, timestampField]
[indexPattern, sourceId, spaceId, timestampField, runtimeMappings]
);
const logAnalysisModule = useLogAnalysisModule({

View file

@ -28,6 +28,7 @@ export const LogEntryCategoriesPageProviders: React.FunctionComponent = ({ child
sourceId={sourceId}
spaceId={space.id}
timestampField={resolvedSourceConfiguration.timestampField}
runtimeMappings={resolvedSourceConfiguration.runtimeMappings}
>
<LogAnalysisSetupFlyoutStateProvider>{children}</LogAnalysisSetupFlyoutStateProvider>
</LogEntryCategoriesModuleProvider>

View file

@ -31,12 +31,14 @@ export const LogEntryRatePageProviders: React.FunctionComponent = ({ children })
sourceId={sourceId}
spaceId={space.id}
timestampField={resolvedSourceConfiguration.timestampField ?? ''}
runtimeMappings={resolvedSourceConfiguration.runtimeMappings}
>
<LogEntryCategoriesModuleProvider
indexPattern={resolvedSourceConfiguration.indices ?? ''}
sourceId={sourceId}
spaceId={space.id}
timestampField={resolvedSourceConfiguration.timestampField ?? ''}
runtimeMappings={resolvedSourceConfiguration.runtimeMappings}
>
<LogAnalysisSetupFlyoutStateProvider>{children}</LogAnalysisSetupFlyoutStateProvider>
</LogEntryCategoriesModuleProvider>

View file

@ -5,6 +5,7 @@
* 2.0.
*/
import type { estypes } from '@elastic/elasticsearch';
import { JsonObject } from '../../../../../../../src/plugins/kibana_utils/common';
import type { InfraPluginRequestHandlerContext } from '../../../types';
@ -38,7 +39,6 @@ import {
CompositeDatasetKey,
createLogEntryDatasetsQuery,
} from './queries/log_entry_datasets';
export interface LogEntriesParams {
startTimestamp: number;
endTimestamp: number;
@ -276,7 +276,8 @@ export class InfraLogEntriesDomain {
timestampField: string,
indexName: string,
startTime: number,
endTime: number
endTime: number,
runtimeMappings: estypes.RuntimeFields
) {
let datasetBuckets: LogEntryDatasetBucket[] = [];
let afterLatestBatchKey: CompositeDatasetKey | undefined;
@ -290,6 +291,7 @@ export class InfraLogEntriesDomain {
timestampField,
startTime,
endTime,
runtimeMappings,
COMPOSITE_AGGREGATION_BATCH_SIZE,
afterLatestBatchKey
)

View file

@ -6,6 +6,7 @@
*/
import * as rt from 'io-ts';
import { estypes } from '@elastic/elasticsearch';
import { commonSearchSuccessResponseFieldsRT } from '../../../../utils/elasticsearch_runtime_types';
@ -14,6 +15,7 @@ export const createLogEntryDatasetsQuery = (
timestampField: string,
startTime: number,
endTime: number,
runtimeMappings: estypes.RuntimeFields,
size: number,
afterKey?: CompositeDatasetKey
) => ({
@ -38,6 +40,7 @@ export const createLogEntryDatasetsQuery = (
],
},
},
runtime_mappings: runtimeMappings,
aggs: {
dataset_buckets: {
composite: {

View file

@ -5,6 +5,7 @@
* 2.0.
*/
import type { estypes } from '@elastic/elasticsearch';
import type { InfraPluginRequestHandlerContext, InfraRequestHandlerContext } from '../../types';
import { TracingSpan, startTracingSpan } from '../../../common/performance_tracing';
import { fetchMlJob, getLogEntryDatasets } from './common';
@ -18,6 +19,7 @@ import {
Pagination,
isCategoryAnomaly,
} from '../../../common/log_analysis';
import type { ResolvedLogSourceConfiguration } from '../../../common/log_sources';
import type { MlSystem, MlAnomalyDetectors } from '../../types';
import { createLogEntryAnomaliesQuery, logEntryAnomaliesResponseRT } from './queries';
import {
@ -31,7 +33,6 @@ import {
createLogEntryExamplesQuery,
logEntryExamplesResponseRT,
} from './queries/log_entry_examples';
import { InfraSource } from '../sources';
import { KibanaFramework } from '../adapters/framework/kibana_framework_adapter';
import { fetchLogEntryCategories } from './log_entry_categories_analysis';
@ -326,7 +327,7 @@ export async function getLogEntryExamples(
endTime: number,
dataset: string,
exampleCount: number,
sourceConfiguration: InfraSource,
resolvedSourceConfiguration: ResolvedLogSourceConfiguration,
callWithRequest: KibanaFramework['callWithRequest'],
categoryId?: string
) {
@ -346,7 +347,7 @@ export async function getLogEntryExamples(
const customSettings = decodeOrThrow(jobCustomSettingsRT)(mlJob.custom_settings);
const indices = customSettings?.logs_source_config?.indexPattern;
const timestampField = customSettings?.logs_source_config?.timestampField;
const tiebreakerField = sourceConfiguration.configuration.fields.tiebreaker;
const { tiebreakerField, runtimeMappings } = resolvedSourceConfiguration;
if (indices == null || timestampField == null) {
throw new InsufficientLogAnalysisMlJobConfigurationError(
@ -361,6 +362,7 @@ export async function getLogEntryExamples(
context,
sourceId,
indices,
runtimeMappings,
timestampField,
tiebreakerField,
startTime,
@ -385,6 +387,7 @@ export async function fetchLogEntryExamples(
context: InfraPluginRequestHandlerContext & { infra: Required<InfraRequestHandlerContext> },
sourceId: string,
indices: string,
runtimeMappings: estypes.RuntimeFields,
timestampField: string,
tiebreakerField: string,
startTime: number,
@ -431,6 +434,7 @@ export async function fetchLogEntryExamples(
'search',
createLogEntryExamplesQuery(
indices,
runtimeMappings,
timestampField,
tiebreakerField,
startTime,

View file

@ -5,6 +5,7 @@
* 2.0.
*/
import type { estypes } from '@elastic/elasticsearch';
import type { ILegacyScopedClusterClient } from 'src/core/server';
import {
compareDatasetsByMaximumAnomalyScore,
@ -14,6 +15,7 @@ import {
CategoriesSort,
} from '../../../common/log_analysis';
import { LogEntryContext } from '../../../common/log_entry';
import type { ResolvedLogSourceConfiguration } from '../../../common/log_sources';
import { startTracingSpan } from '../../../common/performance_tracing';
import { decodeOrThrow } from '../../../common/runtime_types';
import type { MlAnomalyDetectors, MlSystem } from '../../types';
@ -36,7 +38,6 @@ import {
createTopLogEntryCategoriesQuery,
topLogEntryCategoriesResponseRT,
} from './queries/top_log_entry_categories';
import { InfraSource } from '../sources';
import { fetchMlJob, getLogEntryDatasets } from './common';
export async function getTopLogEntryCategories(
@ -147,7 +148,7 @@ export async function getLogEntryCategoryExamples(
endTime: number,
categoryId: number,
exampleCount: number,
sourceConfiguration: InfraSource
resolvedSourceConfiguration: ResolvedLogSourceConfiguration
) {
const finalizeLogEntryCategoryExamplesSpan = startTracingSpan('get category example log entries');
@ -165,7 +166,7 @@ export async function getLogEntryCategoryExamples(
const customSettings = decodeOrThrow(jobCustomSettingsRT)(mlJob.custom_settings);
const indices = customSettings?.logs_source_config?.indexPattern;
const timestampField = customSettings?.logs_source_config?.timestampField;
const tiebreakerField = sourceConfiguration.configuration.fields.tiebreaker;
const { tiebreakerField, runtimeMappings } = resolvedSourceConfiguration;
if (indices == null || timestampField == null) {
throw new InsufficientLogAnalysisMlJobConfigurationError(
@ -189,6 +190,7 @@ export async function getLogEntryCategoryExamples(
} = await fetchLogEntryCategoryExamples(
context,
indices,
runtimeMappings,
timestampField,
tiebreakerField,
startTime,
@ -402,6 +404,7 @@ async function fetchTopLogEntryCategoryHistograms(
async function fetchLogEntryCategoryExamples(
requestContext: { core: { elasticsearch: { legacy: { client: ILegacyScopedClusterClient } } } },
indices: string,
runtimeMappings: estypes.RuntimeFields,
timestampField: string,
tiebreakerField: string,
startTime: number,
@ -418,6 +421,7 @@ async function fetchLogEntryCategoryExamples(
'search',
createLogEntryCategoryExamplesQuery(
indices,
runtimeMappings,
timestampField,
tiebreakerField,
startTime,

View file

@ -5,20 +5,21 @@
* 2.0.
*/
import type { estypes } from '@elastic/elasticsearch';
import * as rt from 'io-ts';
import { commonSearchSuccessResponseFieldsRT } from '../../../utils/elasticsearch_runtime_types';
import { defaultRequestParameters } from './common';
export const createLogEntryCategoryExamplesQuery = (
indices: string,
runtimeMappings: estypes.RuntimeFields,
timestampField: string,
tiebreakerField: string,
startTime: number,
endTime: number,
categoryQuery: string,
exampleCount: number
) => ({
): estypes.SearchRequest => ({
...defaultRequestParameters,
body: {
query: {
@ -43,6 +44,7 @@ export const createLogEntryCategoryExamplesQuery = (
],
},
},
runtime_mappings: runtimeMappings,
sort: [{ [timestampField]: 'asc' }, { [tiebreakerField]: 'asc' }],
_source: false,
fields: ['event.dataset', 'message', 'container.id', 'host.name', 'log.file.path'],

View file

@ -5,14 +5,15 @@
* 2.0.
*/
import type { estypes } from '@elastic/elasticsearch';
import * as rt from 'io-ts';
import { partitionField } from '../../../../common/log_analysis';
import { commonSearchSuccessResponseFieldsRT } from '../../../utils/elasticsearch_runtime_types';
import { defaultRequestParameters } from './common';
import { partitionField } from '../../../../common/log_analysis';
export const createLogEntryExamplesQuery = (
indices: string,
runtimeMappings: estypes.RuntimeFields,
timestampField: string,
tiebreakerField: string,
startTime: number,
@ -20,7 +21,7 @@ export const createLogEntryExamplesQuery = (
dataset: string,
exampleCount: number,
categoryQuery?: string
) => ({
): estypes.SearchRequest => ({
...defaultRequestParameters,
body: {
query: {
@ -61,7 +62,7 @@ export const createLogEntryExamplesQuery = (
match: {
message: {
query: categoryQuery,
operator: 'AND',
operator: 'AND' as const,
},
},
},
@ -70,6 +71,7 @@ export const createLogEntryExamplesQuery = (
],
},
},
runtime_mappings: runtimeMappings,
sort: [{ [timestampField]: 'asc' }, { [tiebreakerField]: 'asc' }],
_source: false,
fields: ['event.dataset', 'message'],

View file

@ -16,6 +16,7 @@ import type { InfraBackendLibs } from '../../../lib/infra_types';
import { getLogEntryCategoryExamples } from '../../../lib/log_analysis';
import { assertHasInfraMlPlugins } from '../../../utils/request_context';
import { isMlPrivilegesError } from '../../../lib/log_analysis/errors';
import { resolveLogSourceConfiguration } from '../../../../common/log_sources';
export const initGetLogEntryCategoryExamplesRoute = ({ framework, sources }: InfraBackendLibs) => {
framework.registerRoute(
@ -40,6 +41,10 @@ export const initGetLogEntryCategoryExamplesRoute = ({ framework, sources }: Inf
requestContext.core.savedObjects.client,
sourceId
);
const resolvedSourceConfiguration = await resolveLogSourceConfiguration(
sourceConfiguration.configuration,
await framework.getIndexPatternsServiceWithRequestContext(requestContext)
);
try {
assertHasInfraMlPlugins(requestContext);
@ -51,7 +56,7 @@ export const initGetLogEntryCategoryExamplesRoute = ({ framework, sources }: Inf
endTime,
categoryId,
exampleCount,
sourceConfiguration
resolvedSourceConfiguration
);
return response.ok({

View file

@ -16,6 +16,7 @@ import {
LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH,
} from '../../../../common/http_api/log_analysis';
import { isMlPrivilegesError } from '../../../lib/log_analysis/errors';
import { resolveLogSourceConfiguration } from '../../../../common/log_sources';
export const initGetLogEntryExamplesRoute = ({ framework, sources }: InfraBackendLibs) => {
framework.registerRoute(
@ -41,6 +42,10 @@ export const initGetLogEntryExamplesRoute = ({ framework, sources }: InfraBacken
requestContext.core.savedObjects.client,
sourceId
);
const resolvedSourceConfiguration = await resolveLogSourceConfiguration(
sourceConfiguration.configuration,
await framework.getIndexPatternsServiceWithRequestContext(requestContext)
);
try {
assertHasInfraMlPlugins(requestContext);
@ -52,7 +57,7 @@ export const initGetLogEntryExamplesRoute = ({ framework, sources }: InfraBacken
endTime,
dataset,
exampleCount,
sourceConfiguration,
resolvedSourceConfiguration,
framework.callWithRequest,
categoryId
);

View file

@ -6,6 +6,7 @@
*/
import Boom from '@hapi/boom';
import type { estypes } from '@elastic/elasticsearch';
import { InfraBackendLibs } from '../../../lib/infra_types';
import {
@ -31,7 +32,7 @@ export const initValidateLogAnalysisDatasetsRoute = ({
framework.router.handleLegacyErrors(async (requestContext, request, response) => {
try {
const {
data: { indices, timestampField, startTime, endTime },
data: { indices, timestampField, startTime, endTime, runtimeMappings },
} = request.body;
const datasets = await Promise.all(
@ -41,7 +42,8 @@ export const initValidateLogAnalysisDatasetsRoute = ({
timestampField,
indexName,
startTime,
endTime
endTime,
runtimeMappings as estypes.RuntimeFields
);
return {

View file

@ -36,7 +36,7 @@ export const initValidateLogAnalysisIndicesRoute = ({ framework }: InfraBackendL
fold(throwErrors(Boom.badRequest), identity)
);
const { fields, indices } = payload.data;
const { fields, indices, runtimeMappings } = payload.data;
const errors: ValidationIndicesError[] = [];
// Query each pattern individually, to map correctly the errors
@ -47,6 +47,9 @@ export const initValidateLogAnalysisIndicesRoute = ({ framework }: InfraBackendL
fields: fields.map((field) => field.name),
ignore_unavailable: true,
index,
body: {
runtime_mappings: runtimeMappings,
},
});
if (fieldCaps.indices.length === 0) {