[Logs UI] Return empty result sets instead of 500 or 404 for analysis results (#72824)
This changes the analysis results routes to return empty result sets with HTTP status code 200 instead of and inconsistent status codes 500 or 404.
This commit is contained in:
parent
2a77307af1
commit
aa45ac89b0
|
@ -14,7 +14,6 @@ import {
|
|||
logEntryDatasetsResponseRT,
|
||||
} from './queries/log_entry_data_sets';
|
||||
import { decodeOrThrow } from '../../../common/runtime_types';
|
||||
import { NoLogAnalysisResultsIndexError } from './errors';
|
||||
import { startTracingSpan, TracingSpan } from '../../../common/performance_tracing';
|
||||
|
||||
export async function fetchMlJob(mlAnomalyDetectors: MlAnomalyDetectors, jobId: string) {
|
||||
|
@ -67,16 +66,8 @@ export async function getLogEntryDatasets(
|
|||
)
|
||||
);
|
||||
|
||||
if (logEntryDatasetsResponse._shards.total === 0) {
|
||||
throw new NoLogAnalysisResultsIndexError(
|
||||
`Failed to find ml indices for jobs: ${jobIds.join(', ')}.`
|
||||
);
|
||||
}
|
||||
|
||||
const {
|
||||
after_key: afterKey,
|
||||
buckets: latestBatchBuckets,
|
||||
} = logEntryDatasetsResponse.aggregations.dataset_buckets;
|
||||
const { after_key: afterKey, buckets: latestBatchBuckets = [] } =
|
||||
logEntryDatasetsResponse.aggregations?.dataset_buckets ?? {};
|
||||
|
||||
logEntryDatasetBuckets = [...logEntryDatasetBuckets, ...latestBatchBuckets];
|
||||
afterLatestBatchKey = afterKey;
|
||||
|
|
|
@ -6,13 +6,6 @@
|
|||
|
||||
/* eslint-disable max-classes-per-file */
|
||||
|
||||
export class NoLogAnalysisResultsIndexError extends Error {
|
||||
constructor(message?: string) {
|
||||
super(message);
|
||||
Object.setPrototypeOf(this, new.target.prototype);
|
||||
}
|
||||
}
|
||||
|
||||
export class NoLogAnalysisMlJobError extends Error {
|
||||
constructor(message?: string) {
|
||||
super(message);
|
||||
|
|
|
@ -15,11 +15,7 @@ import {
|
|||
import { startTracingSpan } from '../../../common/performance_tracing';
|
||||
import { decodeOrThrow } from '../../../common/runtime_types';
|
||||
import type { MlAnomalyDetectors, MlSystem } from '../../types';
|
||||
import {
|
||||
InsufficientLogAnalysisMlJobConfigurationError,
|
||||
NoLogAnalysisResultsIndexError,
|
||||
UnknownCategoryError,
|
||||
} from './errors';
|
||||
import { InsufficientLogAnalysisMlJobConfigurationError, UnknownCategoryError } from './errors';
|
||||
import {
|
||||
createLogEntryCategoriesQuery,
|
||||
logEntryCategoriesResponseRT,
|
||||
|
@ -235,38 +231,33 @@ async function fetchTopLogEntryCategories(
|
|||
|
||||
const esSearchSpan = finalizeEsSearchSpan();
|
||||
|
||||
if (topLogEntryCategoriesResponse._shards.total === 0) {
|
||||
throw new NoLogAnalysisResultsIndexError(
|
||||
`Failed to find ml result index for job ${logEntryCategoriesCountJobId}.`
|
||||
);
|
||||
}
|
||||
const topLogEntryCategories =
|
||||
topLogEntryCategoriesResponse.aggregations?.terms_category_id.buckets.map(
|
||||
(topCategoryBucket) => {
|
||||
const maximumAnomalyScoresByDataset = topCategoryBucket.filter_record.terms_dataset.buckets.reduce<
|
||||
Record<string, number>
|
||||
>(
|
||||
(accumulatedMaximumAnomalyScores, datasetFromRecord) => ({
|
||||
...accumulatedMaximumAnomalyScores,
|
||||
[datasetFromRecord.key]: datasetFromRecord.maximum_record_score.value ?? 0,
|
||||
}),
|
||||
{}
|
||||
);
|
||||
|
||||
const topLogEntryCategories = topLogEntryCategoriesResponse.aggregations.terms_category_id.buckets.map(
|
||||
(topCategoryBucket) => {
|
||||
const maximumAnomalyScoresByDataset = topCategoryBucket.filter_record.terms_dataset.buckets.reduce<
|
||||
Record<string, number>
|
||||
>(
|
||||
(accumulatedMaximumAnomalyScores, datasetFromRecord) => ({
|
||||
...accumulatedMaximumAnomalyScores,
|
||||
[datasetFromRecord.key]: datasetFromRecord.maximum_record_score.value ?? 0,
|
||||
}),
|
||||
{}
|
||||
);
|
||||
|
||||
return {
|
||||
categoryId: parseCategoryId(topCategoryBucket.key),
|
||||
logEntryCount: topCategoryBucket.filter_model_plot.sum_actual.value ?? 0,
|
||||
datasets: topCategoryBucket.filter_model_plot.terms_dataset.buckets
|
||||
.map((datasetBucket) => ({
|
||||
name: datasetBucket.key,
|
||||
maximumAnomalyScore: maximumAnomalyScoresByDataset[datasetBucket.key] ?? 0,
|
||||
}))
|
||||
.sort(compareDatasetsByMaximumAnomalyScore)
|
||||
.reverse(),
|
||||
maximumAnomalyScore: topCategoryBucket.filter_record.maximum_record_score.value ?? 0,
|
||||
};
|
||||
}
|
||||
);
|
||||
return {
|
||||
categoryId: parseCategoryId(topCategoryBucket.key),
|
||||
logEntryCount: topCategoryBucket.filter_model_plot.sum_actual.value ?? 0,
|
||||
datasets: topCategoryBucket.filter_model_plot.terms_dataset.buckets
|
||||
.map((datasetBucket) => ({
|
||||
name: datasetBucket.key,
|
||||
maximumAnomalyScore: maximumAnomalyScoresByDataset[datasetBucket.key] ?? 0,
|
||||
}))
|
||||
.sort(compareDatasetsByMaximumAnomalyScore)
|
||||
.reverse(),
|
||||
maximumAnomalyScore: topCategoryBucket.filter_record.maximum_record_score.value ?? 0,
|
||||
};
|
||||
}
|
||||
) ?? [];
|
||||
|
||||
return {
|
||||
topLogEntryCategories,
|
||||
|
|
|
@ -4,10 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
import { map, fold } from 'fp-ts/lib/Either';
|
||||
import { identity } from 'fp-ts/lib/function';
|
||||
import { throwErrors, createPlainError } from '../../../common/runtime_types';
|
||||
import { decodeOrThrow } from '../../../common/runtime_types';
|
||||
import {
|
||||
logRateModelPlotResponseRT,
|
||||
createLogEntryRateQuery,
|
||||
|
@ -15,7 +12,6 @@ import {
|
|||
CompositeTimestampPartitionKey,
|
||||
} from './queries';
|
||||
import { getJobId } from '../../../common/log_analysis';
|
||||
import { NoLogAnalysisResultsIndexError } from './errors';
|
||||
import type { MlSystem } from '../../types';
|
||||
|
||||
const COMPOSITE_AGGREGATION_BATCH_SIZE = 1000;
|
||||
|
@ -50,22 +46,14 @@ export async function getLogEntryRateBuckets(
|
|||
)
|
||||
);
|
||||
|
||||
if (mlModelPlotResponse._shards.total === 0) {
|
||||
throw new NoLogAnalysisResultsIndexError(
|
||||
`Failed to query ml result index for job ${logRateJobId}.`
|
||||
);
|
||||
}
|
||||
|
||||
const { after_key: afterKey, buckets: latestBatchBuckets } = pipe(
|
||||
logRateModelPlotResponseRT.decode(mlModelPlotResponse),
|
||||
map((response) => response.aggregations.timestamp_partition_buckets),
|
||||
fold(throwErrors(createPlainError), identity)
|
||||
);
|
||||
const { after_key: afterKey, buckets: latestBatchBuckets = [] } =
|
||||
decodeOrThrow(logRateModelPlotResponseRT)(mlModelPlotResponse).aggregations
|
||||
?.timestamp_partition_buckets ?? {};
|
||||
|
||||
mlModelPlotBuckets = [...mlModelPlotBuckets, ...latestBatchBuckets];
|
||||
afterLatestBatchKey = afterKey;
|
||||
|
||||
if (latestBatchBuckets.length < COMPOSITE_AGGREGATION_BATCH_SIZE) {
|
||||
if (afterKey == null || latestBatchBuckets.length < COMPOSITE_AGGREGATION_BATCH_SIZE) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -67,7 +67,7 @@ export type LogEntryDatasetBucket = rt.TypeOf<typeof logEntryDatasetBucketRT>;
|
|||
|
||||
export const logEntryDatasetsResponseRT = rt.intersection([
|
||||
commonSearchSuccessResponseFieldsRT,
|
||||
rt.type({
|
||||
rt.partial({
|
||||
aggregations: rt.type({
|
||||
dataset_buckets: rt.intersection([
|
||||
rt.type({
|
||||
|
|
|
@ -162,7 +162,7 @@ export const logRateModelPlotBucketRT = rt.type({
|
|||
|
||||
export type LogRateModelPlotBucket = rt.TypeOf<typeof logRateModelPlotBucketRT>;
|
||||
|
||||
export const logRateModelPlotResponseRT = rt.type({
|
||||
export const logRateModelPlotResponseRT = rt.partial({
|
||||
aggregations: rt.type({
|
||||
timestamp_partition_buckets: rt.intersection([
|
||||
rt.type({
|
||||
|
|
|
@ -159,7 +159,7 @@ export type LogEntryCategoryBucket = rt.TypeOf<typeof logEntryCategoryBucketRT>;
|
|||
|
||||
export const topLogEntryCategoriesResponseRT = rt.intersection([
|
||||
commonSearchSuccessResponseFieldsRT,
|
||||
rt.type({
|
||||
rt.partial({
|
||||
aggregations: rt.type({
|
||||
terms_category_id: rt.type({
|
||||
buckets: rt.array(logEntryCategoryBucketRT),
|
||||
|
|
|
@ -12,10 +12,7 @@ import {
|
|||
} from '../../../../common/http_api/log_analysis';
|
||||
import { createValidationFunction } from '../../../../common/runtime_types';
|
||||
import type { InfraBackendLibs } from '../../../lib/infra_types';
|
||||
import {
|
||||
getLogEntryAnomaliesDatasets,
|
||||
NoLogAnalysisResultsIndexError,
|
||||
} from '../../../lib/log_analysis';
|
||||
import { getLogEntryAnomaliesDatasets } from '../../../lib/log_analysis';
|
||||
import { assertHasInfraMlPlugins } from '../../../utils/request_context';
|
||||
|
||||
export const initGetLogEntryAnomaliesDatasetsRoute = ({ framework }: InfraBackendLibs) => {
|
||||
|
@ -58,10 +55,6 @@ export const initGetLogEntryAnomaliesDatasetsRoute = ({ framework }: InfraBacken
|
|||
throw error;
|
||||
}
|
||||
|
||||
if (error instanceof NoLogAnalysisResultsIndexError) {
|
||||
return response.notFound({ body: { message: error.message } });
|
||||
}
|
||||
|
||||
return response.customError({
|
||||
statusCode: error.statusCode ?? 500,
|
||||
body: {
|
||||
|
|
|
@ -12,10 +12,7 @@ import {
|
|||
} from '../../../../common/http_api/log_analysis';
|
||||
import { createValidationFunction } from '../../../../common/runtime_types';
|
||||
import type { InfraBackendLibs } from '../../../lib/infra_types';
|
||||
import {
|
||||
getTopLogEntryCategories,
|
||||
NoLogAnalysisResultsIndexError,
|
||||
} from '../../../lib/log_analysis';
|
||||
import { getTopLogEntryCategories } from '../../../lib/log_analysis';
|
||||
import { assertHasInfraMlPlugins } from '../../../utils/request_context';
|
||||
|
||||
export const initGetLogEntryCategoriesRoute = ({ framework }: InfraBackendLibs) => {
|
||||
|
@ -69,10 +66,6 @@ export const initGetLogEntryCategoriesRoute = ({ framework }: InfraBackendLibs)
|
|||
throw error;
|
||||
}
|
||||
|
||||
if (error instanceof NoLogAnalysisResultsIndexError) {
|
||||
return response.notFound({ body: { message: error.message } });
|
||||
}
|
||||
|
||||
return response.customError({
|
||||
statusCode: error.statusCode ?? 500,
|
||||
body: {
|
||||
|
|
|
@ -12,10 +12,7 @@ import {
|
|||
} from '../../../../common/http_api/log_analysis';
|
||||
import { createValidationFunction } from '../../../../common/runtime_types';
|
||||
import type { InfraBackendLibs } from '../../../lib/infra_types';
|
||||
import {
|
||||
getLogEntryCategoryDatasets,
|
||||
NoLogAnalysisResultsIndexError,
|
||||
} from '../../../lib/log_analysis';
|
||||
import { getLogEntryCategoryDatasets } from '../../../lib/log_analysis';
|
||||
import { assertHasInfraMlPlugins } from '../../../utils/request_context';
|
||||
|
||||
export const initGetLogEntryCategoryDatasetsRoute = ({ framework }: InfraBackendLibs) => {
|
||||
|
@ -58,10 +55,6 @@ export const initGetLogEntryCategoryDatasetsRoute = ({ framework }: InfraBackend
|
|||
throw error;
|
||||
}
|
||||
|
||||
if (error instanceof NoLogAnalysisResultsIndexError) {
|
||||
return response.notFound({ body: { message: error.message } });
|
||||
}
|
||||
|
||||
return response.customError({
|
||||
statusCode: error.statusCode ?? 500,
|
||||
body: {
|
||||
|
|
|
@ -12,10 +12,7 @@ import {
|
|||
} from '../../../../common/http_api/log_analysis';
|
||||
import { createValidationFunction } from '../../../../common/runtime_types';
|
||||
import type { InfraBackendLibs } from '../../../lib/infra_types';
|
||||
import {
|
||||
getLogEntryCategoryExamples,
|
||||
NoLogAnalysisResultsIndexError,
|
||||
} from '../../../lib/log_analysis';
|
||||
import { getLogEntryCategoryExamples } from '../../../lib/log_analysis';
|
||||
import { assertHasInfraMlPlugins } from '../../../utils/request_context';
|
||||
|
||||
export const initGetLogEntryCategoryExamplesRoute = ({ framework, sources }: InfraBackendLibs) => {
|
||||
|
@ -68,10 +65,6 @@ export const initGetLogEntryCategoryExamplesRoute = ({ framework, sources }: Inf
|
|||
throw error;
|
||||
}
|
||||
|
||||
if (error instanceof NoLogAnalysisResultsIndexError) {
|
||||
return response.notFound({ body: { message: error.message } });
|
||||
}
|
||||
|
||||
return response.customError({
|
||||
statusCode: error.statusCode ?? 500,
|
||||
body: {
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
import Boom from 'boom';
|
||||
import { createValidationFunction } from '../../../../common/runtime_types';
|
||||
import { InfraBackendLibs } from '../../../lib/infra_types';
|
||||
import { NoLogAnalysisResultsIndexError, getLogEntryExamples } from '../../../lib/log_analysis';
|
||||
import { getLogEntryExamples } from '../../../lib/log_analysis';
|
||||
import { assertHasInfraMlPlugins } from '../../../utils/request_context';
|
||||
import {
|
||||
getLogEntryExamplesRequestPayloadRT,
|
||||
|
@ -68,10 +68,6 @@ export const initGetLogEntryExamplesRoute = ({ framework, sources }: InfraBacken
|
|||
throw error;
|
||||
}
|
||||
|
||||
if (error instanceof NoLogAnalysisResultsIndexError) {
|
||||
return response.notFound({ body: { message: error.message } });
|
||||
}
|
||||
|
||||
return response.customError({
|
||||
statusCode: error.statusCode ?? 500,
|
||||
body: {
|
||||
|
|
|
@ -13,7 +13,7 @@ import {
|
|||
GetLogEntryRateSuccessResponsePayload,
|
||||
} from '../../../../common/http_api/log_analysis';
|
||||
import { createValidationFunction } from '../../../../common/runtime_types';
|
||||
import { NoLogAnalysisResultsIndexError, getLogEntryRateBuckets } from '../../../lib/log_analysis';
|
||||
import { getLogEntryRateBuckets } from '../../../lib/log_analysis';
|
||||
import { assertHasInfraMlPlugins } from '../../../utils/request_context';
|
||||
|
||||
export const initGetLogEntryRateRoute = ({ framework }: InfraBackendLibs) => {
|
||||
|
@ -56,10 +56,6 @@ export const initGetLogEntryRateRoute = ({ framework }: InfraBackendLibs) => {
|
|||
throw error;
|
||||
}
|
||||
|
||||
if (error instanceof NoLogAnalysisResultsIndexError) {
|
||||
return response.notFound({ body: { message: error.message } });
|
||||
}
|
||||
|
||||
return response.customError({
|
||||
statusCode: error.statusCode ?? 500,
|
||||
body: {
|
||||
|
|
Loading…
Reference in a new issue