[Logs UI] Reorganise log rate anomaly table (#69516)

* Remove top level chart

Remove top level anomalies chart

* Refactor table columns to accomodate new formatting

* Tyical vs actual stats in expanded row

* Format message based on actual vs typical

* Start fleshing out log rate examples endpoint and lib methods

* Use the real document ID for expanded rows so React doesn't re-render content

* Add all data fetching resources for log entry rate examples

* Move log entry example and severity indicator components to a shared location

* Render examples for log rate

* Add severity indicator

* Styling tweaks

* Move horizontal button popover menu to a shared components so log rate table can use it

* Revert "Move horizontal button popover menu to a shared components so log rate table can use it"

This reverts commit f80db5984d.

* Add "view in stream" and "view in anomaly explorer" links

* Hook links into the new context menu component

* Add log column headers and add styling tweaks etc

* Fix translations

* Tweak comments

* Chart tweaks

* Update x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/expanded_row.tsx

Co-authored-by: Felix Stürmer <weltenwort@users.noreply.github.com>

* PR amendments

- Pass href to context menu items
- Fix start and end times used for example logs
- Use "fewer" rather than "less"

* Update x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/table.tsx

Co-authored-by: Felix Stürmer <weltenwort@users.noreply.github.com>

* Update x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/log_entry_example.tsx

Co-authored-by: Felix Stürmer <weltenwort@users.noreply.github.com>

* Update x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/log_entry_example.tsx

Co-authored-by: Felix Stürmer <weltenwort@users.noreply.github.com>

* Update x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/table.tsx

Co-authored-by: Felix Stürmer <weltenwort@users.noreply.github.com>

* Update x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_rate_examples.ts

Co-authored-by: Felix Stürmer <weltenwort@users.noreply.github.com>

* Update x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_rate_examples.ts

Co-authored-by: Felix Stürmer <weltenwort@users.noreply.github.com>

* Update x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_rate_examples.ts

Co-authored-by: Felix Stürmer <weltenwort@users.noreply.github.com>

* Update x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_rate_examples.ts

Co-authored-by: Felix Stürmer <weltenwort@users.noreply.github.com>

* PR amendments

- Fix typechecking
- Add an empty log example column header to account for the context menu
- Add anomaly start time to rows

Co-authored-by: Felix Stürmer <weltenwort@users.noreply.github.com>
This commit is contained in:
Kerry Gallagher 2020-07-03 16:57:59 +01:00 committed by GitHub
parent 97ca7bfc2e
commit 7ec48fd966
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
28 changed files with 1180 additions and 260 deletions

View file

@ -8,3 +8,4 @@ export * from './log_entry_categories';
export * from './log_entry_category_datasets'; export * from './log_entry_category_datasets';
export * from './log_entry_category_examples'; export * from './log_entry_category_examples';
export * from './log_entry_rate'; export * from './log_entry_rate';
export * from './log_entry_rate_examples';

View file

@ -30,6 +30,7 @@ export type GetLogEntryRateRequestPayload = rt.TypeOf<typeof getLogEntryRateRequ
*/ */
export const logEntryRateAnomalyRT = rt.type({ export const logEntryRateAnomalyRT = rt.type({
id: rt.string,
actualLogEntryRate: rt.number, actualLogEntryRate: rt.number,
anomalyScore: rt.number, anomalyScore: rt.number,
duration: rt.number, duration: rt.number,
@ -37,6 +38,8 @@ export const logEntryRateAnomalyRT = rt.type({
typicalLogEntryRate: rt.number, typicalLogEntryRate: rt.number,
}); });
export type LogEntryRateAnomaly = rt.TypeOf<typeof logEntryRateAnomalyRT>;
export const logEntryRatePartitionRT = rt.type({ export const logEntryRatePartitionRT = rt.type({
analysisBucketCount: rt.number, analysisBucketCount: rt.number,
anomalies: rt.array(logEntryRateAnomalyRT), anomalies: rt.array(logEntryRateAnomalyRT),

View file

@ -0,0 +1,77 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import * as rt from 'io-ts';
import {
badRequestErrorRT,
forbiddenErrorRT,
timeRangeRT,
routeTimingMetadataRT,
} from '../../shared';
export const LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH =
'/api/infra/log_analysis/results/log_entry_rate_examples';
/**
* request
*/
export const getLogEntryRateExamplesRequestPayloadRT = rt.type({
data: rt.type({
// the dataset to fetch the log rate examples from
dataset: rt.string,
// the number of examples to fetch
exampleCount: rt.number,
// the id of the source configuration
sourceId: rt.string,
// the time range to fetch the log rate examples from
timeRange: timeRangeRT,
}),
});
export type GetLogEntryRateExamplesRequestPayload = rt.TypeOf<
typeof getLogEntryRateExamplesRequestPayloadRT
>;
/**
* response
*/
const logEntryRateExampleRT = rt.type({
id: rt.string,
dataset: rt.string,
message: rt.string,
timestamp: rt.number,
tiebreaker: rt.number,
});
export type LogEntryRateExample = rt.TypeOf<typeof logEntryRateExampleRT>;
export const getLogEntryRateExamplesSuccessReponsePayloadRT = rt.intersection([
rt.type({
data: rt.type({
examples: rt.array(logEntryRateExampleRT),
}),
}),
rt.partial({
timing: routeTimingMetadataRT,
}),
]);
export type GetLogEntryRateExamplesSuccessReponsePayload = rt.TypeOf<
typeof getLogEntryRateExamplesSuccessReponsePayloadRT
>;
export const getLogEntryRateExamplesResponsePayloadRT = rt.union([
getLogEntryRateExamplesSuccessReponsePayloadRT,
badRequestErrorRT,
forbiddenErrorRT,
]);
export type GetLogEntryRateExamplesResponsePayload = rt.TypeOf<
typeof getLogEntryRateExamplesResponsePayloadRT
>;

View file

@ -10,7 +10,7 @@ import {
formatAnomalyScore, formatAnomalyScore,
getSeverityCategoryForScore, getSeverityCategoryForScore,
ML_SEVERITY_COLORS, ML_SEVERITY_COLORS,
} from '../../../../../../common/log_analysis'; } from '../../../../common/log_analysis';
export const AnomalySeverityIndicator: React.FunctionComponent<{ export const AnomalySeverityIndicator: React.FunctionComponent<{
anomalyScore: number; anomalyScore: number;

View file

@ -0,0 +1,49 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import React from 'react';
import { euiStyled } from '../../../../../observability/public';
import { LogEntryExampleMessagesEmptyIndicator } from './log_entry_examples_empty_indicator';
import { LogEntryExampleMessagesFailureIndicator } from './log_entry_examples_failure_indicator';
import { LogEntryExampleMessagesLoadingIndicator } from './log_entry_examples_loading_indicator';
interface Props {
isLoading: boolean;
hasFailedLoading: boolean;
hasResults: boolean;
exampleCount: number;
onReload: () => void;
}
export const LogEntryExampleMessages: React.FunctionComponent<Props> = ({
isLoading,
hasFailedLoading,
exampleCount,
hasResults,
onReload,
children,
}) => {
return (
<Wrapper>
{isLoading ? (
<LogEntryExampleMessagesLoadingIndicator exampleCount={exampleCount} />
) : hasFailedLoading ? (
<LogEntryExampleMessagesFailureIndicator onRetry={onReload} />
) : !hasResults ? (
<LogEntryExampleMessagesEmptyIndicator onReload={onReload} />
) : (
children
)}
</Wrapper>
);
};
const Wrapper = euiStyled.div`
align-items: stretch;
flex-direction: column;
flex: 1 0 0%;
overflow: hidden;
`;

View file

@ -7,20 +7,20 @@ import { EuiButton, EuiFlexGroup, EuiFlexItem } from '@elastic/eui';
import { FormattedMessage } from '@kbn/i18n/react'; import { FormattedMessage } from '@kbn/i18n/react';
import React from 'react'; import React from 'react';
export const CategoryExampleMessagesEmptyIndicator: React.FunctionComponent<{ export const LogEntryExampleMessagesEmptyIndicator: React.FunctionComponent<{
onReload: () => void; onReload: () => void;
}> = ({ onReload }) => ( }> = ({ onReload }) => (
<EuiFlexGroup alignItems="center" justifyContent="center"> <EuiFlexGroup alignItems="center" justifyContent="center">
<EuiFlexItem grow={false} className="eui-textNoWrap"> <EuiFlexItem grow={false} className="eui-textNoWrap">
<FormattedMessage <FormattedMessage
id="xpack.infra.logs.logEntryCategories.exampleEmptyDescription" id="xpack.infra.logs.logEntryExamples.exampleEmptyDescription"
defaultMessage="No examples found within the selected time range. Increase the log entry retention period to improve message sample availability." defaultMessage="No examples found within the selected time range. Increase the log entry retention period to improve message sample availability."
/> />
</EuiFlexItem> </EuiFlexItem>
<EuiFlexItem grow={false}> <EuiFlexItem grow={false}>
<EuiButton onClick={onReload} size="s"> <EuiButton onClick={onReload} size="s">
<FormattedMessage <FormattedMessage
id="xpack.infra.logs.logEntryCategories.exampleEmptyReloadButtonLabel" id="xpack.infra.logs.logEntryExamples.exampleEmptyReloadButtonLabel"
defaultMessage="Reload" defaultMessage="Reload"
/> />
</EuiButton> </EuiButton>

View file

@ -7,22 +7,22 @@ import { EuiButton, EuiFlexGroup, EuiFlexItem, EuiTextColor } from '@elastic/eui
import { FormattedMessage } from '@kbn/i18n/react'; import { FormattedMessage } from '@kbn/i18n/react';
import React from 'react'; import React from 'react';
export const CategoryExampleMessagesFailureIndicator: React.FunctionComponent<{ export const LogEntryExampleMessagesFailureIndicator: React.FunctionComponent<{
onRetry: () => void; onRetry: () => void;
}> = ({ onRetry }) => ( }> = ({ onRetry }) => (
<EuiFlexGroup alignItems="center" justifyContent="center"> <EuiFlexGroup alignItems="center" justifyContent="center">
<EuiFlexItem grow={false} className="eui-textNoWrap"> <EuiFlexItem grow={false} className="eui-textNoWrap">
<EuiTextColor color="danger"> <EuiTextColor color="danger">
<FormattedMessage <FormattedMessage
id="xpack.infra.logs.logEntryCategories.exampleLoadingFailureDescription" id="xpack.infra.logs.logEntryExamples.exampleLoadingFailureDescription"
defaultMessage="Failed to load category examples." defaultMessage="Failed to load examples."
/> />
</EuiTextColor> </EuiTextColor>
</EuiFlexItem> </EuiFlexItem>
<EuiFlexItem grow={false}> <EuiFlexItem grow={false}>
<EuiButton onClick={onRetry} size="s"> <EuiButton onClick={onRetry} size="s">
<FormattedMessage <FormattedMessage
id="xpack.infra.logs.logEntryCategories.exampleLoadingFailureRetryButtonLabel" id="xpack.infra.logs.logEntryExamples.exampleLoadingFailureRetryButtonLabel"
defaultMessage="Retry" defaultMessage="Retry"
/> />
</EuiButton> </EuiButton>

View file

@ -7,7 +7,7 @@
import { EuiLoadingContent } from '@elastic/eui'; import { EuiLoadingContent } from '@elastic/eui';
import React from 'react'; import React from 'react';
export const CategoryExampleMessagesLoadingIndicator: React.FunctionComponent<{ export const LogEntryExampleMessagesLoadingIndicator: React.FunctionComponent<{
exampleCount: number; exampleCount: number;
}> = ({ exampleCount }) => ( }> = ({ exampleCount }) => (
<> <>

View file

@ -68,7 +68,7 @@ export const LogColumnHeaders: React.FunctionComponent<{
); );
}; };
const LogColumnHeader: React.FunctionComponent<{ export const LogColumnHeader: React.FunctionComponent<{
columnWidth: LogEntryColumnWidth; columnWidth: LogEntryColumnWidth;
'data-test-subj'?: string; 'data-test-subj'?: string;
}> = ({ children, columnWidth, 'data-test-subj': dataTestSubj }) => ( }> = ({ children, columnWidth, 'data-test-subj': dataTestSubj }) => (
@ -77,7 +77,7 @@ const LogColumnHeader: React.FunctionComponent<{
</LogColumnHeaderWrapper> </LogColumnHeaderWrapper>
); );
const LogColumnHeadersWrapper = euiStyled.div.attrs((props) => ({ export const LogColumnHeadersWrapper = euiStyled.div.attrs((props) => ({
role: props.role ?? 'row', role: props.role ?? 'row',
}))` }))`
align-items: stretch; align-items: stretch;

View file

@ -4,9 +4,15 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
export { LogEntryColumn, LogEntryColumnWidths, useColumnWidths } from './log_entry_column'; export {
LogEntryColumn,
LogEntryColumnWidths,
useColumnWidths,
iconColumnId,
} from './log_entry_column';
export { LogEntryFieldColumn } from './log_entry_field_column'; export { LogEntryFieldColumn } from './log_entry_field_column';
export { LogEntryMessageColumn } from './log_entry_message_column'; export { LogEntryMessageColumn } from './log_entry_message_column';
export { LogEntryRowWrapper } from './log_entry_row'; export { LogEntryRowWrapper } from './log_entry_row';
export { LogEntryTimestampColumn } from './log_entry_timestamp_column'; export { LogEntryTimestampColumn } from './log_entry_timestamp_column';
export { ScrollableLogTextStreamView } from './scrollable_log_text_stream_view'; export { ScrollableLogTextStreamView } from './scrollable_log_text_stream_view';
export { LogEntryContextMenu } from './log_entry_context_menu';

View file

@ -13,7 +13,8 @@ import { LogEntryColumnContent } from './log_entry_column';
interface LogEntryContextMenuItem { interface LogEntryContextMenuItem {
label: string; label: string;
onClick: () => void; onClick: (e: React.MouseEvent) => void;
href?: string;
} }
interface LogEntryContextMenuProps { interface LogEntryContextMenuProps {
@ -40,9 +41,9 @@ export const LogEntryContextMenu: React.FC<LogEntryContextMenuProps> = ({
}) => { }) => {
const closeMenuAndCall = useMemo(() => { const closeMenuAndCall = useMemo(() => {
return (callback: LogEntryContextMenuItem['onClick']) => { return (callback: LogEntryContextMenuItem['onClick']) => {
return () => { return (e: React.MouseEvent) => {
onClose(); onClose();
callback(); callback(e);
}; };
}; };
}, [onClose]); }, [onClose]);
@ -60,7 +61,7 @@ export const LogEntryContextMenu: React.FC<LogEntryContextMenuProps> = ({
const wrappedItems = useMemo(() => { const wrappedItems = useMemo(() => {
return items.map((item, i) => ( return items.map((item, i) => (
<EuiContextMenuItem key={i} onClick={closeMenuAndCall(item.onClick)}> <EuiContextMenuItem key={i} onClick={closeMenuAndCall(item.onClick)} href={item.href}>
{item.label} {item.label}
</EuiContextMenuItem> </EuiContextMenuItem>
)); ));

View file

@ -8,7 +8,7 @@ import React from 'react';
import { LogEntryCategoryDataset } from '../../../../../../common/http_api/log_analysis'; import { LogEntryCategoryDataset } from '../../../../../../common/http_api/log_analysis';
import { getFriendlyNameForPartitionId } from '../../../../../../common/log_analysis'; import { getFriendlyNameForPartitionId } from '../../../../../../common/log_analysis';
import { AnomalySeverityIndicator } from './anomaly_severity_indicator'; import { AnomalySeverityIndicator } from '../../../../../components/logging/log_analysis_results/anomaly_severity_indicator';
export const AnomalySeverityIndicatorList: React.FunctionComponent<{ export const AnomalySeverityIndicatorList: React.FunctionComponent<{
datasets: LogEntryCategoryDataset[]; datasets: LogEntryCategoryDataset[];

View file

@ -5,14 +5,10 @@
*/ */
import React, { useEffect } from 'react'; import React, { useEffect } from 'react';
import { euiStyled } from '../../../../../../../observability/public';
import { TimeRange } from '../../../../../../common/http_api/shared';
import { useLogEntryCategoryExamples } from '../../use_log_entry_category_examples'; import { useLogEntryCategoryExamples } from '../../use_log_entry_category_examples';
import { LogEntryExampleMessages } from '../../../../../components/logging/log_entry_examples/log_entry_examples';
import { TimeRange } from '../../../../../../common/http_api/shared';
import { CategoryExampleMessage } from './category_example_message'; import { CategoryExampleMessage } from './category_example_message';
import { CategoryExampleMessagesEmptyIndicator } from './category_example_messages_empty_indicator';
import { CategoryExampleMessagesFailureIndicator } from './category_example_messages_failure_indicator';
import { CategoryExampleMessagesLoadingIndicator } from './category_example_messages_loading_indicator';
const exampleCount = 5; const exampleCount = 5;
@ -39,30 +35,21 @@ export const CategoryDetailsRow: React.FunctionComponent<{
}, [getLogEntryCategoryExamples]); }, [getLogEntryCategoryExamples]);
return ( return (
<CategoryExampleMessages> <LogEntryExampleMessages
{isLoadingLogEntryCategoryExamples ? ( isLoading={isLoadingLogEntryCategoryExamples}
<CategoryExampleMessagesLoadingIndicator exampleCount={exampleCount} /> hasFailedLoading={hasFailedLoadingLogEntryCategoryExamples}
) : hasFailedLoadingLogEntryCategoryExamples ? ( hasResults={logEntryCategoryExamples.length > 0}
<CategoryExampleMessagesFailureIndicator onRetry={getLogEntryCategoryExamples} /> exampleCount={exampleCount}
) : logEntryCategoryExamples.length === 0 ? ( onReload={getLogEntryCategoryExamples}
<CategoryExampleMessagesEmptyIndicator onReload={getLogEntryCategoryExamples} /> >
) : ( {logEntryCategoryExamples.map((example, exampleIndex) => (
logEntryCategoryExamples.map((categoryExample, categoryExampleIndex) => ( <CategoryExampleMessage
<CategoryExampleMessage key={exampleIndex}
dataset={categoryExample.dataset} dataset={example.dataset}
key={categoryExampleIndex} message={example.message}
message={categoryExample.message} timestamp={example.timestamp}
timestamp={categoryExample.timestamp} />
/> ))}
)) </LogEntryExampleMessages>
)}
</CategoryExampleMessages>
); );
}; };
const CategoryExampleMessages = euiStyled.div`
align-items: stretch;
flex-direction: column;
flex: 1 0 0%;
overflow: hidden;
`;

View file

@ -4,86 +4,129 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { EuiFlexGroup, EuiFlexItem, EuiSpacer, EuiStat } from '@elastic/eui'; import { EuiFlexGroup, EuiFlexItem, EuiTitle, EuiStat } from '@elastic/eui';
import numeral from '@elastic/numeral'; import numeral from '@elastic/numeral';
import { i18n } from '@kbn/i18n'; import { i18n } from '@kbn/i18n';
import React, { useMemo } from 'react'; import React from 'react';
import { useMount } from 'react-use';
import { TimeRange } from '../../../../../../common/http_api/shared/time_range'; import { TimeRange } from '../../../../../../common/http_api/shared/time_range';
import { AnalyzeInMlButton } from '../../../../../components/logging/log_analysis_results'; import { AnomalyRecord } from '../../use_log_entry_rate_results';
import { LogEntryRateResults } from '../../use_log_entry_rate_results'; import { useLogEntryRateModuleContext } from '../../use_log_entry_rate_module';
import { import { useLogEntryRateExamples } from '../../use_log_entry_rate_examples';
getAnnotationsForPartition, import { LogEntryExampleMessages } from '../../../../../components/logging/log_entry_examples/log_entry_examples';
getLogEntryRateSeriesForPartition, import { LogEntryRateExampleMessage, LogEntryRateExampleMessageHeaders } from './log_entry_example';
getTotalNumberOfLogEntriesForPartition, import { euiStyled } from '../../../../../../../observability/public';
} from '../helpers/data_formatters';
import { AnomaliesChart } from './chart'; const EXAMPLE_COUNT = 5;
const examplesTitle = i18n.translate('xpack.infra.logs.analysis.anomaliesTableExamplesTitle', {
defaultMessage: 'Example log entries',
});
export const AnomaliesTableExpandedRow: React.FunctionComponent<{ export const AnomaliesTableExpandedRow: React.FunctionComponent<{
partitionId: string; anomaly: AnomalyRecord;
results: LogEntryRateResults;
setTimeRange: (timeRange: TimeRange) => void;
timeRange: TimeRange; timeRange: TimeRange;
jobId: string; jobId: string;
}> = ({ results, timeRange, setTimeRange, partitionId, jobId }) => { }> = ({ anomaly, timeRange, jobId }) => {
const logEntryRateSeries = useMemo( const {
() => sourceConfiguration: { sourceId },
results?.histogramBuckets ? getLogEntryRateSeriesForPartition(results, partitionId) : [], } = useLogEntryRateModuleContext();
/* eslint-disable-next-line react-hooks/exhaustive-deps */
[results, partitionId] const {
); getLogEntryRateExamples,
const anomalyAnnotations = useMemo( hasFailedLoadingLogEntryRateExamples,
() => isLoadingLogEntryRateExamples,
results?.histogramBuckets logEntryRateExamples,
? getAnnotationsForPartition(results, partitionId) } = useLogEntryRateExamples({
: { dataset: anomaly.partitionId,
warning: [], endTime: anomaly.startTime + anomaly.duration,
minor: [], exampleCount: EXAMPLE_COUNT,
major: [], sourceId,
critical: [], startTime: anomaly.startTime,
}, });
/* eslint-disable-next-line react-hooks/exhaustive-deps */
[results, partitionId] useMount(() => {
); getLogEntryRateExamples();
const totalNumberOfLogEntries = useMemo( });
() =>
results?.histogramBuckets
? getTotalNumberOfLogEntriesForPartition(results, partitionId)
: undefined,
/* eslint-disable-next-line react-hooks/exhaustive-deps */
[results, partitionId]
);
return ( return (
<EuiFlexGroup> <>
<EuiFlexItem grow={8}> <ExpandedContentWrapper direction="column">
<AnomaliesChart <EuiFlexItem>
chartId={`${partitionId}-anomalies`} <EuiTitle size="s">
timeRange={timeRange} <h3>{examplesTitle}</h3>
setTimeRange={setTimeRange} </EuiTitle>
series={logEntryRateSeries} <LogEntryExampleMessages
annotations={anomalyAnnotations} isLoading={isLoadingLogEntryRateExamples}
/> hasFailedLoading={hasFailedLoadingLogEntryRateExamples}
</EuiFlexItem> hasResults={logEntryRateExamples.length > 0}
<EuiFlexItem> exampleCount={EXAMPLE_COUNT}
<EuiSpacer size="m" /> onReload={getLogEntryRateExamples}
<EuiStat >
title={numeral(totalNumberOfLogEntries).format('0.00a')} {logEntryRateExamples.length > 0 ? (
titleSize="m" <>
description={i18n.translate( <LogEntryRateExampleMessageHeaders dateTime={logEntryRateExamples[0].timestamp} />
'xpack.infra.logs.analysis.anomaliesExpandedRowNumberOfLogEntriesDescription', {logEntryRateExamples.map((example, exampleIndex) => (
{ <LogEntryRateExampleMessage
defaultMessage: 'Number of log entries', key={exampleIndex}
} id={example.id}
)} dataset={example.dataset}
reverse message={example.message}
/> timestamp={example.timestamp}
<EuiSpacer size="m" /> tiebreaker={example.tiebreaker}
<EuiFlexGroup> timeRange={timeRange}
<EuiFlexItem grow={false}> jobId={jobId}
<AnalyzeInMlButton jobId={jobId} timeRange={timeRange} partition={partitionId} /> />
</EuiFlexItem> ))}
</EuiFlexGroup> </>
</EuiFlexItem> ) : null}
</EuiFlexGroup> </LogEntryExampleMessages>
</EuiFlexItem>
<EuiFlexItem>
<EuiFlexGroup>
<EuiFlexItem grow={false}>
<EuiStat
titleSize="s"
title={`${numeral(anomaly.typicalLogEntryRate).format('0.00a')} ${i18n.translate(
'xpack.infra.logs.analysis.anomaliesExpandedRowTypicalRateTitle',
{
defaultMessage: '{typicalCount, plural, one {message} other {messages}}',
values: { typicalCount: anomaly.typicalLogEntryRate },
}
)}`}
description={i18n.translate(
'xpack.infra.logs.analysis.anomaliesExpandedRowTypicalRateDescription',
{
defaultMessage: 'Typical',
}
)}
/>
</EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiStat
titleSize="s"
title={`${numeral(anomaly.actualLogEntryRate).format('0.00a')} ${i18n.translate(
'xpack.infra.logs.analysis.anomaliesExpandedRowActualRateTitle',
{
defaultMessage: '{actualCount, plural, one {message} other {messages}}',
values: { actualCount: anomaly.actualLogEntryRate },
}
)}`}
description={i18n.translate(
'xpack.infra.logs.analysis.anomaliesExpandedRowActualRateDescription',
{
defaultMessage: 'Actual',
}
)}
/>
</EuiFlexItem>
</EuiFlexGroup>
</EuiFlexItem>
</ExpandedContentWrapper>
</>
); );
}; };
const ExpandedContentWrapper = euiStyled(EuiFlexGroup)`
overflow: hidden;
`;

View file

@ -9,23 +9,15 @@ import {
EuiFlexGroup, EuiFlexGroup,
EuiFlexItem, EuiFlexItem,
EuiSpacer, EuiSpacer,
EuiStat,
EuiTitle, EuiTitle,
EuiLoadingSpinner, EuiLoadingSpinner,
} from '@elastic/eui'; } from '@elastic/eui';
import numeral from '@elastic/numeral';
import { i18n } from '@kbn/i18n'; import { i18n } from '@kbn/i18n';
import React, { useMemo } from 'react'; import React, { useMemo } from 'react';
import { euiStyled } from '../../../../../../../observability/public'; import { euiStyled } from '../../../../../../../observability/public';
import { LogEntryRateResults } from '../../use_log_entry_rate_results'; import { LogEntryRateResults } from '../../use_log_entry_rate_results';
import { TimeRange } from '../../../../../../common/http_api/shared/time_range'; import { TimeRange } from '../../../../../../common/http_api/shared/time_range';
import { formatAnomalyScore } from '../../../../../../common/log_analysis'; import { getAnnotationsForAll, getLogEntryRateCombinedSeries } from '../helpers/data_formatters';
import {
getAnnotationsForAll,
getLogEntryRateCombinedSeries,
getTopAnomalyScoreAcrossAllPartitions,
} from '../helpers/data_formatters';
import { AnomaliesChart } from './chart'; import { AnomaliesChart } from './chart';
import { AnomaliesTable } from './table'; import { AnomaliesTable } from './table';
import { RecreateJobButton } from '../../../../../components/logging/log_analysis_job_status'; import { RecreateJobButton } from '../../../../../components/logging/log_analysis_job_status';
@ -67,14 +59,6 @@ export const AnomaliesResults: React.FunctionComponent<{
[results] [results]
); );
const topAnomalyScore = useMemo(
() =>
results && results.histogramBuckets
? getTopAnomalyScoreAcrossAllPartitions(results)
: undefined,
[results]
);
return ( return (
<> <>
<EuiFlexGroup alignItems="center" gutterSize="s"> <EuiFlexGroup alignItems="center" gutterSize="s">
@ -124,7 +108,7 @@ export const AnomaliesResults: React.FunctionComponent<{
) : ( ) : (
<> <>
<EuiFlexGroup> <EuiFlexGroup>
<EuiFlexItem grow={8}> <EuiFlexItem>
<AnomaliesChart <AnomaliesChart
chartId="overall" chartId="overall"
setTimeRange={setTimeRange} setTimeRange={setTimeRange}
@ -134,30 +118,6 @@ export const AnomaliesResults: React.FunctionComponent<{
renderAnnotationTooltip={renderAnnotationTooltip} renderAnnotationTooltip={renderAnnotationTooltip}
/> />
</EuiFlexItem> </EuiFlexItem>
<EuiFlexItem grow={2}>
<EuiStat
title={numeral(results.totalNumberOfLogEntries).format('0.00a')}
titleSize="m"
description={i18n.translate(
'xpack.infra.logs.analysis.overallAnomaliesNumberOfLogEntriesDescription',
{
defaultMessage: 'Number of log entries',
}
)}
reverse
/>
<EuiStat
title={topAnomalyScore ? formatAnomalyScore(topAnomalyScore) : null}
titleSize="m"
description={i18n.translate(
'xpack.infra.logs.analysis.overallAnomaliesTopAnomalyScoreDescription',
{
defaultMessage: 'Max anomaly score',
}
)}
reverse
/>
</EuiFlexItem>
</EuiFlexGroup> </EuiFlexGroup>
<EuiSpacer size="l" /> <EuiSpacer size="l" />
<AnomaliesTable <AnomaliesTable
@ -177,6 +137,13 @@ const title = i18n.translate('xpack.infra.logs.analysis.anomaliesSectionTitle',
defaultMessage: 'Anomalies', defaultMessage: 'Anomalies',
}); });
const loadingAriaLabel = i18n.translate(
'xpack.infra.logs.analysis.anomaliesSectionLoadingAriaLabel',
{ defaultMessage: 'Loading anomalies' }
);
const LoadingOverlayContent = () => <EuiLoadingSpinner size="xl" aria-label={loadingAriaLabel} />;
interface ParsedAnnotationDetails { interface ParsedAnnotationDetails {
anomalyScoresByPartition: Array<{ partitionName: string; maximumAnomalyScore: number }>; anomalyScoresByPartition: Array<{ partitionName: string; maximumAnomalyScore: number }>;
} }
@ -222,10 +189,3 @@ const renderAnnotationTooltip = (details?: string) => {
const TooltipWrapper = euiStyled('div')` const TooltipWrapper = euiStyled('div')`
white-space: nowrap; white-space: nowrap;
`; `;
const loadingAriaLabel = i18n.translate(
'xpack.infra.logs.analysis.anomaliesSectionLoadingAriaLabel',
{ defaultMessage: 'Loading anomalies' }
);
const LoadingOverlayContent = () => <EuiLoadingSpinner size="xl" aria-label={loadingAriaLabel} />;

View file

@ -0,0 +1,291 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import React, { useMemo, useCallback, useState } from 'react';
import moment from 'moment';
import { encode } from 'rison-node';
import { i18n } from '@kbn/i18n';
import { euiStyled } from '../../../../../../../observability/public';
import { getFriendlyNameForPartitionId } from '../../../../../../common/log_analysis';
import {
LogEntryColumn,
LogEntryFieldColumn,
LogEntryMessageColumn,
LogEntryRowWrapper,
LogEntryTimestampColumn,
LogEntryContextMenu,
LogEntryColumnWidths,
iconColumnId,
} from '../../../../../components/logging/log_text_stream';
import {
LogColumnHeadersWrapper,
LogColumnHeader,
} from '../../../../../components/logging/log_text_stream/column_headers';
import { useLinkProps } from '../../../../../hooks/use_link_props';
import { TimeRange } from '../../../../../../common/http_api/shared/time_range';
import { partitionField } from '../../../../../../common/log_analysis/job_parameters';
import { getEntitySpecificSingleMetricViewerLink } from '../../../../../components/logging/log_analysis_results/analyze_in_ml_button';
import { LogEntryRateExample } from '../../../../../../common/http_api/log_analysis/results';
import {
LogColumnConfiguration,
isTimestampLogColumnConfiguration,
isFieldLogColumnConfiguration,
isMessageLogColumnConfiguration,
} from '../../../../../utils/source_configuration';
import { localizedDate } from '../../../../../../common/formatters/datetime';
export const exampleMessageScale = 'medium' as const;
export const exampleTimestampFormat = 'time' as const;
const MENU_LABEL = i18n.translate('xpack.infra.logAnomalies.logEntryExamplesMenuLabel', {
defaultMessage: 'View actions for log entry',
});
const VIEW_IN_STREAM_LABEL = i18n.translate(
'xpack.infra.logs.analysis.logEntryExamplesViewInStreamLabel',
{
defaultMessage: 'View in stream',
}
);
const VIEW_ANOMALY_IN_ML_LABEL = i18n.translate(
'xpack.infra.logs.analysis.logEntryExamplesViewAnomalyInMlLabel',
{
defaultMessage: 'View anomaly in machine learning',
}
);
type Props = LogEntryRateExample & {
timeRange: TimeRange;
jobId: string;
};
export const LogEntryRateExampleMessage: React.FunctionComponent<Props> = ({
id,
dataset,
message,
timestamp,
tiebreaker,
timeRange,
jobId,
}) => {
const [isHovered, setIsHovered] = useState(false);
const [isMenuOpen, setIsMenuOpen] = useState(false);
const openMenu = useCallback(() => setIsMenuOpen(true), []);
const closeMenu = useCallback(() => setIsMenuOpen(false), []);
const setItemIsHovered = useCallback(() => setIsHovered(true), []);
const setItemIsNotHovered = useCallback(() => setIsHovered(false), []);
// the dataset must be encoded for the field column and the empty value must
// be turned into a user-friendly value
const encodedDatasetFieldValue = useMemo(
() => JSON.stringify(getFriendlyNameForPartitionId(dataset)),
[dataset]
);
const viewInStreamLinkProps = useLinkProps({
app: 'logs',
pathname: 'stream',
search: {
logPosition: encode({
end: moment(timeRange.endTime).format('YYYY-MM-DDTHH:mm:ss.SSSZ'),
position: { tiebreaker, time: timestamp },
start: moment(timeRange.startTime).format('YYYY-MM-DDTHH:mm:ss.SSSZ'),
streamLive: false,
}),
flyoutOptions: encode({
surroundingLogsId: id,
}),
logFilter: encode({
expression: `${partitionField}: ${dataset}`,
kind: 'kuery',
}),
},
});
const viewAnomalyInMachineLearningLinkProps = useLinkProps(
getEntitySpecificSingleMetricViewerLink(jobId, timeRange, {
[partitionField]: dataset,
})
);
const menuItems = useMemo(() => {
if (!viewInStreamLinkProps.onClick || !viewAnomalyInMachineLearningLinkProps.onClick) {
return undefined;
}
return [
{
label: VIEW_IN_STREAM_LABEL,
onClick: viewInStreamLinkProps.onClick,
href: viewInStreamLinkProps.href,
},
{
label: VIEW_ANOMALY_IN_ML_LABEL,
onClick: viewAnomalyInMachineLearningLinkProps.onClick,
href: viewAnomalyInMachineLearningLinkProps.href,
},
];
}, [viewInStreamLinkProps, viewAnomalyInMachineLearningLinkProps]);
return (
<LogEntryRowWrapper
scale={exampleMessageScale}
onMouseEnter={setItemIsHovered}
onMouseLeave={setItemIsNotHovered}
>
<LogEntryColumn {...columnWidths[timestampColumnId]}>
<LogEntryTimestampColumn format={exampleTimestampFormat} time={timestamp} />
</LogEntryColumn>
<LogEntryColumn {...columnWidths[messageColumnId]}>
<LogEntryMessageColumn
columnValue={{
columnId: messageColumnId,
message: [{ field: 'message', value: message, highlights: [] }],
}}
highlights={noHighlights}
isActiveHighlight={false}
wrapMode="none"
/>
</LogEntryColumn>
<LogEntryColumn {...columnWidths[datasetColumnId]}>
<LogEntryFieldColumn
columnValue={{
columnId: datasetColumnId,
field: 'event.dataset',
value: encodedDatasetFieldValue,
highlights: [],
}}
highlights={noHighlights}
isActiveHighlight={false}
wrapMode="none"
/>
</LogEntryColumn>
<LogEntryColumn
key="logColumn iconLogColumn iconLogColumn:details"
{...columnWidths[iconColumnId]}
>
{(isHovered || isMenuOpen) && menuItems ? (
<LogEntryContextMenu
aria-label={MENU_LABEL}
isOpen={isMenuOpen}
onOpen={openMenu}
onClose={closeMenu}
items={menuItems}
/>
) : null}
</LogEntryColumn>
</LogEntryRowWrapper>
);
};
const noHighlights: never[] = [];
const timestampColumnId = 'log-entry-example-timestamp-column' as const;
const messageColumnId = 'log-entry-examples-message-column' as const;
const datasetColumnId = 'log-entry-examples-dataset-column' as const;
const DETAIL_FLYOUT_ICON_MIN_WIDTH = 32;
const COLUMN_PADDING = 8;
export const columnWidths: LogEntryColumnWidths = {
[timestampColumnId]: {
growWeight: 0,
shrinkWeight: 0,
// w_score - w_padding = 130 px - 8 px
baseWidth: '122px',
},
[messageColumnId]: {
growWeight: 1,
shrinkWeight: 0,
baseWidth: '0%',
},
[datasetColumnId]: {
growWeight: 0,
shrinkWeight: 0,
baseWidth: '250px',
},
[iconColumnId]: {
growWeight: 0,
shrinkWeight: 0,
baseWidth: `${DETAIL_FLYOUT_ICON_MIN_WIDTH + 2 * COLUMN_PADDING}px`,
},
};
export const exampleMessageColumnConfigurations: LogColumnConfiguration[] = [
{
timestampColumn: {
id: timestampColumnId,
},
},
{
messageColumn: {
id: messageColumnId,
},
},
{
fieldColumn: {
field: 'event.dataset',
id: datasetColumnId,
},
},
];
export const LogEntryRateExampleMessageHeaders: React.FunctionComponent<{
dateTime: number;
}> = ({ dateTime }) => {
return (
<LogEntryRateExampleMessageHeadersWrapper>
<>
{exampleMessageColumnConfigurations.map((columnConfiguration) => {
if (isTimestampLogColumnConfiguration(columnConfiguration)) {
return (
<LogColumnHeader
key={columnConfiguration.timestampColumn.id}
columnWidth={columnWidths[columnConfiguration.timestampColumn.id]}
data-test-subj="logColumnHeader timestampLogColumnHeader"
>
{localizedDate(dateTime)}
</LogColumnHeader>
);
} else if (isMessageLogColumnConfiguration(columnConfiguration)) {
return (
<LogColumnHeader
columnWidth={columnWidths[columnConfiguration.messageColumn.id]}
data-test-subj="logColumnHeader messageLogColumnHeader"
key={columnConfiguration.messageColumn.id}
>
Message
</LogColumnHeader>
);
} else if (isFieldLogColumnConfiguration(columnConfiguration)) {
return (
<LogColumnHeader
columnWidth={columnWidths[columnConfiguration.fieldColumn.id]}
data-test-subj="logColumnHeader fieldLogColumnHeader"
key={columnConfiguration.fieldColumn.id}
>
{columnConfiguration.fieldColumn.field}
</LogColumnHeader>
);
}
})}
<LogColumnHeader
columnWidth={columnWidths[iconColumnId]}
data-test-subj="logColumnHeader contextMenuLogColumnHeader"
key={'icon-column-header'}
>
{null}
</LogColumnHeader>
</>
</LogEntryRateExampleMessageHeadersWrapper>
);
};
const LogEntryRateExampleMessageHeadersWrapper = euiStyled(LogColumnHeadersWrapper)`
border-bottom: none;
box-shadow: none;
padding-right: 0;
`;

View file

@ -6,10 +6,10 @@
import { EuiBasicTable, EuiBasicTableColumn } from '@elastic/eui'; import { EuiBasicTable, EuiBasicTableColumn } from '@elastic/eui';
import { RIGHT_ALIGNMENT } from '@elastic/eui/lib/services'; import { RIGHT_ALIGNMENT } from '@elastic/eui/lib/services';
import moment from 'moment';
import { i18n } from '@kbn/i18n'; import { i18n } from '@kbn/i18n';
import React, { useCallback, useMemo, useState } from 'react'; import React, { useCallback, useMemo, useState } from 'react';
import { useSet } from 'react-use'; import { useSet } from 'react-use';
import { euiStyled } from '../../../../../../../observability/public';
import { TimeRange } from '../../../../../../common/http_api/shared/time_range'; import { TimeRange } from '../../../../../../common/http_api/shared/time_range';
import { import {
formatAnomalyScore, formatAnomalyScore,
@ -18,11 +18,16 @@ import {
import { RowExpansionButton } from '../../../../../components/basic_table'; import { RowExpansionButton } from '../../../../../components/basic_table';
import { LogEntryRateResults } from '../../use_log_entry_rate_results'; import { LogEntryRateResults } from '../../use_log_entry_rate_results';
import { AnomaliesTableExpandedRow } from './expanded_row'; import { AnomaliesTableExpandedRow } from './expanded_row';
import { AnomalySeverityIndicator } from '../../../../../components/logging/log_analysis_results/anomaly_severity_indicator';
import { useKibanaUiSetting } from '../../../../../utils/use_kibana_ui_setting';
interface TableItem { interface TableItem {
partitionName: string; id: string;
partitionId: string; dataset: string;
topAnomalyScore: number; datasetName: string;
anomalyScore: number;
anomalyMessage: string;
startTime: number;
} }
interface SortingOptions { interface SortingOptions {
@ -32,73 +37,132 @@ interface SortingOptions {
}; };
} }
const partitionColumnName = i18n.translate( interface PaginationOptions {
'xpack.infra.logs.analysis.anomaliesTablePartitionColumnName', pageIndex: number;
pageSize: number;
totalItemCount: number;
pageSizeOptions: number[];
hidePerPageOptions: boolean;
}
const anomalyScoreColumnName = i18n.translate(
'xpack.infra.logs.analysis.anomaliesTableAnomalyScoreColumnName',
{ {
defaultMessage: 'Partition', defaultMessage: 'Anomaly score',
} }
); );
const maxAnomalyScoreColumnName = i18n.translate( const anomalyMessageColumnName = i18n.translate(
'xpack.infra.logs.analysis.anomaliesTableMaxAnomalyScoreColumnName', 'xpack.infra.logs.analysis.anomaliesTableAnomalyMessageName',
{ {
defaultMessage: 'Max anomaly score', defaultMessage: 'Anomaly',
} }
); );
const anomalyStartTimeColumnName = i18n.translate(
'xpack.infra.logs.analysis.anomaliesTableAnomalyStartTime',
{
defaultMessage: 'Start time',
}
);
const datasetColumnName = i18n.translate(
'xpack.infra.logs.analysis.anomaliesTableAnomalyDatasetName',
{
defaultMessage: 'Dataset',
}
);
const moreThanExpectedAnomalyMessage = i18n.translate(
'xpack.infra.logs.analysis.anomaliesTableMoreThanExpectedAnomalyMessage',
{
defaultMessage: 'More log messages in this dataset than expected',
}
);
const fewerThanExpectedAnomalyMessage = i18n.translate(
'xpack.infra.logs.analysis.anomaliesTableFewerThanExpectedAnomalyMessage',
{
defaultMessage: 'Fewer log messages in this dataset than expected',
}
);
const getAnomalyMessage = (actualRate: number, typicalRate: number): string => {
return actualRate < typicalRate
? fewerThanExpectedAnomalyMessage
: moreThanExpectedAnomalyMessage;
};
export const AnomaliesTable: React.FunctionComponent<{ export const AnomaliesTable: React.FunctionComponent<{
results: LogEntryRateResults; results: LogEntryRateResults;
setTimeRange: (timeRange: TimeRange) => void; setTimeRange: (timeRange: TimeRange) => void;
timeRange: TimeRange; timeRange: TimeRange;
jobId: string; jobId: string;
}> = ({ results, timeRange, setTimeRange, jobId }) => { }> = ({ results, timeRange, setTimeRange, jobId }) => {
const [dateFormat] = useKibanaUiSetting('dateFormat', 'Y-MM-DD HH:mm:ss');
const tableItems: TableItem[] = useMemo(() => { const tableItems: TableItem[] = useMemo(() => {
return Object.entries(results.partitionBuckets).map(([key, value]) => { return results.anomalies.map((anomaly) => {
return { return {
// The real ID id: anomaly.id,
partitionId: key, dataset: anomaly.partitionId,
// Note: EUI's table expanded rows won't work with a key of '' in itemIdToExpandedRowMap, so we have to use the friendly name here datasetName: getFriendlyNameForPartitionId(anomaly.partitionId),
partitionName: getFriendlyNameForPartitionId(key), anomalyScore: formatAnomalyScore(anomaly.anomalyScore),
topAnomalyScore: formatAnomalyScore(value.topAnomalyScore), anomalyMessage: getAnomalyMessage(anomaly.actualLogEntryRate, anomaly.typicalLogEntryRate),
startTime: anomaly.startTime,
}; };
}); });
}, [results]); }, [results]);
const [expandedDatasetIds, { add: expandDataset, remove: collapseDataset }] = useSet<string>( const [expandedIds, { add: expandId, remove: collapseId }] = useSet<string>(new Set());
new Set()
);
const expandedDatasetRowContents = useMemo( const expandedDatasetRowContents = useMemo(
() => () =>
[...expandedDatasetIds].reduce<Record<string, React.ReactNode>>( [...expandedIds].reduce<Record<string, React.ReactNode>>((aggregatedDatasetRows, id) => {
(aggregatedDatasetRows, datasetId) => { const anomaly = results.anomalies.find((_anomaly) => _anomaly.id === id);
return {
...aggregatedDatasetRows, return {
[getFriendlyNameForPartitionId(datasetId)]: ( ...aggregatedDatasetRows,
<AnomaliesTableExpandedRow [id]: anomaly ? (
partitionId={datasetId} <AnomaliesTableExpandedRow anomaly={anomaly} timeRange={timeRange} jobId={jobId} />
results={results} ) : null,
setTimeRange={setTimeRange} };
timeRange={timeRange} }, {}),
jobId={jobId} [expandedIds, results, timeRange, jobId]
/>
),
};
},
{}
),
[expandedDatasetIds, jobId, results, setTimeRange, timeRange]
); );
const [sorting, setSorting] = useState<SortingOptions>({ const [sorting, setSorting] = useState<SortingOptions>({
sort: { sort: {
field: 'topAnomalyScore', field: 'anomalyScore',
direction: 'desc', direction: 'desc',
}, },
}); });
const [_pagination, setPagination] = useState<PaginationOptions>({
pageIndex: 0,
pageSize: 20,
totalItemCount: results.anomalies.length,
pageSizeOptions: [10, 20, 50],
hidePerPageOptions: false,
});
const paginationOptions = useMemo(() => {
return {
..._pagination,
totalItemCount: results.anomalies.length,
};
}, [_pagination, results]);
const handleTableChange = useCallback( const handleTableChange = useCallback(
({ sort = {} }) => { ({ page = {}, sort = {} }) => {
const { index, size } = page;
setPagination((currentPagination) => {
return {
...currentPagination,
pageIndex: index,
pageSize: size,
};
});
const { field, direction } = sort; const { field, direction } = sort;
setSorting({ setSorting({
sort: { sort: {
@ -107,33 +171,58 @@ export const AnomaliesTable: React.FunctionComponent<{
}, },
}); });
}, },
[setSorting] [setSorting, setPagination]
); );
const sortedTableItems = useMemo(() => { const sortedTableItems = useMemo(() => {
let sortedItems: TableItem[] = []; let sortedItems: TableItem[] = [];
if (sorting.sort.field === 'partitionName') { if (sorting.sort.field === 'datasetName') {
sortedItems = tableItems.sort((a, b) => (a.partitionId > b.partitionId ? 1 : -1)); sortedItems = tableItems.sort((a, b) => (a.datasetName > b.datasetName ? 1 : -1));
} else if (sorting.sort.field === 'topAnomalyScore') { } else if (sorting.sort.field === 'anomalyScore') {
sortedItems = tableItems.sort((a, b) => a.topAnomalyScore - b.topAnomalyScore); sortedItems = tableItems.sort((a, b) => a.anomalyScore - b.anomalyScore);
} else if (sorting.sort.field === 'startTime') {
sortedItems = tableItems.sort((a, b) => a.startTime - b.startTime);
} }
return sorting.sort.direction === 'asc' ? sortedItems : sortedItems.reverse(); return sorting.sort.direction === 'asc' ? sortedItems : sortedItems.reverse();
}, [tableItems, sorting]); }, [tableItems, sorting]);
const pageOfItems: TableItem[] = useMemo(() => {
const { pageIndex, pageSize } = paginationOptions;
return sortedTableItems.slice(pageIndex * pageSize, pageIndex * pageSize + pageSize);
}, [paginationOptions, sortedTableItems]);
const columns: Array<EuiBasicTableColumn<TableItem>> = useMemo( const columns: Array<EuiBasicTableColumn<TableItem>> = useMemo(
() => [ () => [
{ {
field: 'partitionName', field: 'anomalyScore',
name: partitionColumnName, name: anomalyScoreColumnName,
sortable: true,
truncateText: true,
},
{
field: 'topAnomalyScore',
name: maxAnomalyScoreColumnName,
sortable: true, sortable: true,
truncateText: true, truncateText: true,
dataType: 'number' as const, dataType: 'number' as const,
width: '130px',
render: (anomalyScore: number) => <AnomalySeverityIndicator anomalyScore={anomalyScore} />,
},
{
field: 'anomalyMessage',
name: anomalyMessageColumnName,
sortable: false,
truncateText: true,
},
{
field: 'startTime',
name: anomalyStartTimeColumnName,
sortable: true,
truncateText: true,
width: '230px',
render: (startTime: number) => moment(startTime).format(dateFormat),
},
{
field: 'datasetName',
name: datasetColumnName,
sortable: true,
truncateText: true,
width: '200px',
}, },
{ {
align: RIGHT_ALIGNMENT, align: RIGHT_ALIGNMENT,
@ -141,33 +230,28 @@ export const AnomaliesTable: React.FunctionComponent<{
isExpander: true, isExpander: true,
render: (item: TableItem) => ( render: (item: TableItem) => (
<RowExpansionButton <RowExpansionButton
isExpanded={expandedDatasetIds.has(item.partitionId)} isExpanded={expandedIds.has(item.id)}
item={item.partitionId} item={item.id}
onExpand={expandDataset} onExpand={expandId}
onCollapse={collapseDataset} onCollapse={collapseId}
/> />
), ),
}, },
], ],
[collapseDataset, expandDataset, expandedDatasetIds] [collapseId, expandId, expandedIds, dateFormat]
); );
return ( return (
<StyledEuiBasicTable <EuiBasicTable
items={sortedTableItems} items={pageOfItems}
itemId="partitionName" itemId="id"
itemIdToExpandedRowMap={expandedDatasetRowContents} itemIdToExpandedRowMap={expandedDatasetRowContents}
isExpandable={true} isExpandable={true}
hasActions={true} hasActions={true}
columns={columns} columns={columns}
pagination={paginationOptions}
sorting={sorting} sorting={sorting}
onChange={handleTableChange} onChange={handleTableChange}
/> />
); );
}; };
const StyledEuiBasicTable: typeof EuiBasicTable = euiStyled(EuiBasicTable as any)`
& .euiTable {
table-layout: auto;
}
` as any; // eslint-disable-line @typescript-eslint/no-explicit-any

View file

@ -0,0 +1,47 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { fold } from 'fp-ts/lib/Either';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import {
getLogEntryRateExamplesRequestPayloadRT,
getLogEntryRateExamplesSuccessReponsePayloadRT,
LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH,
} from '../../../../../common/http_api/log_analysis';
import { createPlainError, throwErrors } from '../../../../../common/runtime_types';
export const callGetLogEntryRateExamplesAPI = async (
sourceId: string,
startTime: number,
endTime: number,
dataset: string,
exampleCount: number
) => {
const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH, {
method: 'POST',
body: JSON.stringify(
getLogEntryRateExamplesRequestPayloadRT.encode({
data: {
dataset,
exampleCount,
sourceId,
timeRange: {
startTime,
endTime,
},
},
})
),
});
return pipe(
getLogEntryRateExamplesSuccessReponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
};

View file

@ -0,0 +1,63 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { useMemo, useState } from 'react';
import { LogEntryRateExample } from '../../../../common/http_api';
import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { callGetLogEntryRateExamplesAPI } from './service_calls/get_log_entry_rate_examples';
export const useLogEntryRateExamples = ({
dataset,
endTime,
exampleCount,
sourceId,
startTime,
}: {
dataset: string;
endTime: number;
exampleCount: number;
sourceId: string;
startTime: number;
}) => {
const [logEntryRateExamples, setLogEntryRateExamples] = useState<LogEntryRateExample[]>([]);
const [getLogEntryRateExamplesRequest, getLogEntryRateExamples] = useTrackedPromise(
{
cancelPreviousOn: 'creation',
createPromise: async () => {
return await callGetLogEntryRateExamplesAPI(
sourceId,
startTime,
endTime,
dataset,
exampleCount
);
},
onResolve: ({ data: { examples } }) => {
setLogEntryRateExamples(examples);
},
},
[dataset, endTime, exampleCount, sourceId, startTime]
);
const isLoadingLogEntryRateExamples = useMemo(
() => getLogEntryRateExamplesRequest.state === 'pending',
[getLogEntryRateExamplesRequest.state]
);
const hasFailedLoadingLogEntryRateExamples = useMemo(
() => getLogEntryRateExamplesRequest.state === 'rejected',
[getLogEntryRateExamplesRequest.state]
);
return {
getLogEntryRateExamples,
hasFailedLoadingLogEntryRateExamples,
isLoadingLogEntryRateExamples,
logEntryRateExamples,
};
};

View file

@ -10,6 +10,7 @@ import {
GetLogEntryRateSuccessResponsePayload, GetLogEntryRateSuccessResponsePayload,
LogEntryRateHistogramBucket, LogEntryRateHistogramBucket,
LogEntryRatePartition, LogEntryRatePartition,
LogEntryRateAnomaly,
} from '../../../../common/http_api/log_analysis'; } from '../../../../common/http_api/log_analysis';
import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { callGetLogEntryRateAPI } from './service_calls/get_log_entry_rate'; import { callGetLogEntryRateAPI } from './service_calls/get_log_entry_rate';
@ -23,11 +24,16 @@ type PartitionRecord = Record<
{ buckets: PartitionBucket[]; topAnomalyScore: number; totalNumberOfLogEntries: number } { buckets: PartitionBucket[]; topAnomalyScore: number; totalNumberOfLogEntries: number }
>; >;
export type AnomalyRecord = LogEntryRateAnomaly & {
partitionId: string;
};
export interface LogEntryRateResults { export interface LogEntryRateResults {
bucketDuration: number; bucketDuration: number;
totalNumberOfLogEntries: number; totalNumberOfLogEntries: number;
histogramBuckets: LogEntryRateHistogramBucket[]; histogramBuckets: LogEntryRateHistogramBucket[];
partitionBuckets: PartitionRecord; partitionBuckets: PartitionRecord;
anomalies: AnomalyRecord[];
} }
export const useLogEntryRateResults = ({ export const useLogEntryRateResults = ({
@ -55,6 +61,7 @@ export const useLogEntryRateResults = ({
totalNumberOfLogEntries: data.totalNumberOfLogEntries, totalNumberOfLogEntries: data.totalNumberOfLogEntries,
histogramBuckets: data.histogramBuckets, histogramBuckets: data.histogramBuckets,
partitionBuckets: formatLogEntryRateResultsByPartition(data), partitionBuckets: formatLogEntryRateResultsByPartition(data),
anomalies: formatLogEntryRateResultsByAllAnomalies(data),
}); });
}, },
onReject: () => { onReject: () => {
@ -117,3 +124,23 @@ const formatLogEntryRateResultsByPartition = (
return resultsByPartition; return resultsByPartition;
}; };
const formatLogEntryRateResultsByAllAnomalies = (
results: GetLogEntryRateSuccessResponsePayload['data']
): AnomalyRecord[] => {
return results.histogramBuckets.reduce<AnomalyRecord[]>((anomalies, bucket) => {
return bucket.partitions.reduce<AnomalyRecord[]>((_anomalies, partition) => {
if (partition.anomalies.length > 0) {
partition.anomalies.forEach((anomaly) => {
_anomalies.push({
partitionId: partition.partitionId,
...anomaly,
});
});
return _anomalies;
} else {
return _anomalies;
}
}, anomalies);
}, []);
};

View file

@ -15,6 +15,7 @@ import {
initGetLogEntryCategoryDatasetsRoute, initGetLogEntryCategoryDatasetsRoute,
initGetLogEntryCategoryExamplesRoute, initGetLogEntryCategoryExamplesRoute,
initGetLogEntryRateRoute, initGetLogEntryRateRoute,
initGetLogEntryRateExamplesRoute,
initValidateLogAnalysisDatasetsRoute, initValidateLogAnalysisDatasetsRoute,
initValidateLogAnalysisIndicesRoute, initValidateLogAnalysisIndicesRoute,
} from './routes/log_analysis'; } from './routes/log_analysis';
@ -56,6 +57,7 @@ export const initInfraServer = (libs: InfraBackendLibs) => {
initValidateLogAnalysisDatasetsRoute(libs); initValidateLogAnalysisDatasetsRoute(libs);
initValidateLogAnalysisIndicesRoute(libs); initValidateLogAnalysisIndicesRoute(libs);
initLogEntriesRoute(libs); initLogEntriesRoute(libs);
initGetLogEntryRateExamplesRoute(libs);
initLogEntriesHighlightsRoute(libs); initLogEntriesHighlightsRoute(libs);
initLogEntriesSummaryRoute(libs); initLogEntriesSummaryRoute(libs);
initLogEntriesSummaryHighlightsRoute(libs); initLogEntriesSummaryHighlightsRoute(libs);

View file

@ -7,16 +7,30 @@
import { pipe } from 'fp-ts/lib/pipeable'; import { pipe } from 'fp-ts/lib/pipeable';
import { map, fold } from 'fp-ts/lib/Either'; import { map, fold } from 'fp-ts/lib/Either';
import { identity } from 'fp-ts/lib/function'; import { identity } from 'fp-ts/lib/function';
import { getJobId } from '../../../common/log_analysis'; import { RequestHandlerContext } from 'src/core/server';
import { throwErrors, createPlainError } from '../../../common/runtime_types'; import { throwErrors, createPlainError } from '../../../common/runtime_types';
import { NoLogAnalysisResultsIndexError } from './errors';
import { import {
logRateModelPlotResponseRT, logRateModelPlotResponseRT,
createLogEntryRateQuery, createLogEntryRateQuery,
LogRateModelPlotBucket, LogRateModelPlotBucket,
CompositeTimestampPartitionKey, CompositeTimestampPartitionKey,
} from './queries'; } from './queries';
import { MlSystem } from '../../types'; import { startTracingSpan } from '../../../common/performance_tracing';
import { decodeOrThrow } from '../../../common/runtime_types';
import { getJobId, jobCustomSettingsRT } from '../../../common/log_analysis';
import {
createLogEntryRateExamplesQuery,
logEntryRateExamplesResponseRT,
} from './queries/log_entry_rate_examples';
import {
InsufficientLogAnalysisMlJobConfigurationError,
NoLogAnalysisMlJobError,
NoLogAnalysisResultsIndexError,
} from './errors';
import { InfraSource } from '../sources';
import type { MlSystem } from '../../types';
import { InfraRequestHandlerContext } from '../../types';
import { KibanaFramework } from '../adapters/framework/kibana_framework_adapter';
const COMPOSITE_AGGREGATION_BATCH_SIZE = 1000; const COMPOSITE_AGGREGATION_BATCH_SIZE = 1000;
@ -73,6 +87,7 @@ export async function getLogEntryRateBuckets(
partitions: Array<{ partitions: Array<{
analysisBucketCount: number; analysisBucketCount: number;
anomalies: Array<{ anomalies: Array<{
id: string;
actualLogEntryRate: number; actualLogEntryRate: number;
anomalyScore: number; anomalyScore: number;
duration: number; duration: number;
@ -91,7 +106,8 @@ export async function getLogEntryRateBuckets(
const partition = { const partition = {
analysisBucketCount: timestampPartitionBucket.filter_model_plot.doc_count, analysisBucketCount: timestampPartitionBucket.filter_model_plot.doc_count,
anomalies: timestampPartitionBucket.filter_records.top_hits_record.hits.hits.map( anomalies: timestampPartitionBucket.filter_records.top_hits_record.hits.hits.map(
({ _source: record }) => ({ ({ _id, _source: record }) => ({
id: _id,
actualLogEntryRate: record.actual[0], actualLogEntryRate: record.actual[0],
anomalyScore: record.record_score, anomalyScore: record.record_score,
duration: record.bucket_span * 1000, duration: record.bucket_span * 1000,
@ -127,3 +143,130 @@ export async function getLogEntryRateBuckets(
} }
}, []); }, []);
} }
export async function getLogEntryRateExamples(
context: RequestHandlerContext & { infra: Required<InfraRequestHandlerContext> },
sourceId: string,
startTime: number,
endTime: number,
dataset: string,
exampleCount: number,
sourceConfiguration: InfraSource,
callWithRequest: KibanaFramework['callWithRequest']
) {
const finalizeLogEntryRateExamplesSpan = startTracingSpan(
'get log entry rate example log entries'
);
const jobId = getJobId(context.infra.spaceId, sourceId, 'log-entry-rate');
const {
mlJob,
timing: { spans: fetchMlJobSpans },
} = await fetchMlJob(context, jobId);
const customSettings = decodeOrThrow(jobCustomSettingsRT)(mlJob.custom_settings);
const indices = customSettings?.logs_source_config?.indexPattern;
const timestampField = customSettings?.logs_source_config?.timestampField;
const tiebreakerField = sourceConfiguration.configuration.fields.tiebreaker;
if (indices == null || timestampField == null) {
throw new InsufficientLogAnalysisMlJobConfigurationError(
`Failed to find index configuration for ml job ${jobId}`
);
}
const {
examples,
timing: { spans: fetchLogEntryRateExamplesSpans },
} = await fetchLogEntryRateExamples(
context,
indices,
timestampField,
tiebreakerField,
startTime,
endTime,
dataset,
exampleCount,
callWithRequest
);
const logEntryRateExamplesSpan = finalizeLogEntryRateExamplesSpan();
return {
data: examples,
timing: {
spans: [logEntryRateExamplesSpan, ...fetchMlJobSpans, ...fetchLogEntryRateExamplesSpans],
},
};
}
export async function fetchLogEntryRateExamples(
context: RequestHandlerContext & { infra: Required<InfraRequestHandlerContext> },
indices: string,
timestampField: string,
tiebreakerField: string,
startTime: number,
endTime: number,
dataset: string,
exampleCount: number,
callWithRequest: KibanaFramework['callWithRequest']
) {
const finalizeEsSearchSpan = startTracingSpan('Fetch log rate examples from ES');
const {
hits: { hits },
} = decodeOrThrow(logEntryRateExamplesResponseRT)(
await callWithRequest(
context,
'search',
createLogEntryRateExamplesQuery(
indices,
timestampField,
tiebreakerField,
startTime,
endTime,
dataset,
exampleCount
)
)
);
const esSearchSpan = finalizeEsSearchSpan();
return {
examples: hits.map((hit) => ({
id: hit._id,
dataset,
message: hit._source.message ?? '',
timestamp: hit.sort[0],
tiebreaker: hit.sort[1],
})),
timing: {
spans: [esSearchSpan],
},
};
}
async function fetchMlJob(
context: RequestHandlerContext & { infra: Required<InfraRequestHandlerContext> },
logEntryRateJobId: string
) {
const finalizeMlGetJobSpan = startTracingSpan('Fetch ml job from ES');
const {
jobs: [mlJob],
} = await context.infra.mlAnomalyDetectors.jobs(logEntryRateJobId);
const mlGetJobSpan = finalizeMlGetJobSpan();
if (mlJob == null) {
throw new NoLogAnalysisMlJobError(`Failed to find ml job ${logEntryRateJobId}.`);
}
return {
mlJob,
timing: {
spans: [mlGetJobSpan],
},
};
}

View file

@ -143,6 +143,7 @@ export const logRateModelPlotBucketRT = rt.type({
hits: rt.type({ hits: rt.type({
hits: rt.array( hits: rt.array(
rt.type({ rt.type({
_id: rt.string,
_source: logRateMlRecordRT, _source: logRateMlRecordRT,
}) })
), ),

View file

@ -0,0 +1,72 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import * as rt from 'io-ts';
import { commonSearchSuccessResponseFieldsRT } from '../../../utils/elasticsearch_runtime_types';
import { defaultRequestParameters } from './common';
import { partitionField } from '../../../../common/log_analysis';
export const createLogEntryRateExamplesQuery = (
indices: string,
timestampField: string,
tiebreakerField: string,
startTime: number,
endTime: number,
dataset: string,
exampleCount: number
) => ({
...defaultRequestParameters,
body: {
query: {
bool: {
filter: [
{
range: {
[timestampField]: {
gte: startTime,
lte: endTime,
},
},
},
{
term: {
[partitionField]: dataset,
},
},
],
},
},
sort: [{ [timestampField]: 'asc' }, { [tiebreakerField]: 'asc' }],
},
_source: ['event.dataset', 'message'],
index: indices,
size: exampleCount,
});
export const logEntryRateExampleHitRT = rt.type({
_id: rt.string,
_source: rt.partial({
event: rt.partial({
dataset: rt.string,
}),
message: rt.string,
}),
sort: rt.tuple([rt.number, rt.number]),
});
export type LogEntryRateExampleHit = rt.TypeOf<typeof logEntryRateExampleHitRT>;
export const logEntryRateExamplesResponseRT = rt.intersection([
commonSearchSuccessResponseFieldsRT,
rt.type({
hits: rt.type({
hits: rt.array(logEntryRateExampleHitRT),
}),
}),
]);
export type LogEntryRateExamplesResponse = rt.TypeOf<typeof logEntryRateExamplesResponseRT>;

View file

@ -8,3 +8,4 @@ export * from './log_entry_categories';
export * from './log_entry_category_datasets'; export * from './log_entry_category_datasets';
export * from './log_entry_category_examples'; export * from './log_entry_category_examples';
export * from './log_entry_rate'; export * from './log_entry_rate';
export * from './log_entry_rate_examples';

View file

@ -0,0 +1,82 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import Boom from 'boom';
import { createValidationFunction } from '../../../../common/runtime_types';
import { InfraBackendLibs } from '../../../lib/infra_types';
import { NoLogAnalysisResultsIndexError, getLogEntryRateExamples } from '../../../lib/log_analysis';
import { assertHasInfraMlPlugins } from '../../../utils/request_context';
import {
getLogEntryRateExamplesRequestPayloadRT,
getLogEntryRateExamplesSuccessReponsePayloadRT,
LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH,
} from '../../../../common/http_api/log_analysis';
export const initGetLogEntryRateExamplesRoute = ({ framework, sources }: InfraBackendLibs) => {
framework.registerRoute(
{
method: 'post',
path: LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH,
validate: {
body: createValidationFunction(getLogEntryRateExamplesRequestPayloadRT),
},
},
framework.router.handleLegacyErrors(async (requestContext, request, response) => {
const {
data: {
dataset,
exampleCount,
sourceId,
timeRange: { startTime, endTime },
},
} = request.body;
const sourceConfiguration = await sources.getSourceConfiguration(
requestContext.core.savedObjects.client,
sourceId
);
try {
assertHasInfraMlPlugins(requestContext);
const { data: logEntryRateExamples, timing } = await getLogEntryRateExamples(
requestContext,
sourceId,
startTime,
endTime,
dataset,
exampleCount,
sourceConfiguration,
framework.callWithRequest
);
return response.ok({
body: getLogEntryRateExamplesSuccessReponsePayloadRT.encode({
data: {
examples: logEntryRateExamples,
},
timing,
}),
});
} catch (error) {
if (Boom.isBoom(error)) {
throw error;
}
if (error instanceof NoLogAnalysisResultsIndexError) {
return response.notFound({ body: { message: error.message } });
}
return response.customError({
statusCode: error.statusCode ?? 500,
body: {
message: error.message ?? 'An unexpected error occurred',
},
});
}
})
);
};

View file

@ -7473,12 +7473,9 @@
"xpack.infra.logs.alerting.threshold.documentCountActionVariableDescription": "指定された条件と一致したログエントリ数", "xpack.infra.logs.alerting.threshold.documentCountActionVariableDescription": "指定された条件と一致したログエントリ数",
"xpack.infra.logs.alerting.threshold.fired": "実行", "xpack.infra.logs.alerting.threshold.fired": "実行",
"xpack.infra.logs.analysis.analyzeInMlButtonLabel": "ML で分析", "xpack.infra.logs.analysis.analyzeInMlButtonLabel": "ML で分析",
"xpack.infra.logs.analysis.anomaliesExpandedRowNumberOfLogEntriesDescription": "ログエントリーの数です",
"xpack.infra.logs.analysis.anomaliesSectionLineSeriesName": "15 分ごとのログエントリー (平均)", "xpack.infra.logs.analysis.anomaliesSectionLineSeriesName": "15 分ごとのログエントリー (平均)",
"xpack.infra.logs.analysis.anomaliesSectionLoadingAriaLabel": "異常を読み込み中", "xpack.infra.logs.analysis.anomaliesSectionLoadingAriaLabel": "異常を読み込み中",
"xpack.infra.logs.analysis.anomaliesSectionTitle": "異常", "xpack.infra.logs.analysis.anomaliesSectionTitle": "異常",
"xpack.infra.logs.analysis.anomaliesTableMaxAnomalyScoreColumnName": "最高異常スコア",
"xpack.infra.logs.analysis.anomaliesTablePartitionColumnName": "パーティション",
"xpack.infra.logs.analysis.anomalySectionNoAnomaliesTitle": "異常が検出されませんでした。", "xpack.infra.logs.analysis.anomalySectionNoAnomaliesTitle": "異常が検出されませんでした。",
"xpack.infra.logs.analysis.anomalySectionNoDataBody": "時間範囲を調整する必要があるかもしれません。", "xpack.infra.logs.analysis.anomalySectionNoDataBody": "時間範囲を調整する必要があるかもしれません。",
"xpack.infra.logs.analysis.anomalySectionNoDataTitle": "表示するデータがありません。", "xpack.infra.logs.analysis.anomalySectionNoDataTitle": "表示するデータがありません。",
@ -7505,9 +7502,6 @@
"xpack.infra.logs.analysis.mlUnavailableTitle": "この機能には機械学習が必要です", "xpack.infra.logs.analysis.mlUnavailableTitle": "この機能には機械学習が必要です",
"xpack.infra.logs.analysis.onboardingSuccessContent": "機械学習ロボットがデータの収集を開始するまでしばらくお待ちください。", "xpack.infra.logs.analysis.onboardingSuccessContent": "機械学習ロボットがデータの収集を開始するまでしばらくお待ちください。",
"xpack.infra.logs.analysis.onboardingSuccessTitle": "成功!", "xpack.infra.logs.analysis.onboardingSuccessTitle": "成功!",
"xpack.infra.logs.analysis.overallAnomaliesNumberOfLogEntriesDescription": "ログエントリーの数です",
"xpack.infra.logs.analysis.overallAnomaliesTopAnomalyScoreDescription": "最高異常スコア",
"xpack.infra.logs.analysis.overallAnomalyChartMaxScoresLabel": "最高異常スコア",
"xpack.infra.logs.analysis.partitionMaxAnomalyScoreAnnotationLabel": "最高異常スコア: {maxAnomalyScore}", "xpack.infra.logs.analysis.partitionMaxAnomalyScoreAnnotationLabel": "最高異常スコア: {maxAnomalyScore}",
"xpack.infra.logs.analysis.recreateJobButtonLabel": "ML ジョブを再作成", "xpack.infra.logs.analysis.recreateJobButtonLabel": "ML ジョブを再作成",
"xpack.infra.logs.analysis.setupStatusTryAgainButton": "再試行", "xpack.infra.logs.analysis.setupStatusTryAgainButton": "再試行",
@ -7552,10 +7546,6 @@
"xpack.infra.logs.logEntryCategories.countColumnTitle": "メッセージ数", "xpack.infra.logs.logEntryCategories.countColumnTitle": "メッセージ数",
"xpack.infra.logs.logEntryCategories.datasetColumnTitle": "データセット", "xpack.infra.logs.logEntryCategories.datasetColumnTitle": "データセット",
"xpack.infra.logs.logEntryCategories.datasetFilterPlaceholder": "データセットでフィルター", "xpack.infra.logs.logEntryCategories.datasetFilterPlaceholder": "データセットでフィルター",
"xpack.infra.logs.logEntryCategories.exampleEmptyDescription": "選択した時間範囲内に例は見つかりませんでした。ログエントリー保持期間を長くするとメッセージサンプルの可用性が向上します。",
"xpack.infra.logs.logEntryCategories.exampleEmptyReloadButtonLabel": "再読み込み",
"xpack.infra.logs.logEntryCategories.exampleLoadingFailureDescription": "カテゴリーの例を読み込めませんでした。",
"xpack.infra.logs.logEntryCategories.exampleLoadingFailureRetryButtonLabel": "再試行",
"xpack.infra.logs.logEntryCategories.jobStatusLoadingMessage": "分類ジョブのステータスを確認中...", "xpack.infra.logs.logEntryCategories.jobStatusLoadingMessage": "分類ジョブのステータスを確認中...",
"xpack.infra.logs.logEntryCategories.loadDataErrorTitle": "カテゴリーデータを読み込めませんでした", "xpack.infra.logs.logEntryCategories.loadDataErrorTitle": "カテゴリーデータを読み込めませんでした",
"xpack.infra.logs.logEntryCategories.manyCategoriesWarningReasonDescription": "分析されたドキュメントごとのカテゴリ比率が{categoriesDocumentRatio, number }で、非常に高い値です。", "xpack.infra.logs.logEntryCategories.manyCategoriesWarningReasonDescription": "分析されたドキュメントごとのカテゴリ比率が{categoriesDocumentRatio, number }で、非常に高い値です。",

View file

@ -7477,12 +7477,9 @@
"xpack.infra.logs.alerting.threshold.documentCountActionVariableDescription": "匹配所提供条件的日志条目数", "xpack.infra.logs.alerting.threshold.documentCountActionVariableDescription": "匹配所提供条件的日志条目数",
"xpack.infra.logs.alerting.threshold.fired": "已触发", "xpack.infra.logs.alerting.threshold.fired": "已触发",
"xpack.infra.logs.analysis.analyzeInMlButtonLabel": "在 ML 中分析", "xpack.infra.logs.analysis.analyzeInMlButtonLabel": "在 ML 中分析",
"xpack.infra.logs.analysis.anomaliesExpandedRowNumberOfLogEntriesDescription": "日志条目数",
"xpack.infra.logs.analysis.anomaliesSectionLineSeriesName": "每 15 分钟日志条目数(平均值)", "xpack.infra.logs.analysis.anomaliesSectionLineSeriesName": "每 15 分钟日志条目数(平均值)",
"xpack.infra.logs.analysis.anomaliesSectionLoadingAriaLabel": "正在加载异常", "xpack.infra.logs.analysis.anomaliesSectionLoadingAriaLabel": "正在加载异常",
"xpack.infra.logs.analysis.anomaliesSectionTitle": "异常", "xpack.infra.logs.analysis.anomaliesSectionTitle": "异常",
"xpack.infra.logs.analysis.anomaliesTableMaxAnomalyScoreColumnName": "最大异常分数",
"xpack.infra.logs.analysis.anomaliesTablePartitionColumnName": "分区",
"xpack.infra.logs.analysis.anomalySectionNoAnomaliesTitle": "未检测到任何异常。", "xpack.infra.logs.analysis.anomalySectionNoAnomaliesTitle": "未检测到任何异常。",
"xpack.infra.logs.analysis.anomalySectionNoDataBody": "您可能想调整时间范围。", "xpack.infra.logs.analysis.anomalySectionNoDataBody": "您可能想调整时间范围。",
"xpack.infra.logs.analysis.anomalySectionNoDataTitle": "没有可显示的数据。", "xpack.infra.logs.analysis.anomalySectionNoDataTitle": "没有可显示的数据。",
@ -7509,9 +7506,6 @@
"xpack.infra.logs.analysis.mlUnavailableTitle": "此功能需要 Machine Learning", "xpack.infra.logs.analysis.mlUnavailableTitle": "此功能需要 Machine Learning",
"xpack.infra.logs.analysis.onboardingSuccessContent": "请注意,我们的 Machine Learning 机器人若干分钟后才会开始收集数据。", "xpack.infra.logs.analysis.onboardingSuccessContent": "请注意,我们的 Machine Learning 机器人若干分钟后才会开始收集数据。",
"xpack.infra.logs.analysis.onboardingSuccessTitle": "成功!", "xpack.infra.logs.analysis.onboardingSuccessTitle": "成功!",
"xpack.infra.logs.analysis.overallAnomaliesNumberOfLogEntriesDescription": "日志条目数",
"xpack.infra.logs.analysis.overallAnomaliesTopAnomalyScoreDescription": "最大异常分数",
"xpack.infra.logs.analysis.overallAnomalyChartMaxScoresLabel": "最大异常分数:",
"xpack.infra.logs.analysis.partitionMaxAnomalyScoreAnnotationLabel": "最大异常分数:{maxAnomalyScore}", "xpack.infra.logs.analysis.partitionMaxAnomalyScoreAnnotationLabel": "最大异常分数:{maxAnomalyScore}",
"xpack.infra.logs.analysis.recreateJobButtonLabel": "重新创建 ML 作业", "xpack.infra.logs.analysis.recreateJobButtonLabel": "重新创建 ML 作业",
"xpack.infra.logs.analysis.setupStatusTryAgainButton": "重试", "xpack.infra.logs.analysis.setupStatusTryAgainButton": "重试",
@ -7556,10 +7550,6 @@
"xpack.infra.logs.logEntryCategories.countColumnTitle": "消息计数", "xpack.infra.logs.logEntryCategories.countColumnTitle": "消息计数",
"xpack.infra.logs.logEntryCategories.datasetColumnTitle": "数据集", "xpack.infra.logs.logEntryCategories.datasetColumnTitle": "数据集",
"xpack.infra.logs.logEntryCategories.datasetFilterPlaceholder": "按数据集筛选", "xpack.infra.logs.logEntryCategories.datasetFilterPlaceholder": "按数据集筛选",
"xpack.infra.logs.logEntryCategories.exampleEmptyDescription": "选定时间范围内未找到任何示例。增大日志条目保留期限以改善消息样例可用性。",
"xpack.infra.logs.logEntryCategories.exampleEmptyReloadButtonLabel": "重新加载",
"xpack.infra.logs.logEntryCategories.exampleLoadingFailureDescription": "无法加载类别示例。",
"xpack.infra.logs.logEntryCategories.exampleLoadingFailureRetryButtonLabel": "重试",
"xpack.infra.logs.logEntryCategories.jobStatusLoadingMessage": "正在检查归类作业的状态......", "xpack.infra.logs.logEntryCategories.jobStatusLoadingMessage": "正在检查归类作业的状态......",
"xpack.infra.logs.logEntryCategories.loadDataErrorTitle": "无法加载类别数据", "xpack.infra.logs.logEntryCategories.loadDataErrorTitle": "无法加载类别数据",
"xpack.infra.logs.logEntryCategories.manyCategoriesWarningReasonDescription": "每个分析文档的类别比率非常高,达到 {categoriesDocumentRatio, number }。", "xpack.infra.logs.logEntryCategories.manyCategoriesWarningReasonDescription": "每个分析文档的类别比率非常高,达到 {categoriesDocumentRatio, number }。",