[Logs UI] Use the Super date picker in the log stream (#54280)
This commit is contained in:
parent
7aa4651292
commit
fe4c164681
|
@ -12,11 +12,11 @@ export const LOG_ENTRIES_PATH = '/api/log_entries/entries';
|
|||
export const logEntriesBaseRequestRT = rt.intersection([
|
||||
rt.type({
|
||||
sourceId: rt.string,
|
||||
startDate: rt.number,
|
||||
endDate: rt.number,
|
||||
startTimestamp: rt.number,
|
||||
endTimestamp: rt.number,
|
||||
}),
|
||||
rt.partial({
|
||||
query: rt.string,
|
||||
query: rt.union([rt.string, rt.null]),
|
||||
size: rt.number,
|
||||
}),
|
||||
]);
|
||||
|
@ -31,7 +31,7 @@ export const logEntriesAfterRequestRT = rt.intersection([
|
|||
rt.type({ after: rt.union([logEntriesCursorRT, rt.literal('first')]) }),
|
||||
]);
|
||||
|
||||
export const logEntriesCenteredRT = rt.intersection([
|
||||
export const logEntriesCenteredRequestRT = rt.intersection([
|
||||
logEntriesBaseRequestRT,
|
||||
rt.type({ center: logEntriesCursorRT }),
|
||||
]);
|
||||
|
@ -40,38 +40,39 @@ export const logEntriesRequestRT = rt.union([
|
|||
logEntriesBaseRequestRT,
|
||||
logEntriesBeforeRequestRT,
|
||||
logEntriesAfterRequestRT,
|
||||
logEntriesCenteredRT,
|
||||
logEntriesCenteredRequestRT,
|
||||
]);
|
||||
|
||||
export type LogEntriesBaseRequest = rt.TypeOf<typeof logEntriesBaseRequestRT>;
|
||||
export type LogEntriesBeforeRequest = rt.TypeOf<typeof logEntriesBeforeRequestRT>;
|
||||
export type LogEntriesAfterRequest = rt.TypeOf<typeof logEntriesAfterRequestRT>;
|
||||
export type LogEntriesCenteredRequest = rt.TypeOf<typeof logEntriesCenteredRequestRT>;
|
||||
export type LogEntriesRequest = rt.TypeOf<typeof logEntriesRequestRT>;
|
||||
|
||||
// JSON value
|
||||
const valueRT = rt.union([rt.string, rt.number, rt.boolean, rt.object, rt.null, rt.undefined]);
|
||||
export const logMessageConstantPartRT = rt.type({
|
||||
constant: rt.string,
|
||||
});
|
||||
export const logMessageFieldPartRT = rt.type({
|
||||
field: rt.string,
|
||||
value: rt.unknown,
|
||||
highlights: rt.array(rt.string),
|
||||
});
|
||||
|
||||
export const logMessagePartRT = rt.union([
|
||||
rt.type({
|
||||
constant: rt.string,
|
||||
}),
|
||||
rt.type({
|
||||
field: rt.string,
|
||||
value: valueRT,
|
||||
highlights: rt.array(rt.string),
|
||||
}),
|
||||
]);
|
||||
export const logMessagePartRT = rt.union([logMessageConstantPartRT, logMessageFieldPartRT]);
|
||||
|
||||
export const logColumnRT = rt.union([
|
||||
rt.type({ columnId: rt.string, timestamp: rt.number }),
|
||||
rt.type({
|
||||
columnId: rt.string,
|
||||
field: rt.string,
|
||||
value: rt.union([rt.string, rt.undefined]),
|
||||
highlights: rt.array(rt.string),
|
||||
}),
|
||||
rt.type({
|
||||
columnId: rt.string,
|
||||
message: rt.array(logMessagePartRT),
|
||||
}),
|
||||
]);
|
||||
export const logTimestampColumnRT = rt.type({ columnId: rt.string, timestamp: rt.number });
|
||||
export const logFieldColumnRT = rt.type({
|
||||
columnId: rt.string,
|
||||
field: rt.string,
|
||||
value: rt.unknown,
|
||||
highlights: rt.array(rt.string),
|
||||
});
|
||||
export const logMessageColumnRT = rt.type({
|
||||
columnId: rt.string,
|
||||
message: rt.array(logMessagePartRT),
|
||||
});
|
||||
|
||||
export const logColumnRT = rt.union([logTimestampColumnRT, logFieldColumnRT, logMessageColumnRT]);
|
||||
|
||||
export const logEntryRT = rt.type({
|
||||
id: rt.string,
|
||||
|
@ -79,15 +80,20 @@ export const logEntryRT = rt.type({
|
|||
columns: rt.array(logColumnRT),
|
||||
});
|
||||
|
||||
export type LogMessagepart = rt.TypeOf<typeof logMessagePartRT>;
|
||||
export type LogMessageConstantPart = rt.TypeOf<typeof logMessageConstantPartRT>;
|
||||
export type LogMessageFieldPart = rt.TypeOf<typeof logMessageFieldPartRT>;
|
||||
export type LogMessagePart = rt.TypeOf<typeof logMessagePartRT>;
|
||||
export type LogTimestampColumn = rt.TypeOf<typeof logTimestampColumnRT>;
|
||||
export type LogFieldColumn = rt.TypeOf<typeof logFieldColumnRT>;
|
||||
export type LogMessageColumn = rt.TypeOf<typeof logMessageColumnRT>;
|
||||
export type LogColumn = rt.TypeOf<typeof logColumnRT>;
|
||||
export type LogEntry = rt.TypeOf<typeof logEntryRT>;
|
||||
|
||||
export const logEntriesResponseRT = rt.type({
|
||||
data: rt.type({
|
||||
entries: rt.array(logEntryRT),
|
||||
topCursor: logEntriesCursorRT,
|
||||
bottomCursor: logEntriesCursorRT,
|
||||
topCursor: rt.union([logEntriesCursorRT, rt.null]),
|
||||
bottomCursor: rt.union([logEntriesCursorRT, rt.null]),
|
||||
}),
|
||||
});
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ import {
|
|||
logEntriesBaseRequestRT,
|
||||
logEntriesBeforeRequestRT,
|
||||
logEntriesAfterRequestRT,
|
||||
logEntriesCenteredRT,
|
||||
logEntriesCenteredRequestRT,
|
||||
logEntryRT,
|
||||
} from './entries';
|
||||
import { logEntriesCursorRT } from './common';
|
||||
|
@ -36,7 +36,7 @@ export const logEntriesHighlightsAfterRequestRT = rt.intersection([
|
|||
]);
|
||||
|
||||
export const logEntriesHighlightsCenteredRequestRT = rt.intersection([
|
||||
logEntriesCenteredRT,
|
||||
logEntriesCenteredRequestRT,
|
||||
highlightsRT,
|
||||
]);
|
||||
|
||||
|
|
|
@ -10,8 +10,8 @@ export const LOG_ENTRIES_SUMMARY_PATH = '/api/log_entries/summary';
|
|||
|
||||
export const logEntriesSummaryRequestRT = rt.type({
|
||||
sourceId: rt.string,
|
||||
startDate: rt.number,
|
||||
endDate: rt.number,
|
||||
startTimestamp: rt.number,
|
||||
endTimestamp: rt.number,
|
||||
bucketSize: rt.number,
|
||||
query: rt.union([rt.string, rt.undefined, rt.null]),
|
||||
});
|
||||
|
|
|
@ -0,0 +1,73 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import React, { useCallback } from 'react';
|
||||
import { EuiFlexGroup, EuiFlexItem, EuiSuperDatePicker, EuiButtonEmpty } from '@elastic/eui';
|
||||
import { FormattedMessage } from '@kbn/i18n/react';
|
||||
|
||||
interface LogDatepickerProps {
|
||||
startDateExpression: string;
|
||||
endDateExpression: string;
|
||||
isStreaming: boolean;
|
||||
onUpdateDateRange?: (range: { startDateExpression: string; endDateExpression: string }) => void;
|
||||
onStartStreaming?: () => void;
|
||||
onStopStreaming?: () => void;
|
||||
}
|
||||
|
||||
export const LogDatepicker: React.FC<LogDatepickerProps> = ({
|
||||
startDateExpression,
|
||||
endDateExpression,
|
||||
isStreaming,
|
||||
onUpdateDateRange,
|
||||
onStartStreaming,
|
||||
onStopStreaming,
|
||||
}) => {
|
||||
const handleTimeChange = useCallback(
|
||||
({ start, end, isInvalid }) => {
|
||||
if (onUpdateDateRange && !isInvalid) {
|
||||
onUpdateDateRange({ startDateExpression: start, endDateExpression: end });
|
||||
}
|
||||
},
|
||||
[onUpdateDateRange]
|
||||
);
|
||||
|
||||
return (
|
||||
<EuiFlexGroup gutterSize="s">
|
||||
<EuiFlexItem>
|
||||
<EuiSuperDatePicker
|
||||
start={startDateExpression}
|
||||
end={endDateExpression}
|
||||
onTimeChange={handleTimeChange}
|
||||
showUpdateButton={false}
|
||||
// @ts-ignore: EuiSuperDatePicker doesn't expose the `isDisabled` prop, although it exists.
|
||||
isDisabled={isStreaming}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>
|
||||
{isStreaming ? (
|
||||
<EuiButtonEmpty
|
||||
color="primary"
|
||||
iconType="pause"
|
||||
iconSide="left"
|
||||
onClick={onStopStreaming}
|
||||
>
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.stopStreamingButtonLabel"
|
||||
defaultMessage="Stop streaming"
|
||||
/>
|
||||
</EuiButtonEmpty>
|
||||
) : (
|
||||
<EuiButtonEmpty iconType="play" iconSide="left" onClick={onStartStreaming}>
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.startStreamingButtonLabel"
|
||||
defaultMessage="Stream live"
|
||||
/>
|
||||
</EuiButtonEmpty>
|
||||
)}
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
);
|
||||
};
|
|
@ -10,10 +10,10 @@ import { max } from 'lodash';
|
|||
import * as React from 'react';
|
||||
|
||||
import { euiStyled } from '../../../../../observability/public';
|
||||
import { SummaryBucket } from './types';
|
||||
import { LogEntriesSummaryBucket } from '../../../../common/http_api';
|
||||
|
||||
interface DensityChartProps {
|
||||
buckets: SummaryBucket[];
|
||||
buckets: LogEntriesSummaryBucket[];
|
||||
end: number;
|
||||
start: number;
|
||||
width: number;
|
||||
|
@ -38,36 +38,36 @@ export const DensityChart: React.FC<DensityChartProps> = ({
|
|||
const xMax = max(buckets.map(bucket => bucket.entriesCount)) || 0;
|
||||
const xScale = scaleLinear()
|
||||
.domain([0, xMax])
|
||||
.range([0, width * (2 / 3)]);
|
||||
.range([0, width]);
|
||||
|
||||
const path = area<SummaryBucket>()
|
||||
const path = area<LogEntriesSummaryBucket>()
|
||||
.x0(xScale(0))
|
||||
.x1(bucket => xScale(bucket.entriesCount))
|
||||
.y(bucket => yScale((bucket.start + bucket.end) / 2))
|
||||
.y0(bucket => yScale(bucket.start))
|
||||
.y1(bucket => yScale(bucket.end))
|
||||
.curve(curveMonotoneY);
|
||||
const pathData = path(buckets);
|
||||
|
||||
const highestPathCoord = String(pathData)
|
||||
.replace(/[^.0-9,]/g, ' ')
|
||||
.split(/[ ,]/)
|
||||
.reduce((result, num) => (Number(num) > result ? Number(num) : result), 0);
|
||||
const firstBucket = buckets[0];
|
||||
const lastBucket = buckets[buckets.length - 1];
|
||||
const pathBuckets = [
|
||||
// Make sure the graph starts at the count of the first point
|
||||
{ start, end: start, entriesCount: firstBucket.entriesCount },
|
||||
...buckets,
|
||||
// Make sure the line ends at the height of the last point
|
||||
{ start: lastBucket.end, end: lastBucket.end, entriesCount: lastBucket.entriesCount },
|
||||
// If the last point is not at the end of the minimap, make sure it doesn't extend indefinitely and goes to 0
|
||||
{ start: end, end, entriesCount: 0 },
|
||||
];
|
||||
const pathData = path(pathBuckets);
|
||||
|
||||
return (
|
||||
<g transform={`translate(${width / 3}, 0)`}>
|
||||
<DensityChartNegativeBackground
|
||||
transform={`translate(${-width / 3}, 0)`}
|
||||
width={width / 2}
|
||||
height={highestPathCoord}
|
||||
/>
|
||||
<DensityChartPositiveBackground width={width * (2 / 3)} height={highestPathCoord} />
|
||||
<g>
|
||||
<DensityChartPositiveBackground width={width} height={height} />
|
||||
<PositiveAreaPath d={pathData || ''} />
|
||||
</g>
|
||||
);
|
||||
};
|
||||
|
||||
const DensityChartNegativeBackground = euiStyled.rect`
|
||||
fill: ${props => props.theme.eui.euiColorEmptyShade};
|
||||
`;
|
||||
|
||||
const DensityChartPositiveBackground = euiStyled.rect`
|
||||
fill: ${props =>
|
||||
props.theme.darkMode
|
||||
|
|
|
@ -13,6 +13,7 @@ interface HighlightedIntervalProps {
|
|||
getPositionOfTime: (time: number) => number;
|
||||
start: number;
|
||||
end: number;
|
||||
targetWidth: number;
|
||||
width: number;
|
||||
target: number | null;
|
||||
}
|
||||
|
@ -22,6 +23,7 @@ export const HighlightedInterval: React.FC<HighlightedIntervalProps> = ({
|
|||
end,
|
||||
getPositionOfTime,
|
||||
start,
|
||||
targetWidth,
|
||||
width,
|
||||
target,
|
||||
}) => {
|
||||
|
@ -35,14 +37,14 @@ export const HighlightedInterval: React.FC<HighlightedIntervalProps> = ({
|
|||
<HighlightTargetMarker
|
||||
className={className}
|
||||
x1={0}
|
||||
x2={width / 3}
|
||||
x2={targetWidth}
|
||||
y1={yTarget}
|
||||
y2={yTarget}
|
||||
/>
|
||||
)}
|
||||
<HighlightPolygon
|
||||
className={className}
|
||||
points={` ${width / 3},${yStart} ${width},${yStart} ${width},${yEnd} ${width / 3},${yEnd}`}
|
||||
points={` ${targetWidth},${yStart} ${width},${yStart} ${width},${yEnd} ${targetWidth},${yEnd}`}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
|
|
|
@ -13,42 +13,40 @@ import { DensityChart } from './density_chart';
|
|||
import { HighlightedInterval } from './highlighted_interval';
|
||||
import { SearchMarkers } from './search_markers';
|
||||
import { TimeRuler } from './time_ruler';
|
||||
import { SummaryBucket, SummaryHighlightBucket } from './types';
|
||||
import {
|
||||
LogEntriesSummaryBucket,
|
||||
LogEntriesSummaryHighlightsBucket,
|
||||
} from '../../../../common/http_api';
|
||||
|
||||
interface Interval {
|
||||
end: number;
|
||||
start: number;
|
||||
}
|
||||
|
||||
interface DragRecord {
|
||||
startY: number;
|
||||
currentY: number | null;
|
||||
}
|
||||
|
||||
interface LogMinimapProps {
|
||||
className?: string;
|
||||
height: number;
|
||||
highlightedInterval: Interval | null;
|
||||
jumpToTarget: (params: LogEntryTime) => any;
|
||||
intervalSize: number;
|
||||
summaryBuckets: SummaryBucket[];
|
||||
summaryHighlightBuckets?: SummaryHighlightBucket[];
|
||||
summaryBuckets: LogEntriesSummaryBucket[];
|
||||
summaryHighlightBuckets?: LogEntriesSummaryHighlightsBucket[];
|
||||
target: number | null;
|
||||
start: number | null;
|
||||
end: number | null;
|
||||
width: number;
|
||||
}
|
||||
|
||||
interface LogMinimapState {
|
||||
target: number | null;
|
||||
drag: DragRecord | null;
|
||||
svgPosition: ClientRect;
|
||||
timeCursorY: number;
|
||||
}
|
||||
|
||||
function calculateYScale(target: number | null, height: number, intervalSize: number) {
|
||||
const domainStart = target ? target - intervalSize / 2 : 0;
|
||||
const domainEnd = target ? target + intervalSize / 2 : 0;
|
||||
// Wide enough to fit "September"
|
||||
const TIMERULER_WIDTH = 50;
|
||||
|
||||
function calculateYScale(start: number | null, end: number | null, height: number) {
|
||||
return scaleLinear()
|
||||
.domain([domainStart, domainEnd])
|
||||
.domain([start || 0, end || 0])
|
||||
.range([0, height]);
|
||||
}
|
||||
|
||||
|
@ -58,103 +56,28 @@ export class LogMinimap extends React.Component<LogMinimapProps, LogMinimapState
|
|||
this.state = {
|
||||
timeCursorY: 0,
|
||||
target: props.target,
|
||||
drag: null,
|
||||
svgPosition: {
|
||||
width: 0,
|
||||
height: 0,
|
||||
top: 0,
|
||||
right: 0,
|
||||
bottom: 0,
|
||||
left: 0,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
private dragTargetArea: SVGElement | null = null;
|
||||
public handleClick: React.MouseEventHandler<SVGSVGElement> = event => {
|
||||
const minimapTop = event.currentTarget.getBoundingClientRect().top;
|
||||
const clickedYPosition = event.clientY - minimapTop;
|
||||
|
||||
public static getDerivedStateFromProps({ target }: LogMinimapProps, { drag }: LogMinimapState) {
|
||||
if (!drag) {
|
||||
return { target };
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public handleClick = (event: MouseEvent) => {
|
||||
if (!this.dragTargetArea) return;
|
||||
const svgPosition = this.dragTargetArea.getBoundingClientRect();
|
||||
const clickedYPosition = event.clientY - svgPosition.top;
|
||||
const clickedTime = Math.floor(this.getYScale().invert(clickedYPosition));
|
||||
this.setState({
|
||||
drag: null,
|
||||
});
|
||||
|
||||
this.props.jumpToTarget({
|
||||
tiebreaker: 0,
|
||||
time: clickedTime,
|
||||
});
|
||||
};
|
||||
|
||||
private handleMouseDown: React.MouseEventHandler<SVGSVGElement> = event => {
|
||||
const { clientY, target } = event;
|
||||
if (target === this.dragTargetArea) {
|
||||
const svgPosition = event.currentTarget.getBoundingClientRect();
|
||||
this.setState({
|
||||
drag: {
|
||||
startY: clientY,
|
||||
currentY: null,
|
||||
},
|
||||
svgPosition,
|
||||
});
|
||||
window.addEventListener('mousemove', this.handleDragMove);
|
||||
}
|
||||
window.addEventListener('mouseup', this.handleMouseUp);
|
||||
};
|
||||
|
||||
private handleMouseUp = (event: MouseEvent) => {
|
||||
window.removeEventListener('mousemove', this.handleDragMove);
|
||||
window.removeEventListener('mouseup', this.handleMouseUp);
|
||||
|
||||
const { drag, svgPosition } = this.state;
|
||||
if (!drag || !drag.currentY) {
|
||||
this.handleClick(event);
|
||||
return;
|
||||
}
|
||||
const getTime = (pos: number) => Math.floor(this.getYScale().invert(pos));
|
||||
const startYPosition = drag.startY - svgPosition.top;
|
||||
const endYPosition = event.clientY - svgPosition.top;
|
||||
const startTime = getTime(startYPosition);
|
||||
const endTime = getTime(endYPosition);
|
||||
const timeDifference = endTime - startTime;
|
||||
const newTime = (this.props.target || 0) - timeDifference;
|
||||
this.setState({ drag: null, target: newTime });
|
||||
this.props.jumpToTarget({
|
||||
tiebreaker: 0,
|
||||
time: newTime,
|
||||
});
|
||||
};
|
||||
|
||||
private handleDragMove = (event: MouseEvent) => {
|
||||
const { drag } = this.state;
|
||||
if (!drag) return;
|
||||
this.setState({
|
||||
drag: {
|
||||
...drag,
|
||||
currentY: event.clientY,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
public getYScale = () => {
|
||||
const { target } = this.state;
|
||||
const { height, intervalSize } = this.props;
|
||||
return calculateYScale(target, height, intervalSize);
|
||||
const { start, end, height } = this.props;
|
||||
return calculateYScale(start, end, height);
|
||||
};
|
||||
|
||||
public getPositionOfTime = (time: number) => {
|
||||
const { height, intervalSize } = this.props;
|
||||
|
||||
const [minTime] = this.getYScale().domain();
|
||||
|
||||
return ((time - minTime) * height) / intervalSize; //
|
||||
return this.getYScale()(time);
|
||||
};
|
||||
|
||||
private updateTimeCursor: React.MouseEventHandler<SVGSVGElement> = event => {
|
||||
|
@ -166,6 +89,8 @@ export class LogMinimap extends React.Component<LogMinimapProps, LogMinimapState
|
|||
|
||||
public render() {
|
||||
const {
|
||||
start,
|
||||
end,
|
||||
className,
|
||||
height,
|
||||
highlightedInterval,
|
||||
|
@ -173,20 +98,11 @@ export class LogMinimap extends React.Component<LogMinimapProps, LogMinimapState
|
|||
summaryBuckets,
|
||||
summaryHighlightBuckets,
|
||||
width,
|
||||
intervalSize,
|
||||
} = this.props;
|
||||
const { timeCursorY, drag, target } = this.state;
|
||||
// Render the time ruler and density map beyond the visible range of time, so that
|
||||
// the user doesn't run out of ruler when they click and drag
|
||||
const overscanHeight = Math.round(window.screen.availHeight * 2.5) || height * 3;
|
||||
const [minTime, maxTime] = calculateYScale(
|
||||
target,
|
||||
overscanHeight,
|
||||
intervalSize * (overscanHeight / height)
|
||||
).domain();
|
||||
const tickCount = height ? Math.round((overscanHeight / height) * 144) : 12;
|
||||
const overscanTranslate = height ? -(overscanHeight - height) / 2 : 0;
|
||||
const dragTransform = !drag || !drag.currentY ? 0 : drag.currentY - drag.startY;
|
||||
const { timeCursorY, target } = this.state;
|
||||
const [minTime, maxTime] = calculateYScale(start, end, height).domain();
|
||||
const tickCount = height ? Math.floor(height / 50) : 12;
|
||||
|
||||
return (
|
||||
<MinimapWrapper
|
||||
className={className}
|
||||
|
@ -194,67 +110,52 @@ export class LogMinimap extends React.Component<LogMinimapProps, LogMinimapState
|
|||
preserveAspectRatio="none"
|
||||
viewBox={`0 0 ${width} ${height}`}
|
||||
width={width}
|
||||
onMouseDown={this.handleMouseDown}
|
||||
onClick={this.handleClick}
|
||||
onMouseMove={this.updateTimeCursor}
|
||||
showOverscanBoundaries={Boolean(height && summaryBuckets.length)}
|
||||
>
|
||||
<g transform={`translate(0, ${dragTransform + overscanTranslate})`}>
|
||||
<MinimapBorder x1={TIMERULER_WIDTH} x2={TIMERULER_WIDTH} y1={0} y2={height} />
|
||||
<TimeRuler
|
||||
start={minTime}
|
||||
end={maxTime}
|
||||
width={TIMERULER_WIDTH}
|
||||
height={height}
|
||||
tickCount={tickCount}
|
||||
/>
|
||||
<g transform={`translate(${TIMERULER_WIDTH}, 0)`}>
|
||||
<DensityChart
|
||||
buckets={summaryBuckets}
|
||||
start={minTime}
|
||||
end={maxTime}
|
||||
width={width}
|
||||
height={overscanHeight}
|
||||
/>
|
||||
|
||||
<MinimapBorder x1={width / 3} y1={0} x2={width / 3} y2={overscanHeight} />
|
||||
<TimeRuler
|
||||
start={minTime}
|
||||
end={maxTime}
|
||||
width={width}
|
||||
height={overscanHeight}
|
||||
tickCount={tickCount}
|
||||
width={width - TIMERULER_WIDTH}
|
||||
height={height}
|
||||
/>
|
||||
|
||||
<SearchMarkers
|
||||
buckets={summaryHighlightBuckets || []}
|
||||
start={minTime}
|
||||
end={maxTime}
|
||||
width={width}
|
||||
height={overscanHeight}
|
||||
width={width - TIMERULER_WIDTH}
|
||||
height={height}
|
||||
jumpToTarget={jumpToTarget}
|
||||
/>
|
||||
</g>
|
||||
|
||||
{highlightedInterval ? (
|
||||
<HighlightedInterval
|
||||
end={highlightedInterval.end}
|
||||
getPositionOfTime={this.getPositionOfTime}
|
||||
start={highlightedInterval.start}
|
||||
targetWidth={TIMERULER_WIDTH}
|
||||
width={width}
|
||||
target={target}
|
||||
/>
|
||||
) : null}
|
||||
<TimeCursor x1={width / 3} x2={width} y1={timeCursorY} y2={timeCursorY} />
|
||||
<DragTargetArea
|
||||
isGrabbing={Boolean(drag)}
|
||||
ref={node => {
|
||||
this.dragTargetArea = node;
|
||||
}}
|
||||
x={0}
|
||||
y={0}
|
||||
width={width / 3}
|
||||
height={height}
|
||||
/>
|
||||
<TimeCursor x1={TIMERULER_WIDTH} x2={width} y1={timeCursorY} y2={timeCursorY} />
|
||||
</MinimapWrapper>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const DragTargetArea = euiStyled.rect<{ isGrabbing: boolean }>`
|
||||
fill: transparent;
|
||||
cursor: ${({ isGrabbing }) => (isGrabbing ? 'grabbing' : 'grab')};
|
||||
`;
|
||||
|
||||
const MinimapBorder = euiStyled.line`
|
||||
stroke: ${props => props.theme.eui.euiColorMediumShade};
|
||||
stroke-width: 1px;
|
||||
|
@ -269,9 +170,9 @@ const TimeCursor = euiStyled.line`
|
|||
: props.theme.eui.euiColorDarkShade};
|
||||
`;
|
||||
|
||||
const MinimapWrapper = euiStyled.svg<{ showOverscanBoundaries: boolean }>`
|
||||
background: ${props =>
|
||||
props.showOverscanBoundaries ? props.theme.eui.euiColorMediumShade : 'transparent'};
|
||||
const MinimapWrapper = euiStyled.svg`
|
||||
cursor: pointer;
|
||||
fill: ${props => props.theme.eui.euiColorEmptyShade};
|
||||
& ${TimeCursor} {
|
||||
visibility: hidden;
|
||||
}
|
||||
|
|
|
@ -10,10 +10,9 @@ import * as React from 'react';
|
|||
import { euiStyled, keyframes } from '../../../../../observability/public';
|
||||
import { LogEntryTime } from '../../../../common/log_entry';
|
||||
import { SearchMarkerTooltip } from './search_marker_tooltip';
|
||||
import { SummaryHighlightBucket } from './types';
|
||||
|
||||
import { LogEntriesSummaryHighlightsBucket } from '../../../../common/http_api';
|
||||
interface SearchMarkerProps {
|
||||
bucket: SummaryHighlightBucket;
|
||||
bucket: LogEntriesSummaryHighlightsBucket;
|
||||
height: number;
|
||||
width: number;
|
||||
jumpToTarget: (target: LogEntryTime) => void;
|
||||
|
|
|
@ -10,10 +10,10 @@ import * as React from 'react';
|
|||
|
||||
import { LogEntryTime } from '../../../../common/log_entry';
|
||||
import { SearchMarker } from './search_marker';
|
||||
import { SummaryHighlightBucket } from './types';
|
||||
import { LogEntriesSummaryHighlightsBucket } from '../../../../common/http_api';
|
||||
|
||||
interface SearchMarkersProps {
|
||||
buckets: SummaryHighlightBucket[];
|
||||
buckets: LogEntriesSummaryHighlightsBucket[];
|
||||
className?: string;
|
||||
end: number;
|
||||
start: number;
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
// The default d3-time-format is a bit strange for small ranges, so we will specify our own
|
||||
export function getTimeLabelFormat(start: number, end: number): string | undefined {
|
||||
const diff = Math.abs(end - start);
|
||||
|
||||
// 15 seconds
|
||||
if (diff < 15 * 1000) {
|
||||
return ':%S.%L';
|
||||
}
|
||||
|
||||
// 16 minutes
|
||||
if (diff < 16 * 60 * 1000) {
|
||||
return '%I:%M:%S';
|
||||
}
|
||||
|
||||
// Use D3's default
|
||||
return;
|
||||
}
|
|
@ -8,6 +8,7 @@ import { scaleTime } from 'd3-scale';
|
|||
import * as React from 'react';
|
||||
|
||||
import { euiStyled } from '../../../../../observability/public';
|
||||
import { getTimeLabelFormat } from './time_label_formatter';
|
||||
|
||||
interface TimeRulerProps {
|
||||
end: number;
|
||||
|
@ -23,37 +24,19 @@ export const TimeRuler: React.FC<TimeRulerProps> = ({ end, height, start, tickCo
|
|||
.range([0, height]);
|
||||
|
||||
const ticks = yScale.ticks(tickCount);
|
||||
const formatTick = yScale.tickFormat();
|
||||
|
||||
const dateModLabel = (() => {
|
||||
for (let i = 0; i < ticks.length; i++) {
|
||||
const tickLabel = formatTick(ticks[i]);
|
||||
if (!tickLabel[0].match(/[0-9]/)) {
|
||||
return i % 12;
|
||||
}
|
||||
}
|
||||
})();
|
||||
const formatTick = yScale.tickFormat(tickCount, getTimeLabelFormat(start, end));
|
||||
|
||||
return (
|
||||
<g>
|
||||
{ticks.map((tick, tickIndex) => {
|
||||
const y = yScale(tick);
|
||||
const isLabeledTick = tickIndex % 12 === dateModLabel;
|
||||
const tickStartX = isLabeledTick ? 0 : width / 3 - 4;
|
||||
|
||||
return (
|
||||
<g key={`tick${tickIndex}`}>
|
||||
{isLabeledTick && (
|
||||
<TimeRulerTickLabel x={0} y={y - 4}>
|
||||
{formatTick(tick)}
|
||||
</TimeRulerTickLabel>
|
||||
)}
|
||||
<TimeRulerGridLine
|
||||
isDark={isLabeledTick}
|
||||
x1={tickStartX}
|
||||
y1={y}
|
||||
x2={width / 3}
|
||||
y2={y}
|
||||
/>
|
||||
<TimeRulerTickLabel x={0} y={y - 4}>
|
||||
{formatTick(tick)}
|
||||
</TimeRulerTickLabel>
|
||||
<TimeRulerGridLine x1={0} y1={y} x2={width} y2={y} />
|
||||
</g>
|
||||
);
|
||||
})}
|
||||
|
@ -71,15 +54,11 @@ const TimeRulerTickLabel = euiStyled.text`
|
|||
pointer-events: none;
|
||||
`;
|
||||
|
||||
const TimeRulerGridLine = euiStyled.line<{ isDark: boolean }>`
|
||||
const TimeRulerGridLine = euiStyled.line`
|
||||
stroke: ${props =>
|
||||
props.isDark
|
||||
? props.theme.darkMode
|
||||
? props.theme.eui.euiColorDarkestShade
|
||||
: props.theme.eui.euiColorDarkShade
|
||||
: props.theme.darkMode
|
||||
? props.theme.eui.euiColorDarkShade
|
||||
: props.theme.eui.euiColorMediumShade};
|
||||
props.theme.darkMode
|
||||
? props.theme.eui.euiColorDarkestShade
|
||||
: props.theme.eui.euiColorDarkShade};
|
||||
stroke-opacity: 0.5;
|
||||
stroke-width: 1px;
|
||||
`;
|
||||
|
|
|
@ -1,17 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { TimeKey } from '../../../../common/time';
|
||||
|
||||
export interface SummaryBucket {
|
||||
start: number;
|
||||
end: number;
|
||||
entriesCount: number;
|
||||
}
|
||||
|
||||
export interface SummaryHighlightBucket extends SummaryBucket {
|
||||
representativeKey: TimeKey;
|
||||
}
|
|
@ -1,67 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { EuiFormRow, EuiRadioGroup } from '@elastic/eui';
|
||||
import { FormattedMessage } from '@kbn/i18n/react';
|
||||
import * as React from 'react';
|
||||
|
||||
interface IntervalSizeDescriptor {
|
||||
label: string;
|
||||
intervalSize: number;
|
||||
}
|
||||
|
||||
interface LogMinimapScaleControlsProps {
|
||||
availableIntervalSizes: IntervalSizeDescriptor[];
|
||||
intervalSize: number;
|
||||
setIntervalSize: (intervalSize: number) => any;
|
||||
}
|
||||
|
||||
export class LogMinimapScaleControls extends React.PureComponent<LogMinimapScaleControlsProps> {
|
||||
public handleScaleChange = (intervalSizeDescriptorKey: string) => {
|
||||
const { availableIntervalSizes, setIntervalSize } = this.props;
|
||||
const [sizeDescriptor] = availableIntervalSizes.filter(
|
||||
intervalKeyEquals(intervalSizeDescriptorKey)
|
||||
);
|
||||
|
||||
if (sizeDescriptor) {
|
||||
setIntervalSize(sizeDescriptor.intervalSize);
|
||||
}
|
||||
};
|
||||
|
||||
public render() {
|
||||
const { availableIntervalSizes, intervalSize } = this.props;
|
||||
const [currentSizeDescriptor] = availableIntervalSizes.filter(intervalSizeEquals(intervalSize));
|
||||
|
||||
return (
|
||||
<EuiFormRow
|
||||
label={
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.customizeLogs.minimapScaleFormRowLabel"
|
||||
defaultMessage="Minimap Scale"
|
||||
/>
|
||||
}
|
||||
>
|
||||
<EuiRadioGroup
|
||||
options={availableIntervalSizes.map(sizeDescriptor => ({
|
||||
id: getIntervalSizeDescriptorKey(sizeDescriptor),
|
||||
label: sizeDescriptor.label,
|
||||
}))}
|
||||
onChange={this.handleScaleChange}
|
||||
idSelected={getIntervalSizeDescriptorKey(currentSizeDescriptor)}
|
||||
/>
|
||||
</EuiFormRow>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const getIntervalSizeDescriptorKey = (sizeDescriptor: IntervalSizeDescriptor) =>
|
||||
`${sizeDescriptor.intervalSize}`;
|
||||
|
||||
const intervalKeyEquals = (key: string) => (sizeDescriptor: IntervalSizeDescriptor) =>
|
||||
getIntervalSizeDescriptorKey(sizeDescriptor) === key;
|
||||
|
||||
const intervalSizeEquals = (size: number) => (sizeDescriptor: IntervalSizeDescriptor) =>
|
||||
sizeDescriptor.intervalSize === size;
|
|
@ -7,27 +7,27 @@
|
|||
import { bisector } from 'd3-array';
|
||||
|
||||
import { compareToTimeKey, TimeKey } from '../../../../common/time';
|
||||
import { LogEntry, LogEntryHighlight } from '../../../utils/log_entry';
|
||||
import { LogEntry } from '../../../../common/http_api';
|
||||
|
||||
export type StreamItem = LogEntryStreamItem;
|
||||
|
||||
export interface LogEntryStreamItem {
|
||||
kind: 'logEntry';
|
||||
logEntry: LogEntry;
|
||||
highlights: LogEntryHighlight[];
|
||||
highlights: LogEntry[];
|
||||
}
|
||||
|
||||
export function getStreamItemTimeKey(item: StreamItem) {
|
||||
switch (item.kind) {
|
||||
case 'logEntry':
|
||||
return item.logEntry.key;
|
||||
return item.logEntry.cursor;
|
||||
}
|
||||
}
|
||||
|
||||
export function getStreamItemId(item: StreamItem) {
|
||||
switch (item.kind) {
|
||||
case 'logEntry':
|
||||
return `${item.logEntry.key.time}:${item.logEntry.key.tiebreaker}:${item.logEntry.gid}`;
|
||||
return `${item.logEntry.cursor.time}:${item.logEntry.cursor.tiebreaker}:${item.logEntry.id}`;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -6,144 +6,279 @@
|
|||
|
||||
/* eslint-disable max-classes-per-file */
|
||||
|
||||
import { EuiButtonEmpty, EuiIcon, EuiProgress, EuiText } from '@elastic/eui';
|
||||
import { FormattedMessage, FormattedRelative } from '@kbn/i18n/react';
|
||||
import {
|
||||
EuiText,
|
||||
EuiFlexGroup,
|
||||
EuiFlexItem,
|
||||
EuiTitle,
|
||||
EuiLoadingSpinner,
|
||||
EuiButton,
|
||||
} from '@elastic/eui';
|
||||
import { FormattedMessage, FormattedTime, FormattedRelative } from '@kbn/i18n/react';
|
||||
import * as React from 'react';
|
||||
import { Unit } from '@elastic/datemath';
|
||||
|
||||
import { euiStyled } from '../../../../../observability/public';
|
||||
import { LogTextSeparator } from './log_text_separator';
|
||||
import { extendDatemath } from '../../../utils/datemath';
|
||||
|
||||
type Position = 'start' | 'end';
|
||||
|
||||
interface LogTextStreamLoadingItemViewProps {
|
||||
alignment: 'top' | 'bottom';
|
||||
position: Position;
|
||||
timestamp: number; // Either the top of the bottom's cursor timestamp
|
||||
startDateExpression: string;
|
||||
endDateExpression: string;
|
||||
className?: string;
|
||||
hasMore: boolean;
|
||||
isLoading: boolean;
|
||||
isStreaming: boolean;
|
||||
lastStreamingUpdate: Date | null;
|
||||
onLoadMore?: () => void;
|
||||
onExtendRange?: (newDate: string) => void;
|
||||
onStreamStart?: () => void;
|
||||
}
|
||||
|
||||
const TIMESTAMP_FORMAT = {
|
||||
hour12: false,
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: 'numeric',
|
||||
minute: 'numeric',
|
||||
second: 'numeric',
|
||||
};
|
||||
|
||||
export class LogTextStreamLoadingItemView extends React.PureComponent<
|
||||
LogTextStreamLoadingItemViewProps,
|
||||
{}
|
||||
> {
|
||||
public render() {
|
||||
const {
|
||||
alignment,
|
||||
position,
|
||||
timestamp,
|
||||
startDateExpression,
|
||||
endDateExpression,
|
||||
className,
|
||||
hasMore,
|
||||
isLoading,
|
||||
isStreaming,
|
||||
lastStreamingUpdate,
|
||||
onLoadMore,
|
||||
onExtendRange,
|
||||
onStreamStart,
|
||||
} = this.props;
|
||||
|
||||
if (isStreaming) {
|
||||
return (
|
||||
<ProgressEntry alignment={alignment} className={className} color="primary" isLoading={true}>
|
||||
<ProgressMessage>
|
||||
<EuiText color="subdued">
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.streamingNewEntriesText"
|
||||
defaultMessage="Streaming new entries"
|
||||
/>
|
||||
</EuiText>
|
||||
</ProgressMessage>
|
||||
{lastStreamingUpdate ? (
|
||||
<ProgressMessage>
|
||||
<EuiText color="subdued">
|
||||
<EuiIcon type="clock" />
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.lastStreamingUpdateText"
|
||||
defaultMessage=" last updated {lastUpdateTime}"
|
||||
values={{
|
||||
lastUpdateTime: (
|
||||
<FormattedRelative value={lastStreamingUpdate} updateInterval={1000} />
|
||||
),
|
||||
}}
|
||||
/>
|
||||
</EuiText>
|
||||
</ProgressMessage>
|
||||
) : null}
|
||||
</ProgressEntry>
|
||||
);
|
||||
} else if (isLoading) {
|
||||
return (
|
||||
<ProgressEntry alignment={alignment} className={className} color="subdued" isLoading={true}>
|
||||
<ProgressMessage>
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.loadingAdditionalEntriesText"
|
||||
defaultMessage="Loading additional entries"
|
||||
/>
|
||||
</ProgressMessage>
|
||||
</ProgressEntry>
|
||||
);
|
||||
} else if (!hasMore) {
|
||||
return (
|
||||
<ProgressEntry
|
||||
alignment={alignment}
|
||||
className={className}
|
||||
color="subdued"
|
||||
isLoading={false}
|
||||
>
|
||||
<ProgressMessage>
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.noAdditionalEntriesFoundText"
|
||||
defaultMessage="No additional entries found"
|
||||
/>
|
||||
</ProgressMessage>
|
||||
{onLoadMore ? (
|
||||
<EuiButtonEmpty size="xs" onClick={onLoadMore} iconType="refresh">
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.loadAgainButtonLabel"
|
||||
defaultMessage="Load again"
|
||||
/>
|
||||
</EuiButtonEmpty>
|
||||
) : null}
|
||||
</ProgressEntry>
|
||||
);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
const shouldShowCta = !hasMore && !isStreaming;
|
||||
|
||||
const extra = (
|
||||
<LoadingItemViewExtra justifyContent="center" alignItems="center" gutterSize="m">
|
||||
{isLoading || isStreaming ? (
|
||||
<ProgressSpinner kind={isStreaming ? 'streaming' : 'loading'} />
|
||||
) : shouldShowCta ? (
|
||||
<ProgressCta
|
||||
position={position}
|
||||
onStreamStart={onStreamStart}
|
||||
onExtendRange={onExtendRange}
|
||||
startDateExpression={startDateExpression}
|
||||
endDateExpression={endDateExpression}
|
||||
/>
|
||||
) : null}
|
||||
</LoadingItemViewExtra>
|
||||
);
|
||||
|
||||
return (
|
||||
<ProgressEntryWrapper className={className} position={position}>
|
||||
{position === 'start' ? extra : null}
|
||||
<ProgressMessage timestamp={timestamp} position={position} isStreaming={isStreaming} />
|
||||
{position === 'end' ? extra : null}
|
||||
</ProgressEntryWrapper>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
interface ProgressEntryProps {
|
||||
alignment: 'top' | 'bottom';
|
||||
className?: string;
|
||||
color: 'subdued' | 'primary';
|
||||
isLoading: boolean;
|
||||
}
|
||||
const LoadingItemViewExtra = euiStyled(EuiFlexGroup)`
|
||||
height: 40px;
|
||||
`;
|
||||
|
||||
const ProgressEntry: React.FC<ProgressEntryProps> = props => {
|
||||
const { alignment, children, className, color, isLoading } = props;
|
||||
const ProgressEntryWrapper = euiStyled.div<{ position: Position }>`
|
||||
padding-left: ${props => props.theme.eui.euiSizeS};
|
||||
padding-top: ${props =>
|
||||
props.position === 'start' ? props.theme.eui.euiSizeL : props.theme.eui.euiSizeM};
|
||||
padding-bottom: ${props =>
|
||||
props.position === 'end' ? props.theme.eui.euiSizeL : props.theme.eui.euiSizeM};
|
||||
`;
|
||||
|
||||
// NOTE: styled-components seems to make all props in EuiProgress required, so this
|
||||
// style attribute hacking replaces styled-components here for now until that can be fixed
|
||||
// see: https://github.com/elastic/eui/issues/1655
|
||||
const alignmentStyle =
|
||||
alignment === 'top' ? { top: 0, bottom: 'initial' } : { top: 'initial', bottom: 0 };
|
||||
type ProgressMessageProps = Pick<
|
||||
LogTextStreamLoadingItemViewProps,
|
||||
'timestamp' | 'position' | 'isStreaming'
|
||||
>;
|
||||
const ProgressMessage: React.FC<ProgressMessageProps> = ({ timestamp, position, isStreaming }) => {
|
||||
const formattedTimestamp =
|
||||
isStreaming && position === 'end' ? (
|
||||
<FormattedRelative units="second" value={timestamp} updateInterval={1} />
|
||||
) : (
|
||||
<FormattedTime value={timestamp} {...TIMESTAMP_FORMAT} />
|
||||
);
|
||||
|
||||
const message =
|
||||
position === 'start' ? (
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.showingEntriesFromTimestamp"
|
||||
defaultMessage="Showing entries from {timestamp}"
|
||||
values={{ timestamp: formattedTimestamp }}
|
||||
/>
|
||||
) : isStreaming ? (
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.lastUpdate"
|
||||
defaultMessage="Last update {timestamp}"
|
||||
values={{ timestamp: formattedTimestamp }}
|
||||
/>
|
||||
) : (
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.showingEntriesUntilTimestamp"
|
||||
defaultMessage="Showing entries until {timestamp}"
|
||||
values={{ timestamp: formattedTimestamp }}
|
||||
/>
|
||||
);
|
||||
|
||||
return (
|
||||
<ProgressEntryWrapper className={className}>
|
||||
<EuiProgress
|
||||
style={alignmentStyle}
|
||||
color={color}
|
||||
size="xs"
|
||||
position="absolute"
|
||||
{...(!isLoading ? { max: 1, value: 1 } : {})}
|
||||
/>
|
||||
{children}
|
||||
</ProgressEntryWrapper>
|
||||
<LogTextSeparator>
|
||||
<EuiTitle size="xxs">{message}</EuiTitle>
|
||||
</LogTextSeparator>
|
||||
);
|
||||
};
|
||||
|
||||
const ProgressEntryWrapper = euiStyled.div`
|
||||
align-items: center;
|
||||
display: flex;
|
||||
min-height: ${props => props.theme.eui.euiSizeXXL};
|
||||
position: relative;
|
||||
`;
|
||||
const ProgressSpinner: React.FC<{ kind: 'streaming' | 'loading' }> = ({ kind }) => (
|
||||
<>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiLoadingSpinner size="l" />
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiText size="s">
|
||||
{kind === 'streaming' ? (
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.streamingNewEntriesText"
|
||||
defaultMessage="Streaming new entries"
|
||||
/>
|
||||
) : (
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.loadingNewEntriesText"
|
||||
defaultMessage="Loading new entries"
|
||||
/>
|
||||
)}
|
||||
</EuiText>
|
||||
</EuiFlexItem>
|
||||
</>
|
||||
);
|
||||
|
||||
const ProgressMessage = euiStyled.div`
|
||||
padding: 8px 16px;
|
||||
`;
|
||||
type ProgressCtaProps = Pick<
|
||||
LogTextStreamLoadingItemViewProps,
|
||||
'position' | 'startDateExpression' | 'endDateExpression' | 'onExtendRange' | 'onStreamStart'
|
||||
>;
|
||||
const ProgressCta: React.FC<ProgressCtaProps> = ({
|
||||
position,
|
||||
startDateExpression,
|
||||
endDateExpression,
|
||||
onExtendRange,
|
||||
onStreamStart,
|
||||
}) => {
|
||||
const rangeEdge = position === 'start' ? startDateExpression : endDateExpression;
|
||||
|
||||
if (rangeEdge === 'now' && position === 'end') {
|
||||
return (
|
||||
<EuiButton onClick={onStreamStart} size="s">
|
||||
<FormattedMessage id="xpack.infra.logs.streamLive" defaultMessage="Stream live" />
|
||||
</EuiButton>
|
||||
);
|
||||
}
|
||||
|
||||
const iconType = position === 'start' ? 'arrowUp' : 'arrowDown';
|
||||
const extendedRange =
|
||||
position === 'start'
|
||||
? extendDatemath(startDateExpression, 'before', endDateExpression)
|
||||
: extendDatemath(endDateExpression, 'after', startDateExpression);
|
||||
if (!extendedRange || !('diffUnit' in extendedRange)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<EuiButton
|
||||
onClick={() => {
|
||||
if (typeof onExtendRange === 'function') {
|
||||
onExtendRange(extendedRange.value);
|
||||
}
|
||||
}}
|
||||
iconType={iconType}
|
||||
size="s"
|
||||
>
|
||||
<ProgressExtendMessage amount={extendedRange.diffAmount} unit={extendedRange.diffUnit} />
|
||||
</EuiButton>
|
||||
);
|
||||
};
|
||||
|
||||
const ProgressExtendMessage: React.FC<{ amount: number; unit: Unit }> = ({ amount, unit }) => {
|
||||
switch (unit) {
|
||||
case 'ms':
|
||||
return (
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.extendTimeframeByMillisecondsButton"
|
||||
defaultMessage="Extend time frame by {amount, number} {amount, plural, one {millisecond} other {milliseconds}}"
|
||||
values={{ amount }}
|
||||
/>
|
||||
);
|
||||
case 's':
|
||||
return (
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.extendTimeframeBySecondsButton"
|
||||
defaultMessage="Extend time frame by {amount, number} {amount, plural, one {second} other {seconds}}"
|
||||
values={{ amount }}
|
||||
/>
|
||||
);
|
||||
case 'm':
|
||||
return (
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.extendTimeframeByMinutesButton"
|
||||
defaultMessage="Extend time frame by {amount, number} {amount, plural, one {minute} other {minutes}}"
|
||||
values={{ amount }}
|
||||
/>
|
||||
);
|
||||
case 'h':
|
||||
return (
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.extendTimeframeByHoursButton"
|
||||
defaultMessage="Extend time frame by {amount, number} {amount, plural, one {hour} other {hours}}"
|
||||
values={{ amount }}
|
||||
/>
|
||||
);
|
||||
case 'd':
|
||||
return (
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.extendTimeframeByDaysButton"
|
||||
defaultMessage="Extend time frame by {amount, number} {amount, plural, one {day} other {days}}"
|
||||
values={{ amount }}
|
||||
/>
|
||||
);
|
||||
case 'w':
|
||||
return (
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.extendTimeframeByWeeksButton"
|
||||
defaultMessage="Extend time frame by {amount, number} {amount, plural, one {week} other {weeks}}"
|
||||
values={{ amount }}
|
||||
/>
|
||||
);
|
||||
case 'M':
|
||||
return (
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.extendTimeframeByMonthsButton"
|
||||
defaultMessage="Extend time frame by {amount, number} {amount, plural, one {month} other {months}}"
|
||||
values={{ amount }}
|
||||
/>
|
||||
);
|
||||
case 'y':
|
||||
return (
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.extendTimeframeByYearsButton"
|
||||
defaultMessage="Extend time frame by {amount, number} {amount, plural, one {year} other {years}}"
|
||||
values={{ amount }}
|
||||
/>
|
||||
);
|
||||
default:
|
||||
throw new TypeError('Unhandled unit: ' + unit);
|
||||
}
|
||||
};
|
||||
|
|
|
@ -8,15 +8,16 @@ import { mount } from 'enzyme';
|
|||
import React from 'react';
|
||||
|
||||
import { EuiThemeProvider } from '../../../../../observability/public';
|
||||
import { LogEntryColumn } from '../../../utils/log_entry';
|
||||
import { LogEntryFieldColumn } from './log_entry_field_column';
|
||||
import { LogColumn } from '../../../../common/http_api';
|
||||
|
||||
describe('LogEntryFieldColumn', () => {
|
||||
it('should output a <ul> when displaying an Array of values', () => {
|
||||
const column: LogEntryColumn = {
|
||||
const column: LogColumn = {
|
||||
columnId: 'TEST_COLUMN',
|
||||
field: 'TEST_FIELD',
|
||||
value: JSON.stringify(['a', 'b', 'c']),
|
||||
value: ['a', 'b', 'c'],
|
||||
highlights: [],
|
||||
};
|
||||
|
||||
const component = mount(
|
||||
|
@ -42,13 +43,14 @@ describe('LogEntryFieldColumn', () => {
|
|||
});
|
||||
|
||||
it('should output a text representation of a passed complex value', () => {
|
||||
const column: LogEntryColumn = {
|
||||
const column: LogColumn = {
|
||||
columnId: 'TEST_COLUMN',
|
||||
field: 'TEST_FIELD',
|
||||
value: JSON.stringify({
|
||||
value: {
|
||||
lat: 1,
|
||||
lon: 2,
|
||||
}),
|
||||
},
|
||||
highlights: [],
|
||||
};
|
||||
|
||||
const component = mount(
|
||||
|
@ -67,10 +69,11 @@ describe('LogEntryFieldColumn', () => {
|
|||
});
|
||||
|
||||
it('should output just text when passed a non-Array', () => {
|
||||
const column: LogEntryColumn = {
|
||||
const column: LogColumn = {
|
||||
columnId: 'TEST_COLUMN',
|
||||
field: 'TEST_FIELD',
|
||||
value: JSON.stringify('foo'),
|
||||
value: 'foo',
|
||||
highlights: [],
|
||||
};
|
||||
|
||||
const component = mount(
|
||||
|
|
|
@ -8,14 +8,10 @@ import stringify from 'json-stable-stringify';
|
|||
import React, { useMemo } from 'react';
|
||||
|
||||
import { euiStyled } from '../../../../../observability/public';
|
||||
import {
|
||||
isFieldColumn,
|
||||
isHighlightFieldColumn,
|
||||
LogEntryColumn,
|
||||
LogEntryHighlightColumn,
|
||||
} from '../../../utils/log_entry';
|
||||
import { isFieldColumn, isHighlightFieldColumn } from '../../../utils/log_entry';
|
||||
import { ActiveHighlightMarker, highlightFieldValue, HighlightMarker } from './highlighting';
|
||||
import { LogEntryColumnContent } from './log_entry_column';
|
||||
import { LogColumn } from '../../../../common/http_api';
|
||||
import {
|
||||
hoveredContentStyle,
|
||||
longWrappedContentStyle,
|
||||
|
@ -25,8 +21,8 @@ import {
|
|||
} from './text_styles';
|
||||
|
||||
interface LogEntryFieldColumnProps {
|
||||
columnValue: LogEntryColumn;
|
||||
highlights: LogEntryHighlightColumn[];
|
||||
columnValue: LogColumn;
|
||||
highlights: LogColumn[];
|
||||
isActiveHighlight: boolean;
|
||||
isHighlighted: boolean;
|
||||
isHovered: boolean;
|
||||
|
@ -41,9 +37,12 @@ export const LogEntryFieldColumn: React.FunctionComponent<LogEntryFieldColumnPro
|
|||
isHovered,
|
||||
wrapMode,
|
||||
}) => {
|
||||
const value = useMemo(() => (isFieldColumn(columnValue) ? JSON.parse(columnValue.value) : null), [
|
||||
columnValue,
|
||||
]);
|
||||
const value = useMemo(() => {
|
||||
if (isFieldColumn(columnValue)) {
|
||||
return columnValue.value;
|
||||
}
|
||||
return null;
|
||||
}, [columnValue]);
|
||||
const formattedValue = Array.isArray(value) ? (
|
||||
<ul>
|
||||
{value.map((entry, i) => (
|
||||
|
@ -58,7 +57,7 @@ export const LogEntryFieldColumn: React.FunctionComponent<LogEntryFieldColumnPro
|
|||
</ul>
|
||||
) : (
|
||||
highlightFieldValue(
|
||||
typeof value === 'object' && value != null ? stringify(value) : value,
|
||||
typeof value === 'string' ? value : stringify(value),
|
||||
isHighlightFieldColumn(firstHighlight) ? firstHighlight.highlights : [],
|
||||
isActiveHighlight ? ActiveHighlightMarker : HighlightMarker
|
||||
)
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
|
||||
import React, { memo, useMemo } from 'react';
|
||||
import stringify from 'json-stable-stringify';
|
||||
|
||||
import { euiStyled } from '../../../../../observability/public';
|
||||
import {
|
||||
|
@ -12,9 +13,7 @@ import {
|
|||
isFieldSegment,
|
||||
isHighlightMessageColumn,
|
||||
isMessageColumn,
|
||||
LogEntryColumn,
|
||||
LogEntryHighlightColumn,
|
||||
LogEntryMessageSegment,
|
||||
isHighlightFieldSegment,
|
||||
} from '../../../utils/log_entry';
|
||||
import { ActiveHighlightMarker, highlightFieldValue, HighlightMarker } from './highlighting';
|
||||
import { LogEntryColumnContent } from './log_entry_column';
|
||||
|
@ -25,10 +24,11 @@ import {
|
|||
unwrappedContentStyle,
|
||||
WrapMode,
|
||||
} from './text_styles';
|
||||
import { LogColumn, LogMessagePart } from '../../../../common/http_api';
|
||||
|
||||
interface LogEntryMessageColumnProps {
|
||||
columnValue: LogEntryColumn;
|
||||
highlights: LogEntryHighlightColumn[];
|
||||
columnValue: LogColumn;
|
||||
highlights: LogColumn[];
|
||||
isActiveHighlight: boolean;
|
||||
isHighlighted: boolean;
|
||||
isHovered: boolean;
|
||||
|
@ -72,28 +72,39 @@ const MessageColumnContent = euiStyled(LogEntryColumnContent)<MessageColumnConte
|
|||
`;
|
||||
|
||||
const formatMessageSegments = (
|
||||
messageSegments: LogEntryMessageSegment[],
|
||||
highlights: LogEntryHighlightColumn[],
|
||||
messageSegments: LogMessagePart[],
|
||||
highlights: LogColumn[],
|
||||
isActiveHighlight: boolean
|
||||
) =>
|
||||
messageSegments.map((messageSegment, index) =>
|
||||
formatMessageSegment(
|
||||
messageSegment,
|
||||
highlights.map(highlight =>
|
||||
isHighlightMessageColumn(highlight) ? highlight.message[index].highlights : []
|
||||
),
|
||||
highlights.map(highlight => {
|
||||
if (isHighlightMessageColumn(highlight)) {
|
||||
const segment = highlight.message[index];
|
||||
if (isHighlightFieldSegment(segment)) {
|
||||
return segment.highlights;
|
||||
}
|
||||
}
|
||||
return [];
|
||||
}),
|
||||
isActiveHighlight
|
||||
)
|
||||
);
|
||||
|
||||
const formatMessageSegment = (
|
||||
messageSegment: LogEntryMessageSegment,
|
||||
messageSegment: LogMessagePart,
|
||||
[firstHighlight = []]: string[][], // we only support one highlight for now
|
||||
isActiveHighlight: boolean
|
||||
): React.ReactNode => {
|
||||
if (isFieldSegment(messageSegment)) {
|
||||
const value =
|
||||
typeof messageSegment.value === 'string'
|
||||
? messageSegment.value
|
||||
: stringify(messageSegment.value);
|
||||
|
||||
return highlightFieldValue(
|
||||
messageSegment.value,
|
||||
value,
|
||||
firstHighlight,
|
||||
isActiveHighlight ? ActiveHighlightMarker : HighlightMarker
|
||||
);
|
||||
|
|
|
@ -7,12 +7,7 @@
|
|||
import React, { memo, useState, useCallback, useMemo } from 'react';
|
||||
|
||||
import { euiStyled } from '../../../../../observability/public';
|
||||
import {
|
||||
LogEntry,
|
||||
LogEntryHighlight,
|
||||
LogEntryHighlightColumn,
|
||||
isTimestampColumn,
|
||||
} from '../../../utils/log_entry';
|
||||
import { isTimestampColumn } from '../../../utils/log_entry';
|
||||
import {
|
||||
LogColumnConfiguration,
|
||||
isTimestampLogColumnConfiguration,
|
||||
|
@ -26,12 +21,13 @@ import { LogEntryDetailsIconColumn } from './log_entry_icon_column';
|
|||
import { LogEntryMessageColumn } from './log_entry_message_column';
|
||||
import { LogEntryTimestampColumn } from './log_entry_timestamp_column';
|
||||
import { monospaceTextStyle } from './text_styles';
|
||||
import { LogEntry, LogColumn } from '../../../../common/http_api';
|
||||
|
||||
interface LogEntryRowProps {
|
||||
boundingBoxRef?: React.Ref<Element>;
|
||||
columnConfigurations: LogColumnConfiguration[];
|
||||
columnWidths: LogEntryColumnWidths;
|
||||
highlights: LogEntryHighlight[];
|
||||
highlights: LogEntry[];
|
||||
isActiveHighlight: boolean;
|
||||
isHighlighted: boolean;
|
||||
logEntry: LogEntry;
|
||||
|
@ -63,9 +59,9 @@ export const LogEntryRow = memo(
|
|||
setIsHovered(false);
|
||||
}, []);
|
||||
|
||||
const openFlyout = useCallback(() => openFlyoutWithItem?.(logEntry.gid), [
|
||||
const openFlyout = useCallback(() => openFlyoutWithItem?.(logEntry.id), [
|
||||
openFlyoutWithItem,
|
||||
logEntry.gid,
|
||||
logEntry.id,
|
||||
]);
|
||||
|
||||
const logEntryColumnsById = useMemo(
|
||||
|
@ -85,7 +81,7 @@ export const LogEntryRow = memo(
|
|||
const highlightsByColumnId = useMemo(
|
||||
() =>
|
||||
highlights.reduce<{
|
||||
[columnId: string]: LogEntryHighlightColumn[];
|
||||
[columnId: string]: LogColumn[];
|
||||
}>(
|
||||
(columnsById, highlight) =>
|
||||
highlight.columns.reduce(
|
||||
|
|
|
@ -0,0 +1,21 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
import React from 'react';
|
||||
import { EuiFlexGroup, EuiFlexItem, EuiHorizontalRule } from '@elastic/eui';
|
||||
|
||||
/**
|
||||
* Create a separator with a text on the right side
|
||||
*/
|
||||
export const LogTextSeparator: React.FC = ({ children }) => {
|
||||
return (
|
||||
<EuiFlexGroup alignItems="center" gutterSize="s">
|
||||
<EuiFlexItem grow={false}>{children}</EuiFlexItem>
|
||||
<EuiFlexItem>
|
||||
<EuiHorizontalRule />
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
);
|
||||
};
|
|
@ -54,6 +54,10 @@ interface ScrollableLogTextStreamViewProps {
|
|||
setFlyoutVisibility: (visible: boolean) => void;
|
||||
highlightedItem: string | null;
|
||||
currentHighlightKey: UniqueTimeKey | null;
|
||||
startDateExpression: string;
|
||||
endDateExpression: string;
|
||||
updateDateRange: (range: { startDateExpression?: string; endDateExpression?: string }) => void;
|
||||
startLiveStreaming: () => void;
|
||||
}
|
||||
|
||||
interface ScrollableLogTextStreamViewState {
|
||||
|
@ -90,7 +94,7 @@ export class ScrollableLogTextStreamView extends React.PureComponent<
|
|||
targetId: getStreamItemId(getStreamItemBeforeTimeKey(nextProps.items, nextProps.target!)),
|
||||
items: nextItems,
|
||||
};
|
||||
} else if (!nextProps.target || !hasItems) {
|
||||
} else if (!hasItems) {
|
||||
return {
|
||||
target: null,
|
||||
targetId: null,
|
||||
|
@ -129,9 +133,13 @@ export class ScrollableLogTextStreamView extends React.PureComponent<
|
|||
isLoadingMore,
|
||||
isReloading,
|
||||
isStreaming,
|
||||
lastLoadedTime,
|
||||
scale,
|
||||
wrap,
|
||||
startDateExpression,
|
||||
endDateExpression,
|
||||
lastLoadedTime,
|
||||
updateDateRange,
|
||||
startLiveStreaming,
|
||||
} = this.props;
|
||||
const { targetId, items, isScrollLocked } = this.state;
|
||||
const hasItems = items.length > 0;
|
||||
|
@ -184,72 +192,88 @@ export class ScrollableLogTextStreamView extends React.PureComponent<
|
|||
isLocked={isScrollLocked}
|
||||
entriesCount={items.length}
|
||||
>
|
||||
{registerChild => (
|
||||
<>
|
||||
<LogTextStreamLoadingItemView
|
||||
alignment="bottom"
|
||||
isLoading={isLoadingMore}
|
||||
hasMore={hasMoreBeforeStart}
|
||||
isStreaming={false}
|
||||
lastStreamingUpdate={null}
|
||||
/>
|
||||
{items.map((item, idx) => {
|
||||
const currentTimestamp = item.logEntry.key.time;
|
||||
let showDate = false;
|
||||
|
||||
if (idx > 0) {
|
||||
const prevTimestamp = items[idx - 1].logEntry.key.time;
|
||||
showDate = !moment(currentTimestamp).isSame(prevTimestamp, 'day');
|
||||
}
|
||||
|
||||
return (
|
||||
<Fragment key={getStreamItemId(item)}>
|
||||
{showDate && <LogDateRow timestamp={currentTimestamp} />}
|
||||
<MeasurableItemView
|
||||
register={registerChild}
|
||||
registrationKey={getStreamItemId(item)}
|
||||
>
|
||||
{itemMeasureRef => (
|
||||
<LogEntryRow
|
||||
columnConfigurations={columnConfigurations}
|
||||
columnWidths={columnWidths}
|
||||
openFlyoutWithItem={this.handleOpenFlyout}
|
||||
boundingBoxRef={itemMeasureRef}
|
||||
logEntry={item.logEntry}
|
||||
highlights={item.highlights}
|
||||
isActiveHighlight={
|
||||
!!currentHighlightKey &&
|
||||
currentHighlightKey.gid === item.logEntry.gid
|
||||
}
|
||||
scale={scale}
|
||||
wrap={wrap}
|
||||
isHighlighted={
|
||||
highlightedItem
|
||||
? item.logEntry.gid === highlightedItem
|
||||
: false
|
||||
}
|
||||
/>
|
||||
)}
|
||||
</MeasurableItemView>
|
||||
</Fragment>
|
||||
);
|
||||
})}
|
||||
<LogTextStreamLoadingItemView
|
||||
alignment="top"
|
||||
isLoading={isStreaming || isLoadingMore}
|
||||
hasMore={hasMoreAfterEnd}
|
||||
isStreaming={isStreaming}
|
||||
lastStreamingUpdate={isStreaming ? lastLoadedTime : null}
|
||||
onLoadMore={this.handleLoadNewerItems}
|
||||
/>
|
||||
{isScrollLocked && (
|
||||
<LogTextStreamJumpToTail
|
||||
width={width}
|
||||
onClickJump={this.handleJumpToTail}
|
||||
{registerChild =>
|
||||
items.length > 0 ? (
|
||||
<>
|
||||
<LogTextStreamLoadingItemView
|
||||
position="start"
|
||||
isLoading={isLoadingMore}
|
||||
hasMore={hasMoreBeforeStart}
|
||||
timestamp={items[0].logEntry.cursor.time}
|
||||
isStreaming={false}
|
||||
startDateExpression={startDateExpression}
|
||||
endDateExpression={endDateExpression}
|
||||
onExtendRange={newDateExpression =>
|
||||
updateDateRange({ startDateExpression: newDateExpression })
|
||||
}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
{items.map((item, idx) => {
|
||||
const currentTimestamp = item.logEntry.cursor.time;
|
||||
let showDate = false;
|
||||
|
||||
if (idx > 0) {
|
||||
const prevTimestamp = items[idx - 1].logEntry.cursor.time;
|
||||
showDate = !moment(currentTimestamp).isSame(prevTimestamp, 'day');
|
||||
}
|
||||
|
||||
return (
|
||||
<Fragment key={getStreamItemId(item)}>
|
||||
{showDate && <LogDateRow timestamp={currentTimestamp} />}
|
||||
<MeasurableItemView
|
||||
register={registerChild}
|
||||
registrationKey={getStreamItemId(item)}
|
||||
>
|
||||
{itemMeasureRef => (
|
||||
<LogEntryRow
|
||||
columnConfigurations={columnConfigurations}
|
||||
columnWidths={columnWidths}
|
||||
openFlyoutWithItem={this.handleOpenFlyout}
|
||||
boundingBoxRef={itemMeasureRef}
|
||||
logEntry={item.logEntry}
|
||||
highlights={item.highlights}
|
||||
isActiveHighlight={
|
||||
!!currentHighlightKey &&
|
||||
currentHighlightKey.gid === item.logEntry.id
|
||||
}
|
||||
scale={scale}
|
||||
wrap={wrap}
|
||||
isHighlighted={
|
||||
highlightedItem
|
||||
? item.logEntry.id === highlightedItem
|
||||
: false
|
||||
}
|
||||
/>
|
||||
)}
|
||||
</MeasurableItemView>
|
||||
</Fragment>
|
||||
);
|
||||
})}
|
||||
<LogTextStreamLoadingItemView
|
||||
position="end"
|
||||
isLoading={isStreaming || isLoadingMore}
|
||||
hasMore={hasMoreAfterEnd}
|
||||
isStreaming={isStreaming}
|
||||
timestamp={
|
||||
isStreaming && lastLoadedTime
|
||||
? lastLoadedTime.valueOf()
|
||||
: items[items.length - 1].logEntry.cursor.time
|
||||
}
|
||||
startDateExpression={startDateExpression}
|
||||
endDateExpression={endDateExpression}
|
||||
onExtendRange={newDateExpression =>
|
||||
updateDateRange({ endDateExpression: newDateExpression })
|
||||
}
|
||||
onStreamStart={() => startLiveStreaming()}
|
||||
/>
|
||||
{isScrollLocked && (
|
||||
<LogTextStreamJumpToTail
|
||||
width={width}
|
||||
onClickJump={this.handleJumpToTail}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
) : null
|
||||
}
|
||||
</VerticalScrollPanel>
|
||||
</ScrollPanelSizeProbe>
|
||||
)}
|
||||
|
@ -275,14 +299,6 @@ export class ScrollableLogTextStreamView extends React.PureComponent<
|
|||
}
|
||||
};
|
||||
|
||||
private handleLoadNewerItems = () => {
|
||||
const { loadNewerItems } = this.props;
|
||||
|
||||
if (loadNewerItems) {
|
||||
loadNewerItems();
|
||||
}
|
||||
};
|
||||
|
||||
// this is actually a method but not recognized as such
|
||||
// eslint-disable-next-line @typescript-eslint/member-ordering
|
||||
private handleVisibleChildrenChange = callWithoutRepeats(
|
||||
|
|
|
@ -1,97 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { EuiDatePicker, EuiFlexGroup, EuiFlexItem, EuiButtonEmpty } from '@elastic/eui';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { FormattedMessage } from '@kbn/i18n/react';
|
||||
import moment, { Moment } from 'moment';
|
||||
import React from 'react';
|
||||
import { FixedDatePicker } from '../fixed_datepicker';
|
||||
|
||||
const noop = () => undefined;
|
||||
|
||||
interface LogTimeControlsProps {
|
||||
currentTime: number | null;
|
||||
startLiveStreaming: () => any;
|
||||
stopLiveStreaming: () => void;
|
||||
isLiveStreaming: boolean;
|
||||
jumpToTime: (time: number) => any;
|
||||
}
|
||||
|
||||
export class LogTimeControls extends React.PureComponent<LogTimeControlsProps> {
|
||||
public render() {
|
||||
const { currentTime, isLiveStreaming } = this.props;
|
||||
|
||||
const currentMoment = currentTime ? moment(currentTime) : null;
|
||||
if (isLiveStreaming) {
|
||||
return (
|
||||
<EuiFlexGroup gutterSize="s">
|
||||
<EuiFlexItem>
|
||||
<EuiDatePicker
|
||||
disabled
|
||||
onChange={noop}
|
||||
value={i18n.translate('xpack.infra.logs.streamingDescription', {
|
||||
defaultMessage: 'Streaming new entries…',
|
||||
})}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiButtonEmpty
|
||||
color="primary"
|
||||
iconType="pause"
|
||||
iconSide="left"
|
||||
onClick={this.stopLiveStreaming}
|
||||
>
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.stopStreamingButtonLabel"
|
||||
defaultMessage="Stop streaming"
|
||||
/>
|
||||
</EuiButtonEmpty>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
);
|
||||
} else {
|
||||
return (
|
||||
<EuiFlexGroup gutterSize="s">
|
||||
<EuiFlexItem>
|
||||
<FixedDatePicker
|
||||
dateFormat="L LTS"
|
||||
onChange={this.handleChangeDate}
|
||||
popperPlacement="top-end"
|
||||
selected={currentMoment}
|
||||
shouldCloseOnSelect
|
||||
showTimeSelect
|
||||
timeFormat="LTS"
|
||||
injectTimes={currentMoment ? [currentMoment] : []}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiButtonEmpty iconType="play" iconSide="left" onClick={this.startLiveStreaming}>
|
||||
<FormattedMessage
|
||||
id="xpack.infra.logs.startStreamingButtonLabel"
|
||||
defaultMessage="Stream live"
|
||||
/>
|
||||
</EuiButtonEmpty>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private handleChangeDate = (date: Moment | null) => {
|
||||
if (date !== null) {
|
||||
this.props.jumpToTime(date.valueOf());
|
||||
}
|
||||
};
|
||||
|
||||
private startLiveStreaming = () => {
|
||||
this.props.startLiveStreaming();
|
||||
};
|
||||
|
||||
private stopLiveStreaming = () => {
|
||||
this.props.stopLiveStreaming();
|
||||
};
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { fold } from 'fp-ts/lib/Either';
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
import { identity } from 'fp-ts/lib/function';
|
||||
import { npStart } from '../../../../legacy_singletons';
|
||||
|
||||
import { throwErrors, createPlainError } from '../../../../../common/runtime_types';
|
||||
|
||||
import {
|
||||
LOG_ENTRIES_PATH,
|
||||
LogEntriesRequest,
|
||||
logEntriesRequestRT,
|
||||
logEntriesResponseRT,
|
||||
} from '../../../../../common/http_api';
|
||||
|
||||
export const fetchLogEntries = async (requestArgs: LogEntriesRequest) => {
|
||||
const response = await npStart.http.fetch(LOG_ENTRIES_PATH, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(logEntriesRequestRT.encode(requestArgs)),
|
||||
});
|
||||
|
||||
return pipe(logEntriesResponseRT.decode(response), fold(throwErrors(createPlainError), identity));
|
||||
};
|
|
@ -1,64 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
import { ApolloClient } from 'apollo-client';
|
||||
import { TimeKey } from '../../../../common/time';
|
||||
import { logEntriesQuery } from '../../../graphql/log_entries.gql_query';
|
||||
import { useApolloClient } from '../../../utils/apollo_context';
|
||||
import { LogEntriesResponse } from '.';
|
||||
|
||||
const LOAD_CHUNK_SIZE = 200;
|
||||
|
||||
type LogEntriesGetter = (
|
||||
client: ApolloClient<{}>,
|
||||
countBefore: number,
|
||||
countAfter: number
|
||||
) => (params: {
|
||||
sourceId: string;
|
||||
timeKey: TimeKey | null;
|
||||
filterQuery: string | null;
|
||||
}) => Promise<LogEntriesResponse>;
|
||||
|
||||
const getLogEntries: LogEntriesGetter = (client, countBefore, countAfter) => async ({
|
||||
sourceId,
|
||||
timeKey,
|
||||
filterQuery,
|
||||
}) => {
|
||||
if (!timeKey) throw new Error('TimeKey is null');
|
||||
const result = await client.query({
|
||||
query: logEntriesQuery,
|
||||
variables: {
|
||||
sourceId,
|
||||
timeKey: { time: timeKey.time, tiebreaker: timeKey.tiebreaker },
|
||||
countBefore,
|
||||
countAfter,
|
||||
filterQuery,
|
||||
},
|
||||
fetchPolicy: 'no-cache',
|
||||
});
|
||||
// Workaround for Typescript. Since we're removing the GraphQL API in another PR or two
|
||||
// 7.6 goes out I don't think it's worth the effort to actually make this
|
||||
// typecheck pass
|
||||
const { source } = result.data as any;
|
||||
const { logEntriesAround } = source;
|
||||
return {
|
||||
entries: logEntriesAround.entries,
|
||||
entriesStart: logEntriesAround.start,
|
||||
entriesEnd: logEntriesAround.end,
|
||||
hasMoreAfterEnd: logEntriesAround.hasMoreAfter,
|
||||
hasMoreBeforeStart: logEntriesAround.hasMoreBefore,
|
||||
lastLoadedTime: new Date(),
|
||||
};
|
||||
};
|
||||
|
||||
export const useGraphQLQueries = () => {
|
||||
const client = useApolloClient();
|
||||
if (!client) throw new Error('Unable to get Apollo Client from context');
|
||||
return {
|
||||
getLogEntriesAround: getLogEntries(client, LOAD_CHUNK_SIZE, LOAD_CHUNK_SIZE),
|
||||
getLogEntriesBefore: getLogEntries(client, LOAD_CHUNK_SIZE, 0),
|
||||
getLogEntriesAfter: getLogEntries(client, 0, LOAD_CHUNK_SIZE),
|
||||
};
|
||||
};
|
|
@ -5,12 +5,18 @@
|
|||
*/
|
||||
import { useEffect, useState, useReducer, useCallback } from 'react';
|
||||
import createContainer from 'constate';
|
||||
import { pick, throttle, omit } from 'lodash';
|
||||
import { useGraphQLQueries } from './gql_queries';
|
||||
import { pick, throttle } from 'lodash';
|
||||
import { TimeKey, timeKeyIsBetween } from '../../../../common/time';
|
||||
import { InfraLogEntry } from './types';
|
||||
import {
|
||||
LogEntriesResponse,
|
||||
LogEntry,
|
||||
LogEntriesRequest,
|
||||
LogEntriesBaseRequest,
|
||||
} from '../../../../common/http_api';
|
||||
import { fetchLogEntries } from './api/fetch_log_entries';
|
||||
|
||||
const DESIRED_BUFFER_PAGES = 2;
|
||||
const LIVE_STREAM_INTERVAL = 5000;
|
||||
|
||||
enum Action {
|
||||
FetchingNewEntries,
|
||||
|
@ -20,6 +26,7 @@ enum Action {
|
|||
ReceiveEntriesAfter,
|
||||
ErrorOnNewEntries,
|
||||
ErrorOnMoreEntries,
|
||||
ExpandRange,
|
||||
}
|
||||
|
||||
type ReceiveActions =
|
||||
|
@ -29,41 +36,46 @@ type ReceiveActions =
|
|||
|
||||
interface ReceiveEntriesAction {
|
||||
type: ReceiveActions;
|
||||
payload: LogEntriesResponse;
|
||||
payload: LogEntriesResponse['data'];
|
||||
}
|
||||
interface ExpandRangeAction {
|
||||
type: Action.ExpandRange;
|
||||
payload: { before: boolean; after: boolean };
|
||||
}
|
||||
interface FetchOrErrorAction {
|
||||
type: Exclude<Action, ReceiveActions>;
|
||||
type: Exclude<Action, ReceiveActions | Action.ExpandRange>;
|
||||
}
|
||||
type ActionObj = ReceiveEntriesAction | FetchOrErrorAction;
|
||||
type ActionObj = ReceiveEntriesAction | FetchOrErrorAction | ExpandRangeAction;
|
||||
|
||||
type Dispatch = (action: ActionObj) => void;
|
||||
|
||||
interface LogEntriesProps {
|
||||
startTimestamp: number;
|
||||
endTimestamp: number;
|
||||
timestampsLastUpdate: number;
|
||||
filterQuery: string | null;
|
||||
timeKey: TimeKey | null;
|
||||
pagesBeforeStart: number | null;
|
||||
pagesAfterEnd: number | null;
|
||||
sourceId: string;
|
||||
isAutoReloading: boolean;
|
||||
isStreaming: boolean;
|
||||
jumpToTargetPosition: (position: TimeKey) => void;
|
||||
}
|
||||
|
||||
type FetchEntriesParams = Omit<LogEntriesProps, 'isAutoReloading'>;
|
||||
type FetchEntriesParams = Omit<LogEntriesProps, 'isStreaming'>;
|
||||
type FetchMoreEntriesParams = Pick<LogEntriesProps, 'pagesBeforeStart' | 'pagesAfterEnd'>;
|
||||
|
||||
export interface LogEntriesResponse {
|
||||
entries: InfraLogEntry[];
|
||||
entriesStart: TimeKey | null;
|
||||
entriesEnd: TimeKey | null;
|
||||
hasMoreAfterEnd: boolean;
|
||||
hasMoreBeforeStart: boolean;
|
||||
lastLoadedTime: Date | null;
|
||||
}
|
||||
|
||||
export type LogEntriesStateParams = {
|
||||
export interface LogEntriesStateParams {
|
||||
entries: LogEntriesResponse['data']['entries'];
|
||||
topCursor: LogEntriesResponse['data']['topCursor'] | null;
|
||||
bottomCursor: LogEntriesResponse['data']['bottomCursor'] | null;
|
||||
centerCursor: TimeKey | null;
|
||||
isReloading: boolean;
|
||||
isLoadingMore: boolean;
|
||||
} & LogEntriesResponse;
|
||||
lastLoadedTime: Date | null;
|
||||
hasMoreBeforeStart: boolean;
|
||||
hasMoreAfterEnd: boolean;
|
||||
}
|
||||
|
||||
export interface LogEntriesCallbacks {
|
||||
fetchNewerEntries: () => Promise<TimeKey | null | undefined>;
|
||||
|
@ -75,32 +87,40 @@ export const logEntriesInitialCallbacks = {
|
|||
|
||||
export const logEntriesInitialState: LogEntriesStateParams = {
|
||||
entries: [],
|
||||
entriesStart: null,
|
||||
entriesEnd: null,
|
||||
hasMoreAfterEnd: false,
|
||||
hasMoreBeforeStart: false,
|
||||
topCursor: null,
|
||||
bottomCursor: null,
|
||||
centerCursor: null,
|
||||
isReloading: true,
|
||||
isLoadingMore: false,
|
||||
lastLoadedTime: null,
|
||||
hasMoreBeforeStart: false,
|
||||
hasMoreAfterEnd: false,
|
||||
};
|
||||
|
||||
const cleanDuplicateItems = (entriesA: InfraLogEntry[], entriesB: InfraLogEntry[]) => {
|
||||
const gids = new Set(entriesB.map(item => item.gid));
|
||||
return entriesA.filter(item => !gids.has(item.gid));
|
||||
const cleanDuplicateItems = (entriesA: LogEntry[], entriesB: LogEntry[]) => {
|
||||
const ids = new Set(entriesB.map(item => item.id));
|
||||
return entriesA.filter(item => !ids.has(item.id));
|
||||
};
|
||||
|
||||
const shouldFetchNewEntries = ({
|
||||
prevParams,
|
||||
timeKey,
|
||||
filterQuery,
|
||||
entriesStart,
|
||||
entriesEnd,
|
||||
}: FetchEntriesParams & LogEntriesStateParams & { prevParams: FetchEntriesParams }) => {
|
||||
if (!timeKey) return false;
|
||||
const shouldLoadWithNewFilter = filterQuery !== prevParams.filterQuery;
|
||||
topCursor,
|
||||
bottomCursor,
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
}: FetchEntriesParams & LogEntriesStateParams & { prevParams: FetchEntriesParams | undefined }) => {
|
||||
const shouldLoadWithNewDates = prevParams
|
||||
? (startTimestamp !== prevParams.startTimestamp &&
|
||||
startTimestamp > prevParams.startTimestamp) ||
|
||||
(endTimestamp !== prevParams.endTimestamp && endTimestamp < prevParams.endTimestamp)
|
||||
: true;
|
||||
const shouldLoadWithNewFilter = prevParams ? filterQuery !== prevParams.filterQuery : true;
|
||||
const shouldLoadAroundNewPosition =
|
||||
!entriesStart || !entriesEnd || !timeKeyIsBetween(entriesStart, entriesEnd, timeKey);
|
||||
return shouldLoadWithNewFilter || shouldLoadAroundNewPosition;
|
||||
timeKey && (!topCursor || !bottomCursor || !timeKeyIsBetween(topCursor, bottomCursor, timeKey));
|
||||
|
||||
return shouldLoadWithNewDates || shouldLoadWithNewFilter || shouldLoadAroundNewPosition;
|
||||
};
|
||||
|
||||
enum ShouldFetchMoreEntries {
|
||||
|
@ -124,48 +144,105 @@ const useFetchEntriesEffect = (
|
|||
dispatch: Dispatch,
|
||||
props: LogEntriesProps
|
||||
) => {
|
||||
const { getLogEntriesAround, getLogEntriesBefore, getLogEntriesAfter } = useGraphQLQueries();
|
||||
|
||||
const [prevParams, cachePrevParams] = useState(props);
|
||||
const [prevParams, cachePrevParams] = useState<LogEntriesProps | undefined>();
|
||||
const [startedStreaming, setStartedStreaming] = useState(false);
|
||||
|
||||
const runFetchNewEntriesRequest = async (override = {}) => {
|
||||
const runFetchNewEntriesRequest = async (overrides: Partial<LogEntriesProps> = {}) => {
|
||||
if (!props.startTimestamp || !props.endTimestamp) {
|
||||
return;
|
||||
}
|
||||
|
||||
dispatch({ type: Action.FetchingNewEntries });
|
||||
|
||||
try {
|
||||
const payload = await getLogEntriesAround({
|
||||
...omit(props, 'jumpToTargetPosition'),
|
||||
...override,
|
||||
});
|
||||
const commonFetchArgs: LogEntriesBaseRequest = {
|
||||
sourceId: overrides.sourceId || props.sourceId,
|
||||
startTimestamp: overrides.startTimestamp || props.startTimestamp,
|
||||
endTimestamp: overrides.endTimestamp || props.endTimestamp,
|
||||
query: overrides.filterQuery || props.filterQuery,
|
||||
};
|
||||
|
||||
const fetchArgs: LogEntriesRequest = props.timeKey
|
||||
? {
|
||||
...commonFetchArgs,
|
||||
center: props.timeKey,
|
||||
}
|
||||
: {
|
||||
...commonFetchArgs,
|
||||
before: 'last',
|
||||
};
|
||||
|
||||
const { data: payload } = await fetchLogEntries(fetchArgs);
|
||||
dispatch({ type: Action.ReceiveNewEntries, payload });
|
||||
|
||||
// Move position to the bottom if it's the first load.
|
||||
// Do it in the next tick to allow the `dispatch` to fire
|
||||
if (!props.timeKey && payload.bottomCursor) {
|
||||
setTimeout(() => {
|
||||
props.jumpToTargetPosition(payload.bottomCursor!);
|
||||
});
|
||||
} else if (
|
||||
props.timeKey &&
|
||||
payload.topCursor &&
|
||||
payload.bottomCursor &&
|
||||
!timeKeyIsBetween(payload.topCursor, payload.bottomCursor, props.timeKey)
|
||||
) {
|
||||
props.jumpToTargetPosition(payload.topCursor);
|
||||
}
|
||||
} catch (e) {
|
||||
dispatch({ type: Action.ErrorOnNewEntries });
|
||||
}
|
||||
};
|
||||
|
||||
const runFetchMoreEntriesRequest = async (direction: ShouldFetchMoreEntries) => {
|
||||
dispatch({ type: Action.FetchingMoreEntries });
|
||||
if (!props.startTimestamp || !props.endTimestamp) {
|
||||
return;
|
||||
}
|
||||
const getEntriesBefore = direction === ShouldFetchMoreEntries.Before;
|
||||
const timeKey = getEntriesBefore
|
||||
? state.entries[0].key
|
||||
: state.entries[state.entries.length - 1].key;
|
||||
const getMoreLogEntries = getEntriesBefore ? getLogEntriesBefore : getLogEntriesAfter;
|
||||
|
||||
// Control that cursors are correct
|
||||
if ((getEntriesBefore && !state.topCursor) || !state.bottomCursor) {
|
||||
return;
|
||||
}
|
||||
|
||||
dispatch({ type: Action.FetchingMoreEntries });
|
||||
|
||||
try {
|
||||
const payload = await getMoreLogEntries({ ...props, timeKey });
|
||||
const commonFetchArgs: LogEntriesBaseRequest = {
|
||||
sourceId: props.sourceId,
|
||||
startTimestamp: props.startTimestamp,
|
||||
endTimestamp: props.endTimestamp,
|
||||
query: props.filterQuery,
|
||||
};
|
||||
|
||||
const fetchArgs: LogEntriesRequest = getEntriesBefore
|
||||
? {
|
||||
...commonFetchArgs,
|
||||
before: state.topCursor!, // We already check for nullity above
|
||||
}
|
||||
: {
|
||||
...commonFetchArgs,
|
||||
after: state.bottomCursor,
|
||||
};
|
||||
|
||||
const { data: payload } = await fetchLogEntries(fetchArgs);
|
||||
|
||||
dispatch({
|
||||
type: getEntriesBefore ? Action.ReceiveEntriesBefore : Action.ReceiveEntriesAfter,
|
||||
payload,
|
||||
});
|
||||
return payload.entriesEnd;
|
||||
|
||||
return payload.bottomCursor;
|
||||
} catch (e) {
|
||||
dispatch({ type: Action.ErrorOnMoreEntries });
|
||||
}
|
||||
};
|
||||
|
||||
const fetchNewEntriesEffectDependencies = Object.values(
|
||||
pick(props, ['sourceId', 'filterQuery', 'timeKey'])
|
||||
pick(props, ['sourceId', 'filterQuery', 'timeKey', 'startTimestamp', 'endTimestamp'])
|
||||
);
|
||||
const fetchNewEntriesEffect = () => {
|
||||
if (props.isAutoReloading) return;
|
||||
if (props.isStreaming && prevParams) return;
|
||||
if (shouldFetchNewEntries({ ...props, ...state, prevParams })) {
|
||||
runFetchNewEntriesRequest();
|
||||
}
|
||||
|
@ -177,7 +254,7 @@ const useFetchEntriesEffect = (
|
|||
Object.values(pick(state, ['hasMoreBeforeStart', 'hasMoreAfterEnd'])),
|
||||
];
|
||||
const fetchMoreEntriesEffect = () => {
|
||||
if (state.isLoadingMore || props.isAutoReloading) return;
|
||||
if (state.isLoadingMore || props.isStreaming) return;
|
||||
const direction = shouldFetchMoreEntries(props, state);
|
||||
switch (direction) {
|
||||
case ShouldFetchMoreEntries.Before:
|
||||
|
@ -191,30 +268,25 @@ const useFetchEntriesEffect = (
|
|||
|
||||
const fetchNewerEntries = useCallback(
|
||||
throttle(() => runFetchMoreEntriesRequest(ShouldFetchMoreEntries.After), 500),
|
||||
[props, state.entriesEnd]
|
||||
[props, state.bottomCursor]
|
||||
);
|
||||
|
||||
const streamEntriesEffectDependencies = [
|
||||
props.isAutoReloading,
|
||||
props.isStreaming,
|
||||
state.isLoadingMore,
|
||||
state.isReloading,
|
||||
];
|
||||
const streamEntriesEffect = () => {
|
||||
(async () => {
|
||||
if (props.isAutoReloading && !state.isLoadingMore && !state.isReloading) {
|
||||
if (props.isStreaming && !state.isLoadingMore && !state.isReloading) {
|
||||
if (startedStreaming) {
|
||||
await new Promise(res => setTimeout(res, 5000));
|
||||
await new Promise(res => setTimeout(res, LIVE_STREAM_INTERVAL));
|
||||
} else {
|
||||
const nowKey = {
|
||||
tiebreaker: 0,
|
||||
time: Date.now(),
|
||||
};
|
||||
props.jumpToTargetPosition(nowKey);
|
||||
const endTimestamp = Date.now();
|
||||
props.jumpToTargetPosition({ tiebreaker: 0, time: endTimestamp });
|
||||
setStartedStreaming(true);
|
||||
if (state.hasMoreAfterEnd) {
|
||||
runFetchNewEntriesRequest({
|
||||
timeKey: nowKey,
|
||||
});
|
||||
runFetchNewEntriesRequest({ endTimestamp });
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -222,15 +294,41 @@ const useFetchEntriesEffect = (
|
|||
if (newEntriesEnd) {
|
||||
props.jumpToTargetPosition(newEntriesEnd);
|
||||
}
|
||||
} else if (!props.isAutoReloading) {
|
||||
} else if (!props.isStreaming) {
|
||||
setStartedStreaming(false);
|
||||
}
|
||||
})();
|
||||
};
|
||||
|
||||
const expandRangeEffect = () => {
|
||||
if (!prevParams || !prevParams.startTimestamp || !prevParams.endTimestamp) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (props.timestampsLastUpdate === prevParams.timestampsLastUpdate) {
|
||||
return;
|
||||
}
|
||||
|
||||
const shouldExpand = {
|
||||
before: props.startTimestamp < prevParams.startTimestamp,
|
||||
after: props.endTimestamp > prevParams.endTimestamp,
|
||||
};
|
||||
|
||||
dispatch({ type: Action.ExpandRange, payload: shouldExpand });
|
||||
};
|
||||
|
||||
const expandRangeEffectDependencies = [
|
||||
prevParams?.startTimestamp,
|
||||
prevParams?.endTimestamp,
|
||||
props.startTimestamp,
|
||||
props.endTimestamp,
|
||||
props.timestampsLastUpdate,
|
||||
];
|
||||
|
||||
useEffect(fetchNewEntriesEffect, fetchNewEntriesEffectDependencies);
|
||||
useEffect(fetchMoreEntriesEffect, fetchMoreEntriesEffectDependencies);
|
||||
useEffect(streamEntriesEffect, streamEntriesEffectDependencies);
|
||||
useEffect(expandRangeEffect, expandRangeEffectDependencies);
|
||||
|
||||
return { fetchNewerEntries, checkForNewEntries: runFetchNewEntriesRequest };
|
||||
};
|
||||
|
@ -249,44 +347,87 @@ export const useLogEntriesState: (
|
|||
const logEntriesStateReducer = (prevState: LogEntriesStateParams, action: ActionObj) => {
|
||||
switch (action.type) {
|
||||
case Action.ReceiveNewEntries:
|
||||
return { ...prevState, ...action.payload, isReloading: false };
|
||||
case Action.ReceiveEntriesBefore: {
|
||||
const prevEntries = cleanDuplicateItems(prevState.entries, action.payload.entries);
|
||||
const newEntries = [...action.payload.entries, ...prevEntries];
|
||||
const { hasMoreBeforeStart, entriesStart, lastLoadedTime } = action.payload;
|
||||
const update = {
|
||||
entries: newEntries,
|
||||
isLoadingMore: false,
|
||||
hasMoreBeforeStart,
|
||||
entriesStart,
|
||||
lastLoadedTime,
|
||||
return {
|
||||
...prevState,
|
||||
...action.payload,
|
||||
centerCursor: getCenterCursor(action.payload.entries),
|
||||
lastLoadedTime: new Date(),
|
||||
isReloading: false,
|
||||
|
||||
// Be optimistic. If any of the before/after requests comes empty, set
|
||||
// the corresponding flag to `false`
|
||||
hasMoreBeforeStart: true,
|
||||
hasMoreAfterEnd: true,
|
||||
};
|
||||
case Action.ReceiveEntriesBefore: {
|
||||
const newEntries = action.payload.entries;
|
||||
const prevEntries = cleanDuplicateItems(prevState.entries, newEntries);
|
||||
const entries = [...newEntries, ...prevEntries];
|
||||
|
||||
const update = {
|
||||
entries,
|
||||
isLoadingMore: false,
|
||||
hasMoreBeforeStart: newEntries.length > 0,
|
||||
// Keep the previous cursor if request comes empty, to easily extend the range.
|
||||
topCursor: newEntries.length > 0 ? action.payload.topCursor : prevState.topCursor,
|
||||
centerCursor: getCenterCursor(entries),
|
||||
lastLoadedTime: new Date(),
|
||||
};
|
||||
|
||||
return { ...prevState, ...update };
|
||||
}
|
||||
case Action.ReceiveEntriesAfter: {
|
||||
const prevEntries = cleanDuplicateItems(prevState.entries, action.payload.entries);
|
||||
const newEntries = [...prevEntries, ...action.payload.entries];
|
||||
const { hasMoreAfterEnd, entriesEnd, lastLoadedTime } = action.payload;
|
||||
const newEntries = action.payload.entries;
|
||||
const prevEntries = cleanDuplicateItems(prevState.entries, newEntries);
|
||||
const entries = [...prevEntries, ...newEntries];
|
||||
|
||||
const update = {
|
||||
entries: newEntries,
|
||||
entries,
|
||||
isLoadingMore: false,
|
||||
hasMoreAfterEnd,
|
||||
entriesEnd,
|
||||
lastLoadedTime,
|
||||
hasMoreAfterEnd: newEntries.length > 0,
|
||||
// Keep the previous cursor if request comes empty, to easily extend the range.
|
||||
bottomCursor: newEntries.length > 0 ? action.payload.bottomCursor : prevState.bottomCursor,
|
||||
centerCursor: getCenterCursor(entries),
|
||||
lastLoadedTime: new Date(),
|
||||
};
|
||||
|
||||
return { ...prevState, ...update };
|
||||
}
|
||||
case Action.FetchingNewEntries:
|
||||
return { ...prevState, isReloading: true };
|
||||
return {
|
||||
...prevState,
|
||||
isReloading: true,
|
||||
entries: [],
|
||||
topCursor: null,
|
||||
bottomCursor: null,
|
||||
centerCursor: null,
|
||||
hasMoreBeforeStart: true,
|
||||
hasMoreAfterEnd: true,
|
||||
};
|
||||
case Action.FetchingMoreEntries:
|
||||
return { ...prevState, isLoadingMore: true };
|
||||
case Action.ErrorOnNewEntries:
|
||||
return { ...prevState, isReloading: false };
|
||||
case Action.ErrorOnMoreEntries:
|
||||
return { ...prevState, isLoadingMore: false };
|
||||
|
||||
case Action.ExpandRange: {
|
||||
const hasMoreBeforeStart = action.payload.before ? true : prevState.hasMoreBeforeStart;
|
||||
const hasMoreAfterEnd = action.payload.after ? true : prevState.hasMoreAfterEnd;
|
||||
|
||||
return {
|
||||
...prevState,
|
||||
hasMoreBeforeStart,
|
||||
hasMoreAfterEnd,
|
||||
};
|
||||
}
|
||||
default:
|
||||
throw new Error();
|
||||
}
|
||||
};
|
||||
|
||||
function getCenterCursor(entries: LogEntry[]): TimeKey | null {
|
||||
return entries.length > 0 ? entries[Math.floor(entries.length / 2)].cursor : null;
|
||||
}
|
||||
|
||||
export const LogEntriesState = createContainer(useLogEntriesState);
|
||||
|
|
|
@ -19,7 +19,7 @@ export enum FlyoutVisibility {
|
|||
visible = 'visible',
|
||||
}
|
||||
|
||||
interface FlyoutOptionsUrlState {
|
||||
export interface FlyoutOptionsUrlState {
|
||||
flyoutId?: string | null;
|
||||
flyoutVisibility?: string | null;
|
||||
surroundingLogsId?: string | null;
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { fold } from 'fp-ts/lib/Either';
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
import { identity } from 'fp-ts/lib/function';
|
||||
import { npStart } from '../../../../legacy_singletons';
|
||||
|
||||
import { throwErrors, createPlainError } from '../../../../../common/runtime_types';
|
||||
|
||||
import {
|
||||
LOG_ENTRIES_HIGHLIGHTS_PATH,
|
||||
LogEntriesHighlightsRequest,
|
||||
logEntriesHighlightsRequestRT,
|
||||
logEntriesHighlightsResponseRT,
|
||||
} from '../../../../../common/http_api';
|
||||
|
||||
export const fetchLogEntriesHighlights = async (requestArgs: LogEntriesHighlightsRequest) => {
|
||||
const response = await npStart.http.fetch(LOG_ENTRIES_HIGHLIGHTS_PATH, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(logEntriesHighlightsRequestRT.encode(requestArgs)),
|
||||
});
|
||||
|
||||
return pipe(
|
||||
logEntriesHighlightsResponseRT.decode(response),
|
||||
fold(throwErrors(createPlainError), identity)
|
||||
);
|
||||
};
|
|
@ -6,62 +6,47 @@
|
|||
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
|
||||
import { getNextTimeKey, getPreviousTimeKey, TimeKey } from '../../../../common/time';
|
||||
import { LogEntryHighlightsQuery } from '../../../graphql/types';
|
||||
import { DependencyError, useApolloClient } from '../../../utils/apollo_context';
|
||||
import { LogEntryHighlightsMap } from '../../../utils/log_entry';
|
||||
import { TimeKey } from '../../../../common/time';
|
||||
import { useTrackedPromise } from '../../../utils/use_tracked_promise';
|
||||
import { logEntryHighlightsQuery } from './log_entry_highlights.gql_query';
|
||||
|
||||
export type LogEntryHighlights = LogEntryHighlightsQuery.Query['source']['logEntryHighlights'];
|
||||
import { fetchLogEntriesHighlights } from './api/fetch_log_entries_highlights';
|
||||
import { LogEntry, LogEntriesHighlightsResponse } from '../../../../common/http_api';
|
||||
|
||||
export const useLogEntryHighlights = (
|
||||
sourceId: string,
|
||||
sourceVersion: string | undefined,
|
||||
startKey: TimeKey | null,
|
||||
endKey: TimeKey | null,
|
||||
startTimestamp: number | null,
|
||||
endTimestamp: number | null,
|
||||
centerPoint: TimeKey | null,
|
||||
size: number,
|
||||
filterQuery: string | null,
|
||||
highlightTerms: string[]
|
||||
) => {
|
||||
const apolloClient = useApolloClient();
|
||||
const [logEntryHighlights, setLogEntryHighlights] = useState<LogEntryHighlights>([]);
|
||||
const [logEntryHighlights, setLogEntryHighlights] = useState<
|
||||
LogEntriesHighlightsResponse['data']
|
||||
>([]);
|
||||
const [loadLogEntryHighlightsRequest, loadLogEntryHighlights] = useTrackedPromise(
|
||||
{
|
||||
cancelPreviousOn: 'resolution',
|
||||
createPromise: async () => {
|
||||
if (!apolloClient) {
|
||||
throw new DependencyError('Failed to load source: No apollo client available.');
|
||||
}
|
||||
if (!startKey || !endKey || !highlightTerms.length) {
|
||||
if (!startTimestamp || !endTimestamp || !centerPoint || !highlightTerms.length) {
|
||||
throw new Error('Skipping request: Insufficient parameters');
|
||||
}
|
||||
|
||||
return await apolloClient.query<
|
||||
LogEntryHighlightsQuery.Query,
|
||||
LogEntryHighlightsQuery.Variables
|
||||
>({
|
||||
fetchPolicy: 'no-cache',
|
||||
query: logEntryHighlightsQuery,
|
||||
variables: {
|
||||
sourceId,
|
||||
startKey: getPreviousTimeKey(startKey), // interval boundaries are exclusive
|
||||
endKey: getNextTimeKey(endKey), // interval boundaries are exclusive
|
||||
filterQuery,
|
||||
highlights: [
|
||||
{
|
||||
query: highlightTerms[0],
|
||||
countBefore: 1,
|
||||
countAfter: 1,
|
||||
},
|
||||
],
|
||||
},
|
||||
return await fetchLogEntriesHighlights({
|
||||
sourceId,
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
center: centerPoint,
|
||||
size,
|
||||
query: filterQuery || undefined,
|
||||
highlightTerms,
|
||||
});
|
||||
},
|
||||
onResolve: response => {
|
||||
setLogEntryHighlights(response.data.source.logEntryHighlights);
|
||||
setLogEntryHighlights(response.data);
|
||||
},
|
||||
},
|
||||
[apolloClient, sourceId, startKey, endKey, filterQuery, highlightTerms]
|
||||
[sourceId, startTimestamp, endTimestamp, centerPoint, size, filterQuery, highlightTerms]
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
|
@ -71,24 +56,31 @@ export const useLogEntryHighlights = (
|
|||
useEffect(() => {
|
||||
if (
|
||||
highlightTerms.filter(highlightTerm => highlightTerm.length > 0).length &&
|
||||
startKey &&
|
||||
endKey
|
||||
startTimestamp &&
|
||||
endTimestamp
|
||||
) {
|
||||
loadLogEntryHighlights();
|
||||
} else {
|
||||
setLogEntryHighlights([]);
|
||||
}
|
||||
}, [endKey, filterQuery, highlightTerms, loadLogEntryHighlights, sourceVersion, startKey]);
|
||||
}, [
|
||||
endTimestamp,
|
||||
filterQuery,
|
||||
highlightTerms,
|
||||
loadLogEntryHighlights,
|
||||
sourceVersion,
|
||||
startTimestamp,
|
||||
]);
|
||||
|
||||
const logEntryHighlightsById = useMemo(
|
||||
() =>
|
||||
logEntryHighlights.reduce<LogEntryHighlightsMap>(
|
||||
(accumulatedLogEntryHighlightsById, { entries }) => {
|
||||
return entries.reduce<LogEntryHighlightsMap>((singleHighlightLogEntriesById, entry) => {
|
||||
const highlightsForId = singleHighlightLogEntriesById[entry.gid] || [];
|
||||
logEntryHighlights.reduce<Record<string, LogEntry[]>>(
|
||||
(accumulatedLogEntryHighlightsById, highlightData) => {
|
||||
return highlightData.entries.reduce((singleHighlightLogEntriesById, entry) => {
|
||||
const highlightsForId = singleHighlightLogEntriesById[entry.id] || [];
|
||||
return {
|
||||
...singleHighlightLogEntriesById,
|
||||
[entry.gid]: [...highlightsForId, entry],
|
||||
[entry.id]: [...highlightsForId, entry],
|
||||
};
|
||||
}, accumulatedLogEntryHighlightsById);
|
||||
},
|
||||
|
|
|
@ -6,39 +6,38 @@
|
|||
|
||||
import createContainer from 'constate';
|
||||
import { useState, useContext } from 'react';
|
||||
import { useThrottle } from 'react-use';
|
||||
import { useLogEntryHighlights } from './log_entry_highlights';
|
||||
import { useLogSummaryHighlights } from './log_summary_highlights';
|
||||
import { useNextAndPrevious } from './next_and_previous';
|
||||
import { useLogSummaryBufferInterval } from '../log_summary';
|
||||
import { LogViewConfiguration } from '../log_view_configuration';
|
||||
import { LogPositionState } from '../log_position';
|
||||
import { TimeKey } from '../../../../common/time';
|
||||
|
||||
const FETCH_THROTTLE_INTERVAL = 3000;
|
||||
|
||||
interface UseLogHighlightsStateProps {
|
||||
sourceId: string;
|
||||
sourceVersion: string | undefined;
|
||||
centerCursor: TimeKey | null;
|
||||
size: number;
|
||||
filterQuery: string | null;
|
||||
}
|
||||
|
||||
export const useLogHighlightsState = ({
|
||||
sourceId,
|
||||
sourceVersion,
|
||||
entriesStart,
|
||||
entriesEnd,
|
||||
centerCursor,
|
||||
size,
|
||||
filterQuery,
|
||||
}: {
|
||||
sourceId: string;
|
||||
sourceVersion: string | undefined;
|
||||
entriesStart: TimeKey | null;
|
||||
entriesEnd: TimeKey | null;
|
||||
filterQuery: string | null;
|
||||
}) => {
|
||||
}: UseLogHighlightsStateProps) => {
|
||||
const [highlightTerms, setHighlightTerms] = useState<string[]>([]);
|
||||
const { visibleMidpoint, jumpToTargetPosition } = useContext(LogPositionState.Context);
|
||||
const { intervalSize: summaryIntervalSize } = useContext(LogViewConfiguration.Context);
|
||||
const {
|
||||
start: summaryStart,
|
||||
end: summaryEnd,
|
||||
bucketSize: summaryBucketSize,
|
||||
} = useLogSummaryBufferInterval(
|
||||
visibleMidpoint ? visibleMidpoint.time : null,
|
||||
summaryIntervalSize
|
||||
const { visibleMidpoint, jumpToTargetPosition, startTimestamp, endTimestamp } = useContext(
|
||||
LogPositionState.Context
|
||||
);
|
||||
|
||||
const throttledStartTimestamp = useThrottle(startTimestamp, FETCH_THROTTLE_INTERVAL);
|
||||
const throttledEndTimestamp = useThrottle(endTimestamp, FETCH_THROTTLE_INTERVAL);
|
||||
|
||||
const {
|
||||
logEntryHighlights,
|
||||
logEntryHighlightsById,
|
||||
|
@ -46,8 +45,10 @@ export const useLogHighlightsState = ({
|
|||
} = useLogEntryHighlights(
|
||||
sourceId,
|
||||
sourceVersion,
|
||||
entriesStart,
|
||||
entriesEnd,
|
||||
throttledStartTimestamp,
|
||||
throttledEndTimestamp,
|
||||
centerCursor,
|
||||
size,
|
||||
filterQuery,
|
||||
highlightTerms
|
||||
);
|
||||
|
@ -55,9 +56,8 @@ export const useLogHighlightsState = ({
|
|||
const { logSummaryHighlights, loadLogSummaryHighlightsRequest } = useLogSummaryHighlights(
|
||||
sourceId,
|
||||
sourceVersion,
|
||||
summaryStart,
|
||||
summaryEnd,
|
||||
summaryBucketSize,
|
||||
throttledStartTimestamp,
|
||||
throttledEndTimestamp,
|
||||
filterQuery,
|
||||
highlightTerms
|
||||
);
|
||||
|
|
|
@ -10,13 +10,13 @@ import { debounce } from 'lodash';
|
|||
import { useTrackedPromise } from '../../../utils/use_tracked_promise';
|
||||
import { fetchLogSummaryHighlights } from './api/fetch_log_summary_highlights';
|
||||
import { LogEntriesSummaryHighlightsResponse } from '../../../../common/http_api';
|
||||
import { useBucketSize } from '../log_summary/bucket_size';
|
||||
|
||||
export const useLogSummaryHighlights = (
|
||||
sourceId: string,
|
||||
sourceVersion: string | undefined,
|
||||
start: number | null,
|
||||
end: number | null,
|
||||
bucketSize: number,
|
||||
startTimestamp: number | null,
|
||||
endTimestamp: number | null,
|
||||
filterQuery: string | null,
|
||||
highlightTerms: string[]
|
||||
) => {
|
||||
|
@ -24,18 +24,20 @@ export const useLogSummaryHighlights = (
|
|||
LogEntriesSummaryHighlightsResponse['data']
|
||||
>([]);
|
||||
|
||||
const bucketSize = useBucketSize(startTimestamp, endTimestamp);
|
||||
|
||||
const [loadLogSummaryHighlightsRequest, loadLogSummaryHighlights] = useTrackedPromise(
|
||||
{
|
||||
cancelPreviousOn: 'resolution',
|
||||
createPromise: async () => {
|
||||
if (!start || !end || !highlightTerms.length) {
|
||||
if (!startTimestamp || !endTimestamp || !bucketSize || !highlightTerms.length) {
|
||||
throw new Error('Skipping request: Insufficient parameters');
|
||||
}
|
||||
|
||||
return await fetchLogSummaryHighlights({
|
||||
sourceId,
|
||||
startDate: start,
|
||||
endDate: end,
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
bucketSize,
|
||||
query: filterQuery,
|
||||
highlightTerms,
|
||||
|
@ -45,7 +47,7 @@ export const useLogSummaryHighlights = (
|
|||
setLogSummaryHighlights(response.data);
|
||||
},
|
||||
},
|
||||
[sourceId, start, end, bucketSize, filterQuery, highlightTerms]
|
||||
[sourceId, startTimestamp, endTimestamp, bucketSize, filterQuery, highlightTerms]
|
||||
);
|
||||
|
||||
const debouncedLoadSummaryHighlights = useMemo(() => debounce(loadLogSummaryHighlights, 275), [
|
||||
|
@ -57,7 +59,11 @@ export const useLogSummaryHighlights = (
|
|||
}, [highlightTerms]);
|
||||
|
||||
useEffect(() => {
|
||||
if (highlightTerms.filter(highlightTerm => highlightTerm.length > 0).length && start && end) {
|
||||
if (
|
||||
highlightTerms.filter(highlightTerm => highlightTerm.length > 0).length &&
|
||||
startTimestamp &&
|
||||
endTimestamp
|
||||
) {
|
||||
debouncedLoadSummaryHighlights();
|
||||
} else {
|
||||
setLogSummaryHighlights([]);
|
||||
|
@ -65,11 +71,11 @@ export const useLogSummaryHighlights = (
|
|||
}, [
|
||||
bucketSize,
|
||||
debouncedLoadSummaryHighlights,
|
||||
end,
|
||||
filterQuery,
|
||||
highlightTerms,
|
||||
sourceVersion,
|
||||
start,
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
]);
|
||||
|
||||
return {
|
||||
|
|
|
@ -13,7 +13,7 @@ import {
|
|||
getLogEntryIndexBeforeTime,
|
||||
getUniqueLogEntryKey,
|
||||
} from '../../../utils/log_entry';
|
||||
import { LogEntryHighlights } from './log_entry_highlights';
|
||||
import { LogEntriesHighlightsResponse } from '../../../../common/http_api';
|
||||
|
||||
export const useNextAndPrevious = ({
|
||||
highlightTerms,
|
||||
|
@ -23,7 +23,7 @@ export const useNextAndPrevious = ({
|
|||
}: {
|
||||
highlightTerms: string[];
|
||||
jumpToTargetPosition: (target: TimeKey) => void;
|
||||
logEntryHighlights: LogEntryHighlights | undefined;
|
||||
logEntryHighlights: LogEntriesHighlightsResponse['data'] | undefined;
|
||||
visibleMidpoint: TimeKey | null;
|
||||
}) => {
|
||||
const [currentTimeKey, setCurrentTimeKey] = useState<UniqueTimeKey | null>(null);
|
||||
|
|
|
@ -6,10 +6,20 @@
|
|||
|
||||
import { useState, useMemo, useEffect, useCallback } from 'react';
|
||||
import createContainer from 'constate';
|
||||
import { useSetState } from 'react-use';
|
||||
import { TimeKey } from '../../../../common/time';
|
||||
import { datemathToEpochMillis, isValidDatemath } from '../../../utils/datemath';
|
||||
|
||||
type TimeKeyOrNull = TimeKey | null;
|
||||
|
||||
interface DateRange {
|
||||
startDateExpression: string;
|
||||
endDateExpression: string;
|
||||
startTimestamp: number;
|
||||
endTimestamp: number;
|
||||
timestampsLastUpdate: number;
|
||||
}
|
||||
|
||||
interface VisiblePositions {
|
||||
startKey: TimeKeyOrNull;
|
||||
middleKey: TimeKeyOrNull;
|
||||
|
@ -19,24 +29,35 @@ interface VisiblePositions {
|
|||
}
|
||||
|
||||
export interface LogPositionStateParams {
|
||||
isInitialized: boolean;
|
||||
targetPosition: TimeKeyOrNull;
|
||||
isAutoReloading: boolean;
|
||||
isStreaming: boolean;
|
||||
firstVisiblePosition: TimeKeyOrNull;
|
||||
pagesBeforeStart: number;
|
||||
pagesAfterEnd: number;
|
||||
visibleMidpoint: TimeKeyOrNull;
|
||||
visibleMidpointTime: number | null;
|
||||
visibleTimeInterval: { start: number; end: number } | null;
|
||||
startDateExpression: string;
|
||||
endDateExpression: string;
|
||||
startTimestamp: number | null;
|
||||
endTimestamp: number | null;
|
||||
timestampsLastUpdate: number;
|
||||
}
|
||||
|
||||
export interface LogPositionCallbacks {
|
||||
initialize: () => void;
|
||||
jumpToTargetPosition: (pos: TimeKeyOrNull) => void;
|
||||
jumpToTargetPositionTime: (time: number) => void;
|
||||
reportVisiblePositions: (visPos: VisiblePositions) => void;
|
||||
startLiveStreaming: () => void;
|
||||
stopLiveStreaming: () => void;
|
||||
updateDateRange: (newDateRage: Partial<DateRange>) => void;
|
||||
}
|
||||
|
||||
const DEFAULT_DATE_RANGE = { startDateExpression: 'now-1d', endDateExpression: 'now' };
|
||||
const DESIRED_BUFFER_PAGES = 2;
|
||||
|
||||
const useVisibleMidpoint = (middleKey: TimeKeyOrNull, targetPosition: TimeKeyOrNull) => {
|
||||
// Of the two dependencies `middleKey` and `targetPosition`, return
|
||||
// whichever one was the most recently updated. This allows the UI controls
|
||||
|
@ -60,8 +81,18 @@ const useVisibleMidpoint = (middleKey: TimeKeyOrNull, targetPosition: TimeKeyOrN
|
|||
};
|
||||
|
||||
export const useLogPositionState: () => LogPositionStateParams & LogPositionCallbacks = () => {
|
||||
// Flag to determine if `LogPositionState` has been fully initialized.
|
||||
//
|
||||
// When the page loads, there might be initial state in the URL. We want to
|
||||
// prevent the entries from showing until we have processed that initial
|
||||
// state. That prevents double fetching.
|
||||
const [isInitialized, setInitialized] = useState<boolean>(false);
|
||||
const initialize = useCallback(() => {
|
||||
setInitialized(true);
|
||||
}, [setInitialized]);
|
||||
|
||||
const [targetPosition, jumpToTargetPosition] = useState<TimeKey | null>(null);
|
||||
const [isAutoReloading, setIsAutoReloading] = useState(false);
|
||||
const [isStreaming, setIsStreaming] = useState(false);
|
||||
const [visiblePositions, reportVisiblePositions] = useState<VisiblePositions>({
|
||||
endKey: null,
|
||||
middleKey: null,
|
||||
|
@ -70,6 +101,15 @@ export const useLogPositionState: () => LogPositionStateParams & LogPositionCall
|
|||
pagesAfterEnd: Infinity,
|
||||
});
|
||||
|
||||
// We group the `startDate` and `endDate` values in the same object to be able
|
||||
// to set both at the same time, saving a re-render
|
||||
const [dateRange, setDateRange] = useSetState<DateRange>({
|
||||
...DEFAULT_DATE_RANGE,
|
||||
startTimestamp: datemathToEpochMillis(DEFAULT_DATE_RANGE.startDateExpression)!,
|
||||
endTimestamp: datemathToEpochMillis(DEFAULT_DATE_RANGE.endDateExpression, 'up')!,
|
||||
timestampsLastUpdate: Date.now(),
|
||||
});
|
||||
|
||||
const { startKey, middleKey, endKey, pagesBeforeStart, pagesAfterEnd } = visiblePositions;
|
||||
|
||||
const visibleMidpoint = useVisibleMidpoint(middleKey, targetPosition);
|
||||
|
@ -79,26 +119,87 @@ export const useLogPositionState: () => LogPositionStateParams & LogPositionCall
|
|||
[startKey, endKey]
|
||||
);
|
||||
|
||||
// Allow setting `startDate` and `endDate` separately, or together
|
||||
const updateDateRange = useCallback(
|
||||
(newDateRange: Partial<DateRange>) => {
|
||||
// Prevent unnecessary re-renders
|
||||
if (!('startDateExpression' in newDateRange) && !('endDateExpression' in newDateRange)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const nextStartDateExpression =
|
||||
newDateRange.startDateExpression || dateRange.startDateExpression;
|
||||
const nextEndDateExpression = newDateRange.endDateExpression || dateRange.endDateExpression;
|
||||
|
||||
if (!isValidDatemath(nextStartDateExpression) || !isValidDatemath(nextEndDateExpression)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Dates are valid, so the function cannot return `null`
|
||||
const nextStartTimestamp = datemathToEpochMillis(nextStartDateExpression)!;
|
||||
const nextEndTimestamp = datemathToEpochMillis(nextEndDateExpression, 'up')!;
|
||||
|
||||
// Reset the target position if it doesn't fall within the new range.
|
||||
if (
|
||||
targetPosition &&
|
||||
(nextStartTimestamp > targetPosition.time || nextEndTimestamp < targetPosition.time)
|
||||
) {
|
||||
jumpToTargetPosition(null);
|
||||
}
|
||||
|
||||
setDateRange({
|
||||
...newDateRange,
|
||||
startTimestamp: nextStartTimestamp,
|
||||
endTimestamp: nextEndTimestamp,
|
||||
timestampsLastUpdate: Date.now(),
|
||||
});
|
||||
},
|
||||
[setDateRange, dateRange, targetPosition]
|
||||
);
|
||||
|
||||
// `endTimestamp` update conditions
|
||||
useEffect(() => {
|
||||
if (dateRange.endDateExpression !== 'now') {
|
||||
return;
|
||||
}
|
||||
|
||||
// User is close to the bottom edge of the scroll.
|
||||
if (visiblePositions.pagesAfterEnd <= DESIRED_BUFFER_PAGES) {
|
||||
setDateRange({
|
||||
endTimestamp: datemathToEpochMillis(dateRange.endDateExpression, 'up')!,
|
||||
timestampsLastUpdate: Date.now(),
|
||||
});
|
||||
}
|
||||
}, [dateRange.endDateExpression, visiblePositions, setDateRange]);
|
||||
|
||||
const state = {
|
||||
isInitialized,
|
||||
targetPosition,
|
||||
isAutoReloading,
|
||||
isStreaming,
|
||||
firstVisiblePosition: startKey,
|
||||
pagesBeforeStart,
|
||||
pagesAfterEnd,
|
||||
visibleMidpoint,
|
||||
visibleMidpointTime: visibleMidpoint ? visibleMidpoint.time : null,
|
||||
visibleTimeInterval,
|
||||
...dateRange,
|
||||
};
|
||||
|
||||
const callbacks = {
|
||||
initialize,
|
||||
jumpToTargetPosition,
|
||||
jumpToTargetPositionTime: useCallback(
|
||||
(time: number) => jumpToTargetPosition({ tiebreaker: 0, time }),
|
||||
[jumpToTargetPosition]
|
||||
),
|
||||
reportVisiblePositions,
|
||||
startLiveStreaming: useCallback(() => setIsAutoReloading(true), [setIsAutoReloading]),
|
||||
stopLiveStreaming: useCallback(() => setIsAutoReloading(false), [setIsAutoReloading]),
|
||||
startLiveStreaming: useCallback(() => {
|
||||
setIsStreaming(true);
|
||||
jumpToTargetPosition(null);
|
||||
updateDateRange({ startDateExpression: 'now-1d', endDateExpression: 'now' });
|
||||
}, [setIsStreaming, updateDateRange]),
|
||||
stopLiveStreaming: useCallback(() => setIsStreaming(false), [setIsStreaming]),
|
||||
updateDateRange,
|
||||
};
|
||||
|
||||
return { ...state, ...callbacks };
|
||||
|
|
|
@ -9,31 +9,40 @@ import React, { useContext, useMemo } from 'react';
|
|||
import { pickTimeKey } from '../../../../common/time';
|
||||
import { replaceStateKeyInQueryString, UrlStateContainer } from '../../../utils/url_state';
|
||||
import { LogPositionState, LogPositionStateParams } from './log_position_state';
|
||||
import { isValidDatemath, datemathToEpochMillis } from '../../../utils/datemath';
|
||||
|
||||
/**
|
||||
* Url State
|
||||
*/
|
||||
|
||||
interface LogPositionUrlState {
|
||||
position: LogPositionStateParams['visibleMidpoint'] | undefined;
|
||||
export interface LogPositionUrlState {
|
||||
position?: LogPositionStateParams['visibleMidpoint'];
|
||||
streamLive: boolean;
|
||||
start?: string;
|
||||
end?: string;
|
||||
}
|
||||
|
||||
const ONE_HOUR = 3600000;
|
||||
|
||||
export const WithLogPositionUrlState = () => {
|
||||
const {
|
||||
visibleMidpoint,
|
||||
isAutoReloading,
|
||||
isStreaming,
|
||||
jumpToTargetPosition,
|
||||
jumpToTargetPositionTime,
|
||||
startLiveStreaming,
|
||||
stopLiveStreaming,
|
||||
startDateExpression,
|
||||
endDateExpression,
|
||||
updateDateRange,
|
||||
initialize,
|
||||
} = useContext(LogPositionState.Context);
|
||||
const urlState = useMemo(
|
||||
() => ({
|
||||
position: visibleMidpoint ? pickTimeKey(visibleMidpoint) : null,
|
||||
streamLive: isAutoReloading,
|
||||
streamLive: isStreaming,
|
||||
start: startDateExpression,
|
||||
end: endDateExpression,
|
||||
}),
|
||||
[visibleMidpoint, isAutoReloading]
|
||||
[visibleMidpoint, isStreaming, startDateExpression, endDateExpression]
|
||||
);
|
||||
return (
|
||||
<UrlStateContainer
|
||||
|
@ -41,28 +50,69 @@ export const WithLogPositionUrlState = () => {
|
|||
urlStateKey="logPosition"
|
||||
mapToUrlState={mapToUrlState}
|
||||
onChange={(newUrlState: LogPositionUrlState | undefined) => {
|
||||
if (newUrlState && newUrlState.position) {
|
||||
if (!newUrlState) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (newUrlState.start || newUrlState.end) {
|
||||
updateDateRange({
|
||||
startDateExpression: newUrlState.start,
|
||||
endDateExpression: newUrlState.end,
|
||||
});
|
||||
}
|
||||
|
||||
if (newUrlState.position) {
|
||||
jumpToTargetPosition(newUrlState.position);
|
||||
}
|
||||
if (newUrlState && newUrlState.streamLive) {
|
||||
|
||||
if (newUrlState.streamLive) {
|
||||
startLiveStreaming();
|
||||
} else if (
|
||||
newUrlState &&
|
||||
typeof newUrlState.streamLive !== 'undefined' &&
|
||||
!newUrlState.streamLive
|
||||
) {
|
||||
} else if (typeof newUrlState.streamLive !== 'undefined' && !newUrlState.streamLive) {
|
||||
stopLiveStreaming();
|
||||
}
|
||||
}}
|
||||
onInitialize={(initialUrlState: LogPositionUrlState | undefined) => {
|
||||
if (initialUrlState && initialUrlState.position) {
|
||||
jumpToTargetPosition(initialUrlState.position);
|
||||
} else {
|
||||
jumpToTargetPositionTime(Date.now());
|
||||
}
|
||||
if (initialUrlState && initialUrlState.streamLive) {
|
||||
startLiveStreaming();
|
||||
if (initialUrlState) {
|
||||
const initialPosition = initialUrlState.position;
|
||||
let initialStartDateExpression = initialUrlState.start;
|
||||
let initialEndDateExpression = initialUrlState.end;
|
||||
|
||||
if (!initialPosition) {
|
||||
initialStartDateExpression = initialStartDateExpression || 'now-1d';
|
||||
initialEndDateExpression = initialEndDateExpression || 'now';
|
||||
} else {
|
||||
const initialStartTimestamp = initialStartDateExpression
|
||||
? datemathToEpochMillis(initialStartDateExpression)
|
||||
: undefined;
|
||||
const initialEndTimestamp = initialEndDateExpression
|
||||
? datemathToEpochMillis(initialEndDateExpression, 'up')
|
||||
: undefined;
|
||||
|
||||
// Adjust the start-end range if the target position falls outside or if it's not set.
|
||||
if (!initialStartTimestamp || initialStartTimestamp > initialPosition.time) {
|
||||
initialStartDateExpression = new Date(initialPosition.time - ONE_HOUR).toISOString();
|
||||
}
|
||||
|
||||
if (!initialEndTimestamp || initialEndTimestamp < initialPosition.time) {
|
||||
initialEndDateExpression = new Date(initialPosition.time + ONE_HOUR).toISOString();
|
||||
}
|
||||
|
||||
jumpToTargetPosition(initialPosition);
|
||||
}
|
||||
|
||||
if (initialStartDateExpression || initialEndDateExpression) {
|
||||
updateDateRange({
|
||||
startDateExpression: initialStartDateExpression,
|
||||
endDateExpression: initialEndDateExpression,
|
||||
});
|
||||
}
|
||||
|
||||
if (initialUrlState.streamLive) {
|
||||
startLiveStreaming();
|
||||
}
|
||||
}
|
||||
|
||||
initialize();
|
||||
}}
|
||||
/>
|
||||
);
|
||||
|
@ -73,6 +123,8 @@ const mapToUrlState = (value: any): LogPositionUrlState | undefined =>
|
|||
? {
|
||||
position: mapToPositionUrlState(value.position),
|
||||
streamLive: mapToStreamLiveUrlState(value.streamLive),
|
||||
start: mapToDate(value.start),
|
||||
end: mapToDate(value.end),
|
||||
}
|
||||
: undefined;
|
||||
|
||||
|
@ -83,6 +135,7 @@ const mapToPositionUrlState = (value: any) =>
|
|||
|
||||
const mapToStreamLiveUrlState = (value: any) => (typeof value === 'boolean' ? value : false);
|
||||
|
||||
const mapToDate = (value: any) => (isValidDatemath(value) ? value : undefined);
|
||||
export const replaceLogPositionInQueryString = (time: number) =>
|
||||
Number.isNaN(time)
|
||||
? (value: string) => value
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { useMemo } from 'react';
|
||||
|
||||
const SUMMARY_BUCKET_COUNT = 100;
|
||||
|
||||
export function useBucketSize(
|
||||
startTimestamp: number | null,
|
||||
endTimestamp: number | null
|
||||
): number | null {
|
||||
const bucketSize = useMemo(() => {
|
||||
if (!startTimestamp || !endTimestamp) {
|
||||
return null;
|
||||
}
|
||||
return (endTimestamp - startTimestamp) / SUMMARY_BUCKET_COUNT;
|
||||
}, [startTimestamp, endTimestamp]);
|
||||
|
||||
return bucketSize;
|
||||
}
|
|
@ -5,5 +5,4 @@
|
|||
*/
|
||||
|
||||
export * from './log_summary';
|
||||
export * from './use_log_summary_buffer_interval';
|
||||
export * from './with_summary';
|
||||
|
|
|
@ -9,6 +9,7 @@ import { renderHook } from '@testing-library/react-hooks';
|
|||
import { useLogSummary } from './log_summary';
|
||||
|
||||
import { fetchLogSummary } from './api/fetch_log_summary';
|
||||
import { datemathToEpochMillis } from '../../../utils/datemath';
|
||||
|
||||
// Typescript doesn't know that `fetchLogSummary` is a jest mock.
|
||||
// We use a second variable with a type cast to help the compiler further down the line.
|
||||
|
@ -21,20 +22,26 @@ describe('useLogSummary hook', () => {
|
|||
});
|
||||
|
||||
it('provides an empty list of buckets by default', () => {
|
||||
const { result } = renderHook(() => useLogSummary('SOURCE_ID', null, 1000, null));
|
||||
const { result } = renderHook(() => useLogSummary('SOURCE_ID', null, null, null));
|
||||
expect(result.current.buckets).toEqual([]);
|
||||
});
|
||||
|
||||
it('queries for new summary buckets when the source id changes', async () => {
|
||||
const firstMockResponse = createMockResponse([{ start: 99000, end: 101000, entriesCount: 1 }]);
|
||||
const secondMockResponse = createMockResponse([{ start: 99000, end: 101000, entriesCount: 2 }]);
|
||||
const { startTimestamp, endTimestamp } = createMockDateRange();
|
||||
|
||||
const firstMockResponse = createMockResponse([
|
||||
{ start: startTimestamp, end: endTimestamp, entriesCount: 1 },
|
||||
]);
|
||||
const secondMockResponse = createMockResponse([
|
||||
{ start: startTimestamp, end: endTimestamp, entriesCount: 2 },
|
||||
]);
|
||||
|
||||
fetchLogSummaryMock
|
||||
.mockResolvedValueOnce(firstMockResponse)
|
||||
.mockResolvedValueOnce(secondMockResponse);
|
||||
|
||||
const { result, waitForNextUpdate, rerender } = renderHook(
|
||||
({ sourceId }) => useLogSummary(sourceId, 100000, 1000, null),
|
||||
({ sourceId }) => useLogSummary(sourceId, startTimestamp, endTimestamp, null),
|
||||
{
|
||||
initialProps: { sourceId: 'INITIAL_SOURCE_ID' },
|
||||
}
|
||||
|
@ -63,15 +70,21 @@ describe('useLogSummary hook', () => {
|
|||
});
|
||||
|
||||
it('queries for new summary buckets when the filter query changes', async () => {
|
||||
const firstMockResponse = createMockResponse([{ start: 99000, end: 101000, entriesCount: 1 }]);
|
||||
const secondMockResponse = createMockResponse([{ start: 99000, end: 101000, entriesCount: 2 }]);
|
||||
const { startTimestamp, endTimestamp } = createMockDateRange();
|
||||
|
||||
const firstMockResponse = createMockResponse([
|
||||
{ start: startTimestamp, end: endTimestamp, entriesCount: 1 },
|
||||
]);
|
||||
const secondMockResponse = createMockResponse([
|
||||
{ start: startTimestamp, end: endTimestamp, entriesCount: 2 },
|
||||
]);
|
||||
|
||||
fetchLogSummaryMock
|
||||
.mockResolvedValueOnce(firstMockResponse)
|
||||
.mockResolvedValueOnce(secondMockResponse);
|
||||
|
||||
const { result, waitForNextUpdate, rerender } = renderHook(
|
||||
({ filterQuery }) => useLogSummary('SOURCE_ID', 100000, 1000, filterQuery),
|
||||
({ filterQuery }) => useLogSummary('SOURCE_ID', startTimestamp, endTimestamp, filterQuery),
|
||||
{
|
||||
initialProps: { filterQuery: 'INITIAL_FILTER_QUERY' },
|
||||
}
|
||||
|
@ -99,15 +112,17 @@ describe('useLogSummary hook', () => {
|
|||
expect(result.current.buckets).toEqual(secondMockResponse.data.buckets);
|
||||
});
|
||||
|
||||
it('queries for new summary buckets when the midpoint time changes', async () => {
|
||||
it('queries for new summary buckets when the start and end date changes', async () => {
|
||||
fetchLogSummaryMock
|
||||
.mockResolvedValueOnce(createMockResponse([]))
|
||||
.mockResolvedValueOnce(createMockResponse([]));
|
||||
|
||||
const firstRange = createMockDateRange();
|
||||
const { waitForNextUpdate, rerender } = renderHook(
|
||||
({ midpointTime }) => useLogSummary('SOURCE_ID', midpointTime, 1000, null),
|
||||
({ startTimestamp, endTimestamp }) =>
|
||||
useLogSummary('SOURCE_ID', startTimestamp, endTimestamp, null),
|
||||
{
|
||||
initialProps: { midpointTime: 100000 },
|
||||
initialProps: firstRange,
|
||||
}
|
||||
);
|
||||
|
||||
|
@ -115,54 +130,21 @@ describe('useLogSummary hook', () => {
|
|||
expect(fetchLogSummaryMock).toHaveBeenCalledTimes(1);
|
||||
expect(fetchLogSummaryMock).toHaveBeenLastCalledWith(
|
||||
expect.objectContaining({
|
||||
startDate: 98500,
|
||||
endDate: 101500,
|
||||
startTimestamp: firstRange.startTimestamp,
|
||||
endTimestamp: firstRange.endTimestamp,
|
||||
})
|
||||
);
|
||||
|
||||
rerender({ midpointTime: 200000 });
|
||||
const secondRange = createMockDateRange('now-20s', 'now');
|
||||
|
||||
rerender(secondRange);
|
||||
await waitForNextUpdate();
|
||||
|
||||
expect(fetchLogSummaryMock).toHaveBeenCalledTimes(2);
|
||||
expect(fetchLogSummaryMock).toHaveBeenLastCalledWith(
|
||||
expect.objectContaining({
|
||||
startDate: 198500,
|
||||
endDate: 201500,
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('queries for new summary buckets when the interval size changes', async () => {
|
||||
fetchLogSummaryMock
|
||||
.mockResolvedValueOnce(createMockResponse([]))
|
||||
.mockResolvedValueOnce(createMockResponse([]));
|
||||
|
||||
const { waitForNextUpdate, rerender } = renderHook(
|
||||
({ intervalSize }) => useLogSummary('SOURCE_ID', 100000, intervalSize, null),
|
||||
{
|
||||
initialProps: { intervalSize: 1000 },
|
||||
}
|
||||
);
|
||||
|
||||
await waitForNextUpdate();
|
||||
expect(fetchLogSummaryMock).toHaveBeenCalledTimes(1);
|
||||
expect(fetchLogSummaryMock).toHaveBeenLastCalledWith(
|
||||
expect.objectContaining({
|
||||
bucketSize: 10,
|
||||
startDate: 98500,
|
||||
endDate: 101500,
|
||||
})
|
||||
);
|
||||
|
||||
rerender({ intervalSize: 2000 });
|
||||
await waitForNextUpdate();
|
||||
|
||||
expect(fetchLogSummaryMock).toHaveBeenCalledTimes(2);
|
||||
expect(fetchLogSummaryMock).toHaveBeenLastCalledWith(
|
||||
expect.objectContaining({
|
||||
bucketSize: 20,
|
||||
startDate: 97000,
|
||||
endDate: 103000,
|
||||
startTimestamp: secondRange.startTimestamp,
|
||||
endTimestamp: secondRange.endTimestamp,
|
||||
})
|
||||
);
|
||||
});
|
||||
|
@ -171,3 +153,12 @@ describe('useLogSummary hook', () => {
|
|||
const createMockResponse = (
|
||||
buckets: Array<{ start: number; end: number; entriesCount: number }>
|
||||
) => ({ data: { buckets, start: Number.NEGATIVE_INFINITY, end: Number.POSITIVE_INFINITY } });
|
||||
|
||||
const createMockDateRange = (startDate = 'now-10s', endDate = 'now') => {
|
||||
return {
|
||||
startDate,
|
||||
endDate,
|
||||
startTimestamp: datemathToEpochMillis(startDate)!,
|
||||
endTimestamp: datemathToEpochMillis(endDate, 'up')!,
|
||||
};
|
||||
};
|
||||
|
|
|
@ -7,34 +7,31 @@
|
|||
import { useState } from 'react';
|
||||
|
||||
import { useCancellableEffect } from '../../../utils/cancellable_effect';
|
||||
import { useLogSummaryBufferInterval } from './use_log_summary_buffer_interval';
|
||||
import { fetchLogSummary } from './api/fetch_log_summary';
|
||||
import { LogEntriesSummaryResponse } from '../../../../common/http_api';
|
||||
import { useBucketSize } from './bucket_size';
|
||||
|
||||
export type LogSummaryBuckets = LogEntriesSummaryResponse['data']['buckets'];
|
||||
|
||||
export const useLogSummary = (
|
||||
sourceId: string,
|
||||
midpointTime: number | null,
|
||||
intervalSize: number,
|
||||
startTimestamp: number | null,
|
||||
endTimestamp: number | null,
|
||||
filterQuery: string | null
|
||||
) => {
|
||||
const [logSummaryBuckets, setLogSummaryBuckets] = useState<LogSummaryBuckets>([]);
|
||||
const { start: bufferStart, end: bufferEnd, bucketSize } = useLogSummaryBufferInterval(
|
||||
midpointTime,
|
||||
intervalSize
|
||||
);
|
||||
const bucketSize = useBucketSize(startTimestamp, endTimestamp);
|
||||
|
||||
useCancellableEffect(
|
||||
getIsCancelled => {
|
||||
if (bufferStart === null || bufferEnd === null) {
|
||||
if (startTimestamp === null || endTimestamp === null || bucketSize === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
fetchLogSummary({
|
||||
sourceId,
|
||||
startDate: bufferStart,
|
||||
endDate: bufferEnd,
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
bucketSize,
|
||||
query: filterQuery,
|
||||
}).then(response => {
|
||||
|
@ -43,12 +40,12 @@ export const useLogSummary = (
|
|||
}
|
||||
});
|
||||
},
|
||||
[sourceId, filterQuery, bufferStart, bufferEnd, bucketSize]
|
||||
[sourceId, filterQuery, startTimestamp, endTimestamp, bucketSize]
|
||||
);
|
||||
|
||||
return {
|
||||
buckets: logSummaryBuckets,
|
||||
start: bufferStart,
|
||||
end: bufferEnd,
|
||||
start: startTimestamp,
|
||||
end: endTimestamp,
|
||||
};
|
||||
};
|
||||
|
|
|
@ -1,30 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { useMemo } from 'react';
|
||||
|
||||
const LOAD_BUCKETS_PER_PAGE = 100;
|
||||
const UNKNOWN_BUFFER_INTERVAL = {
|
||||
start: null,
|
||||
end: null,
|
||||
bucketSize: 0,
|
||||
};
|
||||
|
||||
export const useLogSummaryBufferInterval = (midpointTime: number | null, intervalSize: number) => {
|
||||
return useMemo(() => {
|
||||
if (midpointTime === null || intervalSize <= 0) {
|
||||
return UNKNOWN_BUFFER_INTERVAL;
|
||||
}
|
||||
|
||||
const halfIntervalSize = intervalSize / 2;
|
||||
|
||||
return {
|
||||
start: (Math.floor((midpointTime - halfIntervalSize) / intervalSize) - 0.5) * intervalSize,
|
||||
end: (Math.ceil((midpointTime + halfIntervalSize) / intervalSize) + 0.5) * intervalSize,
|
||||
bucketSize: intervalSize / LOAD_BUCKETS_PER_PAGE,
|
||||
};
|
||||
}, [midpointTime, intervalSize]);
|
||||
};
|
|
@ -5,14 +5,16 @@
|
|||
*/
|
||||
|
||||
import { useContext } from 'react';
|
||||
import { useThrottle } from 'react-use';
|
||||
|
||||
import { RendererFunction } from '../../../utils/typed_react';
|
||||
import { Source } from '../../source';
|
||||
import { LogViewConfiguration } from '../log_view_configuration';
|
||||
import { LogSummaryBuckets, useLogSummary } from './log_summary';
|
||||
import { LogFilterState } from '../log_filter';
|
||||
import { LogPositionState } from '../log_position';
|
||||
|
||||
const FETCH_THROTTLE_INTERVAL = 3000;
|
||||
|
||||
export const WithSummary = ({
|
||||
children,
|
||||
}: {
|
||||
|
@ -22,15 +24,18 @@ export const WithSummary = ({
|
|||
end: number | null;
|
||||
}>;
|
||||
}) => {
|
||||
const { intervalSize } = useContext(LogViewConfiguration.Context);
|
||||
const { sourceId } = useContext(Source.Context);
|
||||
const { filterQuery } = useContext(LogFilterState.Context);
|
||||
const { visibleMidpointTime } = useContext(LogPositionState.Context);
|
||||
const { startTimestamp, endTimestamp } = useContext(LogPositionState.Context);
|
||||
|
||||
// Keep it reasonably updated for the `now` case, but don't reload all the time when the user scrolls
|
||||
const throttledStartTimestamp = useThrottle(startTimestamp, FETCH_THROTTLE_INTERVAL);
|
||||
const throttledEndTimestamp = useThrottle(endTimestamp, FETCH_THROTTLE_INTERVAL);
|
||||
|
||||
const { buckets, start, end } = useLogSummary(
|
||||
sourceId,
|
||||
visibleMidpointTime,
|
||||
intervalSize,
|
||||
throttledStartTimestamp,
|
||||
throttledEndTimestamp,
|
||||
filterQuery
|
||||
);
|
||||
|
||||
|
|
|
@ -45,35 +45,10 @@ describe('useLogViewConfiguration hook', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('intervalSize state', () => {
|
||||
it('has a default value', () => {
|
||||
const { getLastHookValue } = mountHook(() => useLogViewConfiguration().intervalSize);
|
||||
|
||||
expect(getLastHookValue()).toEqual(86400000);
|
||||
});
|
||||
|
||||
it('can be updated', () => {
|
||||
const { act, getLastHookValue } = mountHook(() => useLogViewConfiguration());
|
||||
|
||||
act(({ setIntervalSize }) => {
|
||||
setIntervalSize(90000000);
|
||||
});
|
||||
|
||||
expect(getLastHookValue().intervalSize).toEqual(90000000);
|
||||
});
|
||||
});
|
||||
|
||||
it('provides the available text scales', () => {
|
||||
const { getLastHookValue } = mountHook(() => useLogViewConfiguration().availableTextScales);
|
||||
|
||||
expect(getLastHookValue()).toEqual(expect.any(Array));
|
||||
expect(getLastHookValue().length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('provides the available interval sizes', () => {
|
||||
const { getLastHookValue } = mountHook(() => useLogViewConfiguration().availableIntervalSizes);
|
||||
|
||||
expect(getLastHookValue()).toEqual(expect.any(Array));
|
||||
expect(getLastHookValue().length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -4,7 +4,6 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import createContainer from 'constate';
|
||||
import { useState } from 'react';
|
||||
|
||||
|
@ -17,18 +16,12 @@ export const useLogViewConfiguration = () => {
|
|||
// text wrap
|
||||
const [textWrap, setTextWrap] = useState<boolean>(true);
|
||||
|
||||
// minimap interval
|
||||
const [intervalSize, setIntervalSize] = useState<number>(1000 * 60 * 60 * 24);
|
||||
|
||||
return {
|
||||
availableIntervalSizes,
|
||||
availableTextScales,
|
||||
setTextScale,
|
||||
setTextWrap,
|
||||
textScale,
|
||||
textWrap,
|
||||
intervalSize,
|
||||
setIntervalSize,
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -39,42 +32,3 @@ export const LogViewConfiguration = createContainer(useLogViewConfiguration);
|
|||
*/
|
||||
|
||||
export const availableTextScales: TextScale[] = ['large', 'medium', 'small'];
|
||||
|
||||
export const availableIntervalSizes = [
|
||||
{
|
||||
label: i18n.translate('xpack.infra.mapLogs.oneYearLabel', {
|
||||
defaultMessage: '1 Year',
|
||||
}),
|
||||
intervalSize: 1000 * 60 * 60 * 24 * 365,
|
||||
},
|
||||
{
|
||||
label: i18n.translate('xpack.infra.mapLogs.oneMonthLabel', {
|
||||
defaultMessage: '1 Month',
|
||||
}),
|
||||
intervalSize: 1000 * 60 * 60 * 24 * 30,
|
||||
},
|
||||
{
|
||||
label: i18n.translate('xpack.infra.mapLogs.oneWeekLabel', {
|
||||
defaultMessage: '1 Week',
|
||||
}),
|
||||
intervalSize: 1000 * 60 * 60 * 24 * 7,
|
||||
},
|
||||
{
|
||||
label: i18n.translate('xpack.infra.mapLogs.oneDayLabel', {
|
||||
defaultMessage: '1 Day',
|
||||
}),
|
||||
intervalSize: 1000 * 60 * 60 * 24,
|
||||
},
|
||||
{
|
||||
label: i18n.translate('xpack.infra.mapLogs.oneHourLabel', {
|
||||
defaultMessage: '1 Hour',
|
||||
}),
|
||||
intervalSize: 1000 * 60 * 60,
|
||||
},
|
||||
{
|
||||
label: i18n.translate('xpack.infra.mapLogs.oneMinuteLabel', {
|
||||
defaultMessage: '1 Minute',
|
||||
}),
|
||||
intervalSize: 1000 * 60,
|
||||
},
|
||||
];
|
||||
|
|
|
@ -1,52 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import React, { useContext, useMemo } from 'react';
|
||||
|
||||
import { UrlStateContainer } from '../../utils/url_state';
|
||||
import { LogViewConfiguration } from './log_view_configuration';
|
||||
|
||||
/**
|
||||
* Url State
|
||||
*/
|
||||
|
||||
interface LogMinimapUrlState {
|
||||
intervalSize?: number;
|
||||
}
|
||||
|
||||
export const WithLogMinimapUrlState = () => {
|
||||
const { intervalSize, setIntervalSize } = useContext(LogViewConfiguration.Context);
|
||||
|
||||
const urlState = useMemo(() => ({ intervalSize }), [intervalSize]);
|
||||
|
||||
return (
|
||||
<UrlStateContainer
|
||||
urlState={urlState}
|
||||
urlStateKey="logMinimap"
|
||||
mapToUrlState={mapToUrlState}
|
||||
onChange={newUrlState => {
|
||||
if (newUrlState && newUrlState.intervalSize) {
|
||||
setIntervalSize(newUrlState.intervalSize);
|
||||
}
|
||||
}}
|
||||
onInitialize={newUrlState => {
|
||||
if (newUrlState && newUrlState.intervalSize) {
|
||||
setIntervalSize(newUrlState.intervalSize);
|
||||
}
|
||||
}}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
const mapToUrlState = (value: any): LogMinimapUrlState | undefined =>
|
||||
value
|
||||
? {
|
||||
intervalSize: mapToIntervalSizeUrlState(value.intervalSize),
|
||||
}
|
||||
: undefined;
|
||||
|
||||
const mapToIntervalSizeUrlState = (value: any) =>
|
||||
value && typeof value === 'number' ? value : undefined;
|
|
@ -6,12 +6,12 @@
|
|||
|
||||
import { useContext, useMemo } from 'react';
|
||||
import { StreamItem, LogEntryStreamItem } from '../../components/logging/log_text_stream/item';
|
||||
import { LogEntry, LogEntryHighlight } from '../../utils/log_entry';
|
||||
import { RendererFunction } from '../../utils/typed_react';
|
||||
// deep inporting to avoid a circular import problem
|
||||
import { LogHighlightsState } from './log_highlights/log_highlights';
|
||||
import { LogEntriesState, LogEntriesStateParams, LogEntriesCallbacks } from './log_entries';
|
||||
import { UniqueTimeKey } from '../../../common/time';
|
||||
import { LogEntry } from '../../../common/http_api';
|
||||
|
||||
export const WithStreamItems: React.FunctionComponent<{
|
||||
children: RendererFunction<
|
||||
|
@ -30,7 +30,7 @@ export const WithStreamItems: React.FunctionComponent<{
|
|||
logEntries.isReloading
|
||||
? []
|
||||
: logEntries.entries.map(logEntry =>
|
||||
createLogEntryStreamItem(logEntry, logEntryHighlightsById[logEntry.gid] || [])
|
||||
createLogEntryStreamItem(logEntry, logEntryHighlightsById[logEntry.id] || [])
|
||||
),
|
||||
|
||||
[logEntries.entries, logEntries.isReloading, logEntryHighlightsById]
|
||||
|
@ -46,7 +46,7 @@ export const WithStreamItems: React.FunctionComponent<{
|
|||
|
||||
const createLogEntryStreamItem = (
|
||||
logEntry: LogEntry,
|
||||
highlights: LogEntryHighlight[]
|
||||
highlights: LogEntry[]
|
||||
): LogEntryStreamItem => ({
|
||||
kind: 'logEntry' as 'logEntry',
|
||||
logEntry,
|
||||
|
|
|
@ -44,11 +44,8 @@ export const CategoryExampleMessage: React.FunctionComponent<{
|
|||
<LogEntryColumn {...columnWidths[messageColumnId]}>
|
||||
<LogEntryMessageColumn
|
||||
columnValue={{
|
||||
__typename: 'InfraLogEntryMessageColumn' as const,
|
||||
columnId: messageColumnId,
|
||||
message: [
|
||||
{ __typename: 'InfraLogMessageFieldSegment', field: 'message', value: message },
|
||||
],
|
||||
message: [{ field: 'message', value: message, highlights: [] }],
|
||||
}}
|
||||
highlights={noHighlights}
|
||||
isHovered={false}
|
||||
|
@ -60,10 +57,10 @@ export const CategoryExampleMessage: React.FunctionComponent<{
|
|||
<LogEntryColumn {...columnWidths[datasetColumnId]}>
|
||||
<LogEntryFieldColumn
|
||||
columnValue={{
|
||||
__typename: 'InfraLogEntryFieldColumn' as const,
|
||||
columnId: datasetColumnId,
|
||||
field: 'event.dataset',
|
||||
value: encodedDatasetFieldValue,
|
||||
highlights: [],
|
||||
}}
|
||||
highlights={noHighlights}
|
||||
isHovered={false}
|
||||
|
@ -103,19 +100,16 @@ const columnWidths = {
|
|||
|
||||
export const exampleMessageColumnConfigurations: LogColumnConfiguration[] = [
|
||||
{
|
||||
__typename: 'InfraSourceTimestampLogColumn',
|
||||
timestampColumn: {
|
||||
id: timestampColumnId,
|
||||
},
|
||||
},
|
||||
{
|
||||
__typename: 'InfraSourceMessageLogColumn',
|
||||
messageColumn: {
|
||||
id: messageColumnId,
|
||||
},
|
||||
},
|
||||
{
|
||||
__typename: 'InfraSourceFieldLogColumn',
|
||||
fieldColumn: {
|
||||
field: 'event.dataset',
|
||||
id: datasetColumnId,
|
||||
|
|
|
@ -20,7 +20,6 @@ import {
|
|||
LogFlyout as LogFlyoutState,
|
||||
WithFlyoutOptionsUrlState,
|
||||
} from '../../../containers/logs/log_flyout';
|
||||
import { WithLogMinimapUrlState } from '../../../containers/logs/with_log_minimap';
|
||||
import { LogPositionState } from '../../../containers/logs/log_position';
|
||||
import { WithLogTextviewUrlState } from '../../../containers/logs/with_log_textview';
|
||||
import { WithStreamItems } from '../../../containers/logs/with_stream_items';
|
||||
|
@ -31,7 +30,7 @@ import { LogHighlightsState } from '../../../containers/logs/log_highlights';
|
|||
|
||||
export const LogsPageLogsContent: React.FunctionComponent = () => {
|
||||
const { source, sourceId, version } = useContext(Source.Context);
|
||||
const { intervalSize, textScale, textWrap } = useContext(LogViewConfiguration.Context);
|
||||
const { textScale, textWrap } = useContext(LogViewConfiguration.Context);
|
||||
const {
|
||||
setFlyoutVisibility,
|
||||
flyoutVisible,
|
||||
|
@ -44,17 +43,20 @@ export const LogsPageLogsContent: React.FunctionComponent = () => {
|
|||
const { logSummaryHighlights } = useContext(LogHighlightsState.Context);
|
||||
const { applyLogFilterQuery } = useContext(LogFilterState.Context);
|
||||
const {
|
||||
isAutoReloading,
|
||||
isStreaming,
|
||||
targetPosition,
|
||||
visibleMidpointTime,
|
||||
visibleTimeInterval,
|
||||
reportVisiblePositions,
|
||||
jumpToTargetPosition,
|
||||
startLiveStreaming,
|
||||
stopLiveStreaming,
|
||||
startDateExpression,
|
||||
endDateExpression,
|
||||
updateDateRange,
|
||||
} = useContext(LogPositionState.Context);
|
||||
return (
|
||||
<>
|
||||
<WithLogMinimapUrlState />
|
||||
<WithLogTextviewUrlState />
|
||||
<WithFlyoutOptionsUrlState />
|
||||
<LogsToolbar />
|
||||
|
@ -90,7 +92,7 @@ export const LogsPageLogsContent: React.FunctionComponent = () => {
|
|||
hasMoreBeforeStart={hasMoreBeforeStart}
|
||||
isLoadingMore={isLoadingMore}
|
||||
isReloading={isReloading}
|
||||
isStreaming={isAutoReloading}
|
||||
isStreaming={isStreaming}
|
||||
items={items}
|
||||
jumpToTarget={jumpToTargetPosition}
|
||||
lastLoadedTime={lastLoadedTime}
|
||||
|
@ -104,6 +106,10 @@ export const LogsPageLogsContent: React.FunctionComponent = () => {
|
|||
setFlyoutVisibility={setFlyoutVisibility}
|
||||
highlightedItem={surroundingLogsId ? surroundingLogsId : null}
|
||||
currentHighlightKey={currentHighlightKey}
|
||||
startDateExpression={startDateExpression}
|
||||
endDateExpression={endDateExpression}
|
||||
updateDateRange={updateDateRange}
|
||||
startLiveStreaming={startLiveStreaming}
|
||||
/>
|
||||
)}
|
||||
</WithStreamItems>
|
||||
|
@ -113,14 +119,15 @@ export const LogsPageLogsContent: React.FunctionComponent = () => {
|
|||
return (
|
||||
<LogPageMinimapColumn ref={measureRef}>
|
||||
<WithSummary>
|
||||
{({ buckets }) => (
|
||||
{({ buckets, start, end }) => (
|
||||
<WithStreamItems>
|
||||
{({ isReloading }) => (
|
||||
<LogMinimap
|
||||
start={start}
|
||||
end={end}
|
||||
height={height}
|
||||
width={width}
|
||||
highlightedInterval={isReloading ? null : visibleTimeInterval}
|
||||
intervalSize={intervalSize}
|
||||
jumpToTarget={jumpToTargetPosition}
|
||||
summaryBuckets={buckets}
|
||||
summaryHighlightBuckets={
|
||||
|
|
|
@ -29,35 +29,57 @@ const LogFilterStateProvider: React.FC = ({ children }) => {
|
|||
const LogEntriesStateProvider: React.FC = ({ children }) => {
|
||||
const { sourceId } = useContext(Source.Context);
|
||||
const {
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
timestampsLastUpdate,
|
||||
targetPosition,
|
||||
pagesBeforeStart,
|
||||
pagesAfterEnd,
|
||||
isAutoReloading,
|
||||
isStreaming,
|
||||
jumpToTargetPosition,
|
||||
isInitialized,
|
||||
} = useContext(LogPositionState.Context);
|
||||
const { filterQuery } = useContext(LogFilterState.Context);
|
||||
|
||||
// Don't render anything if the date range is incorrect.
|
||||
if (!startTimestamp || !endTimestamp) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const entriesProps = {
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
timestampsLastUpdate,
|
||||
timeKey: targetPosition,
|
||||
pagesBeforeStart,
|
||||
pagesAfterEnd,
|
||||
filterQuery,
|
||||
sourceId,
|
||||
isAutoReloading,
|
||||
isStreaming,
|
||||
jumpToTargetPosition,
|
||||
};
|
||||
|
||||
// Don't initialize the entries until the position has been fully intialized.
|
||||
// See `<WithLogPositionUrlState />`
|
||||
if (!isInitialized) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return <LogEntriesState.Provider {...entriesProps}>{children}</LogEntriesState.Provider>;
|
||||
};
|
||||
|
||||
const LogHighlightsStateProvider: React.FC = ({ children }) => {
|
||||
const { sourceId, version } = useContext(Source.Context);
|
||||
const [{ entriesStart, entriesEnd }] = useContext(LogEntriesState.Context);
|
||||
const [{ topCursor, bottomCursor, centerCursor, entries }] = useContext(LogEntriesState.Context);
|
||||
const { filterQuery } = useContext(LogFilterState.Context);
|
||||
|
||||
const highlightsProps = {
|
||||
sourceId,
|
||||
sourceVersion: version,
|
||||
entriesStart,
|
||||
entriesEnd,
|
||||
entriesStart: topCursor,
|
||||
entriesEnd: bottomCursor,
|
||||
centerCursor,
|
||||
size: entries.length,
|
||||
filterQuery,
|
||||
};
|
||||
return <LogHighlightsState.Provider {...highlightsProps}>{children}</LogHighlightsState.Provider>;
|
||||
|
|
|
@ -13,30 +13,22 @@ import { Toolbar } from '../../../components/eui';
|
|||
import { LogCustomizationMenu } from '../../../components/logging/log_customization_menu';
|
||||
import { LogHighlightsMenu } from '../../../components/logging/log_highlights_menu';
|
||||
import { LogHighlightsState } from '../../../containers/logs/log_highlights/log_highlights';
|
||||
import { LogMinimapScaleControls } from '../../../components/logging/log_minimap_scale_controls';
|
||||
import { LogTextScaleControls } from '../../../components/logging/log_text_scale_controls';
|
||||
import { LogTextWrapControls } from '../../../components/logging/log_text_wrap_controls';
|
||||
import { LogTimeControls } from '../../../components/logging/log_time_controls';
|
||||
import { LogFlyout } from '../../../containers/logs/log_flyout';
|
||||
import { LogViewConfiguration } from '../../../containers/logs/log_view_configuration';
|
||||
import { LogFilterState } from '../../../containers/logs/log_filter';
|
||||
import { LogPositionState } from '../../../containers/logs/log_position';
|
||||
import { Source } from '../../../containers/source';
|
||||
import { WithKueryAutocompletion } from '../../../containers/with_kuery_autocompletion';
|
||||
import { LogDatepicker } from '../../../components/logging/log_datepicker';
|
||||
|
||||
export const LogsToolbar = () => {
|
||||
const { createDerivedIndexPattern } = useContext(Source.Context);
|
||||
const derivedIndexPattern = createDerivedIndexPattern('logs');
|
||||
const {
|
||||
availableIntervalSizes,
|
||||
availableTextScales,
|
||||
intervalSize,
|
||||
setIntervalSize,
|
||||
setTextScale,
|
||||
setTextWrap,
|
||||
textScale,
|
||||
textWrap,
|
||||
} = useContext(LogViewConfiguration.Context);
|
||||
const { availableTextScales, setTextScale, setTextWrap, textScale, textWrap } = useContext(
|
||||
LogViewConfiguration.Context
|
||||
);
|
||||
const {
|
||||
filterQueryDraft,
|
||||
isFilterQueryDraftValid,
|
||||
|
@ -55,12 +47,14 @@ export const LogsToolbar = () => {
|
|||
goToNextHighlight,
|
||||
} = useContext(LogHighlightsState.Context);
|
||||
const {
|
||||
visibleMidpointTime,
|
||||
isAutoReloading,
|
||||
jumpToTargetPositionTime,
|
||||
isStreaming,
|
||||
startLiveStreaming,
|
||||
stopLiveStreaming,
|
||||
startDateExpression,
|
||||
endDateExpression,
|
||||
updateDateRange,
|
||||
} = useContext(LogPositionState.Context);
|
||||
|
||||
return (
|
||||
<Toolbar>
|
||||
<EuiFlexGroup alignItems="center" justifyContent="spaceBetween" gutterSize="s">
|
||||
|
@ -94,11 +88,6 @@ export const LogsToolbar = () => {
|
|||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>
|
||||
<LogCustomizationMenu>
|
||||
<LogMinimapScaleControls
|
||||
availableIntervalSizes={availableIntervalSizes}
|
||||
setIntervalSize={setIntervalSize}
|
||||
intervalSize={intervalSize}
|
||||
/>
|
||||
<LogTextWrapControls wrap={textWrap} setTextWrap={setTextWrap} />
|
||||
<LogTextScaleControls
|
||||
availableTextScales={availableTextScales}
|
||||
|
@ -121,15 +110,13 @@ export const LogsToolbar = () => {
|
|||
/>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>
|
||||
<LogTimeControls
|
||||
currentTime={visibleMidpointTime}
|
||||
isLiveStreaming={isAutoReloading}
|
||||
jumpToTime={jumpToTargetPositionTime}
|
||||
startLiveStreaming={() => {
|
||||
startLiveStreaming();
|
||||
setSurroundingLogsId(null);
|
||||
}}
|
||||
stopLiveStreaming={stopLiveStreaming}
|
||||
<LogDatepicker
|
||||
startDateExpression={startDateExpression}
|
||||
endDateExpression={endDateExpression}
|
||||
onStartStreaming={startLiveStreaming}
|
||||
onStopStreaming={stopLiveStreaming}
|
||||
isStreaming={isStreaming}
|
||||
onUpdateDateRange={updateDateRange}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
|
|
401
x-pack/plugins/infra/public/utils/datemath.test.ts
Normal file
401
x-pack/plugins/infra/public/utils/datemath.test.ts
Normal file
|
@ -0,0 +1,401 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import {
|
||||
isValidDatemath,
|
||||
datemathToEpochMillis,
|
||||
extendDatemath,
|
||||
convertDate,
|
||||
normalizeDate,
|
||||
} from './datemath';
|
||||
import sinon from 'sinon';
|
||||
|
||||
describe('isValidDatemath()', () => {
|
||||
it('Returns `false` for empty strings', () => {
|
||||
expect(isValidDatemath('')).toBe(false);
|
||||
});
|
||||
|
||||
it('Returns `false` for invalid strings', () => {
|
||||
expect(isValidDatemath('wadus')).toBe(false);
|
||||
expect(isValidDatemath('nowww-')).toBe(false);
|
||||
expect(isValidDatemath('now-')).toBe(false);
|
||||
expect(isValidDatemath('now-1')).toBe(false);
|
||||
expect(isValidDatemath('now-1d/')).toBe(false);
|
||||
});
|
||||
|
||||
it('Returns `true` for valid strings', () => {
|
||||
expect(isValidDatemath('now')).toBe(true);
|
||||
expect(isValidDatemath('now-1d')).toBe(true);
|
||||
expect(isValidDatemath('now-1d/d')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('datemathToEpochMillis()', () => {
|
||||
let clock: sinon.SinonFakeTimers;
|
||||
|
||||
beforeEach(() => {
|
||||
clock = sinon.useFakeTimers(Date.now());
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
clock.restore();
|
||||
});
|
||||
|
||||
it('Returns `0` for the dawn of time', () => {
|
||||
expect(datemathToEpochMillis('1970-01-01T00:00:00+00:00')).toEqual(0);
|
||||
});
|
||||
|
||||
it('Returns the current timestamp when `now`', () => {
|
||||
expect(datemathToEpochMillis('now')).toEqual(Date.now());
|
||||
});
|
||||
});
|
||||
|
||||
describe('extendDatemath()', () => {
|
||||
it('Returns `undefined` for invalid values', () => {
|
||||
expect(extendDatemath('')).toBeUndefined();
|
||||
});
|
||||
|
||||
it('Keeps `"now"` stable', () => {
|
||||
expect(extendDatemath('now')).toEqual({ value: 'now' });
|
||||
expect(extendDatemath('now', 'before')).toEqual({ value: 'now' });
|
||||
expect(extendDatemath('now', 'after')).toEqual({ value: 'now' });
|
||||
});
|
||||
|
||||
describe('moving before', () => {
|
||||
describe('with a negative operator', () => {
|
||||
it('doubles miliseconds', () => {
|
||||
expect(extendDatemath('now-250ms')).toEqual({
|
||||
value: 'now-500ms',
|
||||
diffAmount: 250,
|
||||
diffUnit: 'ms',
|
||||
});
|
||||
});
|
||||
|
||||
it('normalizes miliseconds', () => {
|
||||
expect(extendDatemath('now-500ms')).toEqual({
|
||||
value: 'now-1s',
|
||||
diffAmount: 500,
|
||||
diffUnit: 'ms',
|
||||
});
|
||||
});
|
||||
|
||||
it('doubles seconds', () => {
|
||||
expect(extendDatemath('now-10s')).toEqual({
|
||||
value: 'now-20s',
|
||||
diffAmount: 10,
|
||||
diffUnit: 's',
|
||||
});
|
||||
});
|
||||
|
||||
it('normalizes seconds', () => {
|
||||
expect(extendDatemath('now-30s')).toEqual({
|
||||
value: 'now-1m',
|
||||
diffAmount: 30,
|
||||
diffUnit: 's',
|
||||
});
|
||||
});
|
||||
|
||||
it('doubles minutes when amount is low', () => {
|
||||
expect(extendDatemath('now-1m')).toEqual({ value: 'now-2m', diffAmount: 1, diffUnit: 'm' });
|
||||
expect(extendDatemath('now-2m')).toEqual({ value: 'now-4m', diffAmount: 2, diffUnit: 'm' });
|
||||
expect(extendDatemath('now-3m')).toEqual({ value: 'now-6m', diffAmount: 3, diffUnit: 'm' });
|
||||
});
|
||||
|
||||
it('adds half the minutes when the amount is high', () => {
|
||||
expect(extendDatemath('now-20m')).toEqual({
|
||||
value: 'now-30m',
|
||||
diffAmount: 10,
|
||||
diffUnit: 'm',
|
||||
});
|
||||
});
|
||||
|
||||
it('Adds half an hour when the amount is one hour', () => {
|
||||
expect(extendDatemath('now-1h')).toEqual({
|
||||
value: 'now-90m',
|
||||
diffAmount: 30,
|
||||
diffUnit: 'm',
|
||||
});
|
||||
});
|
||||
|
||||
it('Adds one hour when the amount more than one hour', () => {
|
||||
expect(extendDatemath('now-2h')).toEqual({
|
||||
value: 'now-3h',
|
||||
diffAmount: 1,
|
||||
diffUnit: 'h',
|
||||
});
|
||||
});
|
||||
|
||||
it('Adds one hour when the amount is one day', () => {
|
||||
expect(extendDatemath('now-1d')).toEqual({
|
||||
value: 'now-25h',
|
||||
diffAmount: 1,
|
||||
diffUnit: 'h',
|
||||
});
|
||||
});
|
||||
|
||||
it('Adds one day when the amount is more than one day', () => {
|
||||
expect(extendDatemath('now-2d')).toEqual({
|
||||
value: 'now-3d',
|
||||
diffAmount: 1,
|
||||
diffUnit: 'd',
|
||||
});
|
||||
expect(extendDatemath('now-3d')).toEqual({
|
||||
value: 'now-4d',
|
||||
diffAmount: 1,
|
||||
diffUnit: 'd',
|
||||
});
|
||||
});
|
||||
|
||||
it('Adds one day when the amount is one week', () => {
|
||||
expect(extendDatemath('now-1w')).toEqual({
|
||||
value: 'now-8d',
|
||||
diffAmount: 1,
|
||||
diffUnit: 'd',
|
||||
});
|
||||
});
|
||||
|
||||
it('Adds one week when the amount is more than one week', () => {
|
||||
expect(extendDatemath('now-2w')).toEqual({
|
||||
value: 'now-3w',
|
||||
diffAmount: 1,
|
||||
diffUnit: 'w',
|
||||
});
|
||||
});
|
||||
|
||||
it('Adds one week when the amount is one month', () => {
|
||||
expect(extendDatemath('now-1M')).toEqual({
|
||||
value: 'now-5w',
|
||||
diffAmount: 1,
|
||||
diffUnit: 'w',
|
||||
});
|
||||
});
|
||||
|
||||
it('Adds one month when the amount is more than one month', () => {
|
||||
expect(extendDatemath('now-2M')).toEqual({
|
||||
value: 'now-3M',
|
||||
diffAmount: 1,
|
||||
diffUnit: 'M',
|
||||
});
|
||||
});
|
||||
|
||||
it('Adds one month when the amount is one year', () => {
|
||||
expect(extendDatemath('now-1y')).toEqual({
|
||||
value: 'now-13M',
|
||||
diffAmount: 1,
|
||||
diffUnit: 'M',
|
||||
});
|
||||
});
|
||||
|
||||
it('Adds one year when the amount is in years', () => {
|
||||
expect(extendDatemath('now-2y')).toEqual({
|
||||
value: 'now-3y',
|
||||
diffAmount: 1,
|
||||
diffUnit: 'y',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('with a positive Operator', () => {
|
||||
it('Halves miliseconds', () => {
|
||||
expect(extendDatemath('now+250ms')).toEqual({
|
||||
value: 'now+125ms',
|
||||
diffAmount: 125,
|
||||
diffUnit: 'ms',
|
||||
});
|
||||
});
|
||||
|
||||
it('Halves seconds', () => {
|
||||
expect(extendDatemath('now+10s')).toEqual({
|
||||
value: 'now+5s',
|
||||
diffAmount: 5,
|
||||
diffUnit: 's',
|
||||
});
|
||||
});
|
||||
|
||||
it('Halves minutes when the amount is low', () => {
|
||||
expect(extendDatemath('now+2m')).toEqual({ value: 'now+1m', diffAmount: 1, diffUnit: 'm' });
|
||||
expect(extendDatemath('now+4m')).toEqual({ value: 'now+2m', diffAmount: 2, diffUnit: 'm' });
|
||||
expect(extendDatemath('now+6m')).toEqual({ value: 'now+3m', diffAmount: 3, diffUnit: 'm' });
|
||||
});
|
||||
|
||||
it('Decreases minutes in half ammounts when the amount is high', () => {
|
||||
expect(extendDatemath('now+30m')).toEqual({
|
||||
value: 'now+20m',
|
||||
diffAmount: 10,
|
||||
diffUnit: 'm',
|
||||
});
|
||||
});
|
||||
|
||||
it('Decreases half an hour when the amount is one hour', () => {
|
||||
expect(extendDatemath('now+1h')).toEqual({
|
||||
value: 'now+30m',
|
||||
diffAmount: 30,
|
||||
diffUnit: 'm',
|
||||
});
|
||||
});
|
||||
|
||||
it('Removes one hour when the amount is one day', () => {
|
||||
expect(extendDatemath('now+1d')).toEqual({
|
||||
value: 'now+23h',
|
||||
diffAmount: 1,
|
||||
diffUnit: 'h',
|
||||
});
|
||||
});
|
||||
|
||||
it('Removes one day when the amount is more than one day', () => {
|
||||
expect(extendDatemath('now+2d')).toEqual({
|
||||
value: 'now+1d',
|
||||
diffAmount: 1,
|
||||
diffUnit: 'd',
|
||||
});
|
||||
expect(extendDatemath('now+3d')).toEqual({
|
||||
value: 'now+2d',
|
||||
diffAmount: 1,
|
||||
diffUnit: 'd',
|
||||
});
|
||||
});
|
||||
|
||||
it('Removes one day when the amount is one week', () => {
|
||||
expect(extendDatemath('now+1w')).toEqual({
|
||||
value: 'now+6d',
|
||||
diffAmount: 1,
|
||||
diffUnit: 'd',
|
||||
});
|
||||
});
|
||||
|
||||
it('Removes one week when the amount is more than one week', () => {
|
||||
expect(extendDatemath('now+2w')).toEqual({
|
||||
value: 'now+1w',
|
||||
diffAmount: 1,
|
||||
diffUnit: 'w',
|
||||
});
|
||||
});
|
||||
|
||||
it('Removes one week when the amount is one month', () => {
|
||||
expect(extendDatemath('now+1M')).toEqual({
|
||||
value: 'now+3w',
|
||||
diffAmount: 1,
|
||||
diffUnit: 'w',
|
||||
});
|
||||
});
|
||||
|
||||
it('Removes one month when the amount is more than one month', () => {
|
||||
expect(extendDatemath('now+2M')).toEqual({
|
||||
value: 'now+1M',
|
||||
diffAmount: 1,
|
||||
diffUnit: 'M',
|
||||
});
|
||||
});
|
||||
|
||||
it('Removes one month when the amount is one year', () => {
|
||||
expect(extendDatemath('now+1y')).toEqual({
|
||||
value: 'now+11M',
|
||||
diffAmount: 1,
|
||||
diffUnit: 'M',
|
||||
});
|
||||
});
|
||||
|
||||
it('Adds one year when the amount is in years', () => {
|
||||
expect(extendDatemath('now+2y')).toEqual({
|
||||
value: 'now+1y',
|
||||
diffAmount: 1,
|
||||
diffUnit: 'y',
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('convertDate()', () => {
|
||||
it('returns same value if units are the same', () => {
|
||||
expect(convertDate(1, 'h', 'h')).toEqual(1);
|
||||
});
|
||||
|
||||
it('converts from big units to small units', () => {
|
||||
expect(convertDate(1, 's', 'ms')).toEqual(1000);
|
||||
expect(convertDate(1, 'm', 'ms')).toEqual(60000);
|
||||
expect(convertDate(1, 'h', 'ms')).toEqual(3600000);
|
||||
expect(convertDate(1, 'd', 'ms')).toEqual(86400000);
|
||||
expect(convertDate(1, 'M', 'ms')).toEqual(2592000000);
|
||||
expect(convertDate(1, 'y', 'ms')).toEqual(31536000000);
|
||||
});
|
||||
|
||||
it('converts from small units to big units', () => {
|
||||
expect(convertDate(1000, 'ms', 's')).toEqual(1);
|
||||
expect(convertDate(60000, 'ms', 'm')).toEqual(1);
|
||||
expect(convertDate(3600000, 'ms', 'h')).toEqual(1);
|
||||
expect(convertDate(86400000, 'ms', 'd')).toEqual(1);
|
||||
expect(convertDate(2592000000, 'ms', 'M')).toEqual(1);
|
||||
expect(convertDate(31536000000, 'ms', 'y')).toEqual(1);
|
||||
});
|
||||
|
||||
it('Handles days to years', () => {
|
||||
expect(convertDate(1, 'y', 'd')).toEqual(365);
|
||||
expect(convertDate(365, 'd', 'y')).toEqual(1);
|
||||
});
|
||||
|
||||
it('Handles years to months', () => {
|
||||
expect(convertDate(1, 'y', 'M')).toEqual(12);
|
||||
expect(convertDate(12, 'M', 'y')).toEqual(1);
|
||||
});
|
||||
|
||||
it('Handles days to months', () => {
|
||||
expect(convertDate(1, 'M', 'd')).toEqual(30);
|
||||
expect(convertDate(30, 'd', 'M')).toEqual(1);
|
||||
});
|
||||
|
||||
it('Handles days to weeks', () => {
|
||||
expect(convertDate(1, 'w', 'd')).toEqual(7);
|
||||
expect(convertDate(7, 'd', 'w')).toEqual(1);
|
||||
});
|
||||
|
||||
it('Handles weeks to years', () => {
|
||||
expect(convertDate(1, 'y', 'w')).toEqual(52);
|
||||
expect(convertDate(52, 'w', 'y')).toEqual(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('normalizeDate()', () => {
|
||||
it('keeps units under the conversion ratio the same', () => {
|
||||
expect(normalizeDate(999, 'ms')).toEqual({ amount: 999, unit: 'ms' });
|
||||
expect(normalizeDate(59, 's')).toEqual({ amount: 59, unit: 's' });
|
||||
expect(normalizeDate(59, 'm')).toEqual({ amount: 59, unit: 'm' });
|
||||
expect(normalizeDate(23, 'h')).toEqual({ amount: 23, unit: 'h' });
|
||||
expect(normalizeDate(6, 'd')).toEqual({ amount: 6, unit: 'd' });
|
||||
expect(normalizeDate(3, 'w')).toEqual({ amount: 3, unit: 'w' });
|
||||
expect(normalizeDate(11, 'M')).toEqual({ amount: 11, unit: 'M' });
|
||||
});
|
||||
|
||||
it('Moves to the next unit for values equal to the conversion ratio', () => {
|
||||
expect(normalizeDate(1000, 'ms')).toEqual({ amount: 1, unit: 's' });
|
||||
expect(normalizeDate(60, 's')).toEqual({ amount: 1, unit: 'm' });
|
||||
expect(normalizeDate(60, 'm')).toEqual({ amount: 1, unit: 'h' });
|
||||
expect(normalizeDate(24, 'h')).toEqual({ amount: 1, unit: 'd' });
|
||||
expect(normalizeDate(7, 'd')).toEqual({ amount: 1, unit: 'w' });
|
||||
expect(normalizeDate(4, 'w')).toEqual({ amount: 1, unit: 'M' });
|
||||
expect(normalizeDate(12, 'M')).toEqual({ amount: 1, unit: 'y' });
|
||||
});
|
||||
|
||||
it('keeps units slightly over the conversion ratio the same', () => {
|
||||
expect(normalizeDate(1001, 'ms')).toEqual({ amount: 1001, unit: 'ms' });
|
||||
expect(normalizeDate(61, 's')).toEqual({ amount: 61, unit: 's' });
|
||||
expect(normalizeDate(61, 'm')).toEqual({ amount: 61, unit: 'm' });
|
||||
expect(normalizeDate(25, 'h')).toEqual({ amount: 25, unit: 'h' });
|
||||
expect(normalizeDate(8, 'd')).toEqual({ amount: 8, unit: 'd' });
|
||||
expect(normalizeDate(5, 'w')).toEqual({ amount: 5, unit: 'w' });
|
||||
expect(normalizeDate(13, 'M')).toEqual({ amount: 13, unit: 'M' });
|
||||
});
|
||||
|
||||
it('moves to the next unit for any value higher than twice the conversion ratio', () => {
|
||||
expect(normalizeDate(2001, 'ms')).toEqual({ amount: 2, unit: 's' });
|
||||
expect(normalizeDate(121, 's')).toEqual({ amount: 2, unit: 'm' });
|
||||
expect(normalizeDate(121, 'm')).toEqual({ amount: 2, unit: 'h' });
|
||||
expect(normalizeDate(49, 'h')).toEqual({ amount: 2, unit: 'd' });
|
||||
expect(normalizeDate(15, 'd')).toEqual({ amount: 2, unit: 'w' });
|
||||
expect(normalizeDate(9, 'w')).toEqual({ amount: 2, unit: 'M' });
|
||||
expect(normalizeDate(25, 'M')).toEqual({ amount: 2, unit: 'y' });
|
||||
});
|
||||
});
|
266
x-pack/plugins/infra/public/utils/datemath.ts
Normal file
266
x-pack/plugins/infra/public/utils/datemath.ts
Normal file
|
@ -0,0 +1,266 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import dateMath, { Unit } from '@elastic/datemath';
|
||||
|
||||
export function isValidDatemath(value: string): boolean {
|
||||
const parsedValue = dateMath.parse(value);
|
||||
return !!(parsedValue && parsedValue.isValid());
|
||||
}
|
||||
|
||||
export function datemathToEpochMillis(value: string, round: 'down' | 'up' = 'down'): number | null {
|
||||
const parsedValue = dateMath.parse(value, { roundUp: round === 'up' });
|
||||
if (!parsedValue || !parsedValue.isValid()) {
|
||||
return null;
|
||||
}
|
||||
return parsedValue.valueOf();
|
||||
}
|
||||
|
||||
type DatemathExtension =
|
||||
| {
|
||||
value: string;
|
||||
diffUnit: Unit;
|
||||
diffAmount: number;
|
||||
}
|
||||
| { value: 'now' };
|
||||
|
||||
const datemathNowExpression = /(\+|\-)(\d+)(ms|s|m|h|d|w|M|y)$/;
|
||||
|
||||
/**
|
||||
* Extend a datemath value
|
||||
* @param value The value to extend
|
||||
* @param {'before' | 'after'} direction Should the value move before or after in time
|
||||
* @param oppositeEdge For absolute values, the value of the other edge of the range
|
||||
*/
|
||||
export function extendDatemath(
|
||||
value: string,
|
||||
direction: 'before' | 'after' = 'before',
|
||||
oppositeEdge?: string
|
||||
): DatemathExtension | undefined {
|
||||
if (!isValidDatemath(value)) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// `now` cannot be extended
|
||||
if (value === 'now') {
|
||||
return { value: 'now' };
|
||||
}
|
||||
|
||||
// The unit is relative
|
||||
if (value.startsWith('now')) {
|
||||
return extendRelativeDatemath(value, direction);
|
||||
} else if (oppositeEdge && isValidDatemath(oppositeEdge)) {
|
||||
return extendAbsoluteDatemath(value, direction, oppositeEdge);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function extendRelativeDatemath(
|
||||
value: string,
|
||||
direction: 'before' | 'after'
|
||||
): DatemathExtension | undefined {
|
||||
const [, operator, amount, unit] = datemathNowExpression.exec(value) || [];
|
||||
if (!operator || !amount || !unit) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const mustIncreaseAmount = operator === '-' && direction === 'before';
|
||||
const parsedAmount = parseInt(amount, 10);
|
||||
let newUnit: Unit = unit as Unit;
|
||||
let newAmount: number;
|
||||
|
||||
// Extend the amount
|
||||
switch (unit) {
|
||||
// For small units, always double or halve the amount
|
||||
case 'ms':
|
||||
case 's':
|
||||
newAmount = mustIncreaseAmount ? parsedAmount * 2 : Math.floor(parsedAmount / 2);
|
||||
break;
|
||||
// For minutes, increase or decrease in doubles or halves, depending on
|
||||
// the amount of minutes
|
||||
case 'm':
|
||||
let ratio;
|
||||
const MINUTES_LARGE = 10;
|
||||
if (mustIncreaseAmount) {
|
||||
ratio = parsedAmount >= MINUTES_LARGE ? 0.5 : 1;
|
||||
newAmount = parsedAmount + Math.floor(parsedAmount * ratio);
|
||||
} else {
|
||||
newAmount =
|
||||
parsedAmount >= MINUTES_LARGE
|
||||
? Math.floor(parsedAmount / 1.5)
|
||||
: parsedAmount - Math.floor(parsedAmount * 0.5);
|
||||
}
|
||||
break;
|
||||
|
||||
// For hours, increase or decrease half an hour for 1 hour. Otherwise
|
||||
// increase full hours
|
||||
case 'h':
|
||||
if (parsedAmount === 1) {
|
||||
newAmount = mustIncreaseAmount ? 90 : 30;
|
||||
newUnit = 'm';
|
||||
} else {
|
||||
newAmount = mustIncreaseAmount ? parsedAmount + 1 : parsedAmount - 1;
|
||||
}
|
||||
break;
|
||||
|
||||
// For the rest of units, increase or decrease one smaller unit for
|
||||
// amounts of 1. Otherwise increase or decrease the unit
|
||||
case 'd':
|
||||
case 'w':
|
||||
case 'M':
|
||||
case 'y':
|
||||
if (parsedAmount === 1) {
|
||||
newUnit = dateMath.unitsDesc[dateMath.unitsDesc.indexOf(unit) + 1];
|
||||
newAmount = mustIncreaseAmount
|
||||
? convertDate(1, unit, newUnit) + 1
|
||||
: convertDate(1, unit, newUnit) - 1;
|
||||
} else {
|
||||
newAmount = mustIncreaseAmount ? parsedAmount + 1 : parsedAmount - 1;
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new TypeError('Unhandled datemath unit');
|
||||
}
|
||||
|
||||
// normalize amount and unit (i.e. 120s -> 2m)
|
||||
const { unit: normalizedUnit, amount: normalizedAmount } = normalizeDate(newAmount, newUnit);
|
||||
|
||||
// How much have we changed the time?
|
||||
const diffAmount = Math.abs(normalizedAmount - convertDate(parsedAmount, unit, normalizedUnit));
|
||||
// if `diffAmount` is not an integer after normalization, express the difference in the original unit
|
||||
const shouldKeepDiffUnit = diffAmount % 1 !== 0;
|
||||
|
||||
return {
|
||||
value: `now${operator}${normalizedAmount}${normalizedUnit}`,
|
||||
diffUnit: shouldKeepDiffUnit ? unit : newUnit,
|
||||
diffAmount: shouldKeepDiffUnit ? Math.abs(newAmount - parsedAmount) : diffAmount,
|
||||
};
|
||||
}
|
||||
|
||||
function extendAbsoluteDatemath(
|
||||
value: string,
|
||||
direction: 'before' | 'after',
|
||||
oppositeEdge: string
|
||||
): DatemathExtension {
|
||||
const valueTimestamp = datemathToEpochMillis(value)!;
|
||||
const oppositeEdgeTimestamp = datemathToEpochMillis(oppositeEdge)!;
|
||||
const actualTimestampDiff = Math.abs(valueTimestamp - oppositeEdgeTimestamp);
|
||||
const normalizedDiff = normalizeDate(actualTimestampDiff, 'ms');
|
||||
const normalizedTimestampDiff = convertDate(normalizedDiff.amount, normalizedDiff.unit, 'ms');
|
||||
|
||||
const newValue =
|
||||
direction === 'before'
|
||||
? valueTimestamp - normalizedTimestampDiff
|
||||
: valueTimestamp + normalizedTimestampDiff;
|
||||
|
||||
return {
|
||||
value: new Date(newValue).toISOString(),
|
||||
diffUnit: normalizedDiff.unit,
|
||||
diffAmount: normalizedDiff.amount,
|
||||
};
|
||||
}
|
||||
|
||||
const CONVERSION_RATIOS: Record<string, Array<[Unit, number]>> = {
|
||||
wy: [
|
||||
['w', 52], // 1 year = 52 weeks
|
||||
['y', 1],
|
||||
],
|
||||
w: [
|
||||
['ms', 1000],
|
||||
['s', 60],
|
||||
['m', 60],
|
||||
['h', 24],
|
||||
['d', 7], // 1 week = 7 days
|
||||
['w', 4], // 1 month = 4 weeks = 28 days
|
||||
['M', 12], // 1 year = 12 months = 52 weeks = 364 days
|
||||
['y', 1],
|
||||
],
|
||||
M: [
|
||||
['ms', 1000],
|
||||
['s', 60],
|
||||
['m', 60],
|
||||
['h', 24],
|
||||
['d', 30], // 1 month = 30 days
|
||||
['M', 12], // 1 year = 12 months = 360 days
|
||||
['y', 1],
|
||||
],
|
||||
default: [
|
||||
['ms', 1000],
|
||||
['s', 60],
|
||||
['m', 60],
|
||||
['h', 24],
|
||||
['d', 365], // 1 year = 365 days
|
||||
['y', 1],
|
||||
],
|
||||
};
|
||||
|
||||
function getRatioScale(from: Unit, to?: Unit) {
|
||||
if ((from === 'y' && to === 'w') || (from === 'w' && to === 'y')) {
|
||||
return CONVERSION_RATIOS.wy;
|
||||
} else if (from === 'w' || to === 'w') {
|
||||
return CONVERSION_RATIOS.w;
|
||||
} else if (from === 'M' || to === 'M') {
|
||||
return CONVERSION_RATIOS.M;
|
||||
} else {
|
||||
return CONVERSION_RATIOS.default;
|
||||
}
|
||||
}
|
||||
|
||||
export function convertDate(value: number, from: Unit, to: Unit): number {
|
||||
if (from === to) {
|
||||
return value;
|
||||
}
|
||||
|
||||
const ratioScale = getRatioScale(from, to);
|
||||
const fromIdx = ratioScale.findIndex(ratio => ratio[0] === from);
|
||||
const toIdx = ratioScale.findIndex(ratio => ratio[0] === to);
|
||||
|
||||
let convertedValue = value;
|
||||
|
||||
if (fromIdx > toIdx) {
|
||||
// `from` is the bigger unit. Multiply the value
|
||||
for (let i = toIdx; i < fromIdx; i++) {
|
||||
convertedValue *= ratioScale[i][1];
|
||||
}
|
||||
} else {
|
||||
// `from` is the smaller unit. Divide the value
|
||||
for (let i = fromIdx; i < toIdx; i++) {
|
||||
convertedValue /= ratioScale[i][1];
|
||||
}
|
||||
}
|
||||
|
||||
return convertedValue;
|
||||
}
|
||||
|
||||
export function normalizeDate(amount: number, unit: Unit): { amount: number; unit: Unit } {
|
||||
// There is nothing after years
|
||||
if (unit === 'y') {
|
||||
return { amount, unit };
|
||||
}
|
||||
|
||||
const nextUnit = dateMath.unitsAsc[dateMath.unitsAsc.indexOf(unit) + 1];
|
||||
const ratioScale = getRatioScale(unit, nextUnit);
|
||||
const ratio = ratioScale.find(r => r[0] === unit)![1];
|
||||
|
||||
const newAmount = amount / ratio;
|
||||
|
||||
// Exact conversion
|
||||
if (newAmount === 1) {
|
||||
return { amount: newAmount, unit: nextUnit };
|
||||
}
|
||||
|
||||
// Might be able to go one unit more, so try again, rounding the value
|
||||
// 7200s => 120m => 2h
|
||||
// 7249s ~> 120m ~> 2h
|
||||
if (newAmount >= 2) {
|
||||
return normalizeDate(Math.round(newAmount), nextUnit);
|
||||
}
|
||||
|
||||
// Cannot go one one unit above. Return as it is
|
||||
return { amount, unit };
|
||||
}
|
|
@ -8,23 +8,26 @@ import { bisector } from 'd3-array';
|
|||
|
||||
import { compareToTimeKey, getIndexAtTimeKey, TimeKey, UniqueTimeKey } from '../../../common/time';
|
||||
import { InfraLogEntryFields } from '../../graphql/types';
|
||||
|
||||
export type LogEntry = InfraLogEntryFields.Fragment;
|
||||
|
||||
export type LogEntryColumn = InfraLogEntryFields.Columns;
|
||||
export type LogEntryMessageColumn = InfraLogEntryFields.InfraLogEntryMessageColumnInlineFragment;
|
||||
export type LogEntryTimestampColumn = InfraLogEntryFields.InfraLogEntryTimestampColumnInlineFragment;
|
||||
export type LogEntryFieldColumn = InfraLogEntryFields.InfraLogEntryFieldColumnInlineFragment;
|
||||
import {
|
||||
LogEntry,
|
||||
LogColumn,
|
||||
LogTimestampColumn,
|
||||
LogFieldColumn,
|
||||
LogMessageColumn,
|
||||
LogMessagePart,
|
||||
LogMessageFieldPart,
|
||||
LogMessageConstantPart,
|
||||
} from '../../../common/http_api';
|
||||
|
||||
export type LogEntryMessageSegment = InfraLogEntryFields.Message;
|
||||
export type LogEntryConstantMessageSegment = InfraLogEntryFields.InfraLogMessageConstantSegmentInlineFragment;
|
||||
export type LogEntryFieldMessageSegment = InfraLogEntryFields.InfraLogMessageFieldSegmentInlineFragment;
|
||||
|
||||
export const getLogEntryKey = (entry: { key: TimeKey }) => entry.key;
|
||||
export const getLogEntryKey = (entry: { cursor: TimeKey }) => entry.cursor;
|
||||
|
||||
export const getUniqueLogEntryKey = (entry: { gid: string; key: TimeKey }): UniqueTimeKey => ({
|
||||
...entry.key,
|
||||
gid: entry.gid,
|
||||
export const getUniqueLogEntryKey = (entry: { id: string; cursor: TimeKey }): UniqueTimeKey => ({
|
||||
...entry.cursor,
|
||||
gid: entry.id,
|
||||
});
|
||||
|
||||
const logEntryTimeBisector = bisector(compareToTimeKey(getLogEntryKey));
|
||||
|
@ -39,19 +42,17 @@ export const getLogEntryAtTime = (entries: LogEntry[], time: TimeKey) => {
|
|||
return entryIndex !== null ? entries[entryIndex] : null;
|
||||
};
|
||||
|
||||
export const isTimestampColumn = (column: LogEntryColumn): column is LogEntryTimestampColumn =>
|
||||
export const isTimestampColumn = (column: LogColumn): column is LogTimestampColumn =>
|
||||
column != null && 'timestamp' in column;
|
||||
|
||||
export const isMessageColumn = (column: LogEntryColumn): column is LogEntryMessageColumn =>
|
||||
export const isMessageColumn = (column: LogColumn): column is LogMessageColumn =>
|
||||
column != null && 'message' in column;
|
||||
|
||||
export const isFieldColumn = (column: LogEntryColumn): column is LogEntryFieldColumn =>
|
||||
export const isFieldColumn = (column: LogColumn): column is LogFieldColumn =>
|
||||
column != null && 'field' in column;
|
||||
|
||||
export const isConstantSegment = (
|
||||
segment: LogEntryMessageSegment
|
||||
): segment is LogEntryConstantMessageSegment => 'constant' in segment;
|
||||
export const isConstantSegment = (segment: LogMessagePart): segment is LogMessageConstantPart =>
|
||||
'constant' in segment;
|
||||
|
||||
export const isFieldSegment = (
|
||||
segment: LogEntryMessageSegment
|
||||
): segment is LogEntryFieldMessageSegment => 'field' in segment && 'value' in segment;
|
||||
export const isFieldSegment = (segment: LogMessagePart): segment is LogMessageFieldPart =>
|
||||
'field' in segment && 'value' in segment;
|
||||
|
|
|
@ -5,8 +5,14 @@
|
|||
*/
|
||||
|
||||
import { InfraLogEntryHighlightFields } from '../../graphql/types';
|
||||
|
||||
export type LogEntryHighlight = InfraLogEntryHighlightFields.Fragment;
|
||||
import {
|
||||
LogEntry,
|
||||
LogColumn,
|
||||
LogMessageColumn,
|
||||
LogFieldColumn,
|
||||
LogMessagePart,
|
||||
LogMessageFieldPart,
|
||||
} from '../../../common/http_api';
|
||||
|
||||
export type LogEntryHighlightColumn = InfraLogEntryHighlightFields.Columns;
|
||||
export type LogEntryHighlightMessageColumn = InfraLogEntryHighlightFields.InfraLogEntryMessageColumnInlineFragment;
|
||||
|
@ -16,18 +22,14 @@ export type LogEntryHighlightMessageSegment = InfraLogEntryHighlightFields.Messa
|
|||
export type LogEntryHighlightFieldMessageSegment = InfraLogEntryHighlightFields.InfraLogMessageFieldSegmentInlineFragment;
|
||||
|
||||
export interface LogEntryHighlightsMap {
|
||||
[entryId: string]: LogEntryHighlight[];
|
||||
[entryId: string]: LogEntry[];
|
||||
}
|
||||
|
||||
export const isHighlightMessageColumn = (
|
||||
column: LogEntryHighlightColumn
|
||||
): column is LogEntryHighlightMessageColumn => column != null && 'message' in column;
|
||||
export const isHighlightMessageColumn = (column: LogColumn): column is LogMessageColumn =>
|
||||
column != null && 'message' in column;
|
||||
|
||||
export const isHighlightFieldColumn = (
|
||||
column: LogEntryHighlightColumn
|
||||
): column is LogEntryHighlightFieldColumn => column != null && 'field' in column;
|
||||
export const isHighlightFieldColumn = (column: LogColumn): column is LogFieldColumn =>
|
||||
column != null && 'field' in column;
|
||||
|
||||
export const isHighlightFieldSegment = (
|
||||
segment: LogEntryHighlightMessageSegment
|
||||
): segment is LogEntryHighlightFieldMessageSegment =>
|
||||
export const isHighlightFieldSegment = (segment: LogMessagePart): segment is LogMessageFieldPart =>
|
||||
segment && 'field' in segment && 'highlights' in segment;
|
||||
|
|
|
@ -6,14 +6,7 @@
|
|||
|
||||
import { rootSchema } from '../../common/graphql/root/schema.gql';
|
||||
import { sharedSchema } from '../../common/graphql/shared/schema.gql';
|
||||
import { logEntriesSchema } from './log_entries/schema.gql';
|
||||
import { sourceStatusSchema } from './source_status/schema.gql';
|
||||
import { sourcesSchema } from './sources/schema.gql';
|
||||
|
||||
export const schemas = [
|
||||
rootSchema,
|
||||
sharedSchema,
|
||||
logEntriesSchema,
|
||||
sourcesSchema,
|
||||
sourceStatusSchema,
|
||||
];
|
||||
export const schemas = [rootSchema, sharedSchema, sourcesSchema, sourceStatusSchema];
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export { createLogEntriesResolvers } from './resolvers';
|
|
@ -1,175 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import {
|
||||
InfraLogEntryColumn,
|
||||
InfraLogEntryFieldColumn,
|
||||
InfraLogEntryMessageColumn,
|
||||
InfraLogEntryTimestampColumn,
|
||||
InfraLogMessageConstantSegment,
|
||||
InfraLogMessageFieldSegment,
|
||||
InfraLogMessageSegment,
|
||||
InfraSourceResolvers,
|
||||
} from '../../graphql/types';
|
||||
import { InfraLogEntriesDomain } from '../../lib/domains/log_entries_domain';
|
||||
import { parseFilterQuery } from '../../utils/serialized_query';
|
||||
import { ChildResolverOf, InfraResolverOf } from '../../utils/typed_resolvers';
|
||||
import { QuerySourceResolver } from '../sources/resolvers';
|
||||
|
||||
export type InfraSourceLogEntriesAroundResolver = ChildResolverOf<
|
||||
InfraResolverOf<InfraSourceResolvers.LogEntriesAroundResolver>,
|
||||
QuerySourceResolver
|
||||
>;
|
||||
|
||||
export type InfraSourceLogEntriesBetweenResolver = ChildResolverOf<
|
||||
InfraResolverOf<InfraSourceResolvers.LogEntriesBetweenResolver>,
|
||||
QuerySourceResolver
|
||||
>;
|
||||
|
||||
export type InfraSourceLogEntryHighlightsResolver = ChildResolverOf<
|
||||
InfraResolverOf<InfraSourceResolvers.LogEntryHighlightsResolver>,
|
||||
QuerySourceResolver
|
||||
>;
|
||||
|
||||
export const createLogEntriesResolvers = (libs: {
|
||||
logEntries: InfraLogEntriesDomain;
|
||||
}): {
|
||||
InfraSource: {
|
||||
logEntriesAround: InfraSourceLogEntriesAroundResolver;
|
||||
logEntriesBetween: InfraSourceLogEntriesBetweenResolver;
|
||||
logEntryHighlights: InfraSourceLogEntryHighlightsResolver;
|
||||
};
|
||||
InfraLogEntryColumn: {
|
||||
__resolveType(
|
||||
logEntryColumn: InfraLogEntryColumn
|
||||
):
|
||||
| 'InfraLogEntryTimestampColumn'
|
||||
| 'InfraLogEntryMessageColumn'
|
||||
| 'InfraLogEntryFieldColumn'
|
||||
| null;
|
||||
};
|
||||
InfraLogMessageSegment: {
|
||||
__resolveType(
|
||||
messageSegment: InfraLogMessageSegment
|
||||
): 'InfraLogMessageFieldSegment' | 'InfraLogMessageConstantSegment' | null;
|
||||
};
|
||||
} => ({
|
||||
InfraSource: {
|
||||
async logEntriesAround(source, args, { req }) {
|
||||
const countBefore = args.countBefore || 0;
|
||||
const countAfter = args.countAfter || 0;
|
||||
|
||||
const { entriesBefore, entriesAfter } = await libs.logEntries.getLogEntriesAround(
|
||||
req,
|
||||
source.id,
|
||||
args.key,
|
||||
countBefore + 1,
|
||||
countAfter + 1,
|
||||
parseFilterQuery(args.filterQuery)
|
||||
);
|
||||
|
||||
const hasMoreBefore = entriesBefore.length > countBefore;
|
||||
const hasMoreAfter = entriesAfter.length > countAfter;
|
||||
|
||||
const entries = [
|
||||
...(hasMoreBefore ? entriesBefore.slice(1) : entriesBefore),
|
||||
...(hasMoreAfter ? entriesAfter.slice(0, -1) : entriesAfter),
|
||||
];
|
||||
|
||||
return {
|
||||
start: entries.length > 0 ? entries[0].key : null,
|
||||
end: entries.length > 0 ? entries[entries.length - 1].key : null,
|
||||
hasMoreBefore,
|
||||
hasMoreAfter,
|
||||
filterQuery: args.filterQuery,
|
||||
entries,
|
||||
};
|
||||
},
|
||||
async logEntriesBetween(source, args, { req }) {
|
||||
const entries = await libs.logEntries.getLogEntriesBetween(
|
||||
req,
|
||||
source.id,
|
||||
args.startKey,
|
||||
args.endKey,
|
||||
parseFilterQuery(args.filterQuery)
|
||||
);
|
||||
|
||||
return {
|
||||
start: entries.length > 0 ? entries[0].key : null,
|
||||
end: entries.length > 0 ? entries[entries.length - 1].key : null,
|
||||
hasMoreBefore: true,
|
||||
hasMoreAfter: true,
|
||||
filterQuery: args.filterQuery,
|
||||
entries,
|
||||
};
|
||||
},
|
||||
async logEntryHighlights(source, args, { req }) {
|
||||
const highlightedLogEntrySets = await libs.logEntries.getLogEntryHighlights(
|
||||
req,
|
||||
source.id,
|
||||
args.startKey,
|
||||
args.endKey,
|
||||
args.highlights.filter(highlightInput => !!highlightInput.query),
|
||||
parseFilterQuery(args.filterQuery)
|
||||
);
|
||||
|
||||
return highlightedLogEntrySets.map(entries => ({
|
||||
start: entries.length > 0 ? entries[0].key : null,
|
||||
end: entries.length > 0 ? entries[entries.length - 1].key : null,
|
||||
hasMoreBefore: true,
|
||||
hasMoreAfter: true,
|
||||
filterQuery: args.filterQuery,
|
||||
entries,
|
||||
}));
|
||||
},
|
||||
},
|
||||
InfraLogEntryColumn: {
|
||||
__resolveType(logEntryColumn) {
|
||||
if (isTimestampColumn(logEntryColumn)) {
|
||||
return 'InfraLogEntryTimestampColumn';
|
||||
}
|
||||
|
||||
if (isMessageColumn(logEntryColumn)) {
|
||||
return 'InfraLogEntryMessageColumn';
|
||||
}
|
||||
|
||||
if (isFieldColumn(logEntryColumn)) {
|
||||
return 'InfraLogEntryFieldColumn';
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
},
|
||||
InfraLogMessageSegment: {
|
||||
__resolveType(messageSegment) {
|
||||
if (isConstantSegment(messageSegment)) {
|
||||
return 'InfraLogMessageConstantSegment';
|
||||
}
|
||||
|
||||
if (isFieldSegment(messageSegment)) {
|
||||
return 'InfraLogMessageFieldSegment';
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const isTimestampColumn = (column: InfraLogEntryColumn): column is InfraLogEntryTimestampColumn =>
|
||||
'timestamp' in column;
|
||||
|
||||
const isMessageColumn = (column: InfraLogEntryColumn): column is InfraLogEntryMessageColumn =>
|
||||
'message' in column;
|
||||
|
||||
const isFieldColumn = (column: InfraLogEntryColumn): column is InfraLogEntryFieldColumn =>
|
||||
'field' in column && 'value' in column;
|
||||
|
||||
const isConstantSegment = (
|
||||
segment: InfraLogMessageSegment
|
||||
): segment is InfraLogMessageConstantSegment => 'constant' in segment;
|
||||
|
||||
const isFieldSegment = (segment: InfraLogMessageSegment): segment is InfraLogMessageFieldSegment =>
|
||||
'field' in segment && 'value' in segment && 'highlights' in segment;
|
|
@ -1,136 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import gql from 'graphql-tag';
|
||||
|
||||
export const logEntriesSchema = gql`
|
||||
"A segment of the log entry message that was derived from a field"
|
||||
type InfraLogMessageFieldSegment {
|
||||
"The field the segment was derived from"
|
||||
field: String!
|
||||
"The segment's message"
|
||||
value: String!
|
||||
"A list of highlighted substrings of the value"
|
||||
highlights: [String!]!
|
||||
}
|
||||
|
||||
"A segment of the log entry message that was derived from a string literal"
|
||||
type InfraLogMessageConstantSegment {
|
||||
"The segment's message"
|
||||
constant: String!
|
||||
}
|
||||
|
||||
"A segment of the log entry message"
|
||||
union InfraLogMessageSegment = InfraLogMessageFieldSegment | InfraLogMessageConstantSegment
|
||||
|
||||
"A special built-in column that contains the log entry's timestamp"
|
||||
type InfraLogEntryTimestampColumn {
|
||||
"The id of the corresponding column configuration"
|
||||
columnId: ID!
|
||||
"The timestamp"
|
||||
timestamp: Float!
|
||||
}
|
||||
|
||||
"A special built-in column that contains the log entry's constructed message"
|
||||
type InfraLogEntryMessageColumn {
|
||||
"The id of the corresponding column configuration"
|
||||
columnId: ID!
|
||||
"A list of the formatted log entry segments"
|
||||
message: [InfraLogMessageSegment!]!
|
||||
}
|
||||
|
||||
"A column that contains the value of a field of the log entry"
|
||||
type InfraLogEntryFieldColumn {
|
||||
"The id of the corresponding column configuration"
|
||||
columnId: ID!
|
||||
"The field name of the column"
|
||||
field: String!
|
||||
"The value of the field in the log entry"
|
||||
value: String!
|
||||
"A list of highlighted substrings of the value"
|
||||
highlights: [String!]!
|
||||
}
|
||||
|
||||
"A column of a log entry"
|
||||
union InfraLogEntryColumn =
|
||||
InfraLogEntryTimestampColumn
|
||||
| InfraLogEntryMessageColumn
|
||||
| InfraLogEntryFieldColumn
|
||||
|
||||
"A log entry"
|
||||
type InfraLogEntry {
|
||||
"A unique representation of the log entry's position in the event stream"
|
||||
key: InfraTimeKey!
|
||||
"The log entry's id"
|
||||
gid: String!
|
||||
"The source id"
|
||||
source: String!
|
||||
"The columns used for rendering the log entry"
|
||||
columns: [InfraLogEntryColumn!]!
|
||||
}
|
||||
|
||||
"A highlighting definition"
|
||||
input InfraLogEntryHighlightInput {
|
||||
"The query to highlight by"
|
||||
query: String!
|
||||
"The number of highlighted documents to include beyond the beginning of the interval"
|
||||
countBefore: Int!
|
||||
"The number of highlighted documents to include beyond the end of the interval"
|
||||
countAfter: Int!
|
||||
}
|
||||
|
||||
"A consecutive sequence of log entries"
|
||||
type InfraLogEntryInterval {
|
||||
"The key corresponding to the start of the interval covered by the entries"
|
||||
start: InfraTimeKey
|
||||
"The key corresponding to the end of the interval covered by the entries"
|
||||
end: InfraTimeKey
|
||||
"Whether there are more log entries available before the start"
|
||||
hasMoreBefore: Boolean!
|
||||
"Whether there are more log entries available after the end"
|
||||
hasMoreAfter: Boolean!
|
||||
"The query the log entries were filtered by"
|
||||
filterQuery: String
|
||||
"The query the log entries were highlighted with"
|
||||
highlightQuery: String
|
||||
"A list of the log entries"
|
||||
entries: [InfraLogEntry!]!
|
||||
}
|
||||
|
||||
extend type InfraSource {
|
||||
"A consecutive span of log entries surrounding a point in time"
|
||||
logEntriesAround(
|
||||
"The sort key that corresponds to the point in time"
|
||||
key: InfraTimeKeyInput!
|
||||
"The maximum number of preceding to return"
|
||||
countBefore: Int = 0
|
||||
"The maximum number of following to return"
|
||||
countAfter: Int = 0
|
||||
"The query to filter the log entries by"
|
||||
filterQuery: String
|
||||
): InfraLogEntryInterval!
|
||||
"A consecutive span of log entries within an interval"
|
||||
logEntriesBetween(
|
||||
"The sort key that corresponds to the start of the interval"
|
||||
startKey: InfraTimeKeyInput!
|
||||
"The sort key that corresponds to the end of the interval"
|
||||
endKey: InfraTimeKeyInput!
|
||||
"The query to filter the log entries by"
|
||||
filterQuery: String
|
||||
): InfraLogEntryInterval!
|
||||
"Sequences of log entries matching sets of highlighting queries within an interval"
|
||||
logEntryHighlights(
|
||||
"The sort key that corresponds to the start of the interval"
|
||||
startKey: InfraTimeKeyInput!
|
||||
"The sort key that corresponds to the end of the interval"
|
||||
endKey: InfraTimeKeyInput!
|
||||
"The query to filter the log entries by"
|
||||
filterQuery: String
|
||||
"The highlighting to apply to the log entries"
|
||||
highlights: [InfraLogEntryHighlightInput!]!
|
||||
): [InfraLogEntryInterval!]!
|
||||
}
|
||||
`;
|
|
@ -7,7 +7,6 @@
|
|||
import { IResolvers, makeExecutableSchema } from 'graphql-tools';
|
||||
import { initIpToHostName } from './routes/ip_to_hostname';
|
||||
import { schemas } from './graphql';
|
||||
import { createLogEntriesResolvers } from './graphql/log_entries';
|
||||
import { createSourceStatusResolvers } from './graphql/source_status';
|
||||
import { createSourcesResolvers } from './graphql/sources';
|
||||
import { InfraBackendLibs } from './lib/infra_types';
|
||||
|
@ -34,7 +33,6 @@ import { initInventoryMetaRoute } from './routes/inventory_metadata';
|
|||
export const initInfraServer = (libs: InfraBackendLibs) => {
|
||||
const schema = makeExecutableSchema({
|
||||
resolvers: [
|
||||
createLogEntriesResolvers(libs) as IResolvers,
|
||||
createSourcesResolvers(libs) as IResolvers,
|
||||
createSourceStatusResolvers(libs) as IResolvers,
|
||||
],
|
||||
|
|
|
@ -8,12 +8,11 @@
|
|||
|
||||
import { timeMilliseconds } from 'd3-time';
|
||||
import * as runtimeTypes from 'io-ts';
|
||||
import { compact, first, get, has, zip } from 'lodash';
|
||||
import { compact, first, get, has } from 'lodash';
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
import { map, fold } from 'fp-ts/lib/Either';
|
||||
import { identity, constant } from 'fp-ts/lib/function';
|
||||
import { RequestHandlerContext } from 'src/core/server';
|
||||
import { compareTimeKeys, isTimeKey, TimeKey } from '../../../../common/time';
|
||||
import { JsonObject, JsonValue } from '../../../../common/typed_json';
|
||||
import {
|
||||
LogEntriesAdapter,
|
||||
|
@ -27,8 +26,6 @@ import { InfraSourceConfiguration } from '../../sources';
|
|||
import { SortedSearchHit } from '../framework';
|
||||
import { KibanaFramework } from '../framework/kibana_framework_adapter';
|
||||
|
||||
const DAY_MILLIS = 24 * 60 * 60 * 1000;
|
||||
const LOOKUP_OFFSETS = [0, 1, 7, 30, 365, 10000, Infinity].map(days => days * DAY_MILLIS);
|
||||
const TIMESTAMP_FORMAT = 'epoch_millis';
|
||||
|
||||
interface LogItemHit {
|
||||
|
@ -41,53 +38,13 @@ interface LogItemHit {
|
|||
export class InfraKibanaLogEntriesAdapter implements LogEntriesAdapter {
|
||||
constructor(private readonly framework: KibanaFramework) {}
|
||||
|
||||
public async getAdjacentLogEntryDocuments(
|
||||
requestContext: RequestHandlerContext,
|
||||
sourceConfiguration: InfraSourceConfiguration,
|
||||
fields: string[],
|
||||
start: TimeKey,
|
||||
direction: 'asc' | 'desc',
|
||||
maxCount: number,
|
||||
filterQuery?: LogEntryQuery,
|
||||
highlightQuery?: LogEntryQuery
|
||||
): Promise<LogEntryDocument[]> {
|
||||
if (maxCount <= 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const intervals = getLookupIntervals(start.time, direction);
|
||||
|
||||
let documents: LogEntryDocument[] = [];
|
||||
for (const [intervalStart, intervalEnd] of intervals) {
|
||||
if (documents.length >= maxCount) {
|
||||
break;
|
||||
}
|
||||
|
||||
const documentsInInterval = await this.getLogEntryDocumentsBetween(
|
||||
requestContext,
|
||||
sourceConfiguration,
|
||||
fields,
|
||||
intervalStart,
|
||||
intervalEnd,
|
||||
documents.length > 0 ? documents[documents.length - 1].key : start,
|
||||
maxCount - documents.length,
|
||||
filterQuery,
|
||||
highlightQuery
|
||||
);
|
||||
|
||||
documents = [...documents, ...documentsInInterval];
|
||||
}
|
||||
|
||||
return direction === 'asc' ? documents : documents.reverse();
|
||||
}
|
||||
|
||||
public async getLogEntries(
|
||||
requestContext: RequestHandlerContext,
|
||||
sourceConfiguration: InfraSourceConfiguration,
|
||||
fields: string[],
|
||||
params: LogEntriesParams
|
||||
): Promise<LogEntryDocument[]> {
|
||||
const { startDate, endDate, query, cursor, size, highlightTerm } = params;
|
||||
const { startTimestamp, endTimestamp, query, cursor, size, highlightTerm } = params;
|
||||
|
||||
const { sortDirection, searchAfterClause } = processCursor(cursor);
|
||||
|
||||
|
@ -133,8 +90,8 @@ export class InfraKibanaLogEntriesAdapter implements LogEntriesAdapter {
|
|||
{
|
||||
range: {
|
||||
[sourceConfiguration.fields.timestamp]: {
|
||||
gte: startDate,
|
||||
lte: endDate,
|
||||
gte: startTimestamp,
|
||||
lte: endTimestamp,
|
||||
format: TIMESTAMP_FORMAT,
|
||||
},
|
||||
},
|
||||
|
@ -158,40 +115,19 @@ export class InfraKibanaLogEntriesAdapter implements LogEntriesAdapter {
|
|||
return mapHitsToLogEntryDocuments(hits, sourceConfiguration.fields.timestamp, fields);
|
||||
}
|
||||
|
||||
/** @deprecated */
|
||||
public async getContainedLogEntryDocuments(
|
||||
requestContext: RequestHandlerContext,
|
||||
sourceConfiguration: InfraSourceConfiguration,
|
||||
fields: string[],
|
||||
start: TimeKey,
|
||||
end: TimeKey,
|
||||
filterQuery?: LogEntryQuery,
|
||||
highlightQuery?: LogEntryQuery
|
||||
): Promise<LogEntryDocument[]> {
|
||||
const documents = await this.getLogEntryDocumentsBetween(
|
||||
requestContext,
|
||||
sourceConfiguration,
|
||||
fields,
|
||||
start.time,
|
||||
end.time,
|
||||
start,
|
||||
10000,
|
||||
filterQuery,
|
||||
highlightQuery
|
||||
);
|
||||
|
||||
return documents.filter(document => compareTimeKeys(document.key, end) < 0);
|
||||
}
|
||||
|
||||
public async getContainedLogSummaryBuckets(
|
||||
requestContext: RequestHandlerContext,
|
||||
sourceConfiguration: InfraSourceConfiguration,
|
||||
start: number,
|
||||
end: number,
|
||||
startTimestamp: number,
|
||||
endTimestamp: number,
|
||||
bucketSize: number,
|
||||
filterQuery?: LogEntryQuery
|
||||
): Promise<LogSummaryBucket[]> {
|
||||
const bucketIntervalStarts = timeMilliseconds(new Date(start), new Date(end), bucketSize);
|
||||
const bucketIntervalStarts = timeMilliseconds(
|
||||
new Date(startTimestamp),
|
||||
new Date(endTimestamp),
|
||||
bucketSize
|
||||
);
|
||||
|
||||
const query = {
|
||||
allowNoIndices: true,
|
||||
|
@ -229,8 +165,8 @@ export class InfraKibanaLogEntriesAdapter implements LogEntriesAdapter {
|
|||
{
|
||||
range: {
|
||||
[sourceConfiguration.fields.timestamp]: {
|
||||
gte: start,
|
||||
lte: end,
|
||||
gte: startTimestamp,
|
||||
lte: endTimestamp,
|
||||
format: TIMESTAMP_FORMAT,
|
||||
},
|
||||
},
|
||||
|
@ -288,112 +224,6 @@ export class InfraKibanaLogEntriesAdapter implements LogEntriesAdapter {
|
|||
}
|
||||
return document;
|
||||
}
|
||||
|
||||
private async getLogEntryDocumentsBetween(
|
||||
requestContext: RequestHandlerContext,
|
||||
sourceConfiguration: InfraSourceConfiguration,
|
||||
fields: string[],
|
||||
start: number,
|
||||
end: number,
|
||||
after: TimeKey | null,
|
||||
maxCount: number,
|
||||
filterQuery?: LogEntryQuery,
|
||||
highlightQuery?: LogEntryQuery
|
||||
): Promise<LogEntryDocument[]> {
|
||||
if (maxCount <= 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const sortDirection: 'asc' | 'desc' = start <= end ? 'asc' : 'desc';
|
||||
|
||||
const startRange = {
|
||||
[sortDirection === 'asc' ? 'gte' : 'lte']: start,
|
||||
};
|
||||
const endRange =
|
||||
end === Infinity
|
||||
? {}
|
||||
: {
|
||||
[sortDirection === 'asc' ? 'lte' : 'gte']: end,
|
||||
};
|
||||
|
||||
const highlightClause = highlightQuery
|
||||
? {
|
||||
highlight: {
|
||||
boundary_scanner: 'word',
|
||||
fields: fields.reduce(
|
||||
(highlightFieldConfigs, fieldName) => ({
|
||||
...highlightFieldConfigs,
|
||||
[fieldName]: {},
|
||||
}),
|
||||
{}
|
||||
),
|
||||
fragment_size: 1,
|
||||
number_of_fragments: 100,
|
||||
post_tags: [''],
|
||||
pre_tags: [''],
|
||||
highlight_query: highlightQuery,
|
||||
},
|
||||
}
|
||||
: {};
|
||||
|
||||
const searchAfterClause = isTimeKey(after)
|
||||
? {
|
||||
search_after: [after.time, after.tiebreaker],
|
||||
}
|
||||
: {};
|
||||
|
||||
const query = {
|
||||
allowNoIndices: true,
|
||||
index: sourceConfiguration.logAlias,
|
||||
ignoreUnavailable: true,
|
||||
body: {
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
...createQueryFilterClauses(filterQuery),
|
||||
{
|
||||
range: {
|
||||
[sourceConfiguration.fields.timestamp]: {
|
||||
...startRange,
|
||||
...endRange,
|
||||
format: TIMESTAMP_FORMAT,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
...highlightClause,
|
||||
...searchAfterClause,
|
||||
_source: fields,
|
||||
size: maxCount,
|
||||
sort: [
|
||||
{ [sourceConfiguration.fields.timestamp]: sortDirection },
|
||||
{ [sourceConfiguration.fields.tiebreaker]: sortDirection },
|
||||
],
|
||||
track_total_hits: false,
|
||||
},
|
||||
};
|
||||
|
||||
const response = await this.framework.callWithRequest<SortedSearchHit>(
|
||||
requestContext,
|
||||
'search',
|
||||
query
|
||||
);
|
||||
const hits = response.hits.hits;
|
||||
const documents = hits.map(convertHitToLogEntryDocument(fields));
|
||||
|
||||
return documents;
|
||||
}
|
||||
}
|
||||
|
||||
function getLookupIntervals(start: number, direction: 'asc' | 'desc'): Array<[number, number]> {
|
||||
const offsetSign = direction === 'asc' ? 1 : -1;
|
||||
const translatedOffsets = LOOKUP_OFFSETS.map(offset => start + offset * offsetSign);
|
||||
const intervals = zip(translatedOffsets.slice(0, -1), translatedOffsets.slice(1)) as Array<
|
||||
[number, number]
|
||||
>;
|
||||
return intervals;
|
||||
}
|
||||
|
||||
function mapHitsToLogEntryDocuments(
|
||||
|
@ -423,28 +253,6 @@ function mapHitsToLogEntryDocuments(
|
|||
});
|
||||
}
|
||||
|
||||
/** @deprecated */
|
||||
const convertHitToLogEntryDocument = (fields: string[]) => (
|
||||
hit: SortedSearchHit
|
||||
): LogEntryDocument => ({
|
||||
gid: hit._id,
|
||||
fields: fields.reduce(
|
||||
(flattenedFields, fieldName) =>
|
||||
has(hit._source, fieldName)
|
||||
? {
|
||||
...flattenedFields,
|
||||
[fieldName]: get(hit._source, fieldName),
|
||||
}
|
||||
: flattenedFields,
|
||||
{} as { [fieldName: string]: string | number | object | boolean | null }
|
||||
),
|
||||
highlights: hit.highlight || {},
|
||||
key: {
|
||||
time: hit.sort[0],
|
||||
tiebreaker: hit.sort[1],
|
||||
},
|
||||
});
|
||||
|
||||
const convertDateRangeBucketToSummaryBucket = (
|
||||
bucket: LogSummaryDateRangeBucket
|
||||
): LogSummaryBucket => ({
|
||||
|
|
|
@ -4,7 +4,6 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import stringify from 'json-stable-stringify';
|
||||
import { sortBy } from 'lodash';
|
||||
|
||||
import { RequestHandlerContext } from 'src/core/server';
|
||||
|
@ -18,13 +17,10 @@ import {
|
|||
LogEntriesCursor,
|
||||
LogColumn,
|
||||
} from '../../../../common/http_api';
|
||||
import { InfraLogEntry, InfraLogMessageSegment } from '../../../graphql/types';
|
||||
import {
|
||||
InfraSourceConfiguration,
|
||||
InfraSources,
|
||||
SavedSourceConfigurationFieldColumnRuntimeType,
|
||||
SavedSourceConfigurationMessageColumnRuntimeType,
|
||||
SavedSourceConfigurationTimestampColumnRuntimeType,
|
||||
} from '../../sources';
|
||||
import { getBuiltinRules } from './builtin_rules';
|
||||
import { convertDocumentSourceToLogItemFields } from './convert_document_source_to_log_item_fields';
|
||||
|
@ -36,16 +32,16 @@ import {
|
|||
} from './message';
|
||||
|
||||
export interface LogEntriesParams {
|
||||
startDate: number;
|
||||
endDate: number;
|
||||
startTimestamp: number;
|
||||
endTimestamp: number;
|
||||
size?: number;
|
||||
query?: JsonObject;
|
||||
cursor?: { before: LogEntriesCursor | 'last' } | { after: LogEntriesCursor | 'first' };
|
||||
highlightTerm?: string;
|
||||
}
|
||||
export interface LogEntriesAroundParams {
|
||||
startDate: number;
|
||||
endDate: number;
|
||||
startTimestamp: number;
|
||||
endTimestamp: number;
|
||||
size?: number;
|
||||
center: LogEntriesCursor;
|
||||
query?: JsonObject;
|
||||
|
@ -67,7 +63,7 @@ export class InfraLogEntriesDomain {
|
|||
sourceId: string,
|
||||
params: LogEntriesAroundParams
|
||||
) {
|
||||
const { startDate, endDate, center, query, size, highlightTerm } = params;
|
||||
const { startTimestamp, endTimestamp, center, query, size, highlightTerm } = params;
|
||||
|
||||
/*
|
||||
* For odd sizes we will round this value down for the first half, and up
|
||||
|
@ -80,8 +76,8 @@ export class InfraLogEntriesDomain {
|
|||
const halfSize = (size || LOG_ENTRIES_PAGE_SIZE) / 2;
|
||||
|
||||
const entriesBefore = await this.getLogEntries(requestContext, sourceId, {
|
||||
startDate,
|
||||
endDate,
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
query,
|
||||
cursor: { before: center },
|
||||
size: Math.floor(halfSize),
|
||||
|
@ -101,8 +97,8 @@ export class InfraLogEntriesDomain {
|
|||
: { time: center.time - 1, tiebreaker: 0 };
|
||||
|
||||
const entriesAfter = await this.getLogEntries(requestContext, sourceId, {
|
||||
startDate,
|
||||
endDate,
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
query,
|
||||
cursor: { after: cursorAfter },
|
||||
size: Math.ceil(halfSize),
|
||||
|
@ -112,71 +108,6 @@ export class InfraLogEntriesDomain {
|
|||
return [...entriesBefore, ...entriesAfter];
|
||||
}
|
||||
|
||||
/** @deprecated */
|
||||
public async getLogEntriesAround(
|
||||
requestContext: RequestHandlerContext,
|
||||
sourceId: string,
|
||||
key: TimeKey,
|
||||
maxCountBefore: number,
|
||||
maxCountAfter: number,
|
||||
filterQuery?: LogEntryQuery,
|
||||
highlightQuery?: LogEntryQuery
|
||||
): Promise<{ entriesBefore: InfraLogEntry[]; entriesAfter: InfraLogEntry[] }> {
|
||||
if (maxCountBefore <= 0 && maxCountAfter <= 0) {
|
||||
return {
|
||||
entriesBefore: [],
|
||||
entriesAfter: [],
|
||||
};
|
||||
}
|
||||
|
||||
const { configuration } = await this.libs.sources.getSourceConfiguration(
|
||||
requestContext,
|
||||
sourceId
|
||||
);
|
||||
const messageFormattingRules = compileFormattingRules(
|
||||
getBuiltinRules(configuration.fields.message)
|
||||
);
|
||||
const requiredFields = getRequiredFields(configuration, messageFormattingRules);
|
||||
|
||||
const documentsBefore = await this.adapter.getAdjacentLogEntryDocuments(
|
||||
requestContext,
|
||||
configuration,
|
||||
requiredFields,
|
||||
key,
|
||||
'desc',
|
||||
Math.max(maxCountBefore, 1),
|
||||
filterQuery,
|
||||
highlightQuery
|
||||
);
|
||||
const lastKeyBefore =
|
||||
documentsBefore.length > 0
|
||||
? documentsBefore[documentsBefore.length - 1].key
|
||||
: {
|
||||
time: key.time - 1,
|
||||
tiebreaker: 0,
|
||||
};
|
||||
|
||||
const documentsAfter = await this.adapter.getAdjacentLogEntryDocuments(
|
||||
requestContext,
|
||||
configuration,
|
||||
requiredFields,
|
||||
lastKeyBefore,
|
||||
'asc',
|
||||
maxCountAfter,
|
||||
filterQuery,
|
||||
highlightQuery
|
||||
);
|
||||
|
||||
return {
|
||||
entriesBefore: (maxCountBefore > 0 ? documentsBefore : []).map(
|
||||
convertLogDocumentToEntry(sourceId, configuration.logColumns, messageFormattingRules.format)
|
||||
),
|
||||
entriesAfter: documentsAfter.map(
|
||||
convertLogDocumentToEntry(sourceId, configuration.logColumns, messageFormattingRules.format)
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
public async getLogEntries(
|
||||
requestContext: RequestHandlerContext,
|
||||
sourceId: string,
|
||||
|
@ -220,7 +151,7 @@ export class InfraLogEntriesDomain {
|
|||
return {
|
||||
columnId: column.fieldColumn.id,
|
||||
field: column.fieldColumn.field,
|
||||
value: stringify(doc.fields[column.fieldColumn.field]),
|
||||
value: doc.fields[column.fieldColumn.field],
|
||||
highlights: doc.highlights[column.fieldColumn.field] || [],
|
||||
};
|
||||
}
|
||||
|
@ -232,116 +163,6 @@ export class InfraLogEntriesDomain {
|
|||
return entries;
|
||||
}
|
||||
|
||||
/** @deprecated */
|
||||
public async getLogEntriesBetween(
|
||||
requestContext: RequestHandlerContext,
|
||||
sourceId: string,
|
||||
startKey: TimeKey,
|
||||
endKey: TimeKey,
|
||||
filterQuery?: LogEntryQuery,
|
||||
highlightQuery?: LogEntryQuery
|
||||
): Promise<InfraLogEntry[]> {
|
||||
const { configuration } = await this.libs.sources.getSourceConfiguration(
|
||||
requestContext,
|
||||
sourceId
|
||||
);
|
||||
const messageFormattingRules = compileFormattingRules(
|
||||
getBuiltinRules(configuration.fields.message)
|
||||
);
|
||||
const requiredFields = getRequiredFields(configuration, messageFormattingRules);
|
||||
const documents = await this.adapter.getContainedLogEntryDocuments(
|
||||
requestContext,
|
||||
configuration,
|
||||
requiredFields,
|
||||
startKey,
|
||||
endKey,
|
||||
filterQuery,
|
||||
highlightQuery
|
||||
);
|
||||
const entries = documents.map(
|
||||
convertLogDocumentToEntry(sourceId, configuration.logColumns, messageFormattingRules.format)
|
||||
);
|
||||
return entries;
|
||||
}
|
||||
|
||||
/** @deprecated */
|
||||
public async getLogEntryHighlights(
|
||||
requestContext: RequestHandlerContext,
|
||||
sourceId: string,
|
||||
startKey: TimeKey,
|
||||
endKey: TimeKey,
|
||||
highlights: Array<{
|
||||
query: string;
|
||||
countBefore: number;
|
||||
countAfter: number;
|
||||
}>,
|
||||
filterQuery?: LogEntryQuery
|
||||
): Promise<InfraLogEntry[][]> {
|
||||
const { configuration } = await this.libs.sources.getSourceConfiguration(
|
||||
requestContext,
|
||||
sourceId
|
||||
);
|
||||
const messageFormattingRules = compileFormattingRules(
|
||||
getBuiltinRules(configuration.fields.message)
|
||||
);
|
||||
const requiredFields = getRequiredFields(configuration, messageFormattingRules);
|
||||
|
||||
const documentSets = await Promise.all(
|
||||
highlights.map(async highlight => {
|
||||
const highlightQuery = createHighlightQueryDsl(highlight.query, requiredFields);
|
||||
const query = filterQuery
|
||||
? {
|
||||
bool: {
|
||||
filter: [filterQuery, highlightQuery],
|
||||
},
|
||||
}
|
||||
: highlightQuery;
|
||||
const [documentsBefore, documents, documentsAfter] = await Promise.all([
|
||||
this.adapter.getAdjacentLogEntryDocuments(
|
||||
requestContext,
|
||||
configuration,
|
||||
requiredFields,
|
||||
startKey,
|
||||
'desc',
|
||||
highlight.countBefore,
|
||||
query,
|
||||
highlightQuery
|
||||
),
|
||||
this.adapter.getContainedLogEntryDocuments(
|
||||
requestContext,
|
||||
configuration,
|
||||
requiredFields,
|
||||
startKey,
|
||||
endKey,
|
||||
query,
|
||||
highlightQuery
|
||||
),
|
||||
this.adapter.getAdjacentLogEntryDocuments(
|
||||
requestContext,
|
||||
configuration,
|
||||
requiredFields,
|
||||
endKey,
|
||||
'asc',
|
||||
highlight.countAfter,
|
||||
query,
|
||||
highlightQuery
|
||||
),
|
||||
]);
|
||||
const entries = [...documentsBefore, ...documents, ...documentsAfter].map(
|
||||
convertLogDocumentToEntry(
|
||||
sourceId,
|
||||
configuration.logColumns,
|
||||
messageFormattingRules.format
|
||||
)
|
||||
);
|
||||
|
||||
return entries;
|
||||
})
|
||||
);
|
||||
|
||||
return documentSets;
|
||||
}
|
||||
|
||||
public async getLogSummaryBucketsBetween(
|
||||
requestContext: RequestHandlerContext,
|
||||
sourceId: string,
|
||||
|
@ -368,8 +189,8 @@ export class InfraLogEntriesDomain {
|
|||
public async getLogSummaryHighlightBucketsBetween(
|
||||
requestContext: RequestHandlerContext,
|
||||
sourceId: string,
|
||||
start: number,
|
||||
end: number,
|
||||
startTimestamp: number,
|
||||
endTimestamp: number,
|
||||
bucketSize: number,
|
||||
highlightQueries: string[],
|
||||
filterQuery?: LogEntryQuery
|
||||
|
@ -396,8 +217,8 @@ export class InfraLogEntriesDomain {
|
|||
const summaryBuckets = await this.adapter.getContainedLogSummaryBuckets(
|
||||
requestContext,
|
||||
configuration,
|
||||
start,
|
||||
end,
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
bucketSize,
|
||||
query
|
||||
);
|
||||
|
@ -445,17 +266,6 @@ interface LogItemHit {
|
|||
}
|
||||
|
||||
export interface LogEntriesAdapter {
|
||||
getAdjacentLogEntryDocuments(
|
||||
requestContext: RequestHandlerContext,
|
||||
sourceConfiguration: InfraSourceConfiguration,
|
||||
fields: string[],
|
||||
start: TimeKey,
|
||||
direction: 'asc' | 'desc',
|
||||
maxCount: number,
|
||||
filterQuery?: LogEntryQuery,
|
||||
highlightQuery?: LogEntryQuery
|
||||
): Promise<LogEntryDocument[]>;
|
||||
|
||||
getLogEntries(
|
||||
requestContext: RequestHandlerContext,
|
||||
sourceConfiguration: InfraSourceConfiguration,
|
||||
|
@ -463,21 +273,11 @@ export interface LogEntriesAdapter {
|
|||
params: LogEntriesParams
|
||||
): Promise<LogEntryDocument[]>;
|
||||
|
||||
getContainedLogEntryDocuments(
|
||||
requestContext: RequestHandlerContext,
|
||||
sourceConfiguration: InfraSourceConfiguration,
|
||||
fields: string[],
|
||||
start: TimeKey,
|
||||
end: TimeKey,
|
||||
filterQuery?: LogEntryQuery,
|
||||
highlightQuery?: LogEntryQuery
|
||||
): Promise<LogEntryDocument[]>;
|
||||
|
||||
getContainedLogSummaryBuckets(
|
||||
requestContext: RequestHandlerContext,
|
||||
sourceConfiguration: InfraSourceConfiguration,
|
||||
start: number,
|
||||
end: number,
|
||||
startTimestamp: number,
|
||||
endTimestamp: number,
|
||||
bucketSize: number,
|
||||
filterQuery?: LogEntryQuery
|
||||
): Promise<LogSummaryBucket[]>;
|
||||
|
@ -505,37 +305,6 @@ export interface LogSummaryBucket {
|
|||
topEntryKeys: TimeKey[];
|
||||
}
|
||||
|
||||
/** @deprecated */
|
||||
const convertLogDocumentToEntry = (
|
||||
sourceId: string,
|
||||
logColumns: InfraSourceConfiguration['logColumns'],
|
||||
formatLogMessage: (fields: Fields, highlights: Highlights) => InfraLogMessageSegment[]
|
||||
) => (document: LogEntryDocument): InfraLogEntry => ({
|
||||
key: document.key,
|
||||
gid: document.gid,
|
||||
source: sourceId,
|
||||
columns: logColumns.map(logColumn => {
|
||||
if (SavedSourceConfigurationTimestampColumnRuntimeType.is(logColumn)) {
|
||||
return {
|
||||
columnId: logColumn.timestampColumn.id,
|
||||
timestamp: document.key.time,
|
||||
};
|
||||
} else if (SavedSourceConfigurationMessageColumnRuntimeType.is(logColumn)) {
|
||||
return {
|
||||
columnId: logColumn.messageColumn.id,
|
||||
message: formatLogMessage(document.fields, document.highlights),
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
columnId: logColumn.fieldColumn.id,
|
||||
field: logColumn.fieldColumn.field,
|
||||
highlights: document.highlights[logColumn.fieldColumn.field] || [],
|
||||
value: stringify(document.fields[logColumn.fieldColumn.field] || null),
|
||||
};
|
||||
}
|
||||
}),
|
||||
});
|
||||
|
||||
const logSummaryBucketHasEntries = (bucket: LogSummaryBucket) =>
|
||||
bucket.entriesCount > 0 && bucket.topEntryKeys.length > 0;
|
||||
|
||||
|
|
|
@ -38,13 +38,19 @@ export const initLogEntriesRoute = ({ framework, logEntries }: InfraBackendLibs)
|
|||
fold(throwErrors(Boom.badRequest), identity)
|
||||
);
|
||||
|
||||
const { startDate, endDate, sourceId, query, size } = payload;
|
||||
const {
|
||||
startTimestamp: startTimestamp,
|
||||
endTimestamp: endTimestamp,
|
||||
sourceId,
|
||||
query,
|
||||
size,
|
||||
} = payload;
|
||||
|
||||
let entries;
|
||||
if ('center' in payload) {
|
||||
entries = await logEntries.getLogEntriesAround__new(requestContext, sourceId, {
|
||||
startDate,
|
||||
endDate,
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
query: parseFilterQuery(query),
|
||||
center: payload.center,
|
||||
size,
|
||||
|
@ -58,20 +64,22 @@ export const initLogEntriesRoute = ({ framework, logEntries }: InfraBackendLibs)
|
|||
}
|
||||
|
||||
entries = await logEntries.getLogEntries(requestContext, sourceId, {
|
||||
startDate,
|
||||
endDate,
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
query: parseFilterQuery(query),
|
||||
cursor,
|
||||
size,
|
||||
});
|
||||
}
|
||||
|
||||
const hasEntries = entries.length > 0;
|
||||
|
||||
return response.ok({
|
||||
body: logEntriesResponseRT.encode({
|
||||
data: {
|
||||
entries,
|
||||
topCursor: entries[0].cursor,
|
||||
bottomCursor: entries[entries.length - 1].cursor,
|
||||
topCursor: hasEntries ? entries[0].cursor : null,
|
||||
bottomCursor: hasEntries ? entries[entries.length - 1].cursor : null,
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
|
|
@ -38,7 +38,7 @@ export const initLogEntriesHighlightsRoute = ({ framework, logEntries }: InfraBa
|
|||
fold(throwErrors(Boom.badRequest), identity)
|
||||
);
|
||||
|
||||
const { startDate, endDate, sourceId, query, size, highlightTerms } = payload;
|
||||
const { startTimestamp, endTimestamp, sourceId, query, size, highlightTerms } = payload;
|
||||
|
||||
let entriesPerHighlightTerm;
|
||||
|
||||
|
@ -46,8 +46,8 @@ export const initLogEntriesHighlightsRoute = ({ framework, logEntries }: InfraBa
|
|||
entriesPerHighlightTerm = await Promise.all(
|
||||
highlightTerms.map(highlightTerm =>
|
||||
logEntries.getLogEntriesAround__new(requestContext, sourceId, {
|
||||
startDate,
|
||||
endDate,
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
query: parseFilterQuery(query),
|
||||
center: payload.center,
|
||||
size,
|
||||
|
@ -66,8 +66,8 @@ export const initLogEntriesHighlightsRoute = ({ framework, logEntries }: InfraBa
|
|||
entriesPerHighlightTerm = await Promise.all(
|
||||
highlightTerms.map(highlightTerm =>
|
||||
logEntries.getLogEntries(requestContext, sourceId, {
|
||||
startDate,
|
||||
endDate,
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
query: parseFilterQuery(query),
|
||||
cursor,
|
||||
size,
|
||||
|
|
|
@ -36,13 +36,13 @@ export const initLogEntriesSummaryRoute = ({ framework, logEntries }: InfraBacke
|
|||
logEntriesSummaryRequestRT.decode(request.body),
|
||||
fold(throwErrors(Boom.badRequest), identity)
|
||||
);
|
||||
const { sourceId, startDate, endDate, bucketSize, query } = payload;
|
||||
const { sourceId, startTimestamp, endTimestamp, bucketSize, query } = payload;
|
||||
|
||||
const buckets = await logEntries.getLogSummaryBucketsBetween(
|
||||
requestContext,
|
||||
sourceId,
|
||||
startDate,
|
||||
endDate,
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
bucketSize,
|
||||
parseFilterQuery(query)
|
||||
);
|
||||
|
@ -50,8 +50,8 @@ export const initLogEntriesSummaryRoute = ({ framework, logEntries }: InfraBacke
|
|||
return response.ok({
|
||||
body: logEntriesSummaryResponseRT.encode({
|
||||
data: {
|
||||
start: startDate,
|
||||
end: endDate,
|
||||
start: startTimestamp,
|
||||
end: endTimestamp,
|
||||
buckets,
|
||||
},
|
||||
}),
|
||||
|
|
|
@ -39,13 +39,20 @@ export const initLogEntriesSummaryHighlightsRoute = ({
|
|||
logEntriesSummaryHighlightsRequestRT.decode(request.body),
|
||||
fold(throwErrors(Boom.badRequest), identity)
|
||||
);
|
||||
const { sourceId, startDate, endDate, bucketSize, query, highlightTerms } = payload;
|
||||
const {
|
||||
sourceId,
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
bucketSize,
|
||||
query,
|
||||
highlightTerms,
|
||||
} = payload;
|
||||
|
||||
const bucketsPerHighlightTerm = await logEntries.getLogSummaryHighlightBucketsBetween(
|
||||
requestContext,
|
||||
sourceId,
|
||||
startDate,
|
||||
endDate,
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
bucketSize,
|
||||
highlightTerms,
|
||||
parseFilterQuery(query)
|
||||
|
@ -54,8 +61,8 @@ export const initLogEntriesSummaryHighlightsRoute = ({
|
|||
return response.ok({
|
||||
body: logEntriesSummaryHighlightsResponseRT.encode({
|
||||
data: bucketsPerHighlightTerm.map(buckets => ({
|
||||
start: startDate,
|
||||
end: endDate,
|
||||
start: startTimestamp,
|
||||
end: endTimestamp,
|
||||
buckets,
|
||||
})),
|
||||
}),
|
||||
|
|
|
@ -6409,7 +6409,6 @@
|
|||
"xpack.infra.logs.analysisPage.unavailable.mlAppLink": "機械学習アプリ",
|
||||
"xpack.infra.logs.customizeLogs.customizeButtonLabel": "カスタマイズ",
|
||||
"xpack.infra.logs.customizeLogs.lineWrappingFormRowLabel": "改行",
|
||||
"xpack.infra.logs.customizeLogs.minimapScaleFormRowLabel": "ミニマップスケール",
|
||||
"xpack.infra.logs.customizeLogs.textSizeFormRowLabel": "テキストサイズ",
|
||||
"xpack.infra.logs.customizeLogs.textSizeRadioGroup": "{textScale, select, small {小さい} 中くらい {Medium} 大きい {Large} その他の {{textScale}} }",
|
||||
"xpack.infra.logs.customizeLogs.wrapLongLinesSwitchLabel": "長い行を改行",
|
||||
|
@ -6424,19 +6423,12 @@
|
|||
"xpack.infra.logs.index.settingsTabTitle": "設定",
|
||||
"xpack.infra.logs.index.streamTabTitle": "ストリーム",
|
||||
"xpack.infra.logs.jumpToTailText": "最も新しいエントリーに移動",
|
||||
"xpack.infra.logs.lastStreamingUpdateText": " 最終更新 {lastUpdateTime}",
|
||||
"xpack.infra.logs.loadAgainButtonLabel": "再読み込み",
|
||||
"xpack.infra.logs.loadingAdditionalEntriesText": "追加エントリーを読み込み中",
|
||||
"xpack.infra.logs.noAdditionalEntriesFoundText": "追加エントリーが見つかりません",
|
||||
"xpack.infra.logs.scrollableLogTextStreamView.loadingEntriesLabel": "エントリーを読み込み中",
|
||||
"xpack.infra.logs.search.nextButtonLabel": "次へ",
|
||||
"xpack.infra.logs.search.previousButtonLabel": "前へ",
|
||||
"xpack.infra.logs.search.searchInLogsAriaLabel": "検索",
|
||||
"xpack.infra.logs.search.searchInLogsPlaceholder": "検索",
|
||||
"xpack.infra.logs.searchResultTooltip": "{bucketCount, plural, one {# 件のハイライトされたエントリー} other {# 件のハイライトされたエントリー}}",
|
||||
"xpack.infra.logs.startStreamingButtonLabel": "ライブストリーム",
|
||||
"xpack.infra.logs.stopStreamingButtonLabel": "ストリーム停止",
|
||||
"xpack.infra.logs.streamingDescription": "新しいエントリーをストリーム中...",
|
||||
"xpack.infra.logs.streamingNewEntriesText": "新しいエントリーをストリーム中",
|
||||
"xpack.infra.logs.streamPage.documentTitle": "{previousTitle} | ストリーム",
|
||||
"xpack.infra.logsPage.noLoggingIndicesDescription": "追加しましょう!",
|
||||
|
@ -6444,12 +6436,6 @@
|
|||
"xpack.infra.logsPage.noLoggingIndicesTitle": "ログインデックスがないようです。",
|
||||
"xpack.infra.logsPage.toolbar.kqlSearchFieldAriaLabel": "ログエントリーを検索",
|
||||
"xpack.infra.logsPage.toolbar.kqlSearchFieldPlaceholder": "ログエントリーを検索中… (例: host.name:host-1)",
|
||||
"xpack.infra.mapLogs.oneDayLabel": "1 日",
|
||||
"xpack.infra.mapLogs.oneHourLabel": "1 時間",
|
||||
"xpack.infra.mapLogs.oneMinuteLabel": "1 分",
|
||||
"xpack.infra.mapLogs.oneMonthLabel": "1 か月",
|
||||
"xpack.infra.mapLogs.oneWeekLabel": "1 週間",
|
||||
"xpack.infra.mapLogs.oneYearLabel": "1 年",
|
||||
"xpack.infra.metricDetailPage.awsMetricsLayout.cpuUtilSection.percentSeriesLabel": "パーセント",
|
||||
"xpack.infra.metricDetailPage.awsMetricsLayout.cpuUtilSection.sectionLabel": "CPU 使用状況",
|
||||
"xpack.infra.metricDetailPage.awsMetricsLayout.diskioBytesSection.readsSeriesLabel": "読み取り",
|
||||
|
|
|
@ -6409,7 +6409,6 @@
|
|||
"xpack.infra.logs.analysisPage.unavailable.mlAppLink": "Machine Learning 应用",
|
||||
"xpack.infra.logs.customizeLogs.customizeButtonLabel": "定制",
|
||||
"xpack.infra.logs.customizeLogs.lineWrappingFormRowLabel": "换行",
|
||||
"xpack.infra.logs.customizeLogs.minimapScaleFormRowLabel": "迷你地图比例",
|
||||
"xpack.infra.logs.customizeLogs.textSizeFormRowLabel": "文本大小",
|
||||
"xpack.infra.logs.customizeLogs.textSizeRadioGroup": "{textScale, select, small {小} medium {Medium} large {Large} other {{textScale}} }",
|
||||
"xpack.infra.logs.customizeLogs.wrapLongLinesSwitchLabel": "长行换行",
|
||||
|
@ -6424,19 +6423,12 @@
|
|||
"xpack.infra.logs.index.settingsTabTitle": "设置",
|
||||
"xpack.infra.logs.index.streamTabTitle": "流式传输",
|
||||
"xpack.infra.logs.jumpToTailText": "跳到最近的条目",
|
||||
"xpack.infra.logs.lastStreamingUpdateText": " 最后更新时间:{lastUpdateTime}",
|
||||
"xpack.infra.logs.loadAgainButtonLabel": "重新加载",
|
||||
"xpack.infra.logs.loadingAdditionalEntriesText": "正在加载其他条目",
|
||||
"xpack.infra.logs.noAdditionalEntriesFoundText": "找不到其他条目",
|
||||
"xpack.infra.logs.scrollableLogTextStreamView.loadingEntriesLabel": "正在加载条目",
|
||||
"xpack.infra.logs.search.nextButtonLabel": "下一个",
|
||||
"xpack.infra.logs.search.previousButtonLabel": "上一页",
|
||||
"xpack.infra.logs.search.searchInLogsAriaLabel": "搜索",
|
||||
"xpack.infra.logs.search.searchInLogsPlaceholder": "搜索",
|
||||
"xpack.infra.logs.searchResultTooltip": "{bucketCount, plural, one {# 个高亮条目} other {# 个高亮条目}}",
|
||||
"xpack.infra.logs.startStreamingButtonLabel": "实时流式传输",
|
||||
"xpack.infra.logs.stopStreamingButtonLabel": "停止流式传输",
|
||||
"xpack.infra.logs.streamingDescription": "正在流式传输新条目……",
|
||||
"xpack.infra.logs.streamingNewEntriesText": "正在流式传输新条目",
|
||||
"xpack.infra.logs.streamPage.documentTitle": "{previousTitle} | 流式传输",
|
||||
"xpack.infra.logsPage.noLoggingIndicesDescription": "让我们添加一些!",
|
||||
|
@ -6444,12 +6436,6 @@
|
|||
"xpack.infra.logsPage.noLoggingIndicesTitle": "似乎您没有任何日志索引。",
|
||||
"xpack.infra.logsPage.toolbar.kqlSearchFieldAriaLabel": "搜索日志条目",
|
||||
"xpack.infra.logsPage.toolbar.kqlSearchFieldPlaceholder": "搜索日志条目……(例如 host.name:host-1)",
|
||||
"xpack.infra.mapLogs.oneDayLabel": "1 日",
|
||||
"xpack.infra.mapLogs.oneHourLabel": "1 小时",
|
||||
"xpack.infra.mapLogs.oneMinuteLabel": "1 分钟",
|
||||
"xpack.infra.mapLogs.oneMonthLabel": "1 个月",
|
||||
"xpack.infra.mapLogs.oneWeekLabel": "1 周",
|
||||
"xpack.infra.mapLogs.oneYearLabel": "1 年",
|
||||
"xpack.infra.metricDetailPage.awsMetricsLayout.cpuUtilSection.percentSeriesLabel": "百分比",
|
||||
"xpack.infra.metricDetailPage.awsMetricsLayout.cpuUtilSection.sectionLabel": "CPU 使用率",
|
||||
"xpack.infra.metricDetailPage.awsMetricsLayout.diskioBytesSection.readsSeriesLabel": "读取数",
|
||||
|
|
|
@ -10,8 +10,8 @@ export default function({ loadTestFile }) {
|
|||
loadTestFile(require.resolve('./log_analysis'));
|
||||
loadTestFile(require.resolve('./log_entries'));
|
||||
loadTestFile(require.resolve('./log_entry_highlights'));
|
||||
loadTestFile(require.resolve('./log_summary'));
|
||||
loadTestFile(require.resolve('./logs_without_millis'));
|
||||
loadTestFile(require.resolve('./log_summary'));
|
||||
loadTestFile(require.resolve('./metrics'));
|
||||
loadTestFile(require.resolve('./sources'));
|
||||
loadTestFile(require.resolve('./waffle'));
|
||||
|
|
|
@ -5,8 +5,6 @@
|
|||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import { ascending, pairs } from 'd3-array';
|
||||
import gql from 'graphql-tag';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
|
@ -19,10 +17,11 @@ import {
|
|||
LOG_ENTRIES_PATH,
|
||||
logEntriesRequestRT,
|
||||
logEntriesResponseRT,
|
||||
LogTimestampColumn,
|
||||
LogFieldColumn,
|
||||
LogMessageColumn,
|
||||
} from '../../../../plugins/infra/common/http_api';
|
||||
|
||||
import { sharedFragments } from '../../../../plugins/infra/common/graphql/shared';
|
||||
import { InfraTimeKey } from '../../../../plugins/infra/public/graphql/types';
|
||||
import { FtrProviderContext } from '../../ftr_provider_context';
|
||||
|
||||
const KEY_WITHIN_DATA_RANGE = {
|
||||
|
@ -38,75 +37,12 @@ const LATEST_KEY_WITH_DATA = {
|
|||
tiebreaker: 5603910,
|
||||
};
|
||||
|
||||
const logEntriesAroundQuery = gql`
|
||||
query LogEntriesAroundQuery(
|
||||
$timeKey: InfraTimeKeyInput!
|
||||
$countBefore: Int = 0
|
||||
$countAfter: Int = 0
|
||||
$filterQuery: String
|
||||
) {
|
||||
source(id: "default") {
|
||||
id
|
||||
logEntriesAround(
|
||||
key: $timeKey
|
||||
countBefore: $countBefore
|
||||
countAfter: $countAfter
|
||||
filterQuery: $filterQuery
|
||||
) {
|
||||
start {
|
||||
...InfraTimeKeyFields
|
||||
}
|
||||
end {
|
||||
...InfraTimeKeyFields
|
||||
}
|
||||
hasMoreBefore
|
||||
hasMoreAfter
|
||||
entries {
|
||||
...InfraLogEntryFields
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
${sharedFragments.InfraTimeKey}
|
||||
${sharedFragments.InfraLogEntryFields}
|
||||
`;
|
||||
|
||||
const logEntriesBetweenQuery = gql`
|
||||
query LogEntriesBetweenQuery(
|
||||
$startKey: InfraTimeKeyInput!
|
||||
$endKey: InfraTimeKeyInput!
|
||||
$filterQuery: String
|
||||
) {
|
||||
source(id: "default") {
|
||||
id
|
||||
logEntriesBetween(startKey: $startKey, endKey: $endKey, filterQuery: $filterQuery) {
|
||||
start {
|
||||
...InfraTimeKeyFields
|
||||
}
|
||||
end {
|
||||
...InfraTimeKeyFields
|
||||
}
|
||||
hasMoreBefore
|
||||
hasMoreAfter
|
||||
entries {
|
||||
...InfraLogEntryFields
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
${sharedFragments.InfraTimeKey}
|
||||
${sharedFragments.InfraLogEntryFields}
|
||||
`;
|
||||
|
||||
const COMMON_HEADERS = {
|
||||
'kbn-xsrf': 'some-xsrf-token',
|
||||
};
|
||||
|
||||
export default function({ getService }: FtrProviderContext) {
|
||||
const esArchiver = getService('esArchiver');
|
||||
const client = getService('infraOpsGraphQLClient');
|
||||
const supertest = getService('supertest');
|
||||
const sourceConfigurationService = getService('infraOpsSourceConfiguration');
|
||||
|
||||
|
@ -126,8 +62,8 @@ export default function({ getService }: FtrProviderContext) {
|
|||
.send(
|
||||
logEntriesRequestRT.encode({
|
||||
sourceId: 'default',
|
||||
startDate: EARLIEST_KEY_WITH_DATA.time,
|
||||
endDate: KEY_WITHIN_DATA_RANGE.time,
|
||||
startTimestamp: EARLIEST_KEY_WITH_DATA.time,
|
||||
endTimestamp: KEY_WITHIN_DATA_RANGE.time,
|
||||
})
|
||||
)
|
||||
.expect(200);
|
||||
|
@ -154,6 +90,42 @@ export default function({ getService }: FtrProviderContext) {
|
|||
expect(lastEntry.cursor.time <= KEY_WITHIN_DATA_RANGE.time).to.be(true);
|
||||
});
|
||||
|
||||
it('Returns the default columns', async () => {
|
||||
const { body } = await supertest
|
||||
.post(LOG_ENTRIES_PATH)
|
||||
.set(COMMON_HEADERS)
|
||||
.send(
|
||||
logEntriesRequestRT.encode({
|
||||
sourceId: 'default',
|
||||
startTimestamp: EARLIEST_KEY_WITH_DATA.time,
|
||||
endTimestamp: LATEST_KEY_WITH_DATA.time,
|
||||
center: KEY_WITHIN_DATA_RANGE,
|
||||
})
|
||||
)
|
||||
.expect(200);
|
||||
|
||||
const logEntriesResponse = pipe(
|
||||
logEntriesResponseRT.decode(body),
|
||||
fold(throwErrors(createPlainError), identity)
|
||||
);
|
||||
|
||||
const entries = logEntriesResponse.data.entries;
|
||||
const entry = entries[0];
|
||||
expect(entry.columns).to.have.length(3);
|
||||
|
||||
const timestampColumn = entry.columns[0] as LogTimestampColumn;
|
||||
expect(timestampColumn).to.have.property('timestamp');
|
||||
|
||||
const eventDatasetColumn = entry.columns[1] as LogFieldColumn;
|
||||
expect(eventDatasetColumn).to.have.property('field');
|
||||
expect(eventDatasetColumn.field).to.be('event.dataset');
|
||||
expect(eventDatasetColumn).to.have.property('value');
|
||||
|
||||
const messageColumn = entry.columns[2] as LogMessageColumn;
|
||||
expect(messageColumn).to.have.property('message');
|
||||
expect(messageColumn.message.length).to.be.greaterThan(0);
|
||||
});
|
||||
|
||||
it('Paginates correctly with `after`', async () => {
|
||||
const { body: firstPageBody } = await supertest
|
||||
.post(LOG_ENTRIES_PATH)
|
||||
|
@ -161,8 +133,8 @@ export default function({ getService }: FtrProviderContext) {
|
|||
.send(
|
||||
logEntriesRequestRT.encode({
|
||||
sourceId: 'default',
|
||||
startDate: EARLIEST_KEY_WITH_DATA.time,
|
||||
endDate: KEY_WITHIN_DATA_RANGE.time,
|
||||
startTimestamp: EARLIEST_KEY_WITH_DATA.time,
|
||||
endTimestamp: KEY_WITHIN_DATA_RANGE.time,
|
||||
size: 10,
|
||||
})
|
||||
);
|
||||
|
@ -177,9 +149,9 @@ export default function({ getService }: FtrProviderContext) {
|
|||
.send(
|
||||
logEntriesRequestRT.encode({
|
||||
sourceId: 'default',
|
||||
startDate: EARLIEST_KEY_WITH_DATA.time,
|
||||
endDate: KEY_WITHIN_DATA_RANGE.time,
|
||||
after: firstPage.data.bottomCursor,
|
||||
startTimestamp: EARLIEST_KEY_WITH_DATA.time,
|
||||
endTimestamp: KEY_WITHIN_DATA_RANGE.time,
|
||||
after: firstPage.data.bottomCursor!,
|
||||
size: 10,
|
||||
})
|
||||
);
|
||||
|
@ -194,8 +166,8 @@ export default function({ getService }: FtrProviderContext) {
|
|||
.send(
|
||||
logEntriesRequestRT.encode({
|
||||
sourceId: 'default',
|
||||
startDate: EARLIEST_KEY_WITH_DATA.time,
|
||||
endDate: KEY_WITHIN_DATA_RANGE.time,
|
||||
startTimestamp: EARLIEST_KEY_WITH_DATA.time,
|
||||
endTimestamp: KEY_WITHIN_DATA_RANGE.time,
|
||||
size: 20,
|
||||
})
|
||||
);
|
||||
|
@ -220,8 +192,8 @@ export default function({ getService }: FtrProviderContext) {
|
|||
.send(
|
||||
logEntriesRequestRT.encode({
|
||||
sourceId: 'default',
|
||||
startDate: KEY_WITHIN_DATA_RANGE.time,
|
||||
endDate: LATEST_KEY_WITH_DATA.time,
|
||||
startTimestamp: KEY_WITHIN_DATA_RANGE.time,
|
||||
endTimestamp: LATEST_KEY_WITH_DATA.time,
|
||||
before: 'last',
|
||||
size: 10,
|
||||
})
|
||||
|
@ -237,9 +209,9 @@ export default function({ getService }: FtrProviderContext) {
|
|||
.send(
|
||||
logEntriesRequestRT.encode({
|
||||
sourceId: 'default',
|
||||
startDate: KEY_WITHIN_DATA_RANGE.time,
|
||||
endDate: LATEST_KEY_WITH_DATA.time,
|
||||
before: lastPage.data.topCursor,
|
||||
startTimestamp: KEY_WITHIN_DATA_RANGE.time,
|
||||
endTimestamp: LATEST_KEY_WITH_DATA.time,
|
||||
before: lastPage.data.topCursor!,
|
||||
size: 10,
|
||||
})
|
||||
);
|
||||
|
@ -254,8 +226,8 @@ export default function({ getService }: FtrProviderContext) {
|
|||
.send(
|
||||
logEntriesRequestRT.encode({
|
||||
sourceId: 'default',
|
||||
startDate: KEY_WITHIN_DATA_RANGE.time,
|
||||
endDate: LATEST_KEY_WITH_DATA.time,
|
||||
startTimestamp: KEY_WITHIN_DATA_RANGE.time,
|
||||
endTimestamp: LATEST_KEY_WITH_DATA.time,
|
||||
before: 'last',
|
||||
size: 20,
|
||||
})
|
||||
|
@ -281,8 +253,8 @@ export default function({ getService }: FtrProviderContext) {
|
|||
.send(
|
||||
logEntriesRequestRT.encode({
|
||||
sourceId: 'default',
|
||||
startDate: EARLIEST_KEY_WITH_DATA.time,
|
||||
endDate: LATEST_KEY_WITH_DATA.time,
|
||||
startTimestamp: EARLIEST_KEY_WITH_DATA.time,
|
||||
endTimestamp: LATEST_KEY_WITH_DATA.time,
|
||||
center: KEY_WITHIN_DATA_RANGE,
|
||||
})
|
||||
)
|
||||
|
@ -300,101 +272,31 @@ export default function({ getService }: FtrProviderContext) {
|
|||
expect(firstEntry.cursor.time >= EARLIEST_KEY_WITH_DATA.time).to.be(true);
|
||||
expect(lastEntry.cursor.time <= LATEST_KEY_WITH_DATA.time).to.be(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('logEntriesAround', () => {
|
||||
describe('with the default source', () => {
|
||||
before(() => esArchiver.load('empty_kibana'));
|
||||
after(() => esArchiver.unload('empty_kibana'));
|
||||
it('Handles empty responses', async () => {
|
||||
const startTimestamp = Date.now() + 1000;
|
||||
const endTimestamp = Date.now() + 5000;
|
||||
|
||||
it('should return newer and older log entries when present', async () => {
|
||||
const {
|
||||
data: {
|
||||
source: { logEntriesAround },
|
||||
},
|
||||
} = await client.query<any>({
|
||||
query: logEntriesAroundQuery,
|
||||
variables: {
|
||||
timeKey: KEY_WITHIN_DATA_RANGE,
|
||||
countBefore: 100,
|
||||
countAfter: 100,
|
||||
},
|
||||
});
|
||||
const { body } = await supertest
|
||||
.post(LOG_ENTRIES_PATH)
|
||||
.set(COMMON_HEADERS)
|
||||
.send(
|
||||
logEntriesRequestRT.encode({
|
||||
sourceId: 'default',
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
})
|
||||
)
|
||||
.expect(200);
|
||||
|
||||
expect(logEntriesAround).to.have.property('entries');
|
||||
expect(logEntriesAround.entries).to.have.length(200);
|
||||
expect(isSorted(ascendingTimeKey)(logEntriesAround.entries)).to.equal(true);
|
||||
const logEntriesResponse = pipe(
|
||||
logEntriesResponseRT.decode(body),
|
||||
fold(throwErrors(createPlainError), identity)
|
||||
);
|
||||
|
||||
expect(logEntriesAround.hasMoreBefore).to.equal(true);
|
||||
expect(logEntriesAround.hasMoreAfter).to.equal(true);
|
||||
});
|
||||
|
||||
it('should indicate if no older entries are present', async () => {
|
||||
const {
|
||||
data: {
|
||||
source: { logEntriesAround },
|
||||
},
|
||||
} = await client.query<any>({
|
||||
query: logEntriesAroundQuery,
|
||||
variables: {
|
||||
timeKey: EARLIEST_KEY_WITH_DATA,
|
||||
countBefore: 100,
|
||||
countAfter: 100,
|
||||
},
|
||||
});
|
||||
|
||||
expect(logEntriesAround.hasMoreBefore).to.equal(false);
|
||||
expect(logEntriesAround.hasMoreAfter).to.equal(true);
|
||||
});
|
||||
|
||||
it('should indicate if no newer entries are present', async () => {
|
||||
const {
|
||||
data: {
|
||||
source: { logEntriesAround },
|
||||
},
|
||||
} = await client.query<any>({
|
||||
query: logEntriesAroundQuery,
|
||||
variables: {
|
||||
timeKey: LATEST_KEY_WITH_DATA,
|
||||
countBefore: 100,
|
||||
countAfter: 100,
|
||||
},
|
||||
});
|
||||
|
||||
expect(logEntriesAround.hasMoreBefore).to.equal(true);
|
||||
expect(logEntriesAround.hasMoreAfter).to.equal(false);
|
||||
});
|
||||
|
||||
it('should return the default columns', async () => {
|
||||
const {
|
||||
data: {
|
||||
source: {
|
||||
logEntriesAround: {
|
||||
entries: [entry],
|
||||
},
|
||||
},
|
||||
},
|
||||
} = await client.query<any>({
|
||||
query: logEntriesAroundQuery,
|
||||
variables: {
|
||||
timeKey: KEY_WITHIN_DATA_RANGE,
|
||||
countAfter: 1,
|
||||
},
|
||||
});
|
||||
|
||||
expect(entry.columns).to.have.length(3);
|
||||
expect(entry.columns[0]).to.have.property('timestamp');
|
||||
expect(entry.columns[0].timestamp).to.be.a('number');
|
||||
expect(entry.columns[1]).to.have.property('field');
|
||||
expect(entry.columns[1].field).to.be('event.dataset');
|
||||
expect(entry.columns[1]).to.have.property('value');
|
||||
expect(JSON.parse)
|
||||
.withArgs(entry.columns[1].value)
|
||||
.to.not.throwException();
|
||||
expect(entry.columns[2]).to.have.property('message');
|
||||
expect(entry.columns[2].message).to.be.an('array');
|
||||
expect(entry.columns[2].message.length).to.be.greaterThan(0);
|
||||
expect(logEntriesResponse.data.entries).to.have.length(0);
|
||||
expect(logEntriesResponse.data.topCursor).to.be(null);
|
||||
expect(logEntriesResponse.data.bottomCursor).to.be(null);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -431,120 +333,48 @@ export default function({ getService }: FtrProviderContext) {
|
|||
});
|
||||
after(() => esArchiver.unload('empty_kibana'));
|
||||
|
||||
it('should return the configured columns', async () => {
|
||||
const {
|
||||
data: {
|
||||
source: {
|
||||
logEntriesAround: {
|
||||
entries: [entry],
|
||||
},
|
||||
},
|
||||
},
|
||||
} = await client.query<any>({
|
||||
query: logEntriesAroundQuery,
|
||||
variables: {
|
||||
timeKey: KEY_WITHIN_DATA_RANGE,
|
||||
countAfter: 1,
|
||||
},
|
||||
});
|
||||
it('returns the configured columns', async () => {
|
||||
const { body } = await supertest
|
||||
.post(LOG_ENTRIES_PATH)
|
||||
.set(COMMON_HEADERS)
|
||||
.send(
|
||||
logEntriesRequestRT.encode({
|
||||
sourceId: 'default',
|
||||
startTimestamp: EARLIEST_KEY_WITH_DATA.time,
|
||||
endTimestamp: LATEST_KEY_WITH_DATA.time,
|
||||
center: KEY_WITHIN_DATA_RANGE,
|
||||
})
|
||||
)
|
||||
.expect(200);
|
||||
|
||||
const logEntriesResponse = pipe(
|
||||
logEntriesResponseRT.decode(body),
|
||||
fold(throwErrors(createPlainError), identity)
|
||||
);
|
||||
|
||||
const entries = logEntriesResponse.data.entries;
|
||||
const entry = entries[0];
|
||||
|
||||
expect(entry.columns).to.have.length(4);
|
||||
expect(entry.columns[0]).to.have.property('timestamp');
|
||||
expect(entry.columns[0].timestamp).to.be.a('number');
|
||||
expect(entry.columns[1]).to.have.property('field');
|
||||
expect(entry.columns[1].field).to.be('host.name');
|
||||
expect(entry.columns[1]).to.have.property('value');
|
||||
expect(JSON.parse)
|
||||
.withArgs(entry.columns[1].value)
|
||||
.to.not.throwException();
|
||||
expect(entry.columns[2]).to.have.property('field');
|
||||
expect(entry.columns[2].field).to.be('event.dataset');
|
||||
expect(entry.columns[2]).to.have.property('value');
|
||||
expect(JSON.parse)
|
||||
.withArgs(entry.columns[2].value)
|
||||
.to.not.throwException();
|
||||
expect(entry.columns[3]).to.have.property('message');
|
||||
expect(entry.columns[3].message).to.be.an('array');
|
||||
expect(entry.columns[3].message.length).to.be.greaterThan(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('logEntriesBetween', () => {
|
||||
describe('with the default source', () => {
|
||||
before(() => esArchiver.load('empty_kibana'));
|
||||
after(() => esArchiver.unload('empty_kibana'));
|
||||
const timestampColumn = entry.columns[0] as LogTimestampColumn;
|
||||
expect(timestampColumn).to.have.property('timestamp');
|
||||
|
||||
it('should return log entries between the start and end keys', async () => {
|
||||
const {
|
||||
data: {
|
||||
source: { logEntriesBetween },
|
||||
},
|
||||
} = await client.query<any>({
|
||||
query: logEntriesBetweenQuery,
|
||||
variables: {
|
||||
startKey: EARLIEST_KEY_WITH_DATA,
|
||||
endKey: KEY_WITHIN_DATA_RANGE,
|
||||
},
|
||||
});
|
||||
const hostNameColumn = entry.columns[1] as LogFieldColumn;
|
||||
expect(hostNameColumn).to.have.property('field');
|
||||
expect(hostNameColumn.field).to.be('host.name');
|
||||
expect(hostNameColumn).to.have.property('value');
|
||||
|
||||
expect(logEntriesBetween).to.have.property('entries');
|
||||
expect(logEntriesBetween.entries).to.not.be.empty();
|
||||
expect(isSorted(ascendingTimeKey)(logEntriesBetween.entries)).to.equal(true);
|
||||
const eventDatasetColumn = entry.columns[2] as LogFieldColumn;
|
||||
expect(eventDatasetColumn).to.have.property('field');
|
||||
expect(eventDatasetColumn.field).to.be('event.dataset');
|
||||
expect(eventDatasetColumn).to.have.property('value');
|
||||
|
||||
expect(
|
||||
ascendingTimeKey(logEntriesBetween.entries[0], { key: EARLIEST_KEY_WITH_DATA })
|
||||
).to.be.above(-1);
|
||||
expect(
|
||||
ascendingTimeKey(logEntriesBetween.entries[logEntriesBetween.entries.length - 1], {
|
||||
key: KEY_WITHIN_DATA_RANGE,
|
||||
})
|
||||
).to.be.below(1);
|
||||
});
|
||||
|
||||
it('should return results consistent with logEntriesAround', async () => {
|
||||
const {
|
||||
data: {
|
||||
source: { logEntriesAround },
|
||||
},
|
||||
} = await client.query<any>({
|
||||
query: logEntriesAroundQuery,
|
||||
variables: {
|
||||
timeKey: KEY_WITHIN_DATA_RANGE,
|
||||
countBefore: 100,
|
||||
countAfter: 100,
|
||||
},
|
||||
});
|
||||
|
||||
const {
|
||||
data: {
|
||||
source: { logEntriesBetween },
|
||||
},
|
||||
} = await client.query<any>({
|
||||
query: logEntriesBetweenQuery,
|
||||
variables: {
|
||||
startKey: {
|
||||
time: logEntriesAround.start.time,
|
||||
tiebreaker: logEntriesAround.start.tiebreaker - 1,
|
||||
},
|
||||
endKey: {
|
||||
time: logEntriesAround.end.time,
|
||||
tiebreaker: logEntriesAround.end.tiebreaker + 1,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(logEntriesBetween).to.eql(logEntriesAround);
|
||||
const messageColumn = entry.columns[3] as LogMessageColumn;
|
||||
expect(messageColumn).to.have.property('message');
|
||||
expect(messageColumn.message.length).to.be.greaterThan(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
const isSorted = <Value>(comparator: (first: Value, second: Value) => number) => (
|
||||
values: Value[]
|
||||
) => pairs(values, comparator).every(order => order <= 0);
|
||||
|
||||
const ascendingTimeKey = (first: { key: InfraTimeKey }, second: { key: InfraTimeKey }) =>
|
||||
ascending(first.key.time, second.key.time) ||
|
||||
ascending(first.key.tiebreaker, second.key.tiebreaker);
|
||||
|
|
|
@ -5,8 +5,6 @@
|
|||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import { ascending, pairs } from 'd3-array';
|
||||
import gql from 'graphql-tag';
|
||||
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
import { identity } from 'fp-ts/lib/function';
|
||||
|
@ -21,21 +19,11 @@ import {
|
|||
} from '../../../../plugins/infra/common/http_api';
|
||||
|
||||
import { FtrProviderContext } from '../../ftr_provider_context';
|
||||
import { sharedFragments } from '../../../../plugins/infra/common/graphql/shared';
|
||||
import { InfraTimeKey } from '../../../../plugins/infra/public/graphql/types';
|
||||
|
||||
const KEY_BEFORE_START = {
|
||||
time: new Date('2000-01-01T00:00:00.000Z').valueOf(),
|
||||
tiebreaker: -1,
|
||||
};
|
||||
const KEY_AFTER_START = {
|
||||
time: new Date('2000-01-01T00:00:04.000Z').valueOf(),
|
||||
tiebreaker: -1,
|
||||
};
|
||||
const KEY_BEFORE_END = {
|
||||
time: new Date('2000-01-01T00:00:06.001Z').valueOf(),
|
||||
tiebreaker: 0,
|
||||
};
|
||||
const KEY_AFTER_END = {
|
||||
time: new Date('2000-01-01T00:00:09.001Z').valueOf(),
|
||||
tiebreaker: 0,
|
||||
|
@ -48,7 +36,6 @@ const COMMON_HEADERS = {
|
|||
export default function({ getService }: FtrProviderContext) {
|
||||
const esArchiver = getService('esArchiver');
|
||||
const supertest = getService('supertest');
|
||||
const client = getService('infraOpsGraphQLClient');
|
||||
|
||||
describe('log highlight apis', () => {
|
||||
before(() => esArchiver.load('infra/simple_logs'));
|
||||
|
@ -66,8 +53,8 @@ export default function({ getService }: FtrProviderContext) {
|
|||
.send(
|
||||
logEntriesHighlightsRequestRT.encode({
|
||||
sourceId: 'default',
|
||||
startDate: KEY_BEFORE_START.time,
|
||||
endDate: KEY_AFTER_END.time,
|
||||
startTimestamp: KEY_BEFORE_START.time,
|
||||
endTimestamp: KEY_AFTER_END.time,
|
||||
highlightTerms: ['message of document 0'],
|
||||
})
|
||||
)
|
||||
|
@ -116,8 +103,8 @@ export default function({ getService }: FtrProviderContext) {
|
|||
.send(
|
||||
logEntriesHighlightsRequestRT.encode({
|
||||
sourceId: 'default',
|
||||
startDate: KEY_BEFORE_START.time,
|
||||
endDate: KEY_AFTER_END.time,
|
||||
startTimestamp: KEY_BEFORE_START.time,
|
||||
endTimestamp: KEY_AFTER_END.time,
|
||||
highlightTerms: ['generate_test_data/simple_logs'],
|
||||
})
|
||||
)
|
||||
|
@ -152,8 +139,8 @@ export default function({ getService }: FtrProviderContext) {
|
|||
.send(
|
||||
logEntriesHighlightsRequestRT.encode({
|
||||
sourceId: 'default',
|
||||
startDate: KEY_BEFORE_START.time,
|
||||
endDate: KEY_AFTER_END.time,
|
||||
startTimestamp: KEY_BEFORE_START.time,
|
||||
endTimestamp: KEY_AFTER_END.time,
|
||||
query: JSON.stringify({
|
||||
multi_match: { query: 'host-a', type: 'phrase', lenient: true },
|
||||
}),
|
||||
|
@ -185,236 +172,5 @@ export default function({ getService }: FtrProviderContext) {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('logEntryHighlights', () => {
|
||||
describe('with the default source', () => {
|
||||
before(() => esArchiver.load('empty_kibana'));
|
||||
after(() => esArchiver.unload('empty_kibana'));
|
||||
|
||||
it('should return log highlights in the built-in message column', async () => {
|
||||
const {
|
||||
data: {
|
||||
source: { logEntryHighlights },
|
||||
},
|
||||
} = await client.query<any>({
|
||||
query: logEntryHighlightsQuery,
|
||||
variables: {
|
||||
sourceId: 'default',
|
||||
startKey: KEY_BEFORE_START,
|
||||
endKey: KEY_AFTER_END,
|
||||
highlights: [
|
||||
{
|
||||
query: 'message of document 0',
|
||||
countBefore: 0,
|
||||
countAfter: 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
expect(logEntryHighlights).to.have.length(1);
|
||||
|
||||
const [logEntryHighlightSet] = logEntryHighlights;
|
||||
expect(logEntryHighlightSet).to.have.property('entries');
|
||||
// ten bundles with one highlight each
|
||||
expect(logEntryHighlightSet.entries).to.have.length(10);
|
||||
expect(isSorted(ascendingTimeKey)(logEntryHighlightSet.entries)).to.equal(true);
|
||||
|
||||
for (const logEntryHighlight of logEntryHighlightSet.entries) {
|
||||
expect(logEntryHighlight.columns).to.have.length(3);
|
||||
expect(logEntryHighlight.columns[1]).to.have.property('field');
|
||||
expect(logEntryHighlight.columns[1]).to.have.property('highlights');
|
||||
expect(logEntryHighlight.columns[1].highlights).to.eql([]);
|
||||
expect(logEntryHighlight.columns[2]).to.have.property('message');
|
||||
expect(logEntryHighlight.columns[2].message).to.be.an('array');
|
||||
expect(logEntryHighlight.columns[2].message.length).to.be(1);
|
||||
expect(logEntryHighlight.columns[2].message[0].highlights).to.eql([
|
||||
'message',
|
||||
'of',
|
||||
'document',
|
||||
'0',
|
||||
]);
|
||||
}
|
||||
});
|
||||
|
||||
// https://github.com/elastic/kibana/issues/49959
|
||||
it.skip('should return log highlights in a field column', async () => {
|
||||
const {
|
||||
data: {
|
||||
source: { logEntryHighlights },
|
||||
},
|
||||
} = await client.query<any>({
|
||||
query: logEntryHighlightsQuery,
|
||||
variables: {
|
||||
sourceId: 'default',
|
||||
startKey: KEY_BEFORE_START,
|
||||
endKey: KEY_AFTER_END,
|
||||
highlights: [
|
||||
{
|
||||
query: 'generate_test_data/simple_logs',
|
||||
countBefore: 0,
|
||||
countAfter: 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
expect(logEntryHighlights).to.have.length(1);
|
||||
|
||||
const [logEntryHighlightSet] = logEntryHighlights;
|
||||
expect(logEntryHighlightSet).to.have.property('entries');
|
||||
// ten bundles with five highlights each
|
||||
expect(logEntryHighlightSet.entries).to.have.length(50);
|
||||
expect(isSorted(ascendingTimeKey)(logEntryHighlightSet.entries)).to.equal(true);
|
||||
|
||||
for (const logEntryHighlight of logEntryHighlightSet.entries) {
|
||||
expect(logEntryHighlight.columns).to.have.length(3);
|
||||
expect(logEntryHighlight.columns[1]).to.have.property('field');
|
||||
expect(logEntryHighlight.columns[1]).to.have.property('highlights');
|
||||
expect(logEntryHighlight.columns[1].highlights).to.eql([
|
||||
'generate_test_data/simple_logs',
|
||||
]);
|
||||
expect(logEntryHighlight.columns[2]).to.have.property('message');
|
||||
expect(logEntryHighlight.columns[2].message).to.be.an('array');
|
||||
expect(logEntryHighlight.columns[2].message.length).to.be(1);
|
||||
expect(logEntryHighlight.columns[2].message[0].highlights).to.eql([]);
|
||||
}
|
||||
});
|
||||
|
||||
it('should apply the filter query in addition to the highlight query', async () => {
|
||||
const {
|
||||
data: {
|
||||
source: { logEntryHighlights },
|
||||
},
|
||||
} = await client.query<any>({
|
||||
query: logEntryHighlightsQuery,
|
||||
variables: {
|
||||
sourceId: 'default',
|
||||
startKey: KEY_BEFORE_START,
|
||||
endKey: KEY_AFTER_END,
|
||||
filterQuery: JSON.stringify({
|
||||
multi_match: { query: 'host-a', type: 'phrase', lenient: true },
|
||||
}),
|
||||
highlights: [
|
||||
{
|
||||
query: 'message',
|
||||
countBefore: 0,
|
||||
countAfter: 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
expect(logEntryHighlights).to.have.length(1);
|
||||
|
||||
const [logEntryHighlightSet] = logEntryHighlights;
|
||||
expect(logEntryHighlightSet).to.have.property('entries');
|
||||
// half of the documenst
|
||||
expect(logEntryHighlightSet.entries).to.have.length(25);
|
||||
expect(isSorted(ascendingTimeKey)(logEntryHighlightSet.entries)).to.equal(true);
|
||||
|
||||
for (const logEntryHighlight of logEntryHighlightSet.entries) {
|
||||
expect(logEntryHighlight.columns).to.have.length(3);
|
||||
expect(logEntryHighlight.columns[1]).to.have.property('field');
|
||||
expect(logEntryHighlight.columns[1]).to.have.property('highlights');
|
||||
expect(logEntryHighlight.columns[1].highlights).to.eql([]);
|
||||
expect(logEntryHighlight.columns[2]).to.have.property('message');
|
||||
expect(logEntryHighlight.columns[2].message).to.be.an('array');
|
||||
expect(logEntryHighlight.columns[2].message.length).to.be(1);
|
||||
expect(logEntryHighlight.columns[2].message[0].highlights).to.eql([
|
||||
'message',
|
||||
'message',
|
||||
]);
|
||||
}
|
||||
});
|
||||
|
||||
it('should return highlights outside of the interval when requested', async () => {
|
||||
const {
|
||||
data: {
|
||||
source: { logEntryHighlights },
|
||||
},
|
||||
} = await client.query<any>({
|
||||
query: logEntryHighlightsQuery,
|
||||
variables: {
|
||||
sourceId: 'default',
|
||||
startKey: KEY_AFTER_START,
|
||||
endKey: KEY_BEFORE_END,
|
||||
highlights: [
|
||||
{
|
||||
query: 'message of document 0',
|
||||
countBefore: 2,
|
||||
countAfter: 2,
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
expect(logEntryHighlights).to.have.length(1);
|
||||
|
||||
const [logEntryHighlightSet] = logEntryHighlights;
|
||||
expect(logEntryHighlightSet).to.have.property('entries');
|
||||
// three bundles with one highlight each plus two beyond each interval boundary
|
||||
expect(logEntryHighlightSet.entries).to.have.length(3 + 4);
|
||||
expect(isSorted(ascendingTimeKey)(logEntryHighlightSet.entries)).to.equal(true);
|
||||
|
||||
for (const logEntryHighlight of logEntryHighlightSet.entries) {
|
||||
expect(logEntryHighlight.columns).to.have.length(3);
|
||||
expect(logEntryHighlight.columns[1]).to.have.property('field');
|
||||
expect(logEntryHighlight.columns[1]).to.have.property('highlights');
|
||||
expect(logEntryHighlight.columns[1].highlights).to.eql([]);
|
||||
expect(logEntryHighlight.columns[2]).to.have.property('message');
|
||||
expect(logEntryHighlight.columns[2].message).to.be.an('array');
|
||||
expect(logEntryHighlight.columns[2].message.length).to.be(1);
|
||||
expect(logEntryHighlight.columns[2].message[0].highlights).to.eql([
|
||||
'message',
|
||||
'of',
|
||||
'document',
|
||||
'0',
|
||||
]);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
const logEntryHighlightsQuery = gql`
|
||||
query LogEntryHighlightsQuery(
|
||||
$sourceId: ID = "default"
|
||||
$startKey: InfraTimeKeyInput!
|
||||
$endKey: InfraTimeKeyInput!
|
||||
$filterQuery: String
|
||||
$highlights: [InfraLogEntryHighlightInput!]!
|
||||
) {
|
||||
source(id: $sourceId) {
|
||||
id
|
||||
logEntryHighlights(
|
||||
startKey: $startKey
|
||||
endKey: $endKey
|
||||
filterQuery: $filterQuery
|
||||
highlights: $highlights
|
||||
) {
|
||||
start {
|
||||
...InfraTimeKeyFields
|
||||
}
|
||||
end {
|
||||
...InfraTimeKeyFields
|
||||
}
|
||||
entries {
|
||||
...InfraLogEntryHighlightFields
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
${sharedFragments.InfraTimeKey}
|
||||
${sharedFragments.InfraLogEntryHighlightFields}
|
||||
`;
|
||||
|
||||
const isSorted = <Value>(comparator: (first: Value, second: Value) => number) => (
|
||||
values: Value[]
|
||||
) => pairs(values, comparator).every(order => order <= 0);
|
||||
|
||||
const ascendingTimeKey = (first: { key: InfraTimeKey }, second: { key: InfraTimeKey }) =>
|
||||
ascending(first.key.time, second.key.time) ||
|
||||
ascending(first.key.tiebreaker, second.key.tiebreaker);
|
||||
|
|
|
@ -38,9 +38,10 @@ export default function({ getService }: FtrProviderContext) {
|
|||
after(() => esArchiver.unload('infra/metrics_and_logs'));
|
||||
|
||||
it('should return empty and non-empty consecutive buckets', async () => {
|
||||
const startDate = EARLIEST_TIME_WITH_DATA;
|
||||
const endDate = LATEST_TIME_WITH_DATA + (LATEST_TIME_WITH_DATA - EARLIEST_TIME_WITH_DATA);
|
||||
const bucketSize = Math.ceil((endDate - startDate) / 10);
|
||||
const startTimestamp = EARLIEST_TIME_WITH_DATA;
|
||||
const endTimestamp =
|
||||
LATEST_TIME_WITH_DATA + (LATEST_TIME_WITH_DATA - EARLIEST_TIME_WITH_DATA);
|
||||
const bucketSize = Math.ceil((endTimestamp - startTimestamp) / 10);
|
||||
|
||||
const { body } = await supertest
|
||||
.post(LOG_ENTRIES_SUMMARY_PATH)
|
||||
|
@ -48,8 +49,8 @@ export default function({ getService }: FtrProviderContext) {
|
|||
.send(
|
||||
logEntriesSummaryRequestRT.encode({
|
||||
sourceId: 'default',
|
||||
startDate,
|
||||
endDate,
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
bucketSize,
|
||||
query: null,
|
||||
})
|
||||
|
|
|
@ -5,8 +5,6 @@
|
|||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import { ascending, pairs } from 'd3-array';
|
||||
import gql from 'graphql-tag';
|
||||
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
import { identity } from 'fp-ts/lib/function';
|
||||
|
@ -15,21 +13,18 @@ import { fold } from 'fp-ts/lib/Either';
|
|||
import { createPlainError, throwErrors } from '../../../../plugins/infra/common/runtime_types';
|
||||
|
||||
import { FtrProviderContext } from '../../ftr_provider_context';
|
||||
import { sharedFragments } from '../../../../plugins/infra/common/graphql/shared';
|
||||
import { InfraTimeKey } from '../../../../plugins/infra/public/graphql/types';
|
||||
import {
|
||||
LOG_ENTRIES_SUMMARY_PATH,
|
||||
logEntriesSummaryRequestRT,
|
||||
logEntriesSummaryResponseRT,
|
||||
LOG_ENTRIES_PATH,
|
||||
logEntriesRequestRT,
|
||||
logEntriesResponseRT,
|
||||
} from '../../../../plugins/infra/common/http_api/log_entries';
|
||||
|
||||
const COMMON_HEADERS = {
|
||||
'kbn-xsrf': 'some-xsrf-token',
|
||||
};
|
||||
const KEY_WITHIN_DATA_RANGE = {
|
||||
time: new Date('2019-01-06T00:00:00.000Z').valueOf(),
|
||||
tiebreaker: 0,
|
||||
};
|
||||
const EARLIEST_KEY_WITH_DATA = {
|
||||
time: new Date('2019-01-05T23:59:23.000Z').valueOf(),
|
||||
tiebreaker: -1,
|
||||
|
@ -38,153 +33,97 @@ const LATEST_KEY_WITH_DATA = {
|
|||
time: new Date('2019-01-06T23:59:23.000Z').valueOf(),
|
||||
tiebreaker: 2,
|
||||
};
|
||||
const KEY_WITHIN_DATA_RANGE = {
|
||||
time: new Date('2019-01-06T00:00:00.000Z').valueOf(),
|
||||
tiebreaker: 0,
|
||||
};
|
||||
|
||||
export default function({ getService }: FtrProviderContext) {
|
||||
const esArchiver = getService('esArchiver');
|
||||
const client = getService('infraOpsGraphQLClient');
|
||||
const supertest = getService('supertest');
|
||||
|
||||
describe('logs without epoch_millis format', () => {
|
||||
before(() => esArchiver.load('infra/logs_without_epoch_millis'));
|
||||
after(() => esArchiver.unload('infra/logs_without_epoch_millis'));
|
||||
|
||||
it('logEntriesAround should return log entries', async () => {
|
||||
const {
|
||||
data: {
|
||||
source: { logEntriesAround },
|
||||
},
|
||||
} = await client.query<any>({
|
||||
query: logEntriesAroundQuery,
|
||||
variables: {
|
||||
timeKey: KEY_WITHIN_DATA_RANGE,
|
||||
countBefore: 1,
|
||||
countAfter: 1,
|
||||
},
|
||||
describe('/log_entries/summary', () => {
|
||||
it('returns non-empty buckets', async () => {
|
||||
const startTimestamp = EARLIEST_KEY_WITH_DATA.time;
|
||||
const endTimestamp = LATEST_KEY_WITH_DATA.time + 1; // the interval end is exclusive
|
||||
const bucketSize = Math.ceil((endTimestamp - startTimestamp) / 10);
|
||||
|
||||
const { body } = await supertest
|
||||
.post(LOG_ENTRIES_SUMMARY_PATH)
|
||||
.set(COMMON_HEADERS)
|
||||
.send(
|
||||
logEntriesSummaryRequestRT.encode({
|
||||
sourceId: 'default',
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
bucketSize,
|
||||
query: null,
|
||||
})
|
||||
)
|
||||
.expect(200);
|
||||
|
||||
const logSummaryResponse = pipe(
|
||||
logEntriesSummaryResponseRT.decode(body),
|
||||
fold(throwErrors(createPlainError), identity)
|
||||
);
|
||||
|
||||
expect(
|
||||
logSummaryResponse.data.buckets.filter((bucket: any) => bucket.entriesCount > 0)
|
||||
).to.have.length(2);
|
||||
});
|
||||
|
||||
expect(logEntriesAround).to.have.property('entries');
|
||||
expect(logEntriesAround.entries).to.have.length(2);
|
||||
expect(isSorted(ascendingTimeKey)(logEntriesAround.entries)).to.equal(true);
|
||||
|
||||
expect(logEntriesAround.hasMoreBefore).to.equal(false);
|
||||
expect(logEntriesAround.hasMoreAfter).to.equal(false);
|
||||
});
|
||||
|
||||
it('logEntriesBetween should return log entries', async () => {
|
||||
const {
|
||||
data: {
|
||||
source: { logEntriesBetween },
|
||||
},
|
||||
} = await client.query<any>({
|
||||
query: logEntriesBetweenQuery,
|
||||
variables: {
|
||||
startKey: EARLIEST_KEY_WITH_DATA,
|
||||
endKey: LATEST_KEY_WITH_DATA,
|
||||
},
|
||||
describe('/log_entries/entries', () => {
|
||||
it('returns log entries', async () => {
|
||||
const startTimestamp = EARLIEST_KEY_WITH_DATA.time;
|
||||
const endTimestamp = LATEST_KEY_WITH_DATA.time + 1; // the interval end is exclusive
|
||||
|
||||
const { body } = await supertest
|
||||
.post(LOG_ENTRIES_PATH)
|
||||
.set(COMMON_HEADERS)
|
||||
.send(
|
||||
logEntriesRequestRT.encode({
|
||||
sourceId: 'default',
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
})
|
||||
)
|
||||
.expect(200);
|
||||
|
||||
const logEntriesResponse = pipe(
|
||||
logEntriesResponseRT.decode(body),
|
||||
fold(throwErrors(createPlainError), identity)
|
||||
);
|
||||
expect(logEntriesResponse.data.entries).to.have.length(2);
|
||||
});
|
||||
|
||||
expect(logEntriesBetween).to.have.property('entries');
|
||||
expect(logEntriesBetween.entries).to.have.length(2);
|
||||
expect(isSorted(ascendingTimeKey)(logEntriesBetween.entries)).to.equal(true);
|
||||
});
|
||||
it('returns log entries when centering around a point', async () => {
|
||||
const startTimestamp = EARLIEST_KEY_WITH_DATA.time;
|
||||
const endTimestamp = LATEST_KEY_WITH_DATA.time + 1; // the interval end is exclusive
|
||||
|
||||
it('logSummaryBetween should return non-empty buckets', async () => {
|
||||
const startDate = EARLIEST_KEY_WITH_DATA.time;
|
||||
const endDate = LATEST_KEY_WITH_DATA.time + 1; // the interval end is exclusive
|
||||
const bucketSize = Math.ceil((endDate - startDate) / 10);
|
||||
const { body } = await supertest
|
||||
.post(LOG_ENTRIES_PATH)
|
||||
.set(COMMON_HEADERS)
|
||||
.send(
|
||||
logEntriesRequestRT.encode({
|
||||
sourceId: 'default',
|
||||
startTimestamp,
|
||||
endTimestamp,
|
||||
center: KEY_WITHIN_DATA_RANGE,
|
||||
})
|
||||
)
|
||||
.expect(200);
|
||||
|
||||
const { body } = await supertest
|
||||
.post(LOG_ENTRIES_SUMMARY_PATH)
|
||||
.set(COMMON_HEADERS)
|
||||
.send(
|
||||
logEntriesSummaryRequestRT.encode({
|
||||
sourceId: 'default',
|
||||
startDate,
|
||||
endDate,
|
||||
bucketSize,
|
||||
query: null,
|
||||
})
|
||||
)
|
||||
.expect(200);
|
||||
|
||||
const logSummaryResponse = pipe(
|
||||
logEntriesSummaryResponseRT.decode(body),
|
||||
fold(throwErrors(createPlainError), identity)
|
||||
);
|
||||
|
||||
expect(
|
||||
logSummaryResponse.data.buckets.filter((bucket: any) => bucket.entriesCount > 0)
|
||||
).to.have.length(2);
|
||||
const logEntriesResponse = pipe(
|
||||
logEntriesResponseRT.decode(body),
|
||||
fold(throwErrors(createPlainError), identity)
|
||||
);
|
||||
expect(logEntriesResponse.data.entries).to.have.length(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
const logEntriesAroundQuery = gql`
|
||||
query LogEntriesAroundQuery(
|
||||
$timeKey: InfraTimeKeyInput!
|
||||
$countBefore: Int = 0
|
||||
$countAfter: Int = 0
|
||||
$filterQuery: String
|
||||
) {
|
||||
source(id: "default") {
|
||||
id
|
||||
logEntriesAround(
|
||||
key: $timeKey
|
||||
countBefore: $countBefore
|
||||
countAfter: $countAfter
|
||||
filterQuery: $filterQuery
|
||||
) {
|
||||
start {
|
||||
...InfraTimeKeyFields
|
||||
}
|
||||
end {
|
||||
...InfraTimeKeyFields
|
||||
}
|
||||
hasMoreBefore
|
||||
hasMoreAfter
|
||||
entries {
|
||||
...InfraLogEntryFields
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
${sharedFragments.InfraTimeKey}
|
||||
${sharedFragments.InfraLogEntryFields}
|
||||
`;
|
||||
|
||||
const logEntriesBetweenQuery = gql`
|
||||
query LogEntriesBetweenQuery(
|
||||
$startKey: InfraTimeKeyInput!
|
||||
$endKey: InfraTimeKeyInput!
|
||||
$filterQuery: String
|
||||
) {
|
||||
source(id: "default") {
|
||||
id
|
||||
logEntriesBetween(startKey: $startKey, endKey: $endKey, filterQuery: $filterQuery) {
|
||||
start {
|
||||
...InfraTimeKeyFields
|
||||
}
|
||||
end {
|
||||
...InfraTimeKeyFields
|
||||
}
|
||||
hasMoreBefore
|
||||
hasMoreAfter
|
||||
entries {
|
||||
...InfraLogEntryFields
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
${sharedFragments.InfraTimeKey}
|
||||
${sharedFragments.InfraLogEntryFields}
|
||||
`;
|
||||
|
||||
const isSorted = <Value>(comparator: (first: Value, second: Value) => number) => (
|
||||
values: Value[]
|
||||
) => pairs(values, comparator).every(order => order <= 0);
|
||||
|
||||
const ascendingTimeKey = (first: { key: InfraTimeKey }, second: { key: InfraTimeKey }) =>
|
||||
ascending(first.key.time, second.key.time) ||
|
||||
ascending(first.key.tiebreaker, second.key.tiebreaker);
|
||||
|
|
|
@ -22,5 +22,9 @@ export const DATES = {
|
|||
withData: '10/17/2018 7:58:03 PM',
|
||||
withoutData: '10/09/2018 10:00:00 PM',
|
||||
},
|
||||
stream: {
|
||||
startWithData: '2018-10-17T19:42:22.000Z',
|
||||
endWithData: '2018-10-17T19:57:21.000Z',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
|
|
@ -7,22 +7,29 @@
|
|||
import expect from '@kbn/expect';
|
||||
import { FtrProviderContext } from '../../ftr_provider_context';
|
||||
|
||||
const ONE_HOUR = 60 * 60 * 1000;
|
||||
|
||||
export default ({ getPageObjects, getService }: FtrProviderContext) => {
|
||||
const pageObjects = getPageObjects(['common']);
|
||||
const retry = getService('retry');
|
||||
const browser = getService('browser');
|
||||
|
||||
const timestamp = Date.now();
|
||||
const startDate = new Date(timestamp - ONE_HOUR).toISOString();
|
||||
const endDate = new Date(timestamp + ONE_HOUR).toISOString();
|
||||
|
||||
const traceId = '433b4651687e18be2c6c8e3b11f53d09';
|
||||
|
||||
describe('Infra link-to', function() {
|
||||
this.tags('smoke');
|
||||
it('redirects to the logs app and parses URL search params correctly', async () => {
|
||||
const location = {
|
||||
hash: '',
|
||||
pathname: '/link-to/logs',
|
||||
search: 'time=1565707203194&filter=trace.id:433b4651687e18be2c6c8e3b11f53d09',
|
||||
search: `time=${timestamp}&filter=trace.id:${traceId}`,
|
||||
state: undefined,
|
||||
};
|
||||
const expectedSearchString =
|
||||
"logFilter=(expression:'trace.id:433b4651687e18be2c6c8e3b11f53d09',kind:kuery)&logPosition=(position:(tiebreaker:0,time:1565707203194),streamLive:!f)&sourceId=default";
|
||||
const expectedSearchString = `logFilter=(expression:'trace.id:${traceId}',kind:kuery)&logPosition=(end:'${endDate}',position:(tiebreaker:0,time:${timestamp}),start:'${startDate}',streamLive:!f)&sourceId=default`;
|
||||
const expectedRedirectPath = '/logs/stream?';
|
||||
|
||||
await pageObjects.common.navigateToUrlWithBrowserHistory(
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import { DATES } from './constants';
|
||||
|
||||
import { FtrProviderContext } from '../../ftr_provider_context';
|
||||
|
||||
|
@ -74,7 +75,12 @@ export default ({ getPageObjects, getService }: FtrProviderContext) => {
|
|||
});
|
||||
|
||||
it('renders the default log columns with their headers', async () => {
|
||||
await logsUi.logStreamPage.navigateTo();
|
||||
await logsUi.logStreamPage.navigateTo({
|
||||
logPosition: {
|
||||
start: DATES.metricsAndLogs.stream.startWithData,
|
||||
end: DATES.metricsAndLogs.stream.endWithData,
|
||||
},
|
||||
});
|
||||
|
||||
await retry.try(async () => {
|
||||
const columnHeaderLabels = await logsUi.logStreamPage.getColumnHeaderLabels();
|
||||
|
@ -108,7 +114,12 @@ export default ({ getPageObjects, getService }: FtrProviderContext) => {
|
|||
});
|
||||
|
||||
it('renders the changed log columns with their headers', async () => {
|
||||
await logsUi.logStreamPage.navigateTo();
|
||||
await logsUi.logStreamPage.navigateTo({
|
||||
logPosition: {
|
||||
start: DATES.metricsAndLogs.stream.startWithData,
|
||||
end: DATES.metricsAndLogs.stream.endWithData,
|
||||
},
|
||||
});
|
||||
|
||||
await retry.try(async () => {
|
||||
const columnHeaderLabels = await logsUi.logStreamPage.getColumnHeaderLabels();
|
||||
|
|
|
@ -6,8 +6,21 @@
|
|||
|
||||
// import testSubjSelector from '@kbn/test-subj-selector';
|
||||
// import moment from 'moment';
|
||||
|
||||
import querystring from 'querystring';
|
||||
import { encode, RisonValue } from 'rison-node';
|
||||
import { FtrProviderContext } from '../ftr_provider_context';
|
||||
import { LogPositionUrlState } from '../../../../x-pack/plugins/infra/public/containers/logs/log_position/with_log_position_url_state';
|
||||
import { FlyoutOptionsUrlState } from '../../../../x-pack/plugins/infra/public/containers/logs/log_flyout';
|
||||
|
||||
export interface TabsParams {
|
||||
stream: {
|
||||
logPosition?: Partial<LogPositionUrlState>;
|
||||
flyoutOptions?: Partial<FlyoutOptionsUrlState>;
|
||||
};
|
||||
settings: never;
|
||||
'log-categories': any;
|
||||
'log-rate': any;
|
||||
}
|
||||
|
||||
export function InfraLogsPageProvider({ getPageObjects, getService }: FtrProviderContext) {
|
||||
const testSubjects = getService('testSubjects');
|
||||
|
@ -18,8 +31,26 @@ export function InfraLogsPageProvider({ getPageObjects, getService }: FtrProvide
|
|||
await pageObjects.common.navigateToApp('infraLogs');
|
||||
},
|
||||
|
||||
async navigateToTab(logsUiTab: LogsUiTab) {
|
||||
await pageObjects.common.navigateToUrlWithBrowserHistory('infraLogs', `/${logsUiTab}`);
|
||||
async navigateToTab<T extends LogsUiTab>(logsUiTab: T, params?: TabsParams[T]) {
|
||||
let qs = '';
|
||||
if (params) {
|
||||
const parsedParams: Record<string, string> = {};
|
||||
|
||||
for (const key in params) {
|
||||
if (params.hasOwnProperty(key)) {
|
||||
const value = (params[key] as unknown) as RisonValue;
|
||||
parsedParams[key] = encode(value);
|
||||
}
|
||||
}
|
||||
qs = '?' + querystring.stringify(parsedParams);
|
||||
}
|
||||
|
||||
await pageObjects.common.navigateToUrlWithBrowserHistory(
|
||||
'infraLogs',
|
||||
`/${logsUiTab}`,
|
||||
qs,
|
||||
{ ensureCurrentUrl: false } // Test runner struggles with `rison-node` escaped values
|
||||
);
|
||||
},
|
||||
|
||||
async getLogStream() {
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
|
||||
import { FtrProviderContext } from '../../ftr_provider_context';
|
||||
import { WebElementWrapper } from '../../../../../test/functional/services/lib/web_element_wrapper';
|
||||
import { TabsParams } from '../../page_objects/infra_logs_page';
|
||||
|
||||
export function LogStreamPageProvider({ getPageObjects, getService }: FtrProviderContext) {
|
||||
const pageObjects = getPageObjects(['infraLogs']);
|
||||
|
@ -13,8 +14,8 @@ export function LogStreamPageProvider({ getPageObjects, getService }: FtrProvide
|
|||
const testSubjects = getService('testSubjects');
|
||||
|
||||
return {
|
||||
async navigateTo() {
|
||||
pageObjects.infraLogs.navigateToTab('stream');
|
||||
async navigateTo(params?: TabsParams['stream']) {
|
||||
pageObjects.infraLogs.navigateToTab('stream', params);
|
||||
},
|
||||
|
||||
async getColumnHeaderLabels(): Promise<string[]> {
|
||||
|
|
Loading…
Reference in a new issue