[Logs UI] Add pagination to the log stream shared component (#81193)

Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
Alejandro Fernández Gómez 2020-11-12 15:11:43 +01:00 committed by GitHub
parent 3a849ff104
commit 0e7bcf6164
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 300 additions and 74 deletions

View file

@ -99,11 +99,17 @@ export type LogEntryContext = rt.TypeOf<typeof logEntryContextRT>;
export type LogEntry = rt.TypeOf<typeof logEntryRT>; export type LogEntry = rt.TypeOf<typeof logEntryRT>;
export const logEntriesResponseRT = rt.type({ export const logEntriesResponseRT = rt.type({
data: rt.type({ data: rt.intersection([
entries: rt.array(logEntryRT), rt.type({
topCursor: rt.union([logEntriesCursorRT, rt.null]), entries: rt.array(logEntryRT),
bottomCursor: rt.union([logEntriesCursorRT, rt.null]), topCursor: rt.union([logEntriesCursorRT, rt.null]),
}), bottomCursor: rt.union([logEntriesCursorRT, rt.null]),
}),
rt.partial({
hasMoreBefore: rt.boolean,
hasMoreAfter: rt.boolean,
}),
]),
}); });
export type LogEntriesResponse = rt.TypeOf<typeof logEntriesResponseRT>; export type LogEntriesResponse = rt.TypeOf<typeof logEntriesResponseRT>;

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import React, { useMemo } from 'react'; import React, { useMemo, useCallback } from 'react';
import { noop } from 'lodash'; import { noop } from 'lodash';
import useMount from 'react-use/lib/useMount'; import useMount from 'react-use/lib/useMount';
import { euiStyled } from '../../../../observability/public'; import { euiStyled } from '../../../../observability/public';
@ -17,6 +17,8 @@ import { useLogStream } from '../../containers/logs/log_stream';
import { ScrollableLogTextStreamView } from '../logging/log_text_stream'; import { ScrollableLogTextStreamView } from '../logging/log_text_stream';
const PAGE_THRESHOLD = 2;
export interface LogStreamProps { export interface LogStreamProps {
sourceId?: string; sourceId?: string;
startTimestamp: number; startTimestamp: number;
@ -58,7 +60,16 @@ Read more at https://github.com/elastic/kibana/blob/master/src/plugins/kibana_re
}); });
// Internal state // Internal state
const { loadingState, entries, fetchEntries } = useLogStream({ const {
loadingState,
pageLoadingState,
entries,
hasMoreBefore,
hasMoreAfter,
fetchEntries,
fetchPreviousEntries,
fetchNextEntries,
} = useLogStream({
sourceId, sourceId,
startTimestamp, startTimestamp,
endTimestamp, endTimestamp,
@ -70,6 +81,8 @@ Read more at https://github.com/elastic/kibana/blob/master/src/plugins/kibana_re
const isReloading = const isReloading =
isLoadingSourceConfiguration || loadingState === 'uninitialized' || loadingState === 'loading'; isLoadingSourceConfiguration || loadingState === 'uninitialized' || loadingState === 'loading';
const isLoadingMore = pageLoadingState === 'loading';
const columnConfigurations = useMemo(() => { const columnConfigurations = useMemo(() => {
return sourceConfiguration ? sourceConfiguration.configuration.logColumns : []; return sourceConfiguration ? sourceConfiguration.configuration.logColumns : [];
}, [sourceConfiguration]); }, [sourceConfiguration]);
@ -84,13 +97,33 @@ Read more at https://github.com/elastic/kibana/blob/master/src/plugins/kibana_re
[entries] [entries]
); );
const parsedHeight = typeof height === 'number' ? `${height}px` : height;
// Component lifetime // Component lifetime
useMount(() => { useMount(() => {
loadSourceConfiguration(); loadSourceConfiguration();
fetchEntries(); fetchEntries();
}); });
const parsedHeight = typeof height === 'number' ? `${height}px` : height; // Pagination handler
const handlePagination = useCallback(
({ fromScroll, pagesBeforeStart, pagesAfterEnd }) => {
if (!fromScroll) {
return;
}
if (isLoadingMore) {
return;
}
if (pagesBeforeStart < PAGE_THRESHOLD) {
fetchPreviousEntries();
} else if (pagesAfterEnd < PAGE_THRESHOLD) {
fetchNextEntries();
}
},
[isLoadingMore, fetchPreviousEntries, fetchNextEntries]
);
return ( return (
<LogStreamContent height={parsedHeight}> <LogStreamContent height={parsedHeight}>
@ -101,13 +134,13 @@ Read more at https://github.com/elastic/kibana/blob/master/src/plugins/kibana_re
scale="medium" scale="medium"
wrap={false} wrap={false}
isReloading={isReloading} isReloading={isReloading}
isLoadingMore={false} isLoadingMore={isLoadingMore}
hasMoreBeforeStart={false} hasMoreBeforeStart={hasMoreBefore}
hasMoreAfterEnd={false} hasMoreAfterEnd={hasMoreAfter}
isStreaming={false} isStreaming={false}
lastLoadedTime={null} lastLoadedTime={null}
jumpToTarget={noop} jumpToTarget={noop}
reportVisibleInterval={noop} reportVisibleInterval={handlePagination}
loadNewerItems={noop} loadNewerItems={noop}
reloadItems={fetchEntries} reloadItems={fetchEntries}
highlightedItem={highlight ?? null} highlightedItem={highlight ?? null}

View file

@ -367,16 +367,16 @@ const logEntriesStateReducer = (prevState: LogEntriesStateParams, action: Action
case Action.ReceiveNewEntries: case Action.ReceiveNewEntries:
return { return {
...prevState, ...prevState,
...action.payload, entries: action.payload.entries,
topCursor: action.payload.topCursor,
bottomCursor: action.payload.bottomCursor,
centerCursor: getCenterCursor(action.payload.entries), centerCursor: getCenterCursor(action.payload.entries),
lastLoadedTime: new Date(), lastLoadedTime: new Date(),
isReloading: false, isReloading: false,
hasMoreBeforeStart: action.payload.hasMoreBefore ?? prevState.hasMoreBeforeStart,
// Be optimistic. If any of the before/after requests comes empty, set hasMoreAfterEnd: action.payload.hasMoreAfter ?? prevState.hasMoreAfterEnd,
// the corresponding flag to `false`
hasMoreBeforeStart: true,
hasMoreAfterEnd: true,
}; };
case Action.ReceiveEntriesBefore: { case Action.ReceiveEntriesBefore: {
const newEntries = action.payload.entries; const newEntries = action.payload.entries;
const prevEntries = cleanDuplicateItems(prevState.entries, newEntries); const prevEntries = cleanDuplicateItems(prevState.entries, newEntries);
@ -385,7 +385,7 @@ const logEntriesStateReducer = (prevState: LogEntriesStateParams, action: Action
const update = { const update = {
entries, entries,
isLoadingMore: false, isLoadingMore: false,
hasMoreBeforeStart: newEntries.length > 0, hasMoreBeforeStart: action.payload.hasMoreBefore ?? prevState.hasMoreBeforeStart,
// Keep the previous cursor if request comes empty, to easily extend the range. // Keep the previous cursor if request comes empty, to easily extend the range.
topCursor: newEntries.length > 0 ? action.payload.topCursor : prevState.topCursor, topCursor: newEntries.length > 0 ? action.payload.topCursor : prevState.topCursor,
centerCursor: getCenterCursor(entries), centerCursor: getCenterCursor(entries),
@ -402,7 +402,7 @@ const logEntriesStateReducer = (prevState: LogEntriesStateParams, action: Action
const update = { const update = {
entries, entries,
isLoadingMore: false, isLoadingMore: false,
hasMoreAfterEnd: newEntries.length > 0, hasMoreAfterEnd: action.payload.hasMoreAfter ?? prevState.hasMoreAfterEnd,
// Keep the previous cursor if request comes empty, to easily extend the range. // Keep the previous cursor if request comes empty, to easily extend the range.
bottomCursor: newEntries.length > 0 ? action.payload.bottomCursor : prevState.bottomCursor, bottomCursor: newEntries.length > 0 ? action.payload.bottomCursor : prevState.bottomCursor,
centerCursor: getCenterCursor(entries), centerCursor: getCenterCursor(entries),
@ -419,6 +419,8 @@ const logEntriesStateReducer = (prevState: LogEntriesStateParams, action: Action
topCursor: null, topCursor: null,
bottomCursor: null, bottomCursor: null,
centerCursor: null, centerCursor: null,
// Assume there are more pages on both ends unless proven wrong by the
// API with an explicit `false` response.
hasMoreBeforeStart: true, hasMoreBeforeStart: true,
hasMoreAfterEnd: true, hasMoreAfterEnd: true,
}; };

View file

@ -4,7 +4,9 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { useState, useMemo } from 'react'; import { useMemo, useEffect } from 'react';
import useSetState from 'react-use/lib/useSetState';
import usePrevious from 'react-use/lib/usePrevious';
import { esKuery } from '../../../../../../../src/plugins/data/public'; import { esKuery } from '../../../../../../../src/plugins/data/public';
import { fetchLogEntries } from '../log_entries/api/fetch_log_entries'; import { fetchLogEntries } from '../log_entries/api/fetch_log_entries';
import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { useTrackedPromise } from '../../../utils/use_tracked_promise';
@ -21,19 +23,62 @@ interface LogStreamProps {
interface LogStreamState { interface LogStreamState {
entries: LogEntry[]; entries: LogEntry[];
fetchEntries: () => void; topCursor: LogEntriesCursor | null;
loadingState: 'uninitialized' | 'loading' | 'success' | 'error'; bottomCursor: LogEntriesCursor | null;
hasMoreBefore: boolean;
hasMoreAfter: boolean;
} }
type LoadingState = 'uninitialized' | 'loading' | 'success' | 'error';
interface LogStreamReturn extends LogStreamState {
fetchEntries: () => void;
fetchPreviousEntries: () => void;
fetchNextEntries: () => void;
loadingState: LoadingState;
pageLoadingState: LoadingState;
}
const INITIAL_STATE: LogStreamState = {
entries: [],
topCursor: null,
bottomCursor: null,
// Assume there are pages available until the API proves us wrong
hasMoreBefore: true,
hasMoreAfter: true,
};
const EMPTY_DATA = {
entries: [],
topCursor: null,
bottomCursor: null,
};
export function useLogStream({ export function useLogStream({
sourceId, sourceId,
startTimestamp, startTimestamp,
endTimestamp, endTimestamp,
query, query,
center, center,
}: LogStreamProps): LogStreamState { }: LogStreamProps): LogStreamReturn {
const { services } = useKibanaContextForPlugin(); const { services } = useKibanaContextForPlugin();
const [entries, setEntries] = useState<LogStreamState['entries']>([]); const [state, setState] = useSetState<LogStreamState>(INITIAL_STATE);
// Ensure the pagination keeps working when the timerange gets extended
const prevStartTimestamp = usePrevious(startTimestamp);
const prevEndTimestamp = usePrevious(endTimestamp);
useEffect(() => {
if (prevStartTimestamp && prevStartTimestamp > startTimestamp) {
setState({ hasMoreBefore: true });
}
}, [prevStartTimestamp, startTimestamp, setState]);
useEffect(() => {
if (prevEndTimestamp && prevEndTimestamp < endTimestamp) {
setState({ hasMoreAfter: true });
}
}, [prevEndTimestamp, endTimestamp, setState]);
const parsedQuery = useMemo(() => { const parsedQuery = useMemo(() => {
return query return query
@ -46,7 +91,7 @@ export function useLogStream({
{ {
cancelPreviousOn: 'creation', cancelPreviousOn: 'creation',
createPromise: () => { createPromise: () => {
setEntries([]); setState(INITIAL_STATE);
const fetchPosition = center ? { center } : { before: 'last' }; const fetchPosition = center ? { center } : { before: 'last' };
return fetchLogEntries( return fetchLogEntries(
@ -61,26 +106,130 @@ export function useLogStream({
); );
}, },
onResolve: ({ data }) => { onResolve: ({ data }) => {
setEntries(data.entries); setState((prevState) => ({
...data,
hasMoreBefore: data.hasMoreBefore ?? prevState.hasMoreBefore,
hasMoreAfter: data.hasMoreAfter ?? prevState.hasMoreAfter,
}));
}, },
}, },
[sourceId, startTimestamp, endTimestamp, query] [sourceId, startTimestamp, endTimestamp, query]
); );
const loadingState = useMemo(() => convertPromiseStateToLoadingState(entriesPromise.state), [ const [previousEntriesPromise, fetchPreviousEntries] = useTrackedPromise(
entriesPromise.state, {
]); cancelPreviousOn: 'creation',
createPromise: () => {
if (state.topCursor === null) {
throw new Error(
'useLogState: Cannot fetch previous entries. No cursor is set.\nEnsure you have called `fetchEntries` at least once.'
);
}
if (!state.hasMoreBefore) {
return Promise.resolve({ data: EMPTY_DATA });
}
return fetchLogEntries(
{
sourceId,
startTimestamp,
endTimestamp,
query: parsedQuery,
before: state.topCursor,
},
services.http.fetch
);
},
onResolve: ({ data }) => {
if (!data.entries.length) {
return;
}
setState((prevState) => ({
entries: [...data.entries, ...prevState.entries],
hasMoreBefore: data.hasMoreBefore ?? prevState.hasMoreBefore,
topCursor: data.topCursor ?? prevState.topCursor,
}));
},
},
[sourceId, startTimestamp, endTimestamp, query, state.topCursor]
);
const [nextEntriesPromise, fetchNextEntries] = useTrackedPromise(
{
cancelPreviousOn: 'creation',
createPromise: () => {
if (state.bottomCursor === null) {
throw new Error(
'useLogState: Cannot fetch next entries. No cursor is set.\nEnsure you have called `fetchEntries` at least once.'
);
}
if (!state.hasMoreAfter) {
return Promise.resolve({ data: EMPTY_DATA });
}
return fetchLogEntries(
{
sourceId,
startTimestamp,
endTimestamp,
query: parsedQuery,
after: state.bottomCursor,
},
services.http.fetch
);
},
onResolve: ({ data }) => {
if (!data.entries.length) {
return;
}
setState((prevState) => ({
entries: [...prevState.entries, ...data.entries],
hasMoreAfter: data.hasMoreAfter ?? prevState.hasMoreAfter,
bottomCursor: data.bottomCursor ?? prevState.bottomCursor,
}));
},
},
[sourceId, startTimestamp, endTimestamp, query, state.bottomCursor]
);
const loadingState = useMemo<LoadingState>(
() => convertPromiseStateToLoadingState(entriesPromise.state),
[entriesPromise.state]
);
const pageLoadingState = useMemo<LoadingState>(() => {
const states = [previousEntriesPromise.state, nextEntriesPromise.state];
if (states.includes('pending')) {
return 'loading';
}
if (states.includes('rejected')) {
return 'error';
}
if (states.includes('resolved')) {
return 'success';
}
return 'uninitialized';
}, [previousEntriesPromise.state, nextEntriesPromise.state]);
return { return {
entries, ...state,
fetchEntries, fetchEntries,
fetchPreviousEntries,
fetchNextEntries,
loadingState, loadingState,
pageLoadingState,
}; };
} }
function convertPromiseStateToLoadingState( function convertPromiseStateToLoadingState(
state: 'uninitialized' | 'pending' | 'resolved' | 'rejected' state: 'uninitialized' | 'pending' | 'resolved' | 'rejected'
): LogStreamState['loadingState'] { ): LoadingState {
switch (state) { switch (state) {
case 'uninitialized': case 'uninitialized':
return 'uninitialized'; return 'uninitialized';

View file

@ -35,8 +35,9 @@ export class InfraKibanaLogEntriesAdapter implements LogEntriesAdapter {
sourceConfiguration: InfraSourceConfiguration, sourceConfiguration: InfraSourceConfiguration,
fields: string[], fields: string[],
params: LogEntriesParams params: LogEntriesParams
): Promise<LogEntryDocument[]> { ): Promise<{ documents: LogEntryDocument[]; hasMoreBefore?: boolean; hasMoreAfter?: boolean }> {
const { startTimestamp, endTimestamp, query, cursor, size, highlightTerm } = params; const { startTimestamp, endTimestamp, query, cursor, highlightTerm } = params;
const size = params.size ?? LOG_ENTRIES_PAGE_SIZE;
const { sortDirection, searchAfterClause } = processCursor(cursor); const { sortDirection, searchAfterClause } = processCursor(cursor);
@ -72,7 +73,7 @@ export class InfraKibanaLogEntriesAdapter implements LogEntriesAdapter {
index: sourceConfiguration.logAlias, index: sourceConfiguration.logAlias,
ignoreUnavailable: true, ignoreUnavailable: true,
body: { body: {
size: typeof size !== 'undefined' ? size : LOG_ENTRIES_PAGE_SIZE, size: size + 1, // Extra one to test if it has more before or after
track_total_hits: false, track_total_hits: false,
_source: false, _source: false,
fields, fields,
@ -104,8 +105,22 @@ export class InfraKibanaLogEntriesAdapter implements LogEntriesAdapter {
esQuery esQuery
); );
const hits = sortDirection === 'asc' ? esResult.hits.hits : esResult.hits.hits.reverse(); const hits = esResult.hits.hits;
return mapHitsToLogEntryDocuments(hits, fields); const hasMore = hits.length > size;
if (hasMore) {
hits.pop();
}
if (sortDirection === 'desc') {
hits.reverse();
}
return {
documents: mapHitsToLogEntryDocuments(hits, fields),
hasMoreBefore: sortDirection === 'desc' ? hasMore : undefined,
hasMoreAfter: sortDirection === 'asc' ? hasMore : undefined,
};
} }
public async getContainedLogSummaryBuckets( public async getContainedLogSummaryBuckets(

View file

@ -74,7 +74,7 @@ export class InfraLogEntriesDomain {
requestContext: RequestHandlerContext, requestContext: RequestHandlerContext,
sourceId: string, sourceId: string,
params: LogEntriesAroundParams params: LogEntriesAroundParams
) { ): Promise<{ entries: LogEntry[]; hasMoreBefore?: boolean; hasMoreAfter?: boolean }> {
const { startTimestamp, endTimestamp, center, query, size, highlightTerm } = params; const { startTimestamp, endTimestamp, center, query, size, highlightTerm } = params;
/* /*
@ -87,14 +87,18 @@ export class InfraLogEntriesDomain {
*/ */
const halfSize = (size || LOG_ENTRIES_PAGE_SIZE) / 2; const halfSize = (size || LOG_ENTRIES_PAGE_SIZE) / 2;
const entriesBefore = await this.getLogEntries(requestContext, sourceId, { const { entries: entriesBefore, hasMoreBefore } = await this.getLogEntries(
startTimestamp, requestContext,
endTimestamp, sourceId,
query, {
cursor: { before: center }, startTimestamp,
size: Math.floor(halfSize), endTimestamp,
highlightTerm, query,
}); cursor: { before: center },
size: Math.floor(halfSize),
highlightTerm,
}
);
/* /*
* Elasticsearch's `search_after` returns documents after the specified cursor. * Elasticsearch's `search_after` returns documents after the specified cursor.
@ -108,23 +112,27 @@ export class InfraLogEntriesDomain {
? entriesBefore[entriesBefore.length - 1].cursor ? entriesBefore[entriesBefore.length - 1].cursor
: { time: center.time - 1, tiebreaker: 0 }; : { time: center.time - 1, tiebreaker: 0 };
const entriesAfter = await this.getLogEntries(requestContext, sourceId, { const { entries: entriesAfter, hasMoreAfter } = await this.getLogEntries(
startTimestamp, requestContext,
endTimestamp, sourceId,
query, {
cursor: { after: cursorAfter }, startTimestamp,
size: Math.ceil(halfSize), endTimestamp,
highlightTerm, query,
}); cursor: { after: cursorAfter },
size: Math.ceil(halfSize),
highlightTerm,
}
);
return [...entriesBefore, ...entriesAfter]; return { entries: [...entriesBefore, ...entriesAfter], hasMoreBefore, hasMoreAfter };
} }
public async getLogEntries( public async getLogEntries(
requestContext: RequestHandlerContext, requestContext: RequestHandlerContext,
sourceId: string, sourceId: string,
params: LogEntriesParams params: LogEntriesParams
): Promise<LogEntry[]> { ): Promise<{ entries: LogEntry[]; hasMoreBefore?: boolean; hasMoreAfter?: boolean }> {
const { configuration } = await this.libs.sources.getSourceConfiguration( const { configuration } = await this.libs.sources.getSourceConfiguration(
requestContext.core.savedObjects.client, requestContext.core.savedObjects.client,
sourceId sourceId
@ -136,7 +144,7 @@ export class InfraLogEntriesDomain {
const requiredFields = getRequiredFields(configuration, messageFormattingRules); const requiredFields = getRequiredFields(configuration, messageFormattingRules);
const documents = await this.adapter.getLogEntries( const { documents, hasMoreBefore, hasMoreAfter } = await this.adapter.getLogEntries(
requestContext, requestContext,
configuration, configuration,
requiredFields, requiredFields,
@ -173,7 +181,7 @@ export class InfraLogEntriesDomain {
}; };
}); });
return entries; return { entries, hasMoreBefore, hasMoreAfter };
} }
public async getLogSummaryBucketsBetween( public async getLogSummaryBucketsBetween(
@ -323,7 +331,7 @@ export interface LogEntriesAdapter {
sourceConfiguration: InfraSourceConfiguration, sourceConfiguration: InfraSourceConfiguration,
fields: string[], fields: string[],
params: LogEntriesParams params: LogEntriesParams
): Promise<LogEntryDocument[]>; ): Promise<{ documents: LogEntryDocument[]; hasMoreBefore?: boolean; hasMoreAfter?: boolean }>;
getContainedLogSummaryBuckets( getContainedLogSummaryBuckets(
requestContext: RequestHandlerContext, requestContext: RequestHandlerContext,

View file

@ -34,14 +34,21 @@ export const initLogEntriesRoute = ({ framework, logEntries }: InfraBackendLibs)
} = payload; } = payload;
let entries; let entries;
let hasMoreBefore;
let hasMoreAfter;
if ('center' in payload) { if ('center' in payload) {
entries = await logEntries.getLogEntriesAround(requestContext, sourceId, { ({ entries, hasMoreBefore, hasMoreAfter } = await logEntries.getLogEntriesAround(
startTimestamp, requestContext,
endTimestamp, sourceId,
query: parseFilterQuery(query), {
center: payload.center, startTimestamp,
size, endTimestamp,
}); query: parseFilterQuery(query),
center: payload.center,
size,
}
));
} else { } else {
let cursor: LogEntriesParams['cursor']; let cursor: LogEntriesParams['cursor'];
if ('before' in payload) { if ('before' in payload) {
@ -50,13 +57,17 @@ export const initLogEntriesRoute = ({ framework, logEntries }: InfraBackendLibs)
cursor = { after: payload.after }; cursor = { after: payload.after };
} }
entries = await logEntries.getLogEntries(requestContext, sourceId, { ({ entries, hasMoreBefore, hasMoreAfter } = await logEntries.getLogEntries(
startTimestamp, requestContext,
endTimestamp, sourceId,
query: parseFilterQuery(query), {
cursor, startTimestamp,
size, endTimestamp,
}); query: parseFilterQuery(query),
cursor,
size,
}
));
} }
const hasEntries = entries.length > 0; const hasEntries = entries.length > 0;
@ -67,6 +78,8 @@ export const initLogEntriesRoute = ({ framework, logEntries }: InfraBackendLibs)
entries, entries,
topCursor: hasEntries ? entries[0].cursor : null, topCursor: hasEntries ? entries[0].cursor : null,
bottomCursor: hasEntries ? entries[entries.length - 1].cursor : null, bottomCursor: hasEntries ? entries[entries.length - 1].cursor : null,
hasMoreBefore,
hasMoreAfter,
}, },
}), }),
}); });

View file

@ -79,7 +79,7 @@ export const initLogEntriesHighlightsRoute = ({ framework, logEntries }: InfraBa
return response.ok({ return response.ok({
body: logEntriesHighlightsResponseRT.encode({ body: logEntriesHighlightsResponseRT.encode({
data: entriesPerHighlightTerm.map((entries) => { data: entriesPerHighlightTerm.map(({ entries }) => {
if (entries.length > 0) { if (entries.length > 0) {
return { return {
entries, entries,