[Logs UI] Allow custom columns in the <LogStream /> component (#83802)

This commit is contained in:
Alejandro Fernández Gómez 2020-11-23 17:30:02 +01:00 committed by GitHub
parent 10afcf032f
commit 378d89b5cd
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 142 additions and 63 deletions

View file

@ -7,6 +7,7 @@
import * as rt from 'io-ts'; import * as rt from 'io-ts';
import { jsonArrayRT } from '../../typed_json'; import { jsonArrayRT } from '../../typed_json';
import { logEntriesCursorRT } from './common'; import { logEntriesCursorRT } from './common';
import { logSourceColumnConfigurationRT } from '../log_sources';
export const LOG_ENTRIES_PATH = '/api/log_entries/entries'; export const LOG_ENTRIES_PATH = '/api/log_entries/entries';
@ -19,6 +20,7 @@ export const logEntriesBaseRequestRT = rt.intersection([
rt.partial({ rt.partial({
query: rt.union([rt.string, rt.null]), query: rt.union([rt.string, rt.null]),
size: rt.number, size: rt.number,
columns: rt.array(logSourceColumnConfigurationRT),
}), }),
]); ]);

View file

@ -48,7 +48,7 @@ const logSourceFieldColumnConfigurationRT = rt.strict({
]), ]),
}); });
const logSourceColumnConfigurationRT = rt.union([ export const logSourceColumnConfigurationRT = rt.union([
logSourceTimestampColumnConfigurationRT, logSourceTimestampColumnConfigurationRT,
logSourceMessageColumnConfigurationRT, logSourceMessageColumnConfigurationRT,
logSourceFieldColumnConfigurationRT, logSourceFieldColumnConfigurationRT,

View file

@ -68,6 +68,36 @@ By default the `<LogStream />` uses the `"default"` source confiuration, but if
<LogStream startTimestamp={startTimestamp} endTimestamp={endTimestamp} sourceId="my_source" /> <LogStream startTimestamp={startTimestamp} endTimestamp={endTimestamp} sourceId="my_source" />
``` ```
### Custom columns
It is possible to change what columns are loaded without creating a whole new source configuration. To do so the component supports the `columns` prop. The default configuration can be replicated as follows.
```tsx
<LogStream
startTimestamp={...}
endTimetsamp={...}
columns={[
{ type: 'timestamp' },
{ type: 'field', field: 'event.dataset' }
{ type: 'message' },
]}
/>
```
There are three column types:
<table>
<tr>
<td>`type: "timestamp"`
<td>The configured timestamp field. Defaults to `@timestamp`.
<tr>
<td>`type: "message"`
<td>The value of the `message` field if it exists. If it doesn't, the component will try to recompose the original log line using values of other fields.
<tr>
<td>`type: "field"`
<td>A specific field specified in the `field` property.
</table>
### Considerations ### Considerations
As mentioned in the prerequisites, the component relies on `kibana-react` to access kibana's core services. If this is not the case the component will throw an exception when rendering. We advise to use an `<EuiErrorBoundary>` in your component hierarchy to catch this error if necessary. As mentioned in the prerequisites, the component relies on `kibana-react` to access kibana's core services. If this is not the case the component will throw an exception when rendering. We advise to use an `<EuiErrorBoundary>` in your component hierarchy to catch this error if necessary.

View file

@ -11,13 +11,18 @@ import { euiStyled } from '../../../../observability/public';
import { LogEntriesCursor } from '../../../common/http_api'; import { LogEntriesCursor } from '../../../common/http_api';
import { useKibana } from '../../../../../../src/plugins/kibana_react/public'; import { useKibana } from '../../../../../../src/plugins/kibana_react/public';
import { useLogSource } from '../../containers/logs/log_source'; import { LogSourceConfigurationProperties, useLogSource } from '../../containers/logs/log_source';
import { useLogStream } from '../../containers/logs/log_stream'; import { useLogStream } from '../../containers/logs/log_stream';
import { ScrollableLogTextStreamView } from '../logging/log_text_stream'; import { ScrollableLogTextStreamView } from '../logging/log_text_stream';
const PAGE_THRESHOLD = 2; const PAGE_THRESHOLD = 2;
type LogColumnDefinition =
| { type: 'timestamp' }
| { type: 'message' }
| { type: 'field'; field: string };
export interface LogStreamProps { export interface LogStreamProps {
sourceId?: string; sourceId?: string;
startTimestamp: number; startTimestamp: number;
@ -26,6 +31,7 @@ export interface LogStreamProps {
center?: LogEntriesCursor; center?: LogEntriesCursor;
highlight?: string; highlight?: string;
height?: string | number; height?: string | number;
columns?: LogColumnDefinition[];
} }
export const LogStream: React.FC<LogStreamProps> = ({ export const LogStream: React.FC<LogStreamProps> = ({
@ -36,7 +42,13 @@ export const LogStream: React.FC<LogStreamProps> = ({
center, center,
highlight, highlight,
height = '400px', height = '400px',
columns,
}) => { }) => {
const customColumns = useMemo(
() => (columns ? convertLogColumnDefinitionToLogSourceColumnDefinition(columns) : undefined),
[columns]
);
// source boilerplate // source boilerplate
const { services } = useKibana(); const { services } = useKibana();
if (!services?.http?.fetch) { if (!services?.http?.fetch) {
@ -74,6 +86,7 @@ Read more at https://github.com/elastic/kibana/blob/master/src/plugins/kibana_re
endTimestamp, endTimestamp,
query, query,
center, center,
columns: customColumns,
}); });
// Derived state // Derived state
@ -83,8 +96,8 @@ Read more at https://github.com/elastic/kibana/blob/master/src/plugins/kibana_re
const isLoadingMore = pageLoadingState === 'loading'; const isLoadingMore = pageLoadingState === 'loading';
const columnConfigurations = useMemo(() => { const columnConfigurations = useMemo(() => {
return sourceConfiguration ? sourceConfiguration.configuration.logColumns : []; return sourceConfiguration ? customColumns ?? sourceConfiguration.configuration.logColumns : [];
}, [sourceConfiguration]); }, [sourceConfiguration, customColumns]);
const streamItems = useMemo( const streamItems = useMemo(
() => () =>
@ -163,6 +176,21 @@ const LogStreamContent = euiStyled.div<{ height: string }>`
height: ${(props) => props.height}; height: ${(props) => props.height};
`; `;
function convertLogColumnDefinitionToLogSourceColumnDefinition(
columns: LogColumnDefinition[]
): LogSourceConfigurationProperties['logColumns'] {
return columns.map((column) => {
switch (column.type) {
case 'timestamp':
return { timestampColumn: { id: '___#timestamp' } };
case 'message':
return { messageColumn: { id: '___#message' } };
case 'field':
return { fieldColumn: { id: `___#${column.field}`, field: column.field } };
}
});
}
// Allow for lazy loading // Allow for lazy loading
// eslint-disable-next-line import/no-default-export // eslint-disable-next-line import/no-default-export
export default LogStream; export default LogStream;

View file

@ -12,6 +12,7 @@ import { fetchLogEntries } from '../log_entries/api/fetch_log_entries';
import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { LogEntry, LogEntriesCursor } from '../../../../common/http_api'; import { LogEntry, LogEntriesCursor } from '../../../../common/http_api';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
import { LogSourceConfigurationProperties } from '../log_source';
interface LogStreamProps { interface LogStreamProps {
sourceId: string; sourceId: string;
@ -19,6 +20,7 @@ interface LogStreamProps {
endTimestamp: number; endTimestamp: number;
query?: string; query?: string;
center?: LogEntriesCursor; center?: LogEntriesCursor;
columns?: LogSourceConfigurationProperties['logColumns'];
} }
interface LogStreamState { interface LogStreamState {
@ -60,6 +62,7 @@ export function useLogStream({
endTimestamp, endTimestamp,
query, query,
center, center,
columns,
}: LogStreamProps): LogStreamReturn { }: LogStreamProps): LogStreamReturn {
const { services } = useKibanaContextForPlugin(); const { services } = useKibanaContextForPlugin();
const [state, setState] = useSetState<LogStreamState>(INITIAL_STATE); const [state, setState] = useSetState<LogStreamState>(INITIAL_STATE);
@ -100,6 +103,7 @@ export function useLogStream({
startTimestamp, startTimestamp,
endTimestamp, endTimestamp,
query: parsedQuery, query: parsedQuery,
columns,
...fetchPosition, ...fetchPosition,
}, },
services.http.fetch services.http.fetch

View file

@ -15,6 +15,7 @@ import {
LogEntriesItem, LogEntriesItem,
LogEntriesCursor, LogEntriesCursor,
LogColumn, LogColumn,
LogEntriesRequest,
} from '../../../../common/http_api'; } from '../../../../common/http_api';
import { import {
InfraSourceConfiguration, InfraSourceConfiguration,
@ -73,7 +74,8 @@ export class InfraLogEntriesDomain {
public async getLogEntriesAround( public async getLogEntriesAround(
requestContext: RequestHandlerContext, requestContext: RequestHandlerContext,
sourceId: string, sourceId: string,
params: LogEntriesAroundParams params: LogEntriesAroundParams,
columnOverrides?: LogEntriesRequest['columns']
): Promise<{ entries: LogEntry[]; hasMoreBefore?: boolean; hasMoreAfter?: boolean }> { ): Promise<{ entries: LogEntry[]; hasMoreBefore?: boolean; hasMoreAfter?: boolean }> {
const { startTimestamp, endTimestamp, center, query, size, highlightTerm } = params; const { startTimestamp, endTimestamp, center, query, size, highlightTerm } = params;
@ -97,7 +99,8 @@ export class InfraLogEntriesDomain {
cursor: { before: center }, cursor: { before: center },
size: Math.floor(halfSize), size: Math.floor(halfSize),
highlightTerm, highlightTerm,
} },
columnOverrides
); );
/* /*
@ -131,13 +134,16 @@ export class InfraLogEntriesDomain {
public async getLogEntries( public async getLogEntries(
requestContext: RequestHandlerContext, requestContext: RequestHandlerContext,
sourceId: string, sourceId: string,
params: LogEntriesParams params: LogEntriesParams,
columnOverrides?: LogEntriesRequest['columns']
): Promise<{ entries: LogEntry[]; hasMoreBefore?: boolean; hasMoreAfter?: boolean }> { ): Promise<{ entries: LogEntry[]; hasMoreBefore?: boolean; hasMoreAfter?: boolean }> {
const { configuration } = await this.libs.sources.getSourceConfiguration( const { configuration } = await this.libs.sources.getSourceConfiguration(
requestContext.core.savedObjects.client, requestContext.core.savedObjects.client,
sourceId sourceId
); );
const columnDefinitions = columnOverrides ?? configuration.logColumns;
const messageFormattingRules = compileFormattingRules( const messageFormattingRules = compileFormattingRules(
getBuiltinRules(configuration.fields.message) getBuiltinRules(configuration.fields.message)
); );
@ -155,7 +161,7 @@ export class InfraLogEntriesDomain {
return { return {
id: doc.id, id: doc.id,
cursor: doc.cursor, cursor: doc.cursor,
columns: configuration.logColumns.map( columns: columnDefinitions.map(
(column): LogColumn => { (column): LogColumn => {
if ('timestampColumn' in column) { if ('timestampColumn' in column) {
return { return {

View file

@ -31,6 +31,7 @@ export const initLogEntriesRoute = ({ framework, logEntries }: InfraBackendLibs)
sourceId, sourceId,
query, query,
size, size,
columns,
} = payload; } = payload;
let entries; let entries;
@ -47,7 +48,8 @@ export const initLogEntriesRoute = ({ framework, logEntries }: InfraBackendLibs)
query: parseFilterQuery(query), query: parseFilterQuery(query),
center: payload.center, center: payload.center,
size, size,
} },
columns
)); ));
} else { } else {
let cursor: LogEntriesParams['cursor']; let cursor: LogEntriesParams['cursor'];
@ -66,7 +68,8 @@ export const initLogEntriesRoute = ({ framework, logEntries }: InfraBackendLibs)
query: parseFilterQuery(query), query: parseFilterQuery(query),
cursor, cursor,
size, size,
} },
columns
)); ));
} }

View file

@ -7,11 +7,7 @@
import expect from '@kbn/expect'; import expect from '@kbn/expect';
import { v4 as uuidv4 } from 'uuid'; import { v4 as uuidv4 } from 'uuid';
import { pipe } from 'fp-ts/lib/pipeable'; import { decodeOrThrow } from '../../../../plugins/infra/common/runtime_types';
import { identity } from 'fp-ts/lib/function';
import { fold } from 'fp-ts/lib/Either';
import { createPlainError, throwErrors } from '../../../../plugins/infra/common/runtime_types';
import { import {
LOG_ENTRIES_PATH, LOG_ENTRIES_PATH,
@ -68,10 +64,7 @@ export default function ({ getService }: FtrProviderContext) {
) )
.expect(200); .expect(200);
const logEntriesResponse = pipe( const logEntriesResponse = decodeOrThrow(logEntriesResponseRT)(body);
logEntriesResponseRT.decode(body),
fold(throwErrors(createPlainError), identity)
);
const entries = logEntriesResponse.data.entries; const entries = logEntriesResponse.data.entries;
const firstEntry = entries[0]; const firstEntry = entries[0];
@ -104,10 +97,7 @@ export default function ({ getService }: FtrProviderContext) {
) )
.expect(200); .expect(200);
const logEntriesResponse = pipe( const logEntriesResponse = decodeOrThrow(logEntriesResponseRT)(body);
logEntriesResponseRT.decode(body),
fold(throwErrors(createPlainError), identity)
);
const entries = logEntriesResponse.data.entries; const entries = logEntriesResponse.data.entries;
const entry = entries[0]; const entry = entries[0];
@ -126,6 +116,52 @@ export default function ({ getService }: FtrProviderContext) {
expect(messageColumn.message.length).to.be.greaterThan(0); expect(messageColumn.message.length).to.be.greaterThan(0);
}); });
it('Returns custom column configurations', async () => {
const customColumns = [
{ timestampColumn: { id: uuidv4() } },
{ fieldColumn: { id: uuidv4(), field: 'host.name' } },
{ fieldColumn: { id: uuidv4(), field: 'event.dataset' } },
{ messageColumn: { id: uuidv4() } },
];
const { body } = await supertest
.post(LOG_ENTRIES_PATH)
.set(COMMON_HEADERS)
.send(
logEntriesRequestRT.encode({
sourceId: 'default',
startTimestamp: EARLIEST_KEY_WITH_DATA.time,
endTimestamp: LATEST_KEY_WITH_DATA.time,
center: KEY_WITHIN_DATA_RANGE,
columns: customColumns,
})
)
.expect(200);
const logEntriesResponse = decodeOrThrow(logEntriesResponseRT)(body);
const entries = logEntriesResponse.data.entries;
const entry = entries[0];
expect(entry.columns).to.have.length(4);
const timestampColumn = entry.columns[0] as LogTimestampColumn;
expect(timestampColumn).to.have.property('timestamp');
const hostNameColumn = entry.columns[1] as LogFieldColumn;
expect(hostNameColumn).to.have.property('field');
expect(hostNameColumn.field).to.be('host.name');
expect(hostNameColumn).to.have.property('value');
const eventDatasetColumn = entry.columns[2] as LogFieldColumn;
expect(eventDatasetColumn).to.have.property('field');
expect(eventDatasetColumn.field).to.be('event.dataset');
expect(eventDatasetColumn).to.have.property('value');
const messageColumn = entry.columns[3] as LogMessageColumn;
expect(messageColumn).to.have.property('message');
expect(messageColumn.message.length).to.be.greaterThan(0);
});
it('Does not build context if entry does not have all fields', async () => { it('Does not build context if entry does not have all fields', async () => {
const { body } = await supertest const { body } = await supertest
.post(LOG_ENTRIES_PATH) .post(LOG_ENTRIES_PATH)
@ -140,10 +176,7 @@ export default function ({ getService }: FtrProviderContext) {
) )
.expect(200); .expect(200);
const logEntriesResponse = pipe( const logEntriesResponse = decodeOrThrow(logEntriesResponseRT)(body);
logEntriesResponseRT.decode(body),
fold(throwErrors(createPlainError), identity)
);
const entries = logEntriesResponse.data.entries; const entries = logEntriesResponse.data.entries;
const entry = entries[0]; const entry = entries[0];
@ -162,10 +195,7 @@ export default function ({ getService }: FtrProviderContext) {
size: 10, size: 10,
}) })
); );
const firstPage = pipe( const firstPage = decodeOrThrow(logEntriesResponseRT)(firstPageBody);
logEntriesResponseRT.decode(firstPageBody),
fold(throwErrors(createPlainError), identity)
);
const { body: secondPageBody } = await supertest const { body: secondPageBody } = await supertest
.post(LOG_ENTRIES_PATH) .post(LOG_ENTRIES_PATH)
@ -179,10 +209,7 @@ export default function ({ getService }: FtrProviderContext) {
size: 10, size: 10,
}) })
); );
const secondPage = pipe( const secondPage = decodeOrThrow(logEntriesResponseRT)(secondPageBody);
logEntriesResponseRT.decode(secondPageBody),
fold(throwErrors(createPlainError), identity)
);
const { body: bothPagesBody } = await supertest const { body: bothPagesBody } = await supertest
.post(LOG_ENTRIES_PATH) .post(LOG_ENTRIES_PATH)
@ -195,10 +222,7 @@ export default function ({ getService }: FtrProviderContext) {
size: 20, size: 20,
}) })
); );
const bothPages = pipe( const bothPages = decodeOrThrow(logEntriesResponseRT)(bothPagesBody);
logEntriesResponseRT.decode(bothPagesBody),
fold(throwErrors(createPlainError), identity)
);
expect(bothPages.data.entries).to.eql([ expect(bothPages.data.entries).to.eql([
...firstPage.data.entries, ...firstPage.data.entries,
@ -222,10 +246,7 @@ export default function ({ getService }: FtrProviderContext) {
size: 10, size: 10,
}) })
); );
const lastPage = pipe( const lastPage = decodeOrThrow(logEntriesResponseRT)(lastPageBody);
logEntriesResponseRT.decode(lastPageBody),
fold(throwErrors(createPlainError), identity)
);
const { body: secondToLastPageBody } = await supertest const { body: secondToLastPageBody } = await supertest
.post(LOG_ENTRIES_PATH) .post(LOG_ENTRIES_PATH)
@ -239,10 +260,7 @@ export default function ({ getService }: FtrProviderContext) {
size: 10, size: 10,
}) })
); );
const secondToLastPage = pipe( const secondToLastPage = decodeOrThrow(logEntriesResponseRT)(secondToLastPageBody);
logEntriesResponseRT.decode(secondToLastPageBody),
fold(throwErrors(createPlainError), identity)
);
const { body: bothPagesBody } = await supertest const { body: bothPagesBody } = await supertest
.post(LOG_ENTRIES_PATH) .post(LOG_ENTRIES_PATH)
@ -256,10 +274,7 @@ export default function ({ getService }: FtrProviderContext) {
size: 20, size: 20,
}) })
); );
const bothPages = pipe( const bothPages = decodeOrThrow(logEntriesResponseRT)(bothPagesBody);
logEntriesResponseRT.decode(bothPagesBody),
fold(throwErrors(createPlainError), identity)
);
expect(bothPages.data.entries).to.eql([ expect(bothPages.data.entries).to.eql([
...secondToLastPage.data.entries, ...secondToLastPage.data.entries,
@ -283,10 +298,7 @@ export default function ({ getService }: FtrProviderContext) {
}) })
) )
.expect(200); .expect(200);
const logEntriesResponse = pipe( const logEntriesResponse = decodeOrThrow(logEntriesResponseRT)(body);
logEntriesResponseRT.decode(body),
fold(throwErrors(createPlainError), identity)
);
const entries = logEntriesResponse.data.entries; const entries = logEntriesResponse.data.entries;
const firstEntry = entries[0]; const firstEntry = entries[0];
@ -313,10 +325,7 @@ export default function ({ getService }: FtrProviderContext) {
) )
.expect(200); .expect(200);
const logEntriesResponse = pipe( const logEntriesResponse = decodeOrThrow(logEntriesResponseRT)(body);
logEntriesResponseRT.decode(body),
fold(throwErrors(createPlainError), identity)
);
expect(logEntriesResponse.data.entries).to.have.length(0); expect(logEntriesResponse.data.entries).to.have.length(0);
expect(logEntriesResponse.data.topCursor).to.be(null); expect(logEntriesResponse.data.topCursor).to.be(null);
@ -371,10 +380,7 @@ export default function ({ getService }: FtrProviderContext) {
) )
.expect(200); .expect(200);
const logEntriesResponse = pipe( const logEntriesResponse = decodeOrThrow(logEntriesResponseRT)(body);
logEntriesResponseRT.decode(body),
fold(throwErrors(createPlainError), identity)
);
const entries = logEntriesResponse.data.entries; const entries = logEntriesResponse.data.entries;
const entry = entries[0]; const entry = entries[0];