[Logs UI] Allow custom columns in the <LogStream /> component (#83802)

This commit is contained in:
Alejandro Fernández Gómez 2020-11-23 17:30:02 +01:00 committed by GitHub
parent 10afcf032f
commit 378d89b5cd
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 142 additions and 63 deletions

View file

@ -7,6 +7,7 @@
import * as rt from 'io-ts';
import { jsonArrayRT } from '../../typed_json';
import { logEntriesCursorRT } from './common';
import { logSourceColumnConfigurationRT } from '../log_sources';
export const LOG_ENTRIES_PATH = '/api/log_entries/entries';
@ -19,6 +20,7 @@ export const logEntriesBaseRequestRT = rt.intersection([
rt.partial({
query: rt.union([rt.string, rt.null]),
size: rt.number,
columns: rt.array(logSourceColumnConfigurationRT),
}),
]);

View file

@ -48,7 +48,7 @@ const logSourceFieldColumnConfigurationRT = rt.strict({
]),
});
const logSourceColumnConfigurationRT = rt.union([
export const logSourceColumnConfigurationRT = rt.union([
logSourceTimestampColumnConfigurationRT,
logSourceMessageColumnConfigurationRT,
logSourceFieldColumnConfigurationRT,

View file

@ -68,6 +68,36 @@ By default the `<LogStream />` uses the `"default"` source confiuration, but if
<LogStream startTimestamp={startTimestamp} endTimestamp={endTimestamp} sourceId="my_source" />
```
### Custom columns
It is possible to change what columns are loaded without creating a whole new source configuration. To do so the component supports the `columns` prop. The default configuration can be replicated as follows.
```tsx
<LogStream
startTimestamp={...}
endTimetsamp={...}
columns={[
{ type: 'timestamp' },
{ type: 'field', field: 'event.dataset' }
{ type: 'message' },
]}
/>
```
There are three column types:
<table>
<tr>
<td>`type: "timestamp"`
<td>The configured timestamp field. Defaults to `@timestamp`.
<tr>
<td>`type: "message"`
<td>The value of the `message` field if it exists. If it doesn't, the component will try to recompose the original log line using values of other fields.
<tr>
<td>`type: "field"`
<td>A specific field specified in the `field` property.
</table>
### Considerations
As mentioned in the prerequisites, the component relies on `kibana-react` to access kibana's core services. If this is not the case the component will throw an exception when rendering. We advise to use an `<EuiErrorBoundary>` in your component hierarchy to catch this error if necessary.

View file

@ -11,13 +11,18 @@ import { euiStyled } from '../../../../observability/public';
import { LogEntriesCursor } from '../../../common/http_api';
import { useKibana } from '../../../../../../src/plugins/kibana_react/public';
import { useLogSource } from '../../containers/logs/log_source';
import { LogSourceConfigurationProperties, useLogSource } from '../../containers/logs/log_source';
import { useLogStream } from '../../containers/logs/log_stream';
import { ScrollableLogTextStreamView } from '../logging/log_text_stream';
const PAGE_THRESHOLD = 2;
type LogColumnDefinition =
| { type: 'timestamp' }
| { type: 'message' }
| { type: 'field'; field: string };
export interface LogStreamProps {
sourceId?: string;
startTimestamp: number;
@ -26,6 +31,7 @@ export interface LogStreamProps {
center?: LogEntriesCursor;
highlight?: string;
height?: string | number;
columns?: LogColumnDefinition[];
}
export const LogStream: React.FC<LogStreamProps> = ({
@ -36,7 +42,13 @@ export const LogStream: React.FC<LogStreamProps> = ({
center,
highlight,
height = '400px',
columns,
}) => {
const customColumns = useMemo(
() => (columns ? convertLogColumnDefinitionToLogSourceColumnDefinition(columns) : undefined),
[columns]
);
// source boilerplate
const { services } = useKibana();
if (!services?.http?.fetch) {
@ -74,6 +86,7 @@ Read more at https://github.com/elastic/kibana/blob/master/src/plugins/kibana_re
endTimestamp,
query,
center,
columns: customColumns,
});
// Derived state
@ -83,8 +96,8 @@ Read more at https://github.com/elastic/kibana/blob/master/src/plugins/kibana_re
const isLoadingMore = pageLoadingState === 'loading';
const columnConfigurations = useMemo(() => {
return sourceConfiguration ? sourceConfiguration.configuration.logColumns : [];
}, [sourceConfiguration]);
return sourceConfiguration ? customColumns ?? sourceConfiguration.configuration.logColumns : [];
}, [sourceConfiguration, customColumns]);
const streamItems = useMemo(
() =>
@ -163,6 +176,21 @@ const LogStreamContent = euiStyled.div<{ height: string }>`
height: ${(props) => props.height};
`;
function convertLogColumnDefinitionToLogSourceColumnDefinition(
columns: LogColumnDefinition[]
): LogSourceConfigurationProperties['logColumns'] {
return columns.map((column) => {
switch (column.type) {
case 'timestamp':
return { timestampColumn: { id: '___#timestamp' } };
case 'message':
return { messageColumn: { id: '___#message' } };
case 'field':
return { fieldColumn: { id: `___#${column.field}`, field: column.field } };
}
});
}
// Allow for lazy loading
// eslint-disable-next-line import/no-default-export
export default LogStream;

View file

@ -12,6 +12,7 @@ import { fetchLogEntries } from '../log_entries/api/fetch_log_entries';
import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { LogEntry, LogEntriesCursor } from '../../../../common/http_api';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
import { LogSourceConfigurationProperties } from '../log_source';
interface LogStreamProps {
sourceId: string;
@ -19,6 +20,7 @@ interface LogStreamProps {
endTimestamp: number;
query?: string;
center?: LogEntriesCursor;
columns?: LogSourceConfigurationProperties['logColumns'];
}
interface LogStreamState {
@ -60,6 +62,7 @@ export function useLogStream({
endTimestamp,
query,
center,
columns,
}: LogStreamProps): LogStreamReturn {
const { services } = useKibanaContextForPlugin();
const [state, setState] = useSetState<LogStreamState>(INITIAL_STATE);
@ -100,6 +103,7 @@ export function useLogStream({
startTimestamp,
endTimestamp,
query: parsedQuery,
columns,
...fetchPosition,
},
services.http.fetch

View file

@ -15,6 +15,7 @@ import {
LogEntriesItem,
LogEntriesCursor,
LogColumn,
LogEntriesRequest,
} from '../../../../common/http_api';
import {
InfraSourceConfiguration,
@ -73,7 +74,8 @@ export class InfraLogEntriesDomain {
public async getLogEntriesAround(
requestContext: RequestHandlerContext,
sourceId: string,
params: LogEntriesAroundParams
params: LogEntriesAroundParams,
columnOverrides?: LogEntriesRequest['columns']
): Promise<{ entries: LogEntry[]; hasMoreBefore?: boolean; hasMoreAfter?: boolean }> {
const { startTimestamp, endTimestamp, center, query, size, highlightTerm } = params;
@ -97,7 +99,8 @@ export class InfraLogEntriesDomain {
cursor: { before: center },
size: Math.floor(halfSize),
highlightTerm,
}
},
columnOverrides
);
/*
@ -131,13 +134,16 @@ export class InfraLogEntriesDomain {
public async getLogEntries(
requestContext: RequestHandlerContext,
sourceId: string,
params: LogEntriesParams
params: LogEntriesParams,
columnOverrides?: LogEntriesRequest['columns']
): Promise<{ entries: LogEntry[]; hasMoreBefore?: boolean; hasMoreAfter?: boolean }> {
const { configuration } = await this.libs.sources.getSourceConfiguration(
requestContext.core.savedObjects.client,
sourceId
);
const columnDefinitions = columnOverrides ?? configuration.logColumns;
const messageFormattingRules = compileFormattingRules(
getBuiltinRules(configuration.fields.message)
);
@ -155,7 +161,7 @@ export class InfraLogEntriesDomain {
return {
id: doc.id,
cursor: doc.cursor,
columns: configuration.logColumns.map(
columns: columnDefinitions.map(
(column): LogColumn => {
if ('timestampColumn' in column) {
return {

View file

@ -31,6 +31,7 @@ export const initLogEntriesRoute = ({ framework, logEntries }: InfraBackendLibs)
sourceId,
query,
size,
columns,
} = payload;
let entries;
@ -47,7 +48,8 @@ export const initLogEntriesRoute = ({ framework, logEntries }: InfraBackendLibs)
query: parseFilterQuery(query),
center: payload.center,
size,
}
},
columns
));
} else {
let cursor: LogEntriesParams['cursor'];
@ -66,7 +68,8 @@ export const initLogEntriesRoute = ({ framework, logEntries }: InfraBackendLibs)
query: parseFilterQuery(query),
cursor,
size,
}
},
columns
));
}

View file

@ -7,11 +7,7 @@
import expect from '@kbn/expect';
import { v4 as uuidv4 } from 'uuid';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { fold } from 'fp-ts/lib/Either';
import { createPlainError, throwErrors } from '../../../../plugins/infra/common/runtime_types';
import { decodeOrThrow } from '../../../../plugins/infra/common/runtime_types';
import {
LOG_ENTRIES_PATH,
@ -68,10 +64,7 @@ export default function ({ getService }: FtrProviderContext) {
)
.expect(200);
const logEntriesResponse = pipe(
logEntriesResponseRT.decode(body),
fold(throwErrors(createPlainError), identity)
);
const logEntriesResponse = decodeOrThrow(logEntriesResponseRT)(body);
const entries = logEntriesResponse.data.entries;
const firstEntry = entries[0];
@ -104,10 +97,7 @@ export default function ({ getService }: FtrProviderContext) {
)
.expect(200);
const logEntriesResponse = pipe(
logEntriesResponseRT.decode(body),
fold(throwErrors(createPlainError), identity)
);
const logEntriesResponse = decodeOrThrow(logEntriesResponseRT)(body);
const entries = logEntriesResponse.data.entries;
const entry = entries[0];
@ -126,6 +116,52 @@ export default function ({ getService }: FtrProviderContext) {
expect(messageColumn.message.length).to.be.greaterThan(0);
});
it('Returns custom column configurations', async () => {
const customColumns = [
{ timestampColumn: { id: uuidv4() } },
{ fieldColumn: { id: uuidv4(), field: 'host.name' } },
{ fieldColumn: { id: uuidv4(), field: 'event.dataset' } },
{ messageColumn: { id: uuidv4() } },
];
const { body } = await supertest
.post(LOG_ENTRIES_PATH)
.set(COMMON_HEADERS)
.send(
logEntriesRequestRT.encode({
sourceId: 'default',
startTimestamp: EARLIEST_KEY_WITH_DATA.time,
endTimestamp: LATEST_KEY_WITH_DATA.time,
center: KEY_WITHIN_DATA_RANGE,
columns: customColumns,
})
)
.expect(200);
const logEntriesResponse = decodeOrThrow(logEntriesResponseRT)(body);
const entries = logEntriesResponse.data.entries;
const entry = entries[0];
expect(entry.columns).to.have.length(4);
const timestampColumn = entry.columns[0] as LogTimestampColumn;
expect(timestampColumn).to.have.property('timestamp');
const hostNameColumn = entry.columns[1] as LogFieldColumn;
expect(hostNameColumn).to.have.property('field');
expect(hostNameColumn.field).to.be('host.name');
expect(hostNameColumn).to.have.property('value');
const eventDatasetColumn = entry.columns[2] as LogFieldColumn;
expect(eventDatasetColumn).to.have.property('field');
expect(eventDatasetColumn.field).to.be('event.dataset');
expect(eventDatasetColumn).to.have.property('value');
const messageColumn = entry.columns[3] as LogMessageColumn;
expect(messageColumn).to.have.property('message');
expect(messageColumn.message.length).to.be.greaterThan(0);
});
it('Does not build context if entry does not have all fields', async () => {
const { body } = await supertest
.post(LOG_ENTRIES_PATH)
@ -140,10 +176,7 @@ export default function ({ getService }: FtrProviderContext) {
)
.expect(200);
const logEntriesResponse = pipe(
logEntriesResponseRT.decode(body),
fold(throwErrors(createPlainError), identity)
);
const logEntriesResponse = decodeOrThrow(logEntriesResponseRT)(body);
const entries = logEntriesResponse.data.entries;
const entry = entries[0];
@ -162,10 +195,7 @@ export default function ({ getService }: FtrProviderContext) {
size: 10,
})
);
const firstPage = pipe(
logEntriesResponseRT.decode(firstPageBody),
fold(throwErrors(createPlainError), identity)
);
const firstPage = decodeOrThrow(logEntriesResponseRT)(firstPageBody);
const { body: secondPageBody } = await supertest
.post(LOG_ENTRIES_PATH)
@ -179,10 +209,7 @@ export default function ({ getService }: FtrProviderContext) {
size: 10,
})
);
const secondPage = pipe(
logEntriesResponseRT.decode(secondPageBody),
fold(throwErrors(createPlainError), identity)
);
const secondPage = decodeOrThrow(logEntriesResponseRT)(secondPageBody);
const { body: bothPagesBody } = await supertest
.post(LOG_ENTRIES_PATH)
@ -195,10 +222,7 @@ export default function ({ getService }: FtrProviderContext) {
size: 20,
})
);
const bothPages = pipe(
logEntriesResponseRT.decode(bothPagesBody),
fold(throwErrors(createPlainError), identity)
);
const bothPages = decodeOrThrow(logEntriesResponseRT)(bothPagesBody);
expect(bothPages.data.entries).to.eql([
...firstPage.data.entries,
@ -222,10 +246,7 @@ export default function ({ getService }: FtrProviderContext) {
size: 10,
})
);
const lastPage = pipe(
logEntriesResponseRT.decode(lastPageBody),
fold(throwErrors(createPlainError), identity)
);
const lastPage = decodeOrThrow(logEntriesResponseRT)(lastPageBody);
const { body: secondToLastPageBody } = await supertest
.post(LOG_ENTRIES_PATH)
@ -239,10 +260,7 @@ export default function ({ getService }: FtrProviderContext) {
size: 10,
})
);
const secondToLastPage = pipe(
logEntriesResponseRT.decode(secondToLastPageBody),
fold(throwErrors(createPlainError), identity)
);
const secondToLastPage = decodeOrThrow(logEntriesResponseRT)(secondToLastPageBody);
const { body: bothPagesBody } = await supertest
.post(LOG_ENTRIES_PATH)
@ -256,10 +274,7 @@ export default function ({ getService }: FtrProviderContext) {
size: 20,
})
);
const bothPages = pipe(
logEntriesResponseRT.decode(bothPagesBody),
fold(throwErrors(createPlainError), identity)
);
const bothPages = decodeOrThrow(logEntriesResponseRT)(bothPagesBody);
expect(bothPages.data.entries).to.eql([
...secondToLastPage.data.entries,
@ -283,10 +298,7 @@ export default function ({ getService }: FtrProviderContext) {
})
)
.expect(200);
const logEntriesResponse = pipe(
logEntriesResponseRT.decode(body),
fold(throwErrors(createPlainError), identity)
);
const logEntriesResponse = decodeOrThrow(logEntriesResponseRT)(body);
const entries = logEntriesResponse.data.entries;
const firstEntry = entries[0];
@ -313,10 +325,7 @@ export default function ({ getService }: FtrProviderContext) {
)
.expect(200);
const logEntriesResponse = pipe(
logEntriesResponseRT.decode(body),
fold(throwErrors(createPlainError), identity)
);
const logEntriesResponse = decodeOrThrow(logEntriesResponseRT)(body);
expect(logEntriesResponse.data.entries).to.have.length(0);
expect(logEntriesResponse.data.topCursor).to.be(null);
@ -371,10 +380,7 @@ export default function ({ getService }: FtrProviderContext) {
)
.expect(200);
const logEntriesResponse = pipe(
logEntriesResponseRT.decode(body),
fold(throwErrors(createPlainError), identity)
);
const logEntriesResponse = decodeOrThrow(logEntriesResponseRT)(body);
const entries = logEntriesResponse.data.entries;
const entry = entries[0];