[7.x] create kbn-legacy-logging package (#77678) (#84034)

* create kbn-legacy-logging package (#77678)

* create kbn-legacy-logging package and start to move things

* fix rotator tests

* fix logging system test mocks

* move logging format to the package

* move logging setup to package

* adapt legacy logging server

* remove usage of legacy config in the legacy logging server

* move legacy logging server to package

* remove `??` syntax from package

* update generated doc

* fix a few things due to month old merge

* remove typings from project

* move reconfigureLogging to package

* add basic README file

* update generated doc

* remove old typings

* add typing for legacy logging events

* remove `??` from packages

* fix / improve event types usages

* remove suffix from tsconfig
# Conflicts:
#	src/legacy/server/config/schema.js

* fix for 7.x branch
This commit is contained in:
Pierre Gayvallet 2020-11-23 07:31:31 +01:00 committed by GitHub
parent b80f2a381f
commit 1a97e078f2
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
36 changed files with 749 additions and 306 deletions

View file

@ -128,6 +128,7 @@
"@kbn/config-schema": "link:packages/kbn-config-schema",
"@kbn/i18n": "link:packages/kbn-i18n",
"@kbn/interpreter": "link:packages/kbn-interpreter",
"@kbn/legacy-logging": "link:packages/kbn-legacy-logging",
"@kbn/logging": "link:packages/kbn-logging",
"@kbn/monaco": "link:packages/kbn-monaco",
"@kbn/std": "link:packages/kbn-std",

View file

@ -0,0 +1,4 @@
# @kbn/legacy-logging
This package contains the implementation of the legacy logging
system, based on `@hapi/good`

View file

@ -0,0 +1,15 @@
{
"name": "@kbn/legacy-logging",
"version": "1.0.0",
"private": true,
"license": "Apache-2.0",
"main": "./target/index.js",
"scripts": {
"build": "tsc",
"kbn:bootstrap": "yarn build",
"kbn:watch": "yarn build --watch"
},
"dependencies": {
"@kbn/std": "link:../kbn-std"
}
}

View file

@ -18,20 +18,25 @@
*/
import _ from 'lodash';
import { getLoggerStream } from './log_reporter';
import { getLogReporter } from './log_reporter';
import { LegacyLoggingConfig } from './schema';
export default function loggingConfiguration(config) {
const events = config.get('logging.events');
/**
* Returns the `@hapi/good` plugin configuration to be used for the legacy logging
* @param config
*/
export function getLoggingConfiguration(config: LegacyLoggingConfig, opsInterval: number) {
const events = config.events;
if (config.get('logging.silent')) {
if (config.silent) {
_.defaults(events, {});
} else if (config.get('logging.quiet')) {
} else if (config.quiet) {
_.defaults(events, {
log: ['listening', 'error', 'fatal'],
request: ['error'],
error: '*',
});
} else if (config.get('logging.verbose')) {
} else if (config.verbose) {
_.defaults(events, {
log: '*',
ops: '*',
@ -42,30 +47,30 @@ export default function loggingConfiguration(config) {
} else {
_.defaults(events, {
log: ['info', 'warning', 'error', 'fatal'],
response: config.get('logging.json') ? '*' : '!',
response: config.json ? '*' : '!',
request: ['info', 'warning', 'error', 'fatal'],
error: '*',
});
}
const loggerStream = getLoggerStream({
const loggerStream = getLogReporter({
config: {
json: config.get('logging.json'),
dest: config.get('logging.dest'),
timezone: config.get('logging.timezone'),
json: config.json,
dest: config.dest,
timezone: config.timezone,
// I'm adding the default here because if you add another filter
// using the commandline it will remove authorization. I want users
// to have to explicitly set --logging.filter.authorization=none or
// --logging.filter.cookie=none to have it show up in the logs.
filter: _.defaults(config.get('logging.filter'), {
filter: _.defaults(config.filter, {
authorization: 'remove',
cookie: 'remove',
}),
},
events: _.transform(
events,
function (filtered, val, key) {
function (filtered: Record<string, string>, val: string, key: string) {
// provide a string compatible way to remove events
if (val !== '!') filtered[key] = val;
},
@ -75,7 +80,7 @@ export default function loggingConfiguration(config) {
const options = {
ops: {
interval: config.get('ops.interval'),
interval: opsInterval,
},
includes: {
request: ['headers', 'payload'],

View file

@ -0,0 +1,25 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export { LegacyLoggingConfig, legacyLoggingConfigSchema } from './schema';
export { attachMetaData } from './metadata';
export { setupLoggingRotate } from './rotate';
export { setupLogging, reconfigureLogging } from './setup_logging';
export { getLoggingConfiguration } from './get_logging_config';
export { LegacyLoggingServer } from './legacy_logging_server';

View file

@ -17,11 +17,9 @@
* under the License.
*/
jest.mock('../../../../legacy/server/config');
jest.mock('../../../../legacy/server/logging');
jest.mock('./setup_logging');
import { LogLevel } from '../../logging';
import { LegacyLoggingServer } from './legacy_logging_server';
import { LegacyLoggingServer, LogRecord } from './legacy_logging_server';
test('correctly forwards log records.', () => {
const loggingServer = new LegacyLoggingServer({ events: {} });
@ -29,28 +27,37 @@ test('correctly forwards log records.', () => {
loggingServer.events.on('log', onLogMock);
const timestamp = 1554433221100;
const firstLogRecord = {
const firstLogRecord: LogRecord = {
timestamp: new Date(timestamp),
pid: 5355,
level: LogLevel.Info,
level: {
id: 'info',
value: 5,
},
context: 'some-context',
message: 'some-message',
};
const secondLogRecord = {
const secondLogRecord: LogRecord = {
timestamp: new Date(timestamp),
pid: 5355,
level: LogLevel.Error,
level: {
id: 'error',
value: 3,
},
context: 'some-context.sub-context',
message: 'some-message',
meta: { unknown: 2 },
error: new Error('some-error'),
};
const thirdLogRecord = {
const thirdLogRecord: LogRecord = {
timestamp: new Date(timestamp),
pid: 5355,
level: LogLevel.Trace,
level: {
id: 'trace',
value: 7,
},
context: 'some-context.sub-context',
message: 'some-message',
meta: { tags: ['important', 'tags'], unknown: 2 },

View file

@ -17,29 +17,40 @@
* under the License.
*/
import { ServerExtType } from '@hapi/hapi';
import Podium from '@hapi/podium';
// @ts-expect-error: implicit any for JS file
import { Config } from '../../../../legacy/server/config';
// @ts-expect-error: implicit any for JS file
import { setupLogging } from '../../../../legacy/server/logging';
import { LogLevel, LogRecord } from '../../logging';
import { LegacyVars } from '../../types';
import { ServerExtType, Server } from '@hapi/hapi';
import Podium from 'podium';
import { setupLogging } from './setup_logging';
import { attachMetaData } from './metadata';
import { legacyLoggingConfigSchema } from './schema';
export const metadataSymbol = Symbol('log message with metadata');
export function attachMetaData(message: string, metadata: LegacyVars = {}) {
return {
[metadataSymbol]: {
message,
metadata,
},
};
// these LogXXX types are duplicated to avoid a cross dependency with the @kbn/logging package.
// typescript will error if they diverge at some point.
type LogLevelId = 'all' | 'fatal' | 'error' | 'warn' | 'info' | 'debug' | 'trace' | 'off';
interface LogLevel {
id: LogLevelId;
value: number;
}
export interface LogRecord {
timestamp: Date;
level: LogLevel;
context: string;
message: string;
error?: Error;
meta?: { [name: string]: any };
pid: number;
}
const isEmptyObject = (obj: object) => Object.keys(obj).length === 0;
function getDataToLog(error: Error | undefined, metadata: object, message: string) {
if (error) return error;
if (!isEmptyObject(metadata)) return attachMetaData(message, metadata);
if (error) {
return error;
}
if (!isEmptyObject(metadata)) {
return attachMetaData(message, metadata);
}
return message;
}
@ -50,7 +61,7 @@ interface PluginRegisterParams {
options: PluginRegisterParams['options']
) => Promise<void>;
};
options: LegacyVars;
options: Record<string, any>;
}
/**
@ -84,22 +95,19 @@ export class LegacyLoggingServer {
private onPostStopCallback?: () => void;
constructor(legacyLoggingConfig: Readonly<LegacyVars>) {
constructor(legacyLoggingConfig: any) {
// We set `ops.interval` to max allowed number and `ops` filter to value
// that doesn't exist to avoid logging of ops at all, if turned on it will be
// logged by the "legacy" Kibana.
const config = {
logging: {
...legacyLoggingConfig,
events: {
...legacyLoggingConfig.events,
ops: '__no-ops__',
},
const { value: loggingConfig } = legacyLoggingConfigSchema.validate({
...legacyLoggingConfig,
events: {
...legacyLoggingConfig.events,
ops: '__no-ops__',
},
ops: { interval: 2147483647 },
};
});
setupLogging(this, Config.withDefaultSchema(config));
setupLogging((this as unknown) as Server, loggingConfig, 2147483647);
}
public register({ plugin: { register }, options }: PluginRegisterParams): Promise<void> {

View file

@ -0,0 +1,80 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { EventData, isEventData } from './metadata';
export interface BaseEvent {
event: string;
timestamp: number;
pid: number;
tags?: string[];
}
export interface ResponseEvent extends BaseEvent {
event: 'response';
method: 'GET' | 'POST' | 'PUT' | 'DELETE';
statusCode: number;
path: string;
headers: Record<string, string | string[]>;
responsePayload: string;
responseTime: string;
query: Record<string, any>;
}
export interface OpsEvent extends BaseEvent {
event: 'ops';
os: {
load: string[];
};
proc: Record<string, any>;
load: string;
}
export interface ErrorEvent extends BaseEvent {
event: 'error';
error: Error;
url: string;
}
export interface UndeclaredErrorEvent extends BaseEvent {
error: Error;
}
export interface LogEvent extends BaseEvent {
data: EventData;
}
export interface UnkownEvent extends BaseEvent {
data: string | Record<string, any>;
}
export type AnyEvent =
| ResponseEvent
| OpsEvent
| ErrorEvent
| UndeclaredErrorEvent
| LogEvent
| UnkownEvent;
export const isResponseEvent = (e: AnyEvent): e is ResponseEvent => e.event === 'response';
export const isOpsEvent = (e: AnyEvent): e is OpsEvent => e.event === 'ops';
export const isErrorEvent = (e: AnyEvent): e is ErrorEvent => e.event === 'error';
export const isLogEvent = (e: AnyEvent): e is LogEvent => isEventData((e as LogEvent).data);
export const isUndeclaredErrorEvent = (e: AnyEvent): e is UndeclaredErrorEvent =>
(e as any).error instanceof Error;

View file

@ -19,16 +19,29 @@
import Stream from 'stream';
import moment from 'moment-timezone';
import { get, _ } from 'lodash';
import _ from 'lodash';
import queryString from 'query-string';
import numeral from '@elastic/numeral';
import chalk from 'chalk';
// @ts-expect-error missing type def
import stringify from 'json-stringify-safe';
import applyFiltersToKeys from './apply_filters_to_keys';
import { inspect } from 'util';
import { logWithMetadata } from './log_with_metadata';
function serializeError(err = {}) {
import { applyFiltersToKeys } from './utils';
import { getLogEventData } from './metadata';
import { LegacyLoggingConfig } from './schema';
import {
AnyEvent,
isResponseEvent,
isOpsEvent,
isErrorEvent,
isLogEvent,
isUndeclaredErrorEvent,
} from './log_events';
export type LogFormatConfig = Pick<LegacyLoggingConfig, 'json' | 'dest' | 'timezone' | 'filter'>;
function serializeError(err: any = {}) {
return {
message: err.message,
name: err.name,
@ -38,34 +51,37 @@ function serializeError(err = {}) {
};
}
const levelColor = function (code) {
if (code < 299) return chalk.green(code);
if (code < 399) return chalk.yellow(code);
if (code < 499) return chalk.magentaBright(code);
return chalk.red(code);
const levelColor = function (code: number) {
if (code < 299) return chalk.green(String(code));
if (code < 399) return chalk.yellow(String(code));
if (code < 499) return chalk.magentaBright(String(code));
return chalk.red(String(code));
};
export default class TransformObjStream extends Stream.Transform {
constructor(config) {
export abstract class BaseLogFormat extends Stream.Transform {
constructor(private readonly config: LogFormatConfig) {
super({
readableObjectMode: false,
writableObjectMode: true,
});
this.config = config;
}
filter(data) {
if (!this.config.filter) return data;
abstract format(data: Record<string, any>): string;
filter(data: Record<string, unknown>) {
if (!this.config.filter) {
return data;
}
return applyFiltersToKeys(data, this.config.filter);
}
_transform(event, enc, next) {
_transform(event: AnyEvent, enc: string, next: Stream.TransformCallback) {
const data = this.filter(this.readEvent(event));
this.push(this.format(data) + '\n');
next();
}
extractAndFormatTimestamp(data, format) {
extractAndFormatTimestamp(data: Record<string, any>, format?: string) {
const { timezone } = this.config;
const date = moment(data['@timestamp']);
if (timezone) {
@ -74,18 +90,18 @@ export default class TransformObjStream extends Stream.Transform {
return date.format(format);
}
readEvent(event) {
const data = {
readEvent(event: AnyEvent) {
const data: Record<string, any> = {
type: event.event,
'@timestamp': event.timestamp,
tags: [].concat(event.tags || []),
tags: [...(event.tags || [])],
pid: event.pid,
};
if (data.type === 'response') {
if (isResponseEvent(event)) {
_.defaults(data, _.pick(event, ['method', 'statusCode']));
const source = get(event, 'source', {});
const source = _.get(event, 'source', {});
data.req = {
url: event.path,
method: event.method || '',
@ -95,21 +111,21 @@ export default class TransformObjStream extends Stream.Transform {
referer: source.referer,
};
let contentLength = 0;
if (typeof event.responsePayload === 'object') {
contentLength = stringify(event.responsePayload).length;
} else {
contentLength = String(event.responsePayload).length;
}
const contentLength =
event.responsePayload === 'object'
? stringify(event.responsePayload).length
: String(event.responsePayload).length;
data.res = {
statusCode: event.statusCode,
responseTime: event.responseTime,
contentLength: contentLength,
contentLength,
};
const query = queryString.stringify(event.query, { sort: false });
if (query) data.req.url += '?' + query;
if (query) {
data.req.url += '?' + query;
}
data.message = data.req.method.toUpperCase() + ' ';
data.message += data.req.url;
@ -118,38 +134,38 @@ export default class TransformObjStream extends Stream.Transform {
data.message += ' ';
data.message += chalk.gray(data.res.responseTime + 'ms');
data.message += chalk.gray(' - ' + numeral(contentLength).format('0.0b'));
} else if (data.type === 'ops') {
} else if (isOpsEvent(event)) {
_.defaults(data, _.pick(event, ['pid', 'os', 'proc', 'load']));
data.message = chalk.gray('memory: ');
data.message += numeral(get(data, 'proc.mem.heapUsed')).format('0.0b');
data.message += numeral(_.get(data, 'proc.mem.heapUsed')).format('0.0b');
data.message += ' ';
data.message += chalk.gray('uptime: ');
data.message += numeral(get(data, 'proc.uptime')).format('00:00:00');
data.message += numeral(_.get(data, 'proc.uptime')).format('00:00:00');
data.message += ' ';
data.message += chalk.gray('load: [');
data.message += get(data, 'os.load', [])
.map(function (val) {
data.message += _.get(data, 'os.load', [])
.map((val: number) => {
return numeral(val).format('0.00');
})
.join(' ');
data.message += chalk.gray(']');
data.message += ' ';
data.message += chalk.gray('delay: ');
data.message += numeral(get(data, 'proc.delay')).format('0.000');
} else if (data.type === 'error') {
data.message += numeral(_.get(data, 'proc.delay')).format('0.000');
} else if (isErrorEvent(event)) {
data.level = 'error';
data.error = serializeError(event.error);
data.url = event.url;
const message = get(event, 'error.message');
const message = _.get(event, 'error.message');
data.message = message || 'Unknown error (no message)';
} else if (event.error instanceof Error) {
} else if (isUndeclaredErrorEvent(event)) {
data.type = 'error';
data.level = _.includes(event.tags, 'fatal') ? 'fatal' : 'error';
data.error = serializeError(event.error);
const message = get(event, 'error.message');
const message = _.get(event, 'error.message');
data.message = message || 'Unknown error object (no message)';
} else if (logWithMetadata.isLogEvent(event.data)) {
_.assign(data, logWithMetadata.getLogEventData(event.data));
} else if (isLogEvent(event)) {
_.assign(data, getLogEventData(event.data));
} else {
data.message = _.isString(event.data) ? event.data : inspect(event.data);
}

View file

@ -19,30 +19,31 @@
import moment from 'moment';
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
import { attachMetaData } from '../../../../src/core/server/legacy/logging/legacy_logging_server';
import { createListStream, createPromiseFromStreams } from '../../../core/server/utils';
import KbnLoggerJsonFormat from './log_format_json';
import { attachMetaData } from './metadata';
import { createListStream, createPromiseFromStreams } from './test_utils';
import { KbnLoggerJsonFormat } from './log_format_json';
const time = +moment('2010-01-01T05:15:59Z', moment.ISO_8601);
const makeEvent = (eventType) => ({
const makeEvent = (eventType: string) => ({
event: eventType,
timestamp: time,
});
describe('KbnLoggerJsonFormat', () => {
const config = {};
const config: any = {};
describe('event types and messages', () => {
let format;
let format: KbnLoggerJsonFormat;
beforeEach(() => {
format = new KbnLoggerJsonFormat(config);
});
it('log', async () => {
const result = await createPromiseFromStreams([createListStream([makeEvent('log')]), format]);
const result = await createPromiseFromStreams<string>([
createListStream([makeEvent('log')]),
format,
]);
const { type, message } = JSON.parse(result);
expect(type).toBe('log');
@ -64,7 +65,7 @@ describe('KbnLoggerJsonFormat', () => {
referer: 'elastic.co',
},
};
const result = await createPromiseFromStreams([createListStream([event]), format]);
const result = await createPromiseFromStreams<string>([createListStream([event]), format]);
const { type, method, statusCode, message, req } = JSON.parse(result);
expect(type).toBe('response');
@ -82,7 +83,7 @@ describe('KbnLoggerJsonFormat', () => {
load: [1, 1, 2],
},
};
const result = await createPromiseFromStreams([createListStream([event]), format]);
const result = await createPromiseFromStreams<string>([createListStream([event]), format]);
const { type, message } = JSON.parse(result);
expect(type).toBe('ops');
@ -98,7 +99,7 @@ describe('KbnLoggerJsonFormat', () => {
}),
tags: ['tag1', 'tag2'],
};
const result = await createPromiseFromStreams([createListStream([event]), format]);
const result = await createPromiseFromStreams<string>([createListStream([event]), format]);
const { level, message, prop1, prop2, tags } = JSON.parse(result);
expect(level).toBe(undefined);
@ -117,7 +118,7 @@ describe('KbnLoggerJsonFormat', () => {
}),
tags: ['tag1', 'tag2'],
};
const result = await createPromiseFromStreams([createListStream([event]), format]);
const result = await createPromiseFromStreams<string>([createListStream([event]), format]);
const { level, message, prop1, prop2, tags } = JSON.parse(result);
expect(level).toBe(undefined);
@ -132,7 +133,7 @@ describe('KbnLoggerJsonFormat', () => {
data: attachMetaData('message for event'),
tags: ['tag1', 'tag2'],
};
const result = await createPromiseFromStreams([createListStream([event]), format]);
const result = await createPromiseFromStreams<string>([createListStream([event]), format]);
const { level, message, prop1, prop2, tags } = JSON.parse(result);
expect(level).toBe(undefined);
@ -151,7 +152,7 @@ describe('KbnLoggerJsonFormat', () => {
}),
tags: ['tag1', 'tag2'],
};
const result = await createPromiseFromStreams([createListStream([event]), format]);
const result = await createPromiseFromStreams<string>([createListStream([event]), format]);
const { level, message, prop1, prop2, tags } = JSON.parse(result);
expect(level).toBe('error');
@ -170,7 +171,7 @@ describe('KbnLoggerJsonFormat', () => {
message: 'test error 0',
},
};
const result = await createPromiseFromStreams([createListStream([event]), format]);
const result = await createPromiseFromStreams<string>([createListStream([event]), format]);
const { level, message, error } = JSON.parse(result);
expect(level).toBe('error');
@ -183,7 +184,7 @@ describe('KbnLoggerJsonFormat', () => {
event: 'error',
error: {},
};
const result = await createPromiseFromStreams([createListStream([event]), format]);
const result = await createPromiseFromStreams<string>([createListStream([event]), format]);
const { level, message, error } = JSON.parse(result);
expect(level).toBe('error');
@ -193,9 +194,9 @@ describe('KbnLoggerJsonFormat', () => {
it('event error instanceof Error', async () => {
const event = {
error: new Error('test error 2'),
error: new Error('test error 2') as any,
};
const result = await createPromiseFromStreams([createListStream([event]), format]);
const result = await createPromiseFromStreams<string>([createListStream([event]), format]);
const { level, message, error } = JSON.parse(result);
expect(level).toBe('error');
@ -210,10 +211,10 @@ describe('KbnLoggerJsonFormat', () => {
it('event error instanceof Error - fatal', async () => {
const event = {
error: new Error('test error 2'),
error: new Error('test error 2') as any,
tags: ['fatal', 'tag2'],
};
const result = await createPromiseFromStreams([createListStream([event]), format]);
const result = await createPromiseFromStreams<string>([createListStream([event]), format]);
const { tags, level, message, error } = JSON.parse(result);
expect(tags).toEqual(['fatal', 'tag2']);
@ -229,9 +230,9 @@ describe('KbnLoggerJsonFormat', () => {
it('event error instanceof Error, no message', async () => {
const event = {
error: new Error(''),
error: new Error('') as any,
};
const result = await createPromiseFromStreams([createListStream([event]), format]);
const result = await createPromiseFromStreams<string>([createListStream([event]), format]);
const { level, message, error } = JSON.parse(result);
expect(level).toBe('error');
@ -250,18 +251,24 @@ describe('KbnLoggerJsonFormat', () => {
it('logs in UTC', async () => {
const format = new KbnLoggerJsonFormat({
timezone: 'UTC',
});
} as any);
const result = await createPromiseFromStreams([createListStream([makeEvent('log')]), format]);
const result = await createPromiseFromStreams<string>([
createListStream([makeEvent('log')]),
format,
]);
const { '@timestamp': timestamp } = JSON.parse(result);
expect(timestamp).toBe(moment.utc(time).format());
});
it('logs in local timezone timezone is undefined', async () => {
const format = new KbnLoggerJsonFormat({});
const format = new KbnLoggerJsonFormat({} as any);
const result = await createPromiseFromStreams([createListStream([makeEvent('log')]), format]);
const result = await createPromiseFromStreams<string>([
createListStream([makeEvent('log')]),
format,
]);
const { '@timestamp': timestamp } = JSON.parse(result);
expect(timestamp).toBe(moment(time).format());

View file

@ -17,15 +17,16 @@
* under the License.
*/
import LogFormat from './log_format';
// @ts-expect-error missing type def
import stringify from 'json-stringify-safe';
import { BaseLogFormat } from './log_format';
const stripColors = function (string) {
const stripColors = function (string: string) {
return string.replace(/\u001b[^m]+m/g, '');
};
export default class KbnLoggerJsonFormat extends LogFormat {
format(data) {
export class KbnLoggerJsonFormat extends BaseLogFormat {
format(data: Record<string, any>) {
data.message = stripColors(data.message);
data['@timestamp'] = this.extractAndFormatTimestamp(data);
return stringify(data);

View file

@ -18,12 +18,10 @@
*/
import moment from 'moment';
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
import { attachMetaData } from '../../../../src/core/server/legacy/logging/legacy_logging_server';
import { createListStream, createPromiseFromStreams } from '../../../core/server/utils';
import KbnLoggerStringFormat from './log_format_string';
import { attachMetaData } from './metadata';
import { createListStream, createPromiseFromStreams } from './test_utils';
import { KbnLoggerStringFormat } from './log_format_string';
const time = +moment('2010-01-01T05:15:59Z', moment.ISO_8601);
@ -39,7 +37,7 @@ describe('KbnLoggerStringFormat', () => {
it('logs in UTC', async () => {
const format = new KbnLoggerStringFormat({
timezone: 'UTC',
});
} as any);
const result = await createPromiseFromStreams([createListStream([makeEvent()]), format]);
@ -47,7 +45,7 @@ describe('KbnLoggerStringFormat', () => {
});
it('logs in local timezone when timezone is undefined', async () => {
const format = new KbnLoggerStringFormat({});
const format = new KbnLoggerStringFormat({} as any);
const result = await createPromiseFromStreams([createListStream([makeEvent()]), format]);
@ -55,7 +53,7 @@ describe('KbnLoggerStringFormat', () => {
});
describe('with metadata', () => {
it('does not log meta data', async () => {
const format = new KbnLoggerStringFormat({});
const format = new KbnLoggerStringFormat({} as any);
const event = {
data: attachMetaData('message for event', {
prop1: 'value1',

View file

@ -20,11 +20,11 @@
import _ from 'lodash';
import chalk from 'chalk';
import LogFormat from './log_format';
import { BaseLogFormat } from './log_format';
const statuses = ['err', 'info', 'error', 'warning', 'fatal', 'status', 'debug'];
const typeColors = {
const typeColors: Record<string, string> = {
log: 'white',
req: 'green',
res: 'green',
@ -45,18 +45,19 @@ const typeColors = {
scss: 'magentaBright',
};
const color = _.memoize(function (name) {
const color = _.memoize((name: string): ((...text: string[]) => string) => {
// @ts-expect-error couldn't even get rid of the error with an any cast
return chalk[typeColors[name]] || _.identity;
});
const type = _.memoize(function (t) {
const type = _.memoize((t: string) => {
return color(t)(_.pad(t, 7).slice(0, 7));
});
const workerType = process.env.kbnWorkerType ? `${type(process.env.kbnWorkerType)} ` : '';
export default class KbnLoggerStringFormat extends LogFormat {
format(data) {
export class KbnLoggerStringFormat extends BaseLogFormat {
format(data: Record<string, any>) {
const time = color('time')(this.extractAndFormatTimestamp(data, 'HH:mm:ss.SSS'));
const msg = data.error ? color('error')(data.error.stack) : color('message')(data.message);

View file

@ -17,13 +17,15 @@
* under the License.
*/
import { ErrorEvent } from './log_events';
import { LogInterceptor } from './log_interceptor';
function stubClientErrorEvent(errorMeta) {
function stubClientErrorEvent(errorMeta: Record<string, any>): ErrorEvent {
const error = new Error();
Object.assign(error, errorMeta);
return {
event: 'error',
url: '',
pid: 1234,
timestamp: Date.now(),
tags: ['connection', 'client', 'error'],
@ -35,7 +37,7 @@ const stubEconnresetEvent = () => stubClientErrorEvent({ code: 'ECONNRESET' });
const stubEpipeEvent = () => stubClientErrorEvent({ errno: 'EPIPE' });
const stubEcanceledEvent = () => stubClientErrorEvent({ errno: 'ECANCELED' });
function assertDowngraded(transformed) {
function assertDowngraded(transformed: Record<string, any>) {
expect(!!transformed).toBe(true);
expect(transformed).toHaveProperty('event', 'log');
expect(transformed).toHaveProperty('tags');
@ -47,13 +49,13 @@ describe('server logging LogInterceptor', () => {
it('transforms ECONNRESET events', () => {
const interceptor = new LogInterceptor();
const event = stubEconnresetEvent();
assertDowngraded(interceptor.downgradeIfEconnreset(event));
assertDowngraded(interceptor.downgradeIfEconnreset(event)!);
});
it('does not match if the tags are not in order', () => {
const interceptor = new LogInterceptor();
const event = stubEconnresetEvent();
event.tags = [...event.tags.slice(1), event.tags[0]];
event.tags = [...event.tags!.slice(1), event.tags![0]];
expect(interceptor.downgradeIfEconnreset(event)).toBe(null);
});
@ -75,13 +77,13 @@ describe('server logging LogInterceptor', () => {
it('transforms EPIPE events', () => {
const interceptor = new LogInterceptor();
const event = stubEpipeEvent();
assertDowngraded(interceptor.downgradeIfEpipe(event));
assertDowngraded(interceptor.downgradeIfEpipe(event)!);
});
it('does not match if the tags are not in order', () => {
const interceptor = new LogInterceptor();
const event = stubEpipeEvent();
event.tags = [...event.tags.slice(1), event.tags[0]];
event.tags = [...event.tags!.slice(1), event.tags![0]];
expect(interceptor.downgradeIfEpipe(event)).toBe(null);
});
@ -103,13 +105,13 @@ describe('server logging LogInterceptor', () => {
it('transforms ECANCELED events', () => {
const interceptor = new LogInterceptor();
const event = stubEcanceledEvent();
assertDowngraded(interceptor.downgradeIfEcanceled(event));
assertDowngraded(interceptor.downgradeIfEcanceled(event)!);
});
it('does not match if the tags are not in order', () => {
const interceptor = new LogInterceptor();
const event = stubEcanceledEvent();
event.tags = [...event.tags.slice(1), event.tags[0]];
event.tags = [...event.tags!.slice(1), event.tags![0]];
expect(interceptor.downgradeIfEcanceled(event)).toBe(null);
});
@ -131,7 +133,7 @@ describe('server logging LogInterceptor', () => {
it('transforms https requests when serving http errors', () => {
const interceptor = new LogInterceptor();
const event = stubClientErrorEvent({ message: 'Parse Error', code: 'HPE_INVALID_METHOD' });
assertDowngraded(interceptor.downgradeIfHTTPSWhenHTTP(event));
assertDowngraded(interceptor.downgradeIfHTTPSWhenHTTP(event)!);
});
it('ignores non events', () => {
@ -150,7 +152,7 @@ describe('server logging LogInterceptor', () => {
'4584650176:error:1408F09C:SSL routines:ssl3_get_record:http request:../deps/openssl/openssl/ssl/record/ssl3_record.c:322:\n';
const interceptor = new LogInterceptor();
const event = stubClientErrorEvent({ message });
assertDowngraded(interceptor.downgradeIfHTTPWhenHTTPS(event));
assertDowngraded(interceptor.downgradeIfHTTPWhenHTTPS(event)!);
});
it('ignores non events', () => {

View file

@ -19,6 +19,7 @@
import Stream from 'stream';
import { get, isEqual } from 'lodash';
import { AnyEvent } from './log_events';
/**
* Matches error messages when clients connect via HTTP instead of HTTPS; see unit test for full message. Warning: this can change when Node
@ -26,25 +27,32 @@ import { get, isEqual } from 'lodash';
*/
const OPENSSL_GET_RECORD_REGEX = /ssl3_get_record:http/;
function doTagsMatch(event, tags) {
return isEqual(get(event, 'tags'), tags);
function doTagsMatch(event: AnyEvent, tags: string[]) {
return isEqual(event.tags, tags);
}
function doesMessageMatch(errorMessage, match) {
if (!errorMessage) return false;
const isRegExp = match instanceof RegExp;
if (isRegExp) return match.test(errorMessage);
function doesMessageMatch(errorMessage: string, match: RegExp | string) {
if (!errorMessage) {
return false;
}
if (match instanceof RegExp) {
return match.test(errorMessage);
}
return errorMessage === match;
}
// converts the given event into a debug log if it's an error of the given type
function downgradeIfErrorType(errorType, event) {
function downgradeIfErrorType(errorType: string, event: AnyEvent) {
const isClientError = doTagsMatch(event, ['connection', 'client', 'error']);
if (!isClientError) return null;
if (!isClientError) {
return null;
}
const matchesErrorType =
get(event, 'error.code') === errorType || get(event, 'error.errno') === errorType;
if (!matchesErrorType) return null;
if (!matchesErrorType) {
return null;
}
const errorTypeTag = errorType.toLowerCase();
@ -57,12 +65,14 @@ function downgradeIfErrorType(errorType, event) {
};
}
function downgradeIfErrorMessage(match, event) {
function downgradeIfErrorMessage(match: RegExp | string, event: AnyEvent) {
const isClientError = doTagsMatch(event, ['connection', 'client', 'error']);
const errorMessage = get(event, 'error.message');
const matchesErrorMessage = isClientError && doesMessageMatch(errorMessage, match);
if (!matchesErrorMessage) return null;
if (!matchesErrorMessage) {
return null;
}
return {
event: 'log',
@ -91,7 +101,7 @@ export class LogInterceptor extends Stream.Transform {
*
* @param {object} - log event
*/
downgradeIfEconnreset(event) {
downgradeIfEconnreset(event: AnyEvent) {
return downgradeIfErrorType('ECONNRESET', event);
}
@ -105,7 +115,7 @@ export class LogInterceptor extends Stream.Transform {
*
* @param {object} - log event
*/
downgradeIfEpipe(event) {
downgradeIfEpipe(event: AnyEvent) {
return downgradeIfErrorType('EPIPE', event);
}
@ -119,19 +129,19 @@ export class LogInterceptor extends Stream.Transform {
*
* @param {object} - log event
*/
downgradeIfEcanceled(event) {
downgradeIfEcanceled(event: AnyEvent) {
return downgradeIfErrorType('ECANCELED', event);
}
downgradeIfHTTPSWhenHTTP(event) {
downgradeIfHTTPSWhenHTTP(event: AnyEvent) {
return downgradeIfErrorType('HPE_INVALID_METHOD', event);
}
downgradeIfHTTPWhenHTTPS(event) {
downgradeIfHTTPWhenHTTPS(event: AnyEvent) {
return downgradeIfErrorMessage(OPENSSL_GET_RECORD_REGEX, event);
}
_transform(event, enc, next) {
_transform(event: AnyEvent, enc: string, next: Stream.TransformCallback) {
const downgraded =
this.downgradeIfEconnreset(event) ||
this.downgradeIfEpipe(event) ||

View file

@ -17,27 +17,21 @@
* under the License.
*/
// @ts-expect-error missing type def
import { Squeeze } from '@hapi/good-squeeze';
import { createWriteStream as writeStr } from 'fs';
import { createWriteStream as writeStr, WriteStream } from 'fs';
import LogFormatJson from './log_format_json';
import LogFormatString from './log_format_string';
import { KbnLoggerJsonFormat } from './log_format_json';
import { KbnLoggerStringFormat } from './log_format_string';
import { LogInterceptor } from './log_interceptor';
import { LogFormatConfig } from './log_format';
// NOTE: legacy logger creates a new stream for each new access
// In https://github.com/elastic/kibana/pull/55937 we reach the max listeners
// default limit of 10 for process.stdout which starts a long warning/error
// thrown every time we start the server.
// In order to keep using the legacy logger until we remove it I'm just adding
// a new hard limit here.
process.stdout.setMaxListeners(25);
export function getLoggerStream({ events, config }) {
export function getLogReporter({ events, config }: { events: any; config: LogFormatConfig }) {
const squeeze = new Squeeze(events);
const format = config.json ? new LogFormatJson(config) : new LogFormatString(config);
const format = config.json ? new KbnLoggerJsonFormat(config) : new KbnLoggerStringFormat(config);
const logInterceptor = new LogInterceptor();
let dest;
let dest: WriteStream | NodeJS.WritableStream;
if (config.dest === 'stdout') {
dest = process.stdout;
} else {

View file

@ -16,30 +16,38 @@
* specific language governing permissions and limitations
* under the License.
*/
import { isPlainObject } from 'lodash';
import {
metadataSymbol,
attachMetaData,
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
} from '../../../../src/core/server/legacy/logging/legacy_logging_server';
export const metadataSymbol = Symbol('log message with metadata');
export const logWithMetadata = {
isLogEvent(eventData) {
return Boolean(isPlainObject(eventData) && eventData[metadataSymbol]);
},
export interface EventData {
[metadataSymbol]?: EventMetadata;
[key: string]: any;
}
getLogEventData(eventData) {
const { message, metadata } = eventData[metadataSymbol];
return {
...metadata,
message,
};
},
export interface EventMetadata {
message: string;
metadata: Record<string, any>;
}
decorateServer(server) {
server.decorate('server', 'logWithMetadata', (tags, message, metadata = {}) => {
server.log(tags, attachMetaData(message, metadata));
});
},
export const isEventData = (eventData: EventData) => {
return Boolean(isPlainObject(eventData) && eventData[metadataSymbol]);
};
export const getLogEventData = (eventData: EventData) => {
const { message, metadata } = eventData[metadataSymbol]!;
return {
...metadata,
message,
};
};
export const attachMetaData = (message: string, metadata: Record<string, any> = {}) => {
return {
[metadataSymbol]: {
message,
metadata,
},
};
};

View file

@ -20,13 +20,13 @@
import { isMaster, isWorker } from 'cluster';
import { Server } from '@hapi/hapi';
import { LogRotator } from './log_rotator';
import { KibanaConfig } from '../../kbn_server';
import { LegacyLoggingConfig } from '../schema';
let logRotator: LogRotator;
export async function setupLoggingRotate(server: Server, config: KibanaConfig) {
export async function setupLoggingRotate(server: Server, config: LegacyLoggingConfig) {
// If log rotate is not enabled we skip
if (!config.get('logging.rotate.enabled')) {
if (!config.rotate.enabled) {
return;
}
@ -38,7 +38,7 @@ export async function setupLoggingRotate(server: Server, config: KibanaConfig) {
// We don't want to run logging rotate server if
// we are not logging to a file
if (config.get('logging.dest') === 'stdout') {
if (config.dest === 'stdout') {
server.log(
['warning', 'logging:rotate'],
'Log rotation is enabled but logging.dest is configured for stdout. Set logging.dest to a file for this setting to take effect.'

View file

@ -19,10 +19,10 @@
import del from 'del';
import fs, { existsSync, mkdirSync, statSync, writeFileSync } from 'fs';
import { LogRotator } from './log_rotator';
import { tmpdir } from 'os';
import { dirname, join } from 'path';
import lodash from 'lodash';
import { LogRotator } from './log_rotator';
import { LegacyLoggingConfig } from '../schema';
const mockOn = jest.fn();
jest.mock('chokidar', () => ({
@ -32,19 +32,26 @@ jest.mock('chokidar', () => ({
})),
}));
lodash.throttle = (fn: any) => fn;
jest.mock('lodash', () => ({
...(jest.requireActual('lodash') as any),
throttle: (fn: any) => fn,
}));
const tempDir = join(tmpdir(), 'kbn_log_rotator_test');
const testFilePath = join(tempDir, 'log_rotator_test_log_file.log');
const createLogRotatorConfig: any = (logFilePath: string) => {
return new Map([
['logging.dest', logFilePath],
['logging.rotate.everyBytes', 2],
['logging.rotate.keepFiles', 2],
['logging.rotate.usePolling', false],
['logging.rotate.pollingInterval', 10000],
] as any);
const createLogRotatorConfig = (logFilePath: string): LegacyLoggingConfig => {
return {
dest: logFilePath,
rotate: {
enabled: true,
keepFiles: 2,
everyBytes: 2,
usePolling: false,
pollingInterval: 10000,
pollingPolicyTestTimeout: 4000,
},
} as LegacyLoggingConfig;
};
const mockServer: any = {
@ -62,7 +69,7 @@ describe('LogRotator', () => {
});
afterEach(() => {
del.sync(dirname(testFilePath), { force: true });
del.sync(tempDir, { force: true });
mockOn.mockClear();
});
@ -71,14 +78,14 @@ describe('LogRotator', () => {
const logRotator = new LogRotator(createLogRotatorConfig(testFilePath), mockServer);
jest.spyOn(logRotator, '_sendReloadLogConfigSignal').mockImplementation(() => {});
await logRotator.start();
expect(logRotator.running).toBe(true);
await logRotator.stop();
const testLogFileDir = dirname(testFilePath);
expect(existsSync(join(testLogFileDir, 'log_rotator_test_log_file.log.0'))).toBeTruthy();
expect(existsSync(join(tempDir, 'log_rotator_test_log_file.log.0'))).toBeTruthy();
});
it('rotates log file when equal than set limit over time', async () => {

View file

@ -27,7 +27,7 @@ import { basename, dirname, join, sep } from 'path';
import { Observable } from 'rxjs';
import { first } from 'rxjs/operators';
import { promisify } from 'util';
import { KibanaConfig } from '../../kbn_server';
import { LegacyLoggingConfig } from '../schema';
const mkdirAsync = promisify(fs.mkdir);
const readdirAsync = promisify(fs.readdir);
@ -37,7 +37,7 @@ const unlinkAsync = promisify(fs.unlink);
const writeFileAsync = promisify(fs.writeFile);
export class LogRotator {
private readonly config: KibanaConfig;
private readonly config: LegacyLoggingConfig;
private readonly log: Server['log'];
public logFilePath: string;
public everyBytes: number;
@ -52,19 +52,19 @@ export class LogRotator {
private stalkerUsePollingPolicyTestTimeout: NodeJS.Timeout | null;
public shouldUsePolling: boolean;
constructor(config: KibanaConfig, server: Server) {
constructor(config: LegacyLoggingConfig, server: Server) {
this.config = config;
this.log = server.log.bind(server);
this.logFilePath = config.get('logging.dest');
this.everyBytes = config.get('logging.rotate.everyBytes');
this.keepFiles = config.get('logging.rotate.keepFiles');
this.logFilePath = config.dest;
this.everyBytes = config.rotate.everyBytes;
this.keepFiles = config.rotate.keepFiles;
this.running = false;
this.logFileSize = 0;
this.isRotating = false;
this.throttledRotate = throttle(async () => await this._rotate(), 5000);
this.stalker = null;
this.usePolling = config.get('logging.rotate.usePolling');
this.pollingInterval = config.get('logging.rotate.pollingInterval');
this.usePolling = config.rotate.usePolling;
this.pollingInterval = config.rotate.pollingInterval;
this.shouldUsePolling = false;
this.stalkerUsePollingPolicyTestTimeout = null;
}
@ -128,7 +128,10 @@ export class LogRotator {
};
// setup conditions that would fire the observable
this.stalkerUsePollingPolicyTestTimeout = setTimeout(() => completeFn(true), 15000);
this.stalkerUsePollingPolicyTestTimeout = setTimeout(
() => completeFn(true),
this.config.rotate.pollingPolicyTestTimeout || 15000
);
testWatcher.on('change', () => completeFn(false));
testWatcher.on('error', () => completeFn(true));
@ -152,7 +155,7 @@ export class LogRotator {
}
async _startLogFileSizeMonitor() {
this.usePolling = this.config.get('logging.rotate.usePolling');
this.usePolling = this.config.rotate.usePolling;
this.shouldUsePolling = await this._shouldUsePolling();
if (this.usePolling && !this.shouldUsePolling) {

View file

@ -0,0 +1,89 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import Joi from 'joi';
const HANDLED_IN_KIBANA_PLATFORM = Joi.any().description(
'This key is handled in the new platform ONLY'
);
export interface LegacyLoggingConfig {
silent: boolean;
quiet: boolean;
verbose: boolean;
events: Record<string, any>;
dest: string;
filter: Record<string, any>;
json: boolean;
timezone?: string;
rotate: {
enabled: boolean;
everyBytes: number;
keepFiles: number;
pollingInterval: number;
usePolling: boolean;
pollingPolicyTestTimeout?: number;
};
}
export const legacyLoggingConfigSchema = Joi.object()
.keys({
appenders: HANDLED_IN_KIBANA_PLATFORM,
loggers: HANDLED_IN_KIBANA_PLATFORM,
root: HANDLED_IN_KIBANA_PLATFORM,
silent: Joi.boolean().default(false),
quiet: Joi.boolean().when('silent', {
is: true,
then: Joi.boolean().default(true).valid(true),
otherwise: Joi.boolean().default(false),
}),
verbose: Joi.boolean().when('quiet', {
is: true,
then: Joi.valid(false).default(false),
otherwise: Joi.boolean().default(false),
}),
events: Joi.any().default({}),
dest: Joi.string().default('stdout'),
filter: Joi.any().default({}),
json: Joi.boolean().when('dest', {
is: 'stdout',
then: Joi.boolean().default(!process.stdout.isTTY),
otherwise: Joi.boolean().default(true),
}),
timezone: Joi.string(),
rotate: Joi.object()
.keys({
enabled: Joi.boolean().default(false),
everyBytes: Joi.number()
// > 1MB
.greater(1048576)
// < 1GB
.less(1073741825)
// 10MB
.default(10485760),
keepFiles: Joi.number().greater(2).less(1024).default(7),
pollingInterval: Joi.number().greater(5000).less(3600000).default(10000),
usePolling: Joi.boolean().default(false),
})
.default(),
})
.default();

View file

@ -0,0 +1,52 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// @ts-expect-error missing typedef
import good from '@elastic/good';
import { Server } from '@hapi/hapi';
import { LegacyLoggingConfig } from './schema';
import { getLoggingConfiguration } from './get_logging_config';
export async function setupLogging(
server: Server,
config: LegacyLoggingConfig,
opsInterval: number
) {
// NOTE: legacy logger creates a new stream for each new access
// In https://github.com/elastic/kibana/pull/55937 we reach the max listeners
// default limit of 10 for process.stdout which starts a long warning/error
// thrown every time we start the server.
// In order to keep using the legacy logger until we remove it I'm just adding
// a new hard limit here.
process.stdout.setMaxListeners(25);
return await server.register({
plugin: good,
options: getLoggingConfiguration(config, opsInterval),
});
}
export function reconfigureLogging(
server: Server,
config: LegacyLoggingConfig,
opsInterval: number
) {
const loggingOptions = getLoggingConfiguration(config, opsInterval);
(server.plugins as any)['@elastic/good'].reconfigure(loggingOptions);
}

View file

@ -0,0 +1,20 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export { createListStream, createPromiseFromStreams } from './streams';

View file

@ -0,0 +1,96 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { pipeline, Writable, Readable } from 'stream';
/**
* Create a Readable stream that provides the items
* from a list as objects to subscribers
*
* @param {Array<any>} items - the list of items to provide
* @return {Readable}
*/
export function createListStream<T = any>(items: T | T[] = []) {
const queue = Array.isArray(items) ? [...items] : [items];
return new Readable({
objectMode: true,
read(size) {
queue.splice(0, size).forEach((item) => {
this.push(item);
});
if (!queue.length) {
this.push(null);
}
},
});
}
/**
* Take an array of streams, pipe the output
* from each one into the next, listening for
* errors from any of the streams, and then resolve
* the promise once the final stream has finished
* writing/reading.
*
* If the last stream is readable, it's final value
* will be provided as the promise value.
*
* Errors emitted from any stream will cause
* the promise to be rejected with that error.
*
* @param {Array<Stream>} streams
* @return {Promise<any>}
*/
function isReadable(stream: Readable | Writable): stream is Readable {
return 'read' in stream && typeof stream.read === 'function';
}
export async function createPromiseFromStreams<T>(streams: [Readable, ...Writable[]]): Promise<T> {
let finalChunk: any;
const last = streams[streams.length - 1];
if (!isReadable(last) && streams.length === 1) {
// For a nicer error than what stream.pipeline throws
throw new Error('A minimum of 2 streams is required when a non-readable stream is given');
}
if (isReadable(last)) {
// We are pushing a writable stream to capture the last chunk
streams.push(
new Writable({
// Use object mode even when "last" stream isn't. This allows to
// capture the last chunk as-is.
objectMode: true,
write(chunk, enc, done) {
finalChunk = chunk;
done();
},
})
);
}
return new Promise((resolve, reject) => {
// @ts-expect-error 'pipeline' doesn't support variable length of arguments
pipeline(...streams, (err) => {
if (err) return reject(err);
resolve(finalChunk);
});
});
}

View file

@ -17,7 +17,7 @@
* under the License.
*/
import applyFiltersToKeys from './apply_filters_to_keys';
import { applyFiltersToKeys } from './apply_filters_to_keys';
describe('applyFiltersToKeys(obj, actionsByKey)', function () {
it('applies for each key+prop in actionsByKey', function () {

View file

@ -17,15 +17,15 @@
* under the License.
*/
function toPojo(obj) {
function toPojo(obj: Record<string, unknown>) {
return JSON.parse(JSON.stringify(obj));
}
function replacer(match, group) {
function replacer(match: string, group: any[]) {
return new Array(group.length + 1).join('X');
}
function apply(obj, key, action) {
function apply(obj: Record<string, unknown>, key: string, action: string) {
for (const k in obj) {
if (obj.hasOwnProperty(k)) {
let val = obj[k];
@ -44,14 +44,17 @@ function apply(obj, key, action) {
}
}
} else if (typeof val === 'object') {
val = apply(val, key, action);
val = apply(val as Record<string, any>, key, action);
}
}
}
return obj;
}
export default function (obj, actionsByKey) {
export function applyFiltersToKeys(
obj: Record<string, unknown>,
actionsByKey: Record<string, string>
) {
return Object.keys(actionsByKey).reduce((output, key) => {
return apply(output, key, actionsByKey[key]);
}, toPojo(obj));

View file

@ -0,0 +1,20 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export { applyFiltersToKeys } from './apply_filters_to_keys';

View file

@ -0,0 +1,11 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"outDir": "target",
"stripInternal": false,
"declaration": true,
"declarationMap": true,
"types": ["jest", "node"]
},
"include": ["./src/**/*"]
}

View file

@ -0,0 +1 @@
../../yarn.lock

View file

@ -17,10 +17,10 @@
* under the License.
*/
jest.mock('../legacy_logging_server');
jest.mock('@kbn/legacy-logging');
import { LogRecord, LogLevel } from '../../../logging';
import { LegacyLoggingServer } from '../legacy_logging_server';
import { LegacyLoggingServer } from '@kbn/legacy-logging';
import { LegacyAppender } from './legacy_appender';
afterEach(() => (LegacyLoggingServer as any).mockClear());

View file

@ -18,8 +18,8 @@
*/
import { schema } from '@kbn/config-schema';
import { DisposableAppender, LogRecord } from '../../../logging';
import { LegacyLoggingServer } from '../legacy_logging_server';
import { LegacyLoggingServer } from '@kbn/legacy-logging';
import { DisposableAppender, LogRecord } from '@kbn/logging';
import { LegacyVars } from '../../types';
export interface LegacyAppenderConfig {

View file

@ -25,7 +25,7 @@ jest.mock('fs', () => ({
const dynamicProps = { process: { pid: expect.any(Number) } };
jest.mock('../../../legacy/server/logging/rotate', () => ({
jest.mock('@kbn/legacy-logging', () => ({
setupLoggingRotate: jest.fn().mockImplementation(() => Promise.resolve({})),
}));

View file

@ -19,6 +19,7 @@
import Joi from 'joi';
import os from 'os';
import { legacyLoggingConfigSchema } from '@kbn/legacy-logging';
const HANDLED_IN_NEW_PLATFORM = Joi.any().description(
'This key is handled in the new platform ONLY'
@ -77,52 +78,7 @@ export default () =>
uiSettings: HANDLED_IN_NEW_PLATFORM,
logging: Joi.object()
.keys({
appenders: HANDLED_IN_NEW_PLATFORM,
loggers: HANDLED_IN_NEW_PLATFORM,
root: HANDLED_IN_NEW_PLATFORM,
silent: Joi.boolean().default(false),
quiet: Joi.boolean().when('silent', {
is: true,
then: Joi.default(true).valid(true),
otherwise: Joi.default(false),
}),
verbose: Joi.boolean().when('quiet', {
is: true,
then: Joi.valid(false).default(false),
otherwise: Joi.default(false),
}),
events: Joi.any().default({}),
dest: Joi.string().default('stdout'),
filter: Joi.any().default({}),
json: Joi.boolean().when('dest', {
is: 'stdout',
then: Joi.default(!process.stdout.isTTY),
otherwise: Joi.default(true),
}),
timezone: Joi.string().allow(false).default('UTC'),
rotate: Joi.object()
.keys({
enabled: Joi.boolean().default(false),
everyBytes: Joi.number()
// > 1MB
.greater(1048576)
// < 1GB
.less(1073741825)
// 10MB
.default(10485760),
keepFiles: Joi.number().greater(2).less(1024).default(7),
pollingInterval: Joi.number().greater(5000).less(3600000).default(10000),
usePolling: Joi.boolean().default(false),
})
.default(),
})
.default(),
logging: legacyLoggingConfigSchema,
ops: Joi.object({
interval: Joi.number().default(5000),

View file

@ -18,12 +18,12 @@
*/
import { constant, once, compact, flatten } from 'lodash';
import { reconfigureLogging } from '@kbn/legacy-logging';
import { isWorker } from 'cluster';
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
import { fromRoot, pkg } from '../../core/server/utils';
import { Config } from './config';
import loggingConfiguration from './logging/configuration';
import httpMixin from './http';
import { coreMixin } from './core';
import { loggingMixin } from './logging';
@ -154,13 +154,17 @@ export default class KbnServer {
applyLoggingConfiguration(settings) {
const config = Config.withDefaultSchema(settings);
const loggingOptions = loggingConfiguration(config);
const loggingConfig = config.get('logging');
const opsConfig = config.get('ops');
const subset = {
ops: config.get('ops'),
logging: config.get('logging'),
ops: opsConfig,
logging: loggingConfig,
};
const plain = JSON.stringify(subset, null, 2);
this.server.log(['info', 'config'], 'New logging configuration:\n' + plain);
this.server.plugins['@elastic/good'].reconfigure(loggingOptions);
reconfigureLogging(this.server, loggingConfig, opsConfig.interval);
}
}

View file

@ -17,21 +17,16 @@
* under the License.
*/
import good from '@elastic/good';
import loggingConfiguration from './configuration';
import { logWithMetadata } from './log_with_metadata';
import { setupLoggingRotate } from './rotate';
export async function setupLogging(server, config) {
return await server.register({
plugin: good,
options: loggingConfiguration(config),
});
}
import { setupLogging, setupLoggingRotate, attachMetaData } from '@kbn/legacy-logging';
export async function loggingMixin(kbnServer, server, config) {
logWithMetadata.decorateServer(server);
server.decorate('server', 'logWithMetadata', (tags, message, metadata = {}) => {
server.log(tags, attachMetaData(message, metadata));
});
await setupLogging(server, config);
await setupLoggingRotate(server, config);
const loggingConfig = config.get('logging');
const opsInterval = config.get('ops.interval');
await setupLogging(server, loggingConfig, opsInterval);
await setupLoggingRotate(server, loggingConfig);
}

View file

@ -2702,6 +2702,10 @@
version "0.0.0"
uid ""
"@kbn/legacy-logging@link:packages/kbn-legacy-logging":
version "0.0.0"
uid ""
"@kbn/logging@link:packages/kbn-logging":
version "0.0.0"
uid ""