[SIEM] move away from Joi for importing/exporting timeline (#62125)

* move away from joi

* update schema for filterQuery

* fix types

* update schemas

* remove boom

* remove redundant params

* reuse utils from case

* update schemas for query params and body

* fix types

* update validation schema

* fix unit test

* update description for test cases

* remove import from case

* lifting common libs

* fix dependency

* lifting validation builder function

* add unit test

* fix for code review

* reve comments

* rename common utils

* fix types
This commit is contained in:
Angela Chuang 2020-04-16 10:17:15 +01:00 committed by GitHub
parent 02cba10469
commit 7b74aa9d69
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
21 changed files with 385 additions and 405 deletions

View file

@ -26,13 +26,13 @@ import {
buildSiemResponse,
validateLicenseForRuleType,
} from '../utils';
import { createRulesStreamFromNdJson } from '../../rules/create_rules_stream_from_ndjson';
import { ImportRuleAlertRest } from '../../types';
import { patchRules } from '../../rules/patch_rules';
import { importRulesQuerySchema, importRulesPayloadSchema } from '../schemas/import_rules_schema';
import { ImportRulesSchema, importRulesSchema } from '../schemas/response/import_rules_schema';
import { getTupleDuplicateErrorsAndUniqueRules } from './utils';
import { validate } from './validate';
import { createRulesStreamFromNdJson } from '../../rules/create_rules_stream_from_ndjson';
type PromiseFromStreams = ImportRuleAlertRest | Error;

View file

@ -12,7 +12,6 @@ import {
transformTags,
getIdBulkError,
transformOrBulkError,
transformDataToNdjson,
transformAlertsToRules,
transformOrImportError,
getDuplicates,
@ -22,14 +21,13 @@ import { getResult } from '../__mocks__/request_responses';
import { INTERNAL_IDENTIFIER } from '../../../../../common/constants';
import { ImportRuleAlertRest, RuleAlertParamsRest, RuleTypeParams } from '../../types';
import { BulkError, ImportSuccessError } from '../utils';
import { sampleRule } from '../../signals/__mocks__/es_results';
import { getSimpleRule, getOutputRuleAlertForRest } from '../__mocks__/utils';
import { createRulesStreamFromNdJson } from '../../rules/create_rules_stream_from_ndjson';
import { createPromiseFromStreams } from '../../../../../../../../../src/legacy/utils/streams';
import { PartialAlert } from '../../../../../../../../plugins/alerting/server';
import { SanitizedAlert } from '../../../../../../../../plugins/alerting/server/types';
import { RuleAlertType } from '../../rules/types';
import { setFeatureFlagsForTestsOnly, unSetFeatureFlagsForTestsOnly } from '../../feature_flags';
import { createRulesStreamFromNdJson } from '../../rules/create_rules_stream_from_ndjson';
type PromiseFromStreams = ImportRuleAlertRest | Error;
@ -396,47 +394,6 @@ describe('utils', () => {
});
});
describe('transformDataToNdjson', () => {
test('if rules are empty it returns an empty string', () => {
const ruleNdjson = transformDataToNdjson([]);
expect(ruleNdjson).toEqual('');
});
test('single rule will transform with new line ending character for ndjson', () => {
const rule = sampleRule();
const ruleNdjson = transformDataToNdjson([rule]);
expect(ruleNdjson.endsWith('\n')).toBe(true);
});
test('multiple rules will transform with two new line ending characters for ndjson', () => {
const result1 = sampleRule();
const result2 = sampleRule();
result2.id = 'some other id';
result2.rule_id = 'some other id';
result2.name = 'Some other rule';
const ruleNdjson = transformDataToNdjson([result1, result2]);
// this is how we count characters in JavaScript :-)
const count = ruleNdjson.split('\n').length - 1;
expect(count).toBe(2);
});
test('you can parse two rules back out without errors', () => {
const result1 = sampleRule();
const result2 = sampleRule();
result2.id = 'some other id';
result2.rule_id = 'some other id';
result2.name = 'Some other rule';
const ruleNdjson = transformDataToNdjson([result1, result2]);
const ruleStrings = ruleNdjson.split('\n');
const reParsed1 = JSON.parse(ruleStrings[0]);
const reParsed2 = JSON.parse(ruleStrings[1]);
expect(reParsed1).toEqual(result1);
expect(reParsed2).toEqual(result2);
});
});
describe('transformAlertsToRules', () => {
test('given an empty array returns an empty array', () => {
expect(transformAlertsToRules([])).toEqual([]);

View file

@ -152,15 +152,6 @@ export const transformAlertToRule = (
});
};
export const transformDataToNdjson = (data: unknown[]): string => {
if (data.length !== 0) {
const dataString = data.map(rule => JSON.stringify(rule)).join('\n');
return `${dataString}\n`;
} else {
return '';
}
};
export const transformAlertsToRules = (
alerts: RuleAlertType[]
): Array<Partial<OutputRuleAlertRest>> => {

View file

@ -4,39 +4,19 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { Transform } from 'stream';
import { has, isString } from 'lodash/fp';
import { ImportRuleAlertRest } from '../types';
import {
createSplitStream,
createMapStream,
createFilterStream,
createConcatStream,
} from '../../../../../../../../src/legacy/utils/streams';
import { importRulesSchema } from '../routes/schemas/import_rules_schema';
import { BadRequestError } from '../errors/bad_request_error';
export interface RulesObjectsExportResultDetails {
/** number of successfully exported objects */
exportedCount: number;
}
export const parseNdjsonStrings = (): Transform => {
return createMapStream((ndJsonStr: string) => {
if (isString(ndJsonStr) && ndJsonStr.trim() !== '') {
try {
return JSON.parse(ndJsonStr);
} catch (err) {
return err;
}
}
});
};
export const filterExportedCounts = (): Transform => {
return createFilterStream<ImportRuleAlertRest | RulesObjectsExportResultDetails>(
obj => obj != null && !has('exported_count', obj)
);
};
import {
parseNdjsonStrings,
filterExportedCounts,
createLimitStream,
} from '../../../utils/read_stream/create_stream_from_ndjson';
export const validateRules = (): Transform => {
return createMapStream((obj: ImportRuleAlertRest) => {
@ -53,21 +33,6 @@ export const validateRules = (): Transform => {
});
};
// Adaptation from: saved_objects/import/create_limit_stream.ts
export const createLimitStream = (limit: number): Transform => {
let counter = 0;
return new Transform({
objectMode: true,
async transform(obj, _, done) {
if (counter >= limit) {
return done(new Error(`Can't import more than ${limit} rules`));
}
counter++;
done(undefined, obj);
},
});
};
// TODO: Capture both the line number and the rule_id if you have that information for the error message
// eventually and then pass it down so we can give error messages on the line number

View file

@ -7,7 +7,8 @@
import { AlertsClient } from '../../../../../../../plugins/alerting/server';
import { getNonPackagedRules } from './get_existing_prepackaged_rules';
import { getExportDetailsNdjson } from './get_export_details_ndjson';
import { transformAlertsToRules, transformDataToNdjson } from '../routes/rules/utils';
import { transformAlertsToRules } from '../routes/rules/utils';
import { transformDataToNdjson } from '../../../utils/read_stream/create_stream_from_ndjson';
export const getExportAll = async (
alertsClient: AlertsClient

View file

@ -8,8 +8,9 @@ import { AlertsClient } from '../../../../../../../plugins/alerting/server';
import { getExportDetailsNdjson } from './get_export_details_ndjson';
import { isAlertType } from '../rules/types';
import { readRules } from './read_rules';
import { transformDataToNdjson, transformAlertToRule } from '../routes/rules/utils';
import { transformAlertToRule } from '../routes/rules/utils';
import { OutputRuleAlertRest } from '../types';
import { transformDataToNdjson } from '../../../utils/read_stream/create_stream_from_ndjson';
interface ExportSuccesRule {
statusCode: 200;

View file

@ -3,8 +3,12 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import * as rt from 'io-ts';
import { Transform } from 'stream';
import { pipe } from 'fp-ts/lib/pipeable';
import { fold } from 'fp-ts/lib/Either';
import { failure } from 'io-ts/lib/PathReporter';
import { identity } from 'fp-ts/lib/function';
import {
createConcatStream,
createSplitStream,
@ -14,26 +18,28 @@ import {
parseNdjsonStrings,
filterExportedCounts,
createLimitStream,
} from '../detection_engine/rules/create_rules_stream_from_ndjson';
import { importTimelinesSchema } from './routes/schemas/import_timelines_schema';
import { BadRequestError } from '../detection_engine/errors/bad_request_error';
import { ImportTimelineResponse } from './routes/utils/import_timelines';
} from '../../utils/read_stream/create_stream_from_ndjson';
export const validateTimelines = (): Transform => {
return createMapStream((obj: ImportTimelineResponse) => {
if (!(obj instanceof Error)) {
const validated = importTimelinesSchema.validate(obj);
if (validated.error != null) {
return new BadRequestError(validated.error.message);
} else {
return validated.value;
}
} else {
return obj;
}
});
import { ImportTimelineResponse } from './routes/utils/import_timelines';
import { ImportTimelinesSchemaRt } from './routes/schemas/import_timelines_schema';
type ErrorFactory = (message: string) => Error;
export const createPlainError = (message: string) => new Error(message);
export const throwErrors = (createError: ErrorFactory) => (errors: rt.Errors) => {
throw createError(failure(errors).join('\n'));
};
export const decodeOrThrow = <A, O, I>(
runtimeType: rt.Type<A, O, I>,
createError: ErrorFactory = createPlainError
) => (inputValue: I) =>
pipe(runtimeType.decode(inputValue), fold(throwErrors(createError), identity));
export const validateTimelines = (): Transform =>
createMapStream((obj: ImportTimelineResponse) => decodeOrThrow(ImportTimelinesSchemaRt)(obj));
export const createTimelinesStreamFromNdJson = (ruleLimit: number) => {
return [
createSplitStream('\n'),

View file

@ -6,11 +6,16 @@
import { TIMELINE_EXPORT_URL, TIMELINE_IMPORT_URL } from '../../../../../common/constants';
import { requestMock } from '../../../detection_engine/routes/__mocks__';
import stream from 'stream';
const readable = new stream.Readable();
export const getExportTimelinesRequest = () =>
requestMock.create({
method: 'get',
path: TIMELINE_EXPORT_URL,
query: {
file_name: 'mock_export_timeline.ndjson',
exclude_export_details: 'false',
},
body: {
ids: ['f0e58720-57b6-11ea-b88d-3f1a31716be8', '890b8ae0-57df-11ea-a7c9-3976b7f1cb37'],
},
@ -22,7 +27,7 @@ export const getImportTimelinesRequest = (filename?: string) =>
path: TIMELINE_IMPORT_URL,
query: { overwrite: false },
body: {
file: { hapi: { filename: filename ?? 'filename.ndjson' } },
file: { ...readable, hapi: { filename: filename ?? 'filename.ndjson' } },
},
});

View file

@ -83,7 +83,7 @@ describe('export timelines', () => {
});
describe('request validation', () => {
test('disallows singular id query param', async () => {
test('return validation error for request body', async () => {
const request = requestMock.create({
method: 'get',
path: TIMELINE_EXPORT_URL,
@ -91,7 +91,26 @@ describe('export timelines', () => {
});
const result = server.validate(request);
expect(result.badRequest).toHaveBeenCalledWith('"id" is not allowed');
expect(result.badRequest.mock.calls[0][0]).toEqual(
'Invalid value undefined supplied to : { ids: Array<string> }/ids: Array<string>'
);
});
test('return validation error for request params', async () => {
const request = requestMock.create({
method: 'get',
path: TIMELINE_EXPORT_URL,
body: { id: 'someId' },
});
const result = server.validate(request);
expect(result.badRequest.mock.calls[1][0]).toEqual(
[
'Invalid value undefined supplied to : { file_name: string, exclude_export_details: ("true" | "false") }/file_name: string',
'Invalid value undefined supplied to : { file_name: string, exclude_export_details: ("true" | "false") }/exclude_export_details: ("true" | "false")/0: "true"',
'Invalid value undefined supplied to : { file_name: string, exclude_export_details: ("true" | "false") }/exclude_export_details: ("true" | "false")/1: "false"',
].join('\n')
);
});
});
});

View file

@ -7,31 +7,24 @@
import { set as _set } from 'lodash/fp';
import { IRouter } from '../../../../../../../../src/core/server';
import { LegacyServices } from '../../../types';
import { ExportTimelineRequestParams } from '../types';
import {
transformError,
buildRouteValidation,
buildSiemResponse,
} from '../../detection_engine/routes/utils';
import { transformError, buildSiemResponse } from '../../detection_engine/routes/utils';
import { TIMELINE_EXPORT_URL } from '../../../../common/constants';
import {
exportTimelinesSchema,
exportTimelinesQuerySchema,
} from './schemas/export_timelines_schema';
import { getExportTimelineByObjectIds } from './utils/export_timelines';
import {
exportTimelinesQuerySchema,
exportTimelinesRequestBodySchema,
} from './schemas/export_timelines_schema';
import { buildRouteValidation } from '../../../utils/build_validation/route_validation';
export const exportTimelinesRoute = (router: IRouter, config: LegacyServices['config']) => {
router.post(
{
path: TIMELINE_EXPORT_URL,
validate: {
query: buildRouteValidation<ExportTimelineRequestParams['query']>(
exportTimelinesQuerySchema
),
body: buildRouteValidation<ExportTimelineRequestParams['body']>(exportTimelinesSchema),
query: buildRouteValidation(exportTimelinesQuerySchema),
body: buildRouteValidation(exportTimelinesRequestBodySchema),
},
options: {
tags: ['access:siem'],
@ -42,6 +35,7 @@ export const exportTimelinesRoute = (router: IRouter, config: LegacyServices['co
const siemResponse = buildSiemResponse(response);
const savedObjectsClient = context.core.savedObjects.client;
const exportSizeLimit = config().get<number>('savedObjects.maxImportExportSize');
if (request.body?.ids != null && request.body.ids.length > exportSizeLimit) {
return siemResponse.error({
statusCode: 400,
@ -51,7 +45,7 @@ export const exportTimelinesRoute = (router: IRouter, config: LegacyServices['co
const responseBody = await getExportTimelineByObjectIds({
client: savedObjectsClient,
request,
ids: request.body.ids,
});
return response.ok({

View file

@ -334,7 +334,10 @@ describe('import timelines', () => {
const result = server.validate(request);
expect(result.badRequest).toHaveBeenCalledWith(
'child "file" fails because ["file" is required]'
[
'Invalid value undefined supplied to : { file: (ReadableRt & { hapi: { filename: string } }) }/file: (ReadableRt & { hapi: { filename: string } })/0: ReadableRt',
'Invalid value undefined supplied to : { file: (ReadableRt & { hapi: { filename: string } }) }/file: (ReadableRt & { hapi: { filename: string } })/1: { hapi: { filename: string } }',
].join('\n')
);
});
});

View file

@ -6,8 +6,8 @@
import { extname } from 'path';
import { chunk, omit, set } from 'lodash/fp';
import {
buildRouteValidation,
buildSiemResponse,
createBulkErrorObject,
BulkError,
@ -23,7 +23,6 @@ import {
isBulkError,
isImportRegular,
ImportTimelineResponse,
ImportTimelinesRequestParams,
ImportTimelinesSchema,
PromiseFromStreams,
} from './utils/import_timelines';
@ -31,14 +30,14 @@ import {
import { IRouter } from '../../../../../../../../src/core/server';
import { TIMELINE_IMPORT_URL } from '../../../../common/constants';
import { SetupPlugins } from '../../../plugin';
import { importTimelinesPayloadSchema } from './schemas/import_timelines_schema';
import { ImportTimelinesPayloadSchemaRt } from './schemas/import_timelines_schema';
import { importRulesSchema } from '../../detection_engine/routes/schemas/response/import_rules_schema';
import { LegacyServices } from '../../../types';
import { Timeline } from '../saved_object';
import { validate } from '../../detection_engine/routes/rules/validate';
import { FrameworkRequest } from '../../framework';
import { buildRouteValidation } from '../../../utils/build_validation/route_validation';
const CHUNK_PARSED_OBJECT_SIZE = 10;
const timelineLib = new Timeline();
@ -52,9 +51,7 @@ export const importTimelinesRoute = (
{
path: `${TIMELINE_IMPORT_URL}`,
validate: {
body: buildRouteValidation<ImportTimelinesRequestParams['body']>(
importTimelinesPayloadSchema
),
body: buildRouteValidation(ImportTimelinesPayloadSchemaRt),
},
options: {
tags: ['access:siem'],
@ -65,28 +62,30 @@ export const importTimelinesRoute = (
},
},
async (context, request, response) => {
const siemResponse = buildSiemResponse(response);
const savedObjectsClient = context.core.savedObjects.client;
if (!savedObjectsClient) {
return siemResponse.error({ statusCode: 404 });
}
const { filename } = request.body.file.hapi;
const fileExtension = extname(filename).toLowerCase();
if (fileExtension !== '.ndjson') {
return siemResponse.error({
statusCode: 400,
body: `Invalid file extension ${fileExtension}`,
});
}
const objectLimit = config().get<number>('savedObjects.maxImportExportSize');
try {
const siemResponse = buildSiemResponse(response);
const savedObjectsClient = context.core.savedObjects.client;
if (!savedObjectsClient) {
return siemResponse.error({ statusCode: 404 });
}
const { file } = request.body;
const { filename } = file.hapi;
const fileExtension = extname(filename).toLowerCase();
if (fileExtension !== '.ndjson') {
return siemResponse.error({
statusCode: 400,
body: `Invalid file extension ${fileExtension}`,
});
}
const objectLimit = config().get<number>('savedObjects.maxImportExportSize');
const readStream = createTimelinesStreamFromNdJson(objectLimit);
const parsedObjects = await createPromiseFromStreams<PromiseFromStreams[]>([
request.body.file,
file,
...readStream,
]);
const [duplicateIdErrors, uniqueParsedObjects] = getTupleDuplicateErrorsAndUniqueTimeline(
@ -215,6 +214,7 @@ export const importTimelinesRoute = (
}
} catch (err) {
const error = transformError(err);
const siemResponse = buildSiemResponse(response);
return siemResponse.error({
body: error.message,

View file

@ -4,17 +4,13 @@
* you may not use this file except in compliance with the Elastic License.
*/
import Joi from 'joi';
import * as rt from 'io-ts';
/* eslint-disable @typescript-eslint/camelcase */
import { ids, exclude_export_details, file_name } from './schemas';
/* eslint-disable @typescript-eslint/camelcase */
export const exportTimelinesSchema = Joi.object({
ids,
}).min(1);
export const exportTimelinesQuerySchema = Joi.object({
file_name: file_name.default('export.ndjson'),
exclude_export_details: exclude_export_details.default(false),
export const exportTimelinesQuerySchema = rt.type({
file_name: rt.string,
exclude_export_details: rt.union([rt.literal('true'), rt.literal('false')]),
});
export const exportTimelinesRequestBodySchema = rt.type({
ids: rt.array(rt.string),
});

View file

@ -3,55 +3,41 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import Joi from 'joi';
import {
columns,
created,
createdBy,
dataProviders,
dateRange,
description,
eventNotes,
eventType,
favorite,
filters,
globalNotes,
kqlMode,
kqlQuery,
savedObjectId,
savedQueryId,
sort,
title,
updated,
updatedBy,
version,
pinnedEventIds,
} from './schemas';
import * as rt from 'io-ts';
export const importTimelinesPayloadSchema = Joi.object({
file: Joi.object().required(),
});
import { Readable } from 'stream';
import { either } from 'fp-ts/lib/Either';
import { eventNotes, globalNotes, pinnedEventIds } from './schemas';
import { SavedTimelineRuntimeType } from '../../types';
export const importTimelinesSchema = Joi.object({
columns,
created,
createdBy,
dataProviders,
dateRange,
description,
eventNotes,
eventType,
filters,
favorite,
globalNotes,
kqlMode,
kqlQuery,
savedObjectId,
savedQueryId,
sort,
title,
updated,
updatedBy,
version,
pinnedEventIds,
export const ImportTimelinesSchemaRt = rt.intersection([
SavedTimelineRuntimeType,
rt.type({
savedObjectId: rt.string,
version: rt.string,
}),
rt.type({
globalNotes,
eventNotes,
pinnedEventIds,
}),
]);
const ReadableRt = new rt.Type<Readable, Readable, unknown>(
'ReadableRt',
(u): u is Readable => u instanceof Readable,
(u, c) =>
either.chain(rt.object.validate(u, c), s => {
const d = s as Readable;
return d.readable ? rt.success(d) : rt.failure(u, c);
}),
a => a
);
export const ImportTimelinesPayloadSchemaRt = rt.type({
file: rt.intersection([
ReadableRt,
rt.type({
hapi: rt.type({ filename: rt.string }),
}),
]),
});

View file

@ -3,156 +3,10 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import Joi from 'joi';
import * as runtimeTypes from 'io-ts';
import { unionWithNullType } from '../../../framework';
import { SavedNoteRuntimeType } from '../../../note/types';
const allowEmptyString = Joi.string().allow([null, '']);
const columnHeaderType = allowEmptyString;
export const created = Joi.number().allow(null);
export const createdBy = allowEmptyString;
export const description = allowEmptyString;
export const end = Joi.number();
export const eventId = allowEmptyString;
export const eventType = allowEmptyString;
export const filters = Joi.array()
.items(
Joi.object({
meta: Joi.object({
alias: allowEmptyString,
controlledBy: allowEmptyString,
disabled: Joi.boolean().allow(null),
field: allowEmptyString,
formattedValue: allowEmptyString,
index: allowEmptyString,
key: allowEmptyString,
negate: Joi.boolean().allow(null),
params: allowEmptyString,
type: allowEmptyString,
value: allowEmptyString,
}),
exists: allowEmptyString,
match_all: allowEmptyString,
missing: allowEmptyString,
query: allowEmptyString,
range: allowEmptyString,
script: allowEmptyString,
})
)
.allow(null);
const name = allowEmptyString;
export const noteId = allowEmptyString;
export const note = allowEmptyString;
export const start = Joi.number();
export const savedQueryId = allowEmptyString;
export const savedObjectId = allowEmptyString;
export const timelineId = allowEmptyString;
export const title = allowEmptyString;
export const updated = Joi.number().allow(null);
export const updatedBy = allowEmptyString;
export const version = allowEmptyString;
export const columns = Joi.array().items(
Joi.object({
aggregatable: Joi.boolean().allow(null),
category: allowEmptyString,
columnHeaderType,
description,
example: allowEmptyString,
indexes: allowEmptyString,
id: allowEmptyString,
name,
placeholder: allowEmptyString,
searchable: Joi.boolean().allow(null),
type: allowEmptyString,
}).required()
);
export const dataProviders = Joi.array()
.items(
Joi.object({
id: allowEmptyString,
name: allowEmptyString,
enabled: Joi.boolean().allow(null),
excluded: Joi.boolean().allow(null),
kqlQuery: allowEmptyString,
queryMatch: Joi.object({
field: allowEmptyString,
displayField: allowEmptyString,
value: allowEmptyString,
displayValue: allowEmptyString,
operator: allowEmptyString,
}),
and: Joi.array()
.items(
Joi.object({
id: allowEmptyString,
name,
enabled: Joi.boolean().allow(null),
excluded: Joi.boolean().allow(null),
kqlQuery: allowEmptyString,
queryMatch: Joi.object({
field: allowEmptyString,
displayField: allowEmptyString,
value: allowEmptyString,
displayValue: allowEmptyString,
operator: allowEmptyString,
}).allow(null),
})
)
.allow(null),
})
)
.allow(null);
export const dateRange = Joi.object({
start,
end,
});
export const favorite = Joi.array().items(
Joi.object({
keySearch: allowEmptyString,
fullName: allowEmptyString,
userName: allowEmptyString,
favoriteDate: Joi.number(),
}).allow(null)
);
const noteItem = Joi.object({
noteId,
version,
eventId,
note,
timelineId,
created,
createdBy,
updated,
updatedBy,
});
export const eventNotes = Joi.array().items(noteItem);
export const globalNotes = Joi.array().items(noteItem);
export const kqlMode = allowEmptyString;
export const kqlQuery = Joi.object({
filterQuery: Joi.object({
kuery: Joi.object({
kind: allowEmptyString,
expression: allowEmptyString,
}).allow(null),
serializedQuery: allowEmptyString,
}).allow(null),
});
export const pinnedEventIds = Joi.array()
.items(allowEmptyString)
.allow(null);
export const sort = Joi.object({
columnId: allowEmptyString,
sortDirection: allowEmptyString,
});
/* eslint-disable @typescript-eslint/camelcase */
export const ids = Joi.array().items(allowEmptyString);
export const exclude_export_details = Joi.boolean();
export const file_name = allowEmptyString;
export const eventNotes = runtimeTypes.array(unionWithNullType(SavedNoteRuntimeType));
export const globalNotes = runtimeTypes.array(unionWithNullType(SavedNoteRuntimeType));
export const pinnedEventIds = runtimeTypes.array(unionWithNullType(runtimeTypes.string));

View file

@ -26,12 +26,11 @@ import {
import {
ExportedTimelines,
ExportTimelineSavedObjectsClient,
ExportTimelineRequest,
ExportedNotes,
TimelineSavedObject,
} from '../../types';
import { transformDataToNdjson } from '../../../../utils/read_stream/create_stream_from_ndjson';
import { transformDataToNdjson } from '../../../detection_engine/routes/rules/utils';
export type TimelineSavedObjectsClient = Pick<
SavedObjectsClient,
| 'get'
@ -142,23 +141,17 @@ const getTimelines = async (
const getTimelinesFromObjects = async (
savedObjectsClient: ExportTimelineSavedObjectsClient,
request: ExportTimelineRequest
ids: string[]
): Promise<ExportedTimelines[]> => {
const timelines: TimelineSavedObject[] = await getTimelines(savedObjectsClient, request.body.ids);
const timelines: TimelineSavedObject[] = await getTimelines(savedObjectsClient, ids);
// To Do for feature freeze
// if (timelines.length !== request.body.ids.length) {
// //figure out which is missing to tell user
// }
const [notes, pinnedEventIds] = await Promise.all([
Promise.all(
request.body.ids.map(timelineId => getNotesByTimelineId(savedObjectsClient, timelineId))
),
Promise.all(
request.body.ids.map(timelineId =>
getPinnedEventsByTimelineId(savedObjectsClient, timelineId)
)
),
Promise.all(ids.map(timelineId => getNotesByTimelineId(savedObjectsClient, timelineId))),
Promise.all(ids.map(timelineId => getPinnedEventsByTimelineId(savedObjectsClient, timelineId))),
]);
const myNotes = notes.reduce<NoteSavedObject[]>(
@ -171,7 +164,7 @@ const getTimelinesFromObjects = async (
[]
);
const myResponse = request.body.ids.reduce<ExportedTimelines[]>((acc, timelineId) => {
const myResponse = ids.reduce<ExportedTimelines[]>((acc, timelineId) => {
const myTimeline = timelines.find(t => t.savedObjectId === timelineId);
if (myTimeline != null) {
const timelineNotes = myNotes.filter(n => n.timelineId === timelineId);
@ -193,11 +186,11 @@ const getTimelinesFromObjects = async (
export const getExportTimelineByObjectIds = async ({
client,
request,
ids,
}: {
client: ExportTimelineSavedObjectsClient;
request: ExportTimelineRequest;
ids: string[];
}) => {
const timeline = await getTimelinesFromObjects(client, request);
const timeline = await getTimelinesFromObjects(client, ids);
return transformDataToNdjson(timeline);
};

View file

@ -14,7 +14,7 @@ import {
PinnedEventToReturnSavedObjectRuntimeType,
PinnedEventSavedObject,
} from '../pinned_event/types';
import { SavedObjectsClient, KibanaRequest } from '../../../../../../../src/core/server';
import { SavedObjectsClient } from '../../../../../../../src/core/server';
/*
* ColumnHeader Types
@ -204,20 +204,9 @@ export const AllTimelineSavedObjectRuntimeType = runtimeTypes.type({
export interface AllTimelineSavedObject
extends runtimeTypes.TypeOf<typeof AllTimelineSavedObjectRuntimeType> {}
export interface ExportTimelineRequestParams {
body: { ids: string[] };
query: {
file_name: string;
exclude_export_details: boolean;
};
}
export type ExportTimelineRequest = KibanaRequest<
unknown,
ExportTimelineRequestParams['query'],
ExportTimelineRequestParams['body'],
'post'
>;
/**
* Import/export timelines
*/
export type ExportTimelineSavedObjectsClient = Pick<
SavedObjectsClient,

View file

@ -0,0 +1,39 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { buildRouteValidation } from './route_validation';
import * as rt from 'io-ts';
import { RouteValidationResultFactory } from '../../../../../../../src/core/server/http';
describe('buildRouteValidation', () => {
const schema = rt.type({
ids: rt.array(rt.string),
});
const validationResult: RouteValidationResultFactory = {
ok: jest.fn().mockImplementation(validatedInput => validatedInput),
badRequest: jest.fn().mockImplementation(e => e),
};
beforeEach(() => {
jest.clearAllMocks();
});
test('return validation error', () => {
const input = { id: 'someId' };
const result = buildRouteValidation(schema)(input, validationResult);
expect(result).toEqual(
'Invalid value undefined supplied to : { ids: Array<string> }/ids: Array<string>'
);
});
test('return validated input', () => {
const input = { ids: ['someId'] };
const result = buildRouteValidation(schema)(input, validationResult);
expect(result).toEqual(input);
});
});

View file

@ -0,0 +1,39 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { fold } from 'fp-ts/lib/Either';
import { pipe } from 'fp-ts/lib/pipeable';
import * as rt from 'io-ts';
import { failure } from 'io-ts/lib/PathReporter';
import {
RouteValidationFunction,
RouteValidationResultFactory,
RouteValidationError,
} from '../../../../../../../src/core/server';
type RequestValidationResult<T> =
| {
value: T;
error?: undefined;
}
| {
value?: undefined;
error: RouteValidationError;
};
export const buildRouteValidation = <T extends rt.Mixed, A = rt.TypeOf<T>>(
schema: T
): RouteValidationFunction<A> => (
inputValue: unknown,
validationResult: RouteValidationResultFactory
) =>
pipe(
schema.decode(inputValue),
fold<rt.Errors, A, RequestValidationResult<A>>(
(errors: rt.Errors) => validationResult.badRequest(failure(errors).join('\n')),
(validatedInput: A) => validationResult.ok(validatedInput)
)
);

View file

@ -0,0 +1,69 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { transformDataToNdjson } from './create_stream_from_ndjson';
import { ImportRuleAlertRest } from '../../lib/detection_engine/types';
import { sampleRule } from '../../lib/detection_engine/signals/__mocks__/es_results';
export const getOutputSample = (): Partial<ImportRuleAlertRest> => ({
rule_id: 'rule-1',
output_index: '.siem-signals',
risk_score: 50,
description: 'some description',
from: 'now-5m',
to: 'now',
index: ['index-1'],
name: 'some-name',
severity: 'low',
interval: '5m',
type: 'query',
});
export const getSampleAsNdjson = (sample: Partial<ImportRuleAlertRest>): string => {
return `${JSON.stringify(sample)}\n`;
};
describe('create_rules_stream_from_ndjson', () => {
describe('transformDataToNdjson', () => {
test('if rules are empty it returns an empty string', () => {
const ruleNdjson = transformDataToNdjson([]);
expect(ruleNdjson).toEqual('');
});
test('single rule will transform with new line ending character for ndjson', () => {
const rule = sampleRule();
const ruleNdjson = transformDataToNdjson([rule]);
expect(ruleNdjson.endsWith('\n')).toBe(true);
});
test('multiple rules will transform with two new line ending characters for ndjson', () => {
const result1 = sampleRule();
const result2 = sampleRule();
result2.id = 'some other id';
result2.rule_id = 'some other id';
result2.name = 'Some other rule';
const ruleNdjson = transformDataToNdjson([result1, result2]);
// this is how we count characters in JavaScript :-)
const count = ruleNdjson.split('\n').length - 1;
expect(count).toBe(2);
});
test('you can parse two rules back out without errors', () => {
const result1 = sampleRule();
const result2 = sampleRule();
result2.id = 'some other id';
result2.rule_id = 'some other id';
result2.name = 'Some other rule';
const ruleNdjson = transformDataToNdjson([result1, result2]);
const ruleStrings = ruleNdjson.split('\n');
const reParsed1 = JSON.parse(ruleStrings[0]);
const reParsed2 = JSON.parse(ruleStrings[1]);
expect(reParsed1).toEqual(result1);
expect(reParsed2).toEqual(result2);
});
});
});

View file

@ -0,0 +1,73 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { Transform } from 'stream';
import { has, isString } from 'lodash/fp';
import { ImportRuleAlertRest } from '../../lib/detection_engine/types';
import { createMapStream, createFilterStream } from '../../../../../../../src/legacy/utils/streams';
import { importRulesSchema } from '../../lib/detection_engine/routes/schemas/import_rules_schema';
import { BadRequestError } from '../../lib/detection_engine/errors/bad_request_error';
export interface RulesObjectsExportResultDetails {
/** number of successfully exported objects */
exportedCount: number;
}
export const parseNdjsonStrings = (): Transform => {
return createMapStream((ndJsonStr: string) => {
if (isString(ndJsonStr) && ndJsonStr.trim() !== '') {
try {
return JSON.parse(ndJsonStr);
} catch (err) {
return err;
}
}
});
};
export const filterExportedCounts = (): Transform => {
return createFilterStream<ImportRuleAlertRest | RulesObjectsExportResultDetails>(
obj => obj != null && !has('exported_count', obj)
);
};
export const validateRules = (): Transform => {
return createMapStream((obj: ImportRuleAlertRest) => {
if (!(obj instanceof Error)) {
const validated = importRulesSchema.validate(obj);
if (validated.error != null) {
return new BadRequestError(validated.error.message);
} else {
return validated.value;
}
} else {
return obj;
}
});
};
// Adaptation from: saved_objects/import/create_limit_stream.ts
export const createLimitStream = (limit: number): Transform => {
let counter = 0;
return new Transform({
objectMode: true,
async transform(obj, _, done) {
if (counter >= limit) {
return done(new Error(`Can't import more than ${limit} rules`));
}
counter++;
done(undefined, obj);
},
});
};
export const transformDataToNdjson = (data: unknown[]): string => {
if (data.length !== 0) {
const dataString = data.map(rule => JSON.stringify(rule)).join('\n');
return `${dataString}\n`;
} else {
return '';
}
};