[Security Solution][Resolver] Support kuery filter (#74695)

* Adding kql filter

* Adding filter support for the backend and tests

* Moving the filter to the body

* switching events and alerts api to post

* Removing unused import

* Adding tests for events api results being in descending order

* Switching frontend to use post for related events
This commit is contained in:
Jonathan Buttner 2020-08-26 09:25:45 -04:00 committed by GitHub
parent 63265b6f57
commit 4042f82035
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
19 changed files with 808 additions and 446 deletions

View file

@ -169,6 +169,7 @@ describe('data generator', () => {
const childrenPerNode = 3;
const generations = 3;
const relatedAlerts = 4;
beforeEach(() => {
tree = generator.generateTree({
alwaysGenMaxChildrenPerNode: true,
@ -182,6 +183,7 @@ describe('data generator', () => {
{ category: RelatedEventCategory.File, count: 2 },
{ category: RelatedEventCategory.Network, count: 1 },
],
relatedEventsOrdered: true,
relatedAlerts,
ancestryArraySize: ANCESTRY_LIMIT,
});
@ -212,6 +214,14 @@ describe('data generator', () => {
}
};
it('creates related events in ascending order', () => {
// the order should not change since it should already be in ascending order
const relatedEventsAsc = _.cloneDeep(tree.origin.relatedEvents).sort(
(event1, event2) => event1['@timestamp'] - event2['@timestamp']
);
expect(tree.origin.relatedEvents).toStrictEqual(relatedEventsAsc);
});
it('has ancestry array defined', () => {
expect(tree.origin.lifecycle[0].process.Ext!.ancestry!.length).toBe(ANCESTRY_LIMIT);
for (const event of tree.allEvents) {

View file

@ -302,6 +302,12 @@ export interface TreeOptions {
generations?: number;
children?: number;
relatedEvents?: RelatedEventInfo[] | number;
/**
* If true then the related events will be created with timestamps that preserve the
* generation order, meaning the first event will always have a timestamp number less
* than the next related event
*/
relatedEventsOrdered?: boolean;
relatedAlerts?: number;
percentWithRelated?: number;
percentTerminated?: number;
@ -322,6 +328,7 @@ export function getTreeOptionsWithDef(options?: TreeOptions): TreeOptionDefaults
generations: options?.generations ?? 2,
children: options?.children ?? 2,
relatedEvents: options?.relatedEvents ?? 5,
relatedEventsOrdered: options?.relatedEventsOrdered ?? false,
relatedAlerts: options?.relatedAlerts ?? 3,
percentWithRelated: options?.percentWithRelated ?? 30,
percentTerminated: options?.percentTerminated ?? 100,
@ -809,7 +816,8 @@ export class EndpointDocGenerator {
for (const relatedEvent of this.relatedEventsGenerator(
node,
opts.relatedEvents,
secBeforeEvent
secBeforeEvent,
opts.relatedEventsOrdered
)) {
eventList.push(relatedEvent);
}
@ -877,6 +885,8 @@ export class EndpointDocGenerator {
addRelatedAlerts(ancestor, numAlertsPerNode, processDuration, events);
}
}
timestamp = timestamp + 1000;
events.push(
this.generateAlert(
timestamp,
@ -961,7 +971,12 @@ export class EndpointDocGenerator {
});
}
if (this.randomN(100) < opts.percentWithRelated) {
yield* this.relatedEventsGenerator(child, opts.relatedEvents, processDuration);
yield* this.relatedEventsGenerator(
child,
opts.relatedEvents,
processDuration,
opts.relatedEventsOrdered
);
yield* this.relatedAlertsGenerator(child, opts.relatedAlerts, processDuration);
}
}
@ -973,13 +988,17 @@ export class EndpointDocGenerator {
* @param relatedEvents - can be an array of RelatedEventInfo objects describing the related events that should be generated for each process node
* or a number which defines the number of related events and will default to random categories
* @param processDuration - maximum number of seconds after process event that related event timestamp can be
* @param ordered - if true the events will have an increasing timestamp, otherwise their timestamp will be random but
* guaranteed to be greater than or equal to the originating event
*/
public *relatedEventsGenerator(
node: Event,
relatedEvents: RelatedEventInfo[] | number = 10,
processDuration: number = 6 * 3600
processDuration: number = 6 * 3600,
ordered: boolean = false
) {
let relatedEventsInfo: RelatedEventInfo[];
let ts = node['@timestamp'] + 1;
if (typeof relatedEvents === 'number') {
relatedEventsInfo = [{ category: RelatedEventCategory.Random, count: relatedEvents }];
} else {
@ -995,7 +1014,12 @@ export class EndpointDocGenerator {
eventInfo = OTHER_EVENT_CATEGORIES[event.category];
}
const ts = node['@timestamp'] + this.randomN(processDuration) * 1000;
if (ordered) {
ts += this.randomN(processDuration) * 1000;
} else {
ts = node['@timestamp'] + this.randomN(processDuration) * 1000;
}
yield this.generateEvent({
timestamp: ts,
entityID: node.process.entity_id,

View file

@ -33,6 +33,11 @@ export const validateEvents = {
afterEvent: schema.maybe(schema.string()),
legacyEndpointID: schema.maybe(schema.string({ minLength: 1 })),
}),
body: schema.nullable(
schema.object({
filter: schema.maybe(schema.string()),
})
),
};
/**
@ -45,6 +50,11 @@ export const validateAlerts = {
afterAlert: schema.maybe(schema.string()),
legacyEndpointID: schema.maybe(schema.string({ minLength: 1 })),
}),
body: schema.nullable(
schema.object({
filter: schema.maybe(schema.string()),
})
),
};
/**

View file

@ -25,7 +25,7 @@ export function dataAccessLayerFactory(
* Used to get non-process related events for a node.
*/
async relatedEvents(entityID: string): Promise<ResolverRelatedEvents> {
return context.services.http.get(`/api/endpoint/resolver/${entityID}/events`, {
return context.services.http.post(`/api/endpoint/resolver/${entityID}/events`, {
query: { events: 100 },
});
},

View file

@ -24,7 +24,7 @@ import { handleEntities } from './resolver/entity';
export function registerResolverRoutes(router: IRouter, endpointAppContext: EndpointAppContext) {
const log = endpointAppContext.logFactory.get('resolver');
router.get(
router.post(
{
path: '/api/endpoint/resolver/{id}/events',
validate: validateEvents,
@ -33,7 +33,7 @@ export function registerResolverRoutes(router: IRouter, endpointAppContext: Endp
handleEvents(log, endpointAppContext)
);
router.get(
router.post(
{
path: '/api/endpoint/resolver/{id}/alerts',
validate: validateAlerts,

View file

@ -14,11 +14,16 @@ import { EndpointAppContext } from '../../types';
export function handleAlerts(
log: Logger,
endpointAppContext: EndpointAppContext
): RequestHandler<TypeOf<typeof validateAlerts.params>, TypeOf<typeof validateAlerts.query>> {
): RequestHandler<
TypeOf<typeof validateAlerts.params>,
TypeOf<typeof validateAlerts.query>,
TypeOf<typeof validateAlerts.body>
> {
return async (context, req, res) => {
const {
params: { id },
query: { alerts, afterAlert, legacyEndpointID: endpointID },
body,
} = req;
try {
const client = context.core.elasticsearch.legacy.client;
@ -26,7 +31,7 @@ export function handleAlerts(
const fetcher = new Fetcher(client, id, eventsIndexPattern, alertsIndexPattern, endpointID);
return res.ok({
body: await fetcher.alerts(alerts, afterAlert),
body: await fetcher.alerts(alerts, afterAlert, body?.filter),
});
} catch (err) {
log.warn(err);

View file

@ -14,11 +14,16 @@ import { EndpointAppContext } from '../../types';
export function handleEvents(
log: Logger,
endpointAppContext: EndpointAppContext
): RequestHandler<TypeOf<typeof validateEvents.params>, TypeOf<typeof validateEvents.query>> {
): RequestHandler<
TypeOf<typeof validateEvents.params>,
TypeOf<typeof validateEvents.query>,
TypeOf<typeof validateEvents.body>
> {
return async (context, req, res) => {
const {
params: { id },
query: { events, afterEvent, legacyEndpointID: endpointID },
body,
} = req;
try {
const client = context.core.elasticsearch.legacy.client;
@ -26,7 +31,7 @@ export function handleEvents(
const fetcher = new Fetcher(client, id, eventsIndexPattern, alertsIndexPattern, endpointID);
return res.ok({
body: await fetcher.events(events, afterEvent),
body: await fetcher.events(events, afterEvent, body?.filter),
});
} catch (err) {
log.warn(err);

View file

@ -4,6 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SearchResponse } from 'elasticsearch';
import { esKuery } from '../../../../../../../../src/plugins/data/server';
import { ResolverEvent } from '../../../../../common/endpoint/types';
import { ResolverQuery } from './base';
import { PaginationBuilder } from '../utils/pagination';
@ -13,12 +14,17 @@ import { JsonObject } from '../../../../../../../../src/plugins/kibana_utils/com
* Builds a query for retrieving alerts for a node.
*/
export class AlertsQuery extends ResolverQuery<ResolverEvent[]> {
private readonly kqlQuery: JsonObject[] = [];
constructor(
private readonly pagination: PaginationBuilder,
indexPattern: string | string[],
endpointID?: string
endpointID?: string,
kql?: string
) {
super(indexPattern, endpointID);
if (kql) {
this.kqlQuery.push(esKuery.toElasticsearchQuery(esKuery.fromKueryExpression(kql)));
}
}
protected legacyQuery(endpointID: string, uniquePIDs: string[]): JsonObject {
@ -26,6 +32,7 @@ export class AlertsQuery extends ResolverQuery<ResolverEvent[]> {
query: {
bool: {
filter: [
...this.kqlQuery,
{
terms: { 'endgame.unique_pid': uniquePIDs },
},
@ -38,7 +45,7 @@ export class AlertsQuery extends ResolverQuery<ResolverEvent[]> {
],
},
},
...this.pagination.buildQueryFields('endgame.serial_event_id'),
...this.pagination.buildQueryFields('endgame.serial_event_id', 'asc'),
};
}
@ -47,6 +54,7 @@ export class AlertsQuery extends ResolverQuery<ResolverEvent[]> {
query: {
bool: {
filter: [
...this.kqlQuery,
{
terms: { 'process.entity_id': entityIDs },
},
@ -56,7 +64,7 @@ export class AlertsQuery extends ResolverQuery<ResolverEvent[]> {
],
},
},
...this.pagination.buildQueryFields('event.id'),
...this.pagination.buildQueryFields('event.id', 'asc'),
};
}

View file

@ -4,6 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SearchResponse } from 'elasticsearch';
import { esKuery } from '../../../../../../../../src/plugins/data/server';
import { ResolverEvent } from '../../../../../common/endpoint/types';
import { ResolverQuery } from './base';
import { PaginationBuilder } from '../utils/pagination';
@ -13,12 +14,18 @@ import { JsonObject } from '../../../../../../../../src/plugins/kibana_utils/com
* Builds a query for retrieving related events for a node.
*/
export class EventsQuery extends ResolverQuery<ResolverEvent[]> {
private readonly kqlQuery: JsonObject[] = [];
constructor(
private readonly pagination: PaginationBuilder,
indexPattern: string | string[],
endpointID?: string
endpointID?: string,
kql?: string
) {
super(indexPattern, endpointID);
if (kql) {
this.kqlQuery.push(esKuery.toElasticsearchQuery(esKuery.fromKueryExpression(kql)));
}
}
protected legacyQuery(endpointID: string, uniquePIDs: string[]): JsonObject {
@ -26,6 +33,7 @@ export class EventsQuery extends ResolverQuery<ResolverEvent[]> {
query: {
bool: {
filter: [
...this.kqlQuery,
{
terms: { 'endgame.unique_pid': uniquePIDs },
},
@ -45,7 +53,7 @@ export class EventsQuery extends ResolverQuery<ResolverEvent[]> {
],
},
},
...this.pagination.buildQueryFields('endgame.serial_event_id'),
...this.pagination.buildQueryFields('endgame.serial_event_id', 'desc'),
};
}
@ -54,6 +62,7 @@ export class EventsQuery extends ResolverQuery<ResolverEvent[]> {
query: {
bool: {
filter: [
...this.kqlQuery,
{
terms: { 'process.entity_id': entityIDs },
},
@ -70,7 +79,7 @@ export class EventsQuery extends ResolverQuery<ResolverEvent[]> {
],
},
},
...this.pagination.buildQueryFields('event.id'),
...this.pagination.buildQueryFields('event.id', 'desc'),
};
}

View file

@ -13,23 +13,35 @@ import { PaginationBuilder } from './pagination';
import { QueryInfo } from '../queries/multi_searcher';
import { SingleQueryHandler } from './fetch';
/**
* Parameters for RelatedAlertsQueryHandler
*/
export interface RelatedAlertsParams {
limit: number;
entityID: string;
indexPattern: string;
after?: string;
legacyEndpointID?: string;
filter?: string;
}
/**
* Requests related alerts for the given node.
*/
export class RelatedAlertsQueryHandler implements SingleQueryHandler<ResolverRelatedAlerts> {
private relatedAlerts: ResolverRelatedAlerts | undefined;
private readonly query: AlertsQuery;
constructor(
private readonly limit: number,
private readonly entityID: string,
after: string | undefined,
indexPattern: string,
legacyEndpointID: string | undefined
) {
private readonly limit: number;
private readonly entityID: string;
constructor(options: RelatedAlertsParams) {
this.limit = options.limit;
this.entityID = options.entityID;
this.query = new AlertsQuery(
PaginationBuilder.createBuilder(limit, after),
indexPattern,
legacyEndpointID
PaginationBuilder.createBuilder(this.limit, options.after),
options.indexPattern,
options.legacyEndpointID,
options.filter
);
}

View file

@ -13,23 +13,36 @@ import { PaginationBuilder } from './pagination';
import { QueryInfo } from '../queries/multi_searcher';
import { SingleQueryHandler } from './fetch';
/**
* Parameters for the RelatedEventsQueryHandler
*/
export interface RelatedEventsParams {
limit: number;
entityID: string;
indexPattern: string;
after?: string;
legacyEndpointID?: string;
filter?: string;
}
/**
* This retrieves the related events for the origin node of a resolver tree.
*/
export class RelatedEventsQueryHandler implements SingleQueryHandler<ResolverRelatedEvents> {
private relatedEvents: ResolverRelatedEvents | undefined;
private readonly query: EventsQuery;
constructor(
private readonly limit: number,
private readonly entityID: string,
after: string | undefined,
indexPattern: string,
legacyEndpointID: string | undefined
) {
private readonly limit: number;
private readonly entityID: string;
constructor(options: RelatedEventsParams) {
this.limit = options.limit;
this.entityID = options.entityID;
this.query = new EventsQuery(
PaginationBuilder.createBuilder(limit, after),
indexPattern,
legacyEndpointID
PaginationBuilder.createBuilder(this.limit, options.after),
options.indexPattern,
options.legacyEndpointID,
options.filter
);
}

View file

@ -110,21 +110,21 @@ export class Fetcher {
this.endpointID
);
const eventsHandler = new RelatedEventsQueryHandler(
options.events,
this.id,
options.afterEvent,
this.eventsIndexPattern,
this.endpointID
);
const eventsHandler = new RelatedEventsQueryHandler({
limit: options.events,
entityID: this.id,
after: options.afterEvent,
indexPattern: this.eventsIndexPattern,
legacyEndpointID: this.endpointID,
});
const alertsHandler = new RelatedAlertsQueryHandler(
options.alerts,
this.id,
options.afterAlert,
this.alertsIndexPattern,
this.endpointID
);
const alertsHandler = new RelatedAlertsQueryHandler({
limit: options.alerts,
entityID: this.id,
after: options.afterAlert,
indexPattern: this.alertsIndexPattern,
legacyEndpointID: this.endpointID,
});
// we need to get the start events first because the API request defines how many nodes to return and we don't want
// to count or limit ourselves based on the other lifecycle events (end, etc)
@ -228,17 +228,24 @@ export class Fetcher {
/**
* Retrieves the related events for the origin node.
*
* @param limit the upper bound number of related events to return
* @param limit the upper bound number of related events to return. The limit is applied after the cursor is used to
* skip the previous results.
* @param after a cursor to use as the starting point for retrieving related events
* @param filter a kql query for filtering the results
*/
public async events(limit: number, after?: string): Promise<ResolverRelatedEvents> {
const eventsHandler = new RelatedEventsQueryHandler(
public async events(
limit: number,
after?: string,
filter?: string
): Promise<ResolverRelatedEvents> {
const eventsHandler = new RelatedEventsQueryHandler({
limit,
this.id,
entityID: this.id,
after,
this.eventsIndexPattern,
this.endpointID
);
indexPattern: this.eventsIndexPattern,
legacyEndpointID: this.endpointID,
filter,
});
return eventsHandler.search(this.client);
}
@ -246,17 +253,24 @@ export class Fetcher {
/**
* Retrieves the alerts for the origin node.
*
* @param limit the upper bound number of alerts to return
* @param limit the upper bound number of alerts to return. The limit is applied after the cursor is used to
* skip the previous results.
* @param after a cursor to use as the starting point for retrieving alerts
* @param filter a kql query string for filtering the results
*/
public async alerts(limit: number, after?: string): Promise<ResolverRelatedAlerts> {
const alertsHandler = new RelatedAlertsQueryHandler(
public async alerts(
limit: number,
after?: string,
filter?: string
): Promise<ResolverRelatedAlerts> {
const alertsHandler = new RelatedAlertsQueryHandler({
limit,
this.id,
entityID: this.id,
after,
this.alertsIndexPattern,
this.endpointID
);
indexPattern: this.alertsIndexPattern,
legacyEndpointID: this.endpointID,
filter,
});
return alertsHandler.search(this.client);
}

View file

@ -42,5 +42,19 @@ describe('Pagination', () => {
const fields = builder.buildQueryFields('');
expect(fields).not.toHaveProperty('search_after');
});
it('creates the sort field in ascending order', () => {
const builder = PaginationBuilder.createBuilder(100);
expect(builder.buildQueryFields('a').sort).toContainEqual({ '@timestamp': 'asc' });
expect(builder.buildQueryFields('', 'asc').sort).toContainEqual({ '@timestamp': 'asc' });
});
it('creates the sort field in descending order', () => {
const builder = PaginationBuilder.createBuilder(100);
expect(builder.buildQueryFields('a', 'desc').sort).toStrictEqual([
{ '@timestamp': 'desc' },
{ a: 'asc' },
]);
});
});
});

View file

@ -16,6 +16,11 @@ interface PaginationCursor {
eventID: string;
}
/**
* The sort direction for the timestamp field
*/
export type TimeSortDirection = 'asc' | 'desc';
/**
* Defines the sorting fields for queries that leverage pagination
*/
@ -158,10 +163,14 @@ export class PaginationBuilder {
* Helper for creates an object for adding the pagination fields to a query
*
* @param tiebreaker a unique field to use as the tiebreaker for the search_after
* @param timeSort is the timestamp sort direction
* @returns an object containing the pagination information
*/
buildQueryFieldsAsInterface(tiebreaker: string): PaginationFields {
const sort: SortFields = [{ '@timestamp': 'asc' }, { [tiebreaker]: 'asc' }];
buildQueryFieldsAsInterface(
tiebreaker: string,
timeSort: TimeSortDirection = 'asc'
): PaginationFields {
const sort: SortFields = [{ '@timestamp': timeSort }, { [tiebreaker]: 'asc' }];
let searchAfter: SearchAfterFields | undefined;
if (this.timestamp && this.eventID) {
searchAfter = [this.timestamp, this.eventID];
@ -174,11 +183,12 @@ export class PaginationBuilder {
* Creates an object for adding the pagination fields to a query
*
* @param tiebreaker a unique field to use as the tiebreaker for the search_after
* @param timeSort is the timestamp sort direction
* @returns an object containing the pagination information
*/
buildQueryFields(tiebreaker: string): JsonObject {
buildQueryFields(tiebreaker: string, timeSort: TimeSortDirection = 'asc'): JsonObject {
const fields: JsonObject = {};
const pagination = this.buildQueryFieldsAsInterface(tiebreaker);
const pagination = this.buildQueryFieldsAsInterface(tiebreaker, timeSort);
fields.sort = pagination.sort;
fields.size = pagination.size;
if (pagination.searchAfter) {

View file

@ -0,0 +1,159 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import expect from '@kbn/expect';
import { eventId } from '../../../../plugins/security_solution/common/endpoint/models/event';
import { ResolverRelatedAlerts } from '../../../../plugins/security_solution/common/endpoint/types';
import { FtrProviderContext } from '../../ftr_provider_context';
import {
Tree,
RelatedEventCategory,
} from '../../../../plugins/security_solution/common/endpoint/generate_data';
import { Options, GeneratedTrees } from '../../services/resolver';
import { compareArrays } from './common';
export default function ({ getService }: FtrProviderContext) {
const supertest = getService('supertest');
const resolver = getService('resolverGenerator');
const relatedEventsToGen = [
{ category: RelatedEventCategory.Driver, count: 2 },
{ category: RelatedEventCategory.File, count: 1 },
{ category: RelatedEventCategory.Registry, count: 1 },
];
const relatedAlerts = 4;
let resolverTrees: GeneratedTrees;
let tree: Tree;
const treeOptions: Options = {
ancestors: 5,
relatedEvents: relatedEventsToGen,
relatedAlerts,
children: 3,
generations: 2,
percentTerminated: 100,
percentWithRelated: 100,
numTrees: 1,
alwaysGenMaxChildrenPerNode: true,
ancestryArraySize: 2,
};
describe('related alerts route', () => {
before(async () => {
resolverTrees = await resolver.createTrees(treeOptions);
// we only requested a single alert so there's only 1 tree
tree = resolverTrees.trees[0];
});
after(async () => {
await resolver.deleteData(resolverTrees);
});
it('should not find any alerts', async () => {
const { body }: { body: ResolverRelatedAlerts } = await supertest
.post(`/api/endpoint/resolver/5555/alerts`)
.set('kbn-xsrf', 'xxx')
.expect(200);
expect(body.nextAlert).to.eql(null);
expect(body.alerts).to.be.empty();
});
it('should return details for the root node', async () => {
const { body }: { body: ResolverRelatedAlerts } = await supertest
.post(`/api/endpoint/resolver/${tree.origin.id}/alerts`)
.set('kbn-xsrf', 'xxx')
.expect(200);
expect(body.alerts.length).to.eql(4);
compareArrays(tree.origin.relatedAlerts, body.alerts, true);
expect(body.nextAlert).to.eql(null);
});
it('should allow alerts to be filtered', async () => {
const filter = `not event.id:"${tree.origin.relatedAlerts[0].event.id}"`;
const { body }: { body: ResolverRelatedAlerts } = await supertest
.post(`/api/endpoint/resolver/${tree.origin.id}/alerts`)
.set('kbn-xsrf', 'xxx')
.send({
filter,
})
.expect(200);
expect(body.alerts.length).to.eql(3);
compareArrays(tree.origin.relatedAlerts, body.alerts);
expect(body.nextAlert).to.eql(null);
// should not find the alert that we excluded in the filter
expect(
body.alerts.find((bodyAlert) => {
return eventId(bodyAlert) === tree.origin.relatedAlerts[0].event.id;
})
).to.not.be.ok();
});
it('should return paginated results for the root node', async () => {
let { body }: { body: ResolverRelatedAlerts } = await supertest
.post(`/api/endpoint/resolver/${tree.origin.id}/alerts?alerts=2`)
.set('kbn-xsrf', 'xxx')
.expect(200);
expect(body.alerts.length).to.eql(2);
compareArrays(tree.origin.relatedAlerts, body.alerts);
expect(body.nextAlert).not.to.eql(null);
({ body } = await supertest
.post(
`/api/endpoint/resolver/${tree.origin.id}/alerts?alerts=2&afterAlert=${body.nextAlert}`
)
.set('kbn-xsrf', 'xxx')
.expect(200));
expect(body.alerts.length).to.eql(2);
compareArrays(tree.origin.relatedAlerts, body.alerts);
expect(body.nextAlert).to.not.eql(null);
({ body } = await supertest
.post(
`/api/endpoint/resolver/${tree.origin.id}/alerts?alerts=2&afterAlert=${body.nextAlert}`
)
.set('kbn-xsrf', 'xxx')
.expect(200));
expect(body.alerts).to.be.empty();
expect(body.nextAlert).to.eql(null);
});
it('should return the first page of information when the cursor is invalid', async () => {
const { body }: { body: ResolverRelatedAlerts } = await supertest
.post(`/api/endpoint/resolver/${tree.origin.id}/alerts?afterAlert=blah`)
.set('kbn-xsrf', 'xxx')
.expect(200);
expect(body.alerts.length).to.eql(4);
compareArrays(tree.origin.relatedAlerts, body.alerts, true);
expect(body.nextAlert).to.eql(null);
});
it('should sort the alerts in ascending order', async () => {
const { body }: { body: ResolverRelatedAlerts } = await supertest
.post(`/api/endpoint/resolver/${tree.origin.id}/alerts`)
.set('kbn-xsrf', 'xxx')
.expect(200);
const sortedAsc = [...tree.origin.relatedAlerts].sort((event1, event2) => {
// this sorts the events by timestamp in ascending order
const diff = event1['@timestamp'] - event2['@timestamp'];
// if the timestamps are the same, fallback to the event.id sorted in
// ascending order
if (diff === 0) {
if (event1.event.id < event2.event.id) {
return -1;
}
if (event1.event.id > event2.event.id) {
return 1;
}
return 0;
}
return diff;
});
expect(body.alerts.length).to.eql(4);
for (let i = 0; i < body.alerts.length; i++) {
expect(eventId(body.alerts[i])).to.equal(sortedAsc[i].event.id);
}
});
});
}

View file

@ -0,0 +1,222 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import _ from 'lodash';
import expect from '@kbn/expect';
import {
ResolverChildNode,
ResolverLifecycleNode,
ResolverEvent,
ResolverNodeStats,
} from '../../../../plugins/security_solution/common/endpoint/types';
import {
parentEntityId,
eventId,
} from '../../../../plugins/security_solution/common/endpoint/models/event';
import {
Event,
Tree,
TreeNode,
RelatedEventInfo,
categoryMapping,
} from '../../../../plugins/security_solution/common/endpoint/generate_data';
/**
* Check that the given lifecycle is in the resolver tree's corresponding map
*
* @param node a lifecycle node containing the start and end events for a node
* @param nodeMap a map of entity_ids to nodes to look for the passed in `node`
*/
const expectLifecycleNodeInMap = (node: ResolverLifecycleNode, nodeMap: Map<string, TreeNode>) => {
const genNode = nodeMap.get(node.entityID);
expect(genNode).to.be.ok();
compareArrays(genNode!.lifecycle, node.lifecycle, true);
};
/**
* Verify that all the ancestor nodes are valid and optionally have parents.
*
* @param ancestors an array of ancestors
* @param tree the generated resolver tree as the source of truth
* @param verifyLastParent a boolean indicating whether to check the last ancestor. If the ancestors array intentionally
* does not contain all the ancestors, the last one will not have the parent
*/
export const verifyAncestry = (
ancestors: ResolverLifecycleNode[],
tree: Tree,
verifyLastParent: boolean
) => {
// group the ancestors by their entity_id mapped to a lifecycle node
const groupedAncestors = _.groupBy(ancestors, (ancestor) => ancestor.entityID);
// group by parent entity_id
const groupedAncestorsParent = _.groupBy(ancestors, (ancestor) =>
parentEntityId(ancestor.lifecycle[0])
);
// make sure there aren't any nodes with the same entity_id
expect(Object.keys(groupedAncestors).length).to.eql(ancestors.length);
// make sure there aren't any nodes with the same parent entity_id
expect(Object.keys(groupedAncestorsParent).length).to.eql(ancestors.length);
// make sure each of the ancestors' lifecycle events are in the generated tree
for (const node of ancestors) {
expectLifecycleNodeInMap(node, tree.ancestry);
}
// start at the origin which is always the first element of the array and make sure we have a connection
// using parent id between each of the nodes
let foundParents = 0;
let node = ancestors[0];
for (let i = 0; i < ancestors.length; i++) {
const parentID = parentEntityId(node.lifecycle[0]);
if (parentID !== undefined) {
const nextNode = groupedAncestors[parentID];
if (!nextNode) {
break;
}
// the grouped nodes should only have a single entry since each entity is unique
node = nextNode[0];
}
foundParents++;
}
if (verifyLastParent) {
expect(foundParents).to.eql(ancestors.length);
} else {
// if we only retrieved a portion of all the ancestors then the most distant grandparent's parent will not necessarily
// be in the results
expect(foundParents).to.eql(ancestors.length - 1);
}
};
/**
* Retrieves the most distant ancestor in the given array.
*
* @param ancestors an array of ancestor nodes
*/
export const retrieveDistantAncestor = (ancestors: ResolverLifecycleNode[]) => {
// group the ancestors by their entity_id mapped to a lifecycle node
const groupedAncestors = _.groupBy(ancestors, (ancestor) => ancestor.entityID);
let node = ancestors[0];
for (let i = 0; i < ancestors.length; i++) {
const parentID = parentEntityId(node.lifecycle[0]);
if (parentID !== undefined) {
const nextNode = groupedAncestors[parentID];
if (nextNode) {
node = nextNode[0];
} else {
return node;
}
}
}
return node;
};
/**
* Verify that the children nodes are correct
*
* @param children the children nodes
* @param tree the generated resolver tree as the source of truth
* @param numberOfParents an optional number to compare that are a certain number of parents in the children array
* @param childrenPerParent an optional number to compare that there are a certain number of children for each parent
*/
export const verifyChildren = (
children: ResolverChildNode[],
tree: Tree,
numberOfParents?: number,
childrenPerParent?: number
) => {
// group the children by their entity_id mapped to a child node
const groupedChildren = _.groupBy(children, (child) => child.entityID);
// make sure each child is unique
expect(Object.keys(groupedChildren).length).to.eql(children.length);
if (numberOfParents !== undefined) {
const groupParent = _.groupBy(children, (child) => parentEntityId(child.lifecycle[0]));
expect(Object.keys(groupParent).length).to.eql(numberOfParents);
if (childrenPerParent !== undefined) {
Object.values(groupParent).forEach((childNodes) =>
expect(childNodes.length).to.be(childrenPerParent)
);
}
}
children.forEach((child) => {
expectLifecycleNodeInMap(child, tree.children);
});
};
/**
* Compare an array of events returned from an API with an array of events generated
*
* @param expected an array to use as the source of truth
* @param toTest the array to test against the source of truth
* @param lengthCheck an optional flag to check that the arrays are the same length
*/
export const compareArrays = (
expected: Event[],
toTest: ResolverEvent[],
lengthCheck: boolean = false
) => {
if (lengthCheck) {
expect(expected.length).to.eql(toTest.length);
}
toTest.forEach((toTestEvent) => {
expect(
expected.find((arrEvent) => {
// we're only checking that the event ids are the same here. The reason we can't check the entire document
// is because ingest pipelines are used to add fields to the document when it is received by elasticsearch,
// therefore it will not be the same as the document created by the generator
return eventId(toTestEvent) === eventId(arrEvent);
})
).to.be.ok();
});
};
/**
* Verifies that the stats received from ES for a node reflect the categories of events that the generator created.
*
* @param relatedEvents the related events received for a particular node
* @param categories the related event info used when generating the resolver tree
*/
export const verifyStats = (
stats: ResolverNodeStats | undefined,
categories: RelatedEventInfo[],
relatedAlerts: number
) => {
expect(stats).to.not.be(undefined);
let totalExpEvents = 0;
for (const cat of categories) {
const ecsCategories = categoryMapping[cat.category];
if (Array.isArray(ecsCategories)) {
// if there are multiple ecs categories used to define a related event, the count for all of them should be the same
// and they should equal what is defined in the categories used to generate the related events
for (const ecsCat of ecsCategories) {
expect(stats?.events.byCategory[ecsCat]).to.be(cat.count);
}
} else {
expect(stats?.events.byCategory[ecsCategories]).to.be(cat.count);
}
totalExpEvents += cat.count;
}
expect(stats?.events.total).to.be(totalExpEvents);
expect(stats?.totalAlerts);
};
/**
* A helper function for verifying the stats information an array of nodes.
*
* @param nodes an array of lifecycle nodes that should have a stats field defined
* @param categories the related event info used when generating the resolver tree
*/
export const verifyLifecycleStats = (
nodes: ResolverLifecycleNode[],
categories: RelatedEventInfo[],
relatedAlerts: number
) => {
for (const node of nodes) {
verifyStats(node.stats, categories, relatedAlerts);
}
};

View file

@ -0,0 +1,213 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import expect from '@kbn/expect';
import { eventId } from '../../../../plugins/security_solution/common/endpoint/models/event';
import { ResolverRelatedEvents } from '../../../../plugins/security_solution/common/endpoint/types';
import { FtrProviderContext } from '../../ftr_provider_context';
import {
Tree,
RelatedEventCategory,
} from '../../../../plugins/security_solution/common/endpoint/generate_data';
import { Options, GeneratedTrees } from '../../services/resolver';
import { compareArrays } from './common';
export default function ({ getService }: FtrProviderContext) {
const supertest = getService('supertest');
const resolver = getService('resolverGenerator');
const esArchiver = getService('esArchiver');
const relatedEventsToGen = [
{ category: RelatedEventCategory.Driver, count: 2 },
{ category: RelatedEventCategory.File, count: 1 },
{ category: RelatedEventCategory.Registry, count: 1 },
];
const relatedAlerts = 4;
let resolverTrees: GeneratedTrees;
let tree: Tree;
const treeOptions: Options = {
ancestors: 5,
relatedEvents: relatedEventsToGen,
relatedEventsOrdered: true,
relatedAlerts,
children: 3,
generations: 2,
percentTerminated: 100,
percentWithRelated: 100,
numTrees: 1,
alwaysGenMaxChildrenPerNode: true,
ancestryArraySize: 2,
};
describe('related events route', () => {
before(async () => {
await esArchiver.load('endpoint/resolver/api_feature');
resolverTrees = await resolver.createTrees(treeOptions);
// we only requested a single alert so there's only 1 tree
tree = resolverTrees.trees[0];
});
after(async () => {
await resolver.deleteData(resolverTrees);
await esArchiver.unload('endpoint/resolver/api_feature');
});
describe('legacy events', () => {
const endpointID = '5a0c957f-b8e7-4538-965e-57e8bb86ad3a';
const entityID = '94042';
const cursor = 'eyJ0aW1lc3RhbXAiOjE1ODE0NTYyNTUwMDAsImV2ZW50SUQiOiI5NDA0MyJ9';
it('should return details for the root node', async () => {
const { body }: { body: ResolverRelatedEvents } = await supertest
.post(`/api/endpoint/resolver/${entityID}/events?legacyEndpointID=${endpointID}`)
.set('kbn-xsrf', 'xxx')
.expect(200);
expect(body.events.length).to.eql(1);
expect(body.entityID).to.eql(entityID);
expect(body.nextEvent).to.eql(null);
});
it('returns no values when there is no more data', async () => {
const { body }: { body: ResolverRelatedEvents } = await supertest
// after is set to the document id of the last event so there shouldn't be any more after it
.post(
`/api/endpoint/resolver/${entityID}/events?legacyEndpointID=${endpointID}&afterEvent=${cursor}`
)
.set('kbn-xsrf', 'xxx')
.expect(200);
expect(body.events).be.empty();
expect(body.entityID).to.eql(entityID);
expect(body.nextEvent).to.eql(null);
});
it('should return the first page of information when the cursor is invalid', async () => {
const { body }: { body: ResolverRelatedEvents } = await supertest
.post(
`/api/endpoint/resolver/${entityID}/events?legacyEndpointID=${endpointID}&afterEvent=blah`
)
.set('kbn-xsrf', 'xxx')
.expect(200);
expect(body.entityID).to.eql(entityID);
expect(body.nextEvent).to.eql(null);
});
it('should return no results for an invalid endpoint ID', async () => {
const { body }: { body: ResolverRelatedEvents } = await supertest
.post(`/api/endpoint/resolver/${entityID}/events?legacyEndpointID=foo`)
.set('kbn-xsrf', 'xxx')
.expect(200);
expect(body.nextEvent).to.eql(null);
expect(body.entityID).to.eql(entityID);
expect(body.events).to.be.empty();
});
it('should error on invalid pagination values', async () => {
await supertest
.post(`/api/endpoint/resolver/${entityID}/events?events=0`)
.set('kbn-xsrf', 'xxx')
.expect(400);
await supertest
.post(`/api/endpoint/resolver/${entityID}/events?events=20000`)
.set('kbn-xsrf', 'xxx')
.expect(400);
await supertest
.post(`/api/endpoint/resolver/${entityID}/events?events=-1`)
.set('kbn-xsrf', 'xxx')
.expect(400);
});
});
describe('endpoint events', () => {
it('should not find any events', async () => {
const { body }: { body: ResolverRelatedEvents } = await supertest
.post(`/api/endpoint/resolver/5555/events`)
.set('kbn-xsrf', 'xxx')
.expect(200);
expect(body.nextEvent).to.eql(null);
expect(body.events).to.be.empty();
});
it('should return details for the root node', async () => {
const { body }: { body: ResolverRelatedEvents } = await supertest
.post(`/api/endpoint/resolver/${tree.origin.id}/events`)
.set('kbn-xsrf', 'xxx')
.expect(200);
expect(body.events.length).to.eql(4);
compareArrays(tree.origin.relatedEvents, body.events, true);
expect(body.nextEvent).to.eql(null);
});
it('should allow for the events to be filtered', async () => {
const filter = `event.category:"${RelatedEventCategory.Driver}"`;
const { body }: { body: ResolverRelatedEvents } = await supertest
.post(`/api/endpoint/resolver/${tree.origin.id}/events`)
.set('kbn-xsrf', 'xxx')
.send({
filter,
})
.expect(200);
expect(body.events.length).to.eql(2);
compareArrays(tree.origin.relatedEvents, body.events);
expect(body.nextEvent).to.eql(null);
for (const event of body.events) {
expect(event.event?.category).to.be(RelatedEventCategory.Driver);
}
});
it('should return paginated results for the root node', async () => {
let { body }: { body: ResolverRelatedEvents } = await supertest
.post(`/api/endpoint/resolver/${tree.origin.id}/events?events=2`)
.set('kbn-xsrf', 'xxx')
.expect(200);
expect(body.events.length).to.eql(2);
compareArrays(tree.origin.relatedEvents, body.events);
expect(body.nextEvent).not.to.eql(null);
({ body } = await supertest
.post(
`/api/endpoint/resolver/${tree.origin.id}/events?events=2&afterEvent=${body.nextEvent}`
)
.set('kbn-xsrf', 'xxx')
.expect(200));
expect(body.events.length).to.eql(2);
compareArrays(tree.origin.relatedEvents, body.events);
expect(body.nextEvent).to.not.eql(null);
({ body } = await supertest
.post(
`/api/endpoint/resolver/${tree.origin.id}/events?events=2&afterEvent=${body.nextEvent}`
)
.set('kbn-xsrf', 'xxx')
.expect(200));
expect(body.events).to.be.empty();
expect(body.nextEvent).to.eql(null);
});
it('should return the first page of information when the cursor is invalid', async () => {
const { body }: { body: ResolverRelatedEvents } = await supertest
.post(`/api/endpoint/resolver/${tree.origin.id}/events?afterEvent=blah`)
.set('kbn-xsrf', 'xxx')
.expect(200);
expect(body.events.length).to.eql(4);
compareArrays(tree.origin.relatedEvents, body.events, true);
expect(body.nextEvent).to.eql(null);
});
it('should sort the events in descending order', async () => {
const { body }: { body: ResolverRelatedEvents } = await supertest
.post(`/api/endpoint/resolver/${tree.origin.id}/events`)
.set('kbn-xsrf', 'xxx')
.expect(200);
expect(body.events.length).to.eql(4);
// these events are created in the order they are defined in the array so the newest one is
// the last element in the array so let's reverse it
const relatedEvents = tree.origin.relatedEvents.reverse();
for (let i = 0; i < body.events.length; i++) {
expect(body.events[i].event?.category).to.equal(relatedEvents[i].event.category);
expect(eventId(body.events[i])).to.equal(relatedEvents[i].event.id);
}
});
});
});
}

View file

@ -12,5 +12,7 @@ export default function (providerContext: FtrProviderContext) {
loadTestFile(require.resolve('./entity_id'));
loadTestFile(require.resolve('./children'));
loadTestFile(require.resolve('./tree'));
loadTestFile(require.resolve('./alerts'));
loadTestFile(require.resolve('./events'));
});
}

View file

@ -3,232 +3,28 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import _ from 'lodash';
import expect from '@kbn/expect';
import {
ResolverChildNode,
ResolverLifecycleNode,
ResolverAncestry,
ResolverEvent,
ResolverRelatedEvents,
ResolverChildren,
ResolverTree,
LegacyEndpointEvent,
ResolverNodeStats,
ResolverRelatedAlerts,
} from '../../../../plugins/security_solution/common/endpoint/types';
import {
parentEntityId,
eventId,
} from '../../../../plugins/security_solution/common/endpoint/models/event';
import { parentEntityId } from '../../../../plugins/security_solution/common/endpoint/models/event';
import { FtrProviderContext } from '../../ftr_provider_context';
import {
Event,
Tree,
TreeNode,
RelatedEventCategory,
RelatedEventInfo,
categoryMapping,
} from '../../../../plugins/security_solution/common/endpoint/generate_data';
import { Options, GeneratedTrees } from '../../services/resolver';
/**
* Check that the given lifecycle is in the resolver tree's corresponding map
*
* @param node a lifecycle node containing the start and end events for a node
* @param nodeMap a map of entity_ids to nodes to look for the passed in `node`
*/
const expectLifecycleNodeInMap = (node: ResolverLifecycleNode, nodeMap: Map<string, TreeNode>) => {
const genNode = nodeMap.get(node.entityID);
expect(genNode).to.be.ok();
compareArrays(genNode!.lifecycle, node.lifecycle, true);
};
/**
* Verify that all the ancestor nodes are valid and optionally have parents.
*
* @param ancestors an array of ancestors
* @param tree the generated resolver tree as the source of truth
* @param verifyLastParent a boolean indicating whether to check the last ancestor. If the ancestors array intentionally
* does not contain all the ancestors, the last one will not have the parent
*/
const verifyAncestry = (
ancestors: ResolverLifecycleNode[],
tree: Tree,
verifyLastParent: boolean
) => {
// group the ancestors by their entity_id mapped to a lifecycle node
const groupedAncestors = _.groupBy(ancestors, (ancestor) => ancestor.entityID);
// group by parent entity_id
const groupedAncestorsParent = _.groupBy(ancestors, (ancestor) =>
parentEntityId(ancestor.lifecycle[0])
);
// make sure there aren't any nodes with the same entity_id
expect(Object.keys(groupedAncestors).length).to.eql(ancestors.length);
// make sure there aren't any nodes with the same parent entity_id
expect(Object.keys(groupedAncestorsParent).length).to.eql(ancestors.length);
// make sure each of the ancestors' lifecycle events are in the generated tree
for (const node of ancestors) {
expectLifecycleNodeInMap(node, tree.ancestry);
}
// start at the origin which is always the first element of the array and make sure we have a connection
// using parent id between each of the nodes
let foundParents = 0;
let node = ancestors[0];
for (let i = 0; i < ancestors.length; i++) {
const parentID = parentEntityId(node.lifecycle[0]);
if (parentID !== undefined) {
const nextNode = groupedAncestors[parentID];
if (!nextNode) {
break;
}
// the grouped nodes should only have a single entry since each entity is unique
node = nextNode[0];
}
foundParents++;
}
if (verifyLastParent) {
expect(foundParents).to.eql(ancestors.length);
} else {
// if we only retrieved a portion of all the ancestors then the most distant grandparent's parent will not necessarily
// be in the results
expect(foundParents).to.eql(ancestors.length - 1);
}
};
/**
* Retrieves the most distant ancestor in the given array.
*
* @param ancestors an array of ancestor nodes
*/
const retrieveDistantAncestor = (ancestors: ResolverLifecycleNode[]) => {
// group the ancestors by their entity_id mapped to a lifecycle node
const groupedAncestors = _.groupBy(ancestors, (ancestor) => ancestor.entityID);
let node = ancestors[0];
for (let i = 0; i < ancestors.length; i++) {
const parentID = parentEntityId(node.lifecycle[0]);
if (parentID !== undefined) {
const nextNode = groupedAncestors[parentID];
if (nextNode) {
node = nextNode[0];
} else {
return node;
}
}
}
return node;
};
/**
* Verify that the children nodes are correct
*
* @param children the children nodes
* @param tree the generated resolver tree as the source of truth
* @param numberOfParents an optional number to compare that are a certain number of parents in the children array
* @param childrenPerParent an optional number to compare that there are a certain number of children for each parent
*/
const verifyChildren = (
children: ResolverChildNode[],
tree: Tree,
numberOfParents?: number,
childrenPerParent?: number
) => {
// group the children by their entity_id mapped to a child node
const groupedChildren = _.groupBy(children, (child) => child.entityID);
// make sure each child is unique
expect(Object.keys(groupedChildren).length).to.eql(children.length);
if (numberOfParents !== undefined) {
const groupParent = _.groupBy(children, (child) => parentEntityId(child.lifecycle[0]));
expect(Object.keys(groupParent).length).to.eql(numberOfParents);
if (childrenPerParent !== undefined) {
Object.values(groupParent).forEach((childNodes) =>
expect(childNodes.length).to.be(childrenPerParent)
);
}
}
children.forEach((child) => {
expectLifecycleNodeInMap(child, tree.children);
});
};
/**
* Compare an array of events returned from an API with an array of events generated
*
* @param expected an array to use as the source of truth
* @param toTest the array to test against the source of truth
* @param lengthCheck an optional flag to check that the arrays are the same length
*/
const compareArrays = (
expected: Event[],
toTest: ResolverEvent[],
lengthCheck: boolean = false
) => {
if (lengthCheck) {
expect(expected.length).to.eql(toTest.length);
}
toTest.forEach((toTestEvent) => {
expect(
expected.find((arrEvent) => {
// we're only checking that the event ids are the same here. The reason we can't check the entire document
// is because ingest pipelines are used to add fields to the document when it is received by elasticsearch,
// therefore it will not be the same as the document created by the generator
return eventId(toTestEvent) === eventId(arrEvent);
})
).to.be.ok();
});
};
/**
* Verifies that the stats received from ES for a node reflect the categories of events that the generator created.
*
* @param relatedEvents the related events received for a particular node
* @param categories the related event info used when generating the resolver tree
*/
const verifyStats = (
stats: ResolverNodeStats | undefined,
categories: RelatedEventInfo[],
relatedAlerts: number
) => {
expect(stats).to.not.be(undefined);
let totalExpEvents = 0;
for (const cat of categories) {
const ecsCategories = categoryMapping[cat.category];
if (Array.isArray(ecsCategories)) {
// if there are multiple ecs categories used to define a related event, the count for all of them should be the same
// and they should equal what is defined in the categories used to generate the related events
for (const ecsCat of ecsCategories) {
expect(stats?.events.byCategory[ecsCat]).to.be(cat.count);
}
} else {
expect(stats?.events.byCategory[ecsCategories]).to.be(cat.count);
}
totalExpEvents += cat.count;
}
expect(stats?.events.total).to.be(totalExpEvents);
expect(stats?.totalAlerts);
};
/**
* A helper function for verifying the stats information an array of nodes.
*
* @param nodes an array of lifecycle nodes that should have a stats field defined
* @param categories the related event info used when generating the resolver tree
*/
const verifyLifecycleStats = (
nodes: ResolverLifecycleNode[],
categories: RelatedEventInfo[],
relatedAlerts: number
) => {
for (const node of nodes) {
verifyStats(node.stats, categories, relatedAlerts);
}
};
import {
compareArrays,
verifyAncestry,
retrieveDistantAncestor,
verifyChildren,
verifyLifecycleStats,
verifyStats,
} from './common';
export default function ({ getService }: FtrProviderContext) {
const supertest = getService('supertest');
@ -269,170 +65,6 @@ export default function ({ getService }: FtrProviderContext) {
await esArchiver.unload('endpoint/resolver/api_feature');
});
describe('related alerts route', () => {
describe('endpoint events', () => {
it('should not find any alerts', async () => {
const { body }: { body: ResolverRelatedAlerts } = await supertest
.get(`/api/endpoint/resolver/5555/alerts`)
.expect(200);
expect(body.nextAlert).to.eql(null);
expect(body.alerts).to.be.empty();
});
it('should return details for the root node', async () => {
const { body }: { body: ResolverRelatedAlerts } = await supertest
.get(`/api/endpoint/resolver/${tree.origin.id}/alerts`)
.expect(200);
expect(body.alerts.length).to.eql(4);
compareArrays(tree.origin.relatedAlerts, body.alerts, true);
expect(body.nextAlert).to.eql(null);
});
it('should return paginated results for the root node', async () => {
let { body }: { body: ResolverRelatedAlerts } = await supertest
.get(`/api/endpoint/resolver/${tree.origin.id}/alerts?alerts=2`)
.expect(200);
expect(body.alerts.length).to.eql(2);
compareArrays(tree.origin.relatedAlerts, body.alerts);
expect(body.nextAlert).not.to.eql(null);
({ body } = await supertest
.get(
`/api/endpoint/resolver/${tree.origin.id}/alerts?alerts=2&afterAlert=${body.nextAlert}`
)
.expect(200));
expect(body.alerts.length).to.eql(2);
compareArrays(tree.origin.relatedAlerts, body.alerts);
expect(body.nextAlert).to.not.eql(null);
({ body } = await supertest
.get(
`/api/endpoint/resolver/${tree.origin.id}/alerts?alerts=2&afterAlert=${body.nextAlert}`
)
.expect(200));
expect(body.alerts).to.be.empty();
expect(body.nextAlert).to.eql(null);
});
it('should return the first page of information when the cursor is invalid', async () => {
const { body }: { body: ResolverRelatedAlerts } = await supertest
.get(`/api/endpoint/resolver/${tree.origin.id}/alerts?afterAlert=blah`)
.expect(200);
expect(body.alerts.length).to.eql(4);
compareArrays(tree.origin.relatedAlerts, body.alerts, true);
expect(body.nextAlert).to.eql(null);
});
});
});
describe('related events route', () => {
describe('legacy events', () => {
const endpointID = '5a0c957f-b8e7-4538-965e-57e8bb86ad3a';
const entityID = '94042';
const cursor = 'eyJ0aW1lc3RhbXAiOjE1ODE0NTYyNTUwMDAsImV2ZW50SUQiOiI5NDA0MyJ9';
it('should return details for the root node', async () => {
const { body }: { body: ResolverRelatedEvents } = await supertest
.get(`/api/endpoint/resolver/${entityID}/events?legacyEndpointID=${endpointID}`)
.expect(200);
expect(body.events.length).to.eql(1);
expect(body.entityID).to.eql(entityID);
expect(body.nextEvent).to.eql(null);
});
it('returns no values when there is no more data', async () => {
const { body }: { body: ResolverRelatedEvents } = await supertest
// after is set to the document id of the last event so there shouldn't be any more after it
.get(
`/api/endpoint/resolver/${entityID}/events?legacyEndpointID=${endpointID}&afterEvent=${cursor}`
)
.expect(200);
expect(body.events).be.empty();
expect(body.entityID).to.eql(entityID);
expect(body.nextEvent).to.eql(null);
});
it('should return the first page of information when the cursor is invalid', async () => {
const { body }: { body: ResolverRelatedEvents } = await supertest
.get(
`/api/endpoint/resolver/${entityID}/events?legacyEndpointID=${endpointID}&afterEvent=blah`
)
.expect(200);
expect(body.entityID).to.eql(entityID);
expect(body.nextEvent).to.eql(null);
});
it('should return no results for an invalid endpoint ID', async () => {
const { body }: { body: ResolverRelatedEvents } = await supertest
.get(`/api/endpoint/resolver/${entityID}/events?legacyEndpointID=foo`)
.expect(200);
expect(body.nextEvent).to.eql(null);
expect(body.entityID).to.eql(entityID);
expect(body.events).to.be.empty();
});
it('should error on invalid pagination values', async () => {
await supertest.get(`/api/endpoint/resolver/${entityID}/events?events=0`).expect(400);
await supertest.get(`/api/endpoint/resolver/${entityID}/events?events=20000`).expect(400);
await supertest.get(`/api/endpoint/resolver/${entityID}/events?events=-1`).expect(400);
});
});
describe('endpoint events', () => {
it('should not find any events', async () => {
const { body }: { body: ResolverRelatedEvents } = await supertest
.get(`/api/endpoint/resolver/5555/events`)
.expect(200);
expect(body.nextEvent).to.eql(null);
expect(body.events).to.be.empty();
});
it('should return details for the root node', async () => {
const { body }: { body: ResolverRelatedEvents } = await supertest
.get(`/api/endpoint/resolver/${tree.origin.id}/events`)
.expect(200);
expect(body.events.length).to.eql(4);
compareArrays(tree.origin.relatedEvents, body.events, true);
expect(body.nextEvent).to.eql(null);
});
it('should return paginated results for the root node', async () => {
let { body }: { body: ResolverRelatedEvents } = await supertest
.get(`/api/endpoint/resolver/${tree.origin.id}/events?events=2`)
.expect(200);
expect(body.events.length).to.eql(2);
compareArrays(tree.origin.relatedEvents, body.events);
expect(body.nextEvent).not.to.eql(null);
({ body } = await supertest
.get(
`/api/endpoint/resolver/${tree.origin.id}/events?events=2&afterEvent=${body.nextEvent}`
)
.expect(200));
expect(body.events.length).to.eql(2);
compareArrays(tree.origin.relatedEvents, body.events);
expect(body.nextEvent).to.not.eql(null);
({ body } = await supertest
.get(
`/api/endpoint/resolver/${tree.origin.id}/events?events=2&afterEvent=${body.nextEvent}`
)
.expect(200));
expect(body.events).to.be.empty();
expect(body.nextEvent).to.eql(null);
});
it('should return the first page of information when the cursor is invalid', async () => {
const { body }: { body: ResolverRelatedEvents } = await supertest
.get(`/api/endpoint/resolver/${tree.origin.id}/events?afterEvent=blah`)
.expect(200);
expect(body.events.length).to.eql(4);
compareArrays(tree.origin.relatedEvents, body.events, true);
expect(body.nextEvent).to.eql(null);
});
});
});
describe('ancestry events route', () => {
describe('legacy events', () => {
const endpointID = '5a0c957f-b8e7-4538-965e-57e8bb86ad3a';