[Security Solution][Resolver] Allow a configurable entity_id field (#81679)

* Trying to flesh out new tree route

* Working on the descendants query

* Almost working descendants

* Possible solution for aggs

* Working aggregations extraction

* Working on the ancestry array for descendants

* Making changes to the unique id for  ancestr

* Implementing ancestry funcitonality

* Deleting the multiple edges

* Fleshing out the descendants loop for levels

* Writing tests for ancestors and descendants

* Fixing type errors and writing more tests

* Renaming validation variable and deprecating old tree routes

* Renaming tree integration test file

* Adding some integration tests

* Fixing ancestry to handle multiple nodes in the request and writing more tests

* Adding more tests

* Renaming new tree to handler file

* Renaming new tree directory

* Adding more unit tests

* Using doc value fields and working on types

* Adding comments and more tests

* Fixing timestamp test issue

* Adding more comments

* Fixing timestamp test issue take 2

* Adding id, parent, and name fields to the top level response

* Fixing generator start and end time generation

* Adding more comments

* Revert "Fixing generator start and end time generation"

This reverts commit 9e9abf68a6.

* Adding test for time

Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
Jonathan Buttner 2020-11-24 11:57:23 -05:00 committed by GitHub
parent 24f262b9ca
commit 5e183dd46d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
17 changed files with 3099 additions and 310 deletions

View file

@ -27,7 +27,6 @@ interface Node {
}
describe('data generator data streams', () => {
// these tests cast the result of the generate methods so that we can specifically compare the `data_stream` fields
it('creates a generator with default data streams', () => {
const generator = new EndpointDocGenerator('seed');
expect(generator.generateHostMetadata().data_stream).toEqual({
@ -268,6 +267,31 @@ describe('data generator', () => {
}
};
it('sets the start and end times correctly', () => {
const startOfEpoch = new Date(0);
let startTime = new Date(timestampSafeVersion(tree.allEvents[0]) ?? startOfEpoch);
expect(startTime).not.toEqual(startOfEpoch);
let endTime = new Date(timestampSafeVersion(tree.allEvents[0]) ?? startOfEpoch);
expect(startTime).not.toEqual(startOfEpoch);
for (const event of tree.allEvents) {
const currentEventTime = new Date(timestampSafeVersion(event) ?? startOfEpoch);
expect(currentEventTime).not.toEqual(startOfEpoch);
expect(tree.startTime.getTime()).toBeLessThanOrEqual(currentEventTime.getTime());
expect(tree.endTime.getTime()).toBeGreaterThanOrEqual(currentEventTime.getTime());
if (currentEventTime < startTime) {
startTime = currentEventTime;
}
if (currentEventTime > endTime) {
endTime = currentEventTime;
}
}
expect(startTime).toEqual(tree.startTime);
expect(endTime).toEqual(tree.endTime);
expect(endTime.getTime() - startTime.getTime()).toBeGreaterThanOrEqual(0);
});
it('creates related events in ascending order', () => {
// the order should not change since it should already be in ascending order
const relatedEventsAsc = _.cloneDeep(tree.origin.relatedEvents).sort(

View file

@ -317,6 +317,8 @@ export interface Tree {
* All events from children, ancestry, origin, and the alert in a single array
*/
allEvents: Event[];
startTime: Date;
endTime: Date;
}
export interface TreeOptions {
@ -718,6 +720,35 @@ export class EndpointDocGenerator {
};
}
private static getStartEndTimes(events: Event[]): { startTime: Date; endTime: Date } {
let startTime: number;
let endTime: number;
if (events.length > 0) {
startTime = timestampSafeVersion(events[0]) ?? new Date().getTime();
endTime = startTime;
} else {
startTime = new Date().getTime();
endTime = startTime;
}
for (const event of events) {
const eventTimestamp = timestampSafeVersion(event);
if (eventTimestamp !== undefined) {
if (eventTimestamp < startTime) {
startTime = eventTimestamp;
}
if (eventTimestamp > endTime) {
endTime = eventTimestamp;
}
}
}
return {
startTime: new Date(startTime),
endTime: new Date(endTime),
};
}
/**
* This generates a full resolver tree and keeps the entire tree in memory. This is useful for tests that want
* to compare results from elasticsearch with the actual events created by this generator. Because all the events
@ -815,12 +846,17 @@ export class EndpointDocGenerator {
const childrenByParent = groupNodesByParent(childrenNodes);
const levels = createLevels(childrenByParent, [], childrenByParent.get(origin.id));
const allEvents = [...ancestry, ...children];
const { startTime, endTime } = EndpointDocGenerator.getStartEndTimes(allEvents);
return {
children: childrenNodes,
ancestry: ancestryNodes,
allEvents: [...ancestry, ...children],
allEvents,
origin,
childrenLevels: levels,
startTime,
endTime,
};
}

View file

@ -7,9 +7,9 @@
import { schema } from '@kbn/config-schema';
/**
* Used to validate GET requests for a complete resolver tree.
* Used to validate GET requests for a complete resolver tree centered around an entity_id.
*/
export const validateTree = {
export const validateTreeEntityID = {
params: schema.object({ id: schema.string({ minLength: 1 }) }),
query: schema.object({
children: schema.number({ defaultValue: 200, min: 0, max: 10000 }),
@ -23,6 +23,44 @@ export const validateTree = {
}),
};
/**
* Used to validate GET requests for a complete resolver tree.
*/
export const validateTree = {
body: schema.object({
/**
* If the ancestry field is specified this field will be ignored
*
* If the ancestry field is specified we have a much more performant way of retrieving levels so let's not limit
* the number of levels that come back in that scenario. We could still limit it, but what we'd likely have to do
* is get all the levels back like we normally do with the ancestry array, bucket them together by level, and then
* remove the levels that exceeded the requested number which seems kind of wasteful.
*/
descendantLevels: schema.number({ defaultValue: 20, min: 0, max: 1000 }),
descendants: schema.number({ defaultValue: 1000, min: 0, max: 10000 }),
// if the ancestry array isn't specified allowing 200 might be too high
ancestors: schema.number({ defaultValue: 200, min: 0, max: 10000 }),
timerange: schema.object({
from: schema.string(),
to: schema.string(),
}),
schema: schema.object({
// the ancestry field is optional
ancestry: schema.maybe(schema.string({ minLength: 1 })),
id: schema.string({ minLength: 1 }),
name: schema.maybe(schema.string({ minLength: 1 })),
parent: schema.string({ minLength: 1 }),
}),
// only allowing strings and numbers for node IDs because Elasticsearch only allows those types for collapsing:
// https://www.elastic.co/guide/en/elasticsearch/reference/current/collapse-search-results.html
// We use collapsing in our Elasticsearch queries for the tree api
nodes: schema.arrayOf(schema.oneOf([schema.string({ minLength: 1 }), schema.number()]), {
minSize: 1,
}),
indexPatterns: schema.arrayOf(schema.string(), { minSize: 1 }),
}),
};
/**
* Used to validate POST requests for `/resolver/events` api.
*/

View file

@ -78,6 +78,56 @@ export interface EventStats {
byCategory: Record<string, number>;
}
/**
* Represents the object structure of a returned document when using doc value fields to filter the fields
* returned in a document from an Elasticsearch query.
*
* Here is an example:
*
* {
* "_index": ".ds-logs-endpoint.events.process-default-000001",
* "_id": "bc7brnUBxO0aE7QcCVHo",
* "_score": null,
* "fields": { <----------- The FieldsObject represents this portion
* "@timestamp": [
* "2020-11-09T21:13:25.246Z"
* ],
* "process.name": "explorer.exe",
* "process.parent.entity_id": [
* "0i17c2m22c"
* ],
* "process.Ext.ancestry": [ <------------ Notice that the keys are flattened
* "0i17c2m22c",
* "2z9j8dlx72",
* "oj61pr6g62",
* "x0leonbrc9"
* ],
* "process.entity_id": [
* "6k8waczi22"
* ]
* },
* "sort": [
* 0,
* 1604956405246
* ]
* }
*/
export interface FieldsObject {
[key: string]: ECSField<number | string>;
}
/**
* A node in a resolver graph.
*/
export interface ResolverNode {
data: FieldsObject;
id: string | number;
// the very root node might not have the parent field defined
parent?: string | number;
name?: string;
stats: EventStats;
}
/**
* Statistical information for a node in a resolver tree.
*/

View file

@ -7,16 +7,18 @@
import { IRouter } from 'kibana/server';
import { EndpointAppContext } from '../types';
import {
validateTree,
validateTreeEntityID,
validateEvents,
validateChildren,
validateAncestry,
validateAlerts,
validateEntities,
validateTree,
} from '../../../common/endpoint/schema/resolver';
import { handleChildren } from './resolver/children';
import { handleAncestry } from './resolver/ancestry';
import { handleTree } from './resolver/tree';
import { handleTree as handleTreeEntityID } from './resolver/tree';
import { handleTree } from './resolver/tree/handler';
import { handleAlerts } from './resolver/alerts';
import { handleEntities } from './resolver/entity';
import { handleEvents } from './resolver/events';
@ -24,6 +26,15 @@ import { handleEvents } from './resolver/events';
export function registerResolverRoutes(router: IRouter, endpointAppContext: EndpointAppContext) {
const log = endpointAppContext.logFactory.get('resolver');
router.post(
{
path: '/api/endpoint/resolver/tree',
validate: validateTree,
options: { authRequired: true },
},
handleTree(log)
);
router.post(
{
path: '/api/endpoint/resolver/events',
@ -33,6 +44,9 @@ export function registerResolverRoutes(router: IRouter, endpointAppContext: Endp
handleEvents(log)
);
/**
* @deprecated will be removed because it is not used
*/
router.post(
{
path: '/api/endpoint/resolver/{id}/alerts',
@ -42,6 +56,9 @@ export function registerResolverRoutes(router: IRouter, endpointAppContext: Endp
handleAlerts(log, endpointAppContext)
);
/**
* @deprecated use the /resolver/tree api instead
*/
router.get(
{
path: '/api/endpoint/resolver/{id}/children',
@ -51,6 +68,9 @@ export function registerResolverRoutes(router: IRouter, endpointAppContext: Endp
handleChildren(log, endpointAppContext)
);
/**
* @deprecated use the /resolver/tree api instead
*/
router.get(
{
path: '/api/endpoint/resolver/{id}/ancestry',
@ -60,13 +80,16 @@ export function registerResolverRoutes(router: IRouter, endpointAppContext: Endp
handleAncestry(log, endpointAppContext)
);
/**
* @deprecated use the /resolver/tree api instead
*/
router.get(
{
path: '/api/endpoint/resolver/{id}',
validate: validateTree,
validate: validateTreeEntityID,
options: { authRequired: true },
},
handleTree(log, endpointAppContext)
handleTreeEntityID(log, endpointAppContext)
);
/**

View file

@ -7,14 +7,17 @@
import { RequestHandler, Logger } from 'kibana/server';
import { TypeOf } from '@kbn/config-schema';
import { eventsIndexPattern, alertsIndexPattern } from '../../../../common/endpoint/constants';
import { validateTree } from '../../../../common/endpoint/schema/resolver';
import { validateTreeEntityID } from '../../../../common/endpoint/schema/resolver';
import { Fetcher } from './utils/fetch';
import { EndpointAppContext } from '../../types';
export function handleTree(
log: Logger,
endpointAppContext: EndpointAppContext
): RequestHandler<TypeOf<typeof validateTree.params>, TypeOf<typeof validateTree.query>> {
): RequestHandler<
TypeOf<typeof validateTreeEntityID.params>,
TypeOf<typeof validateTreeEntityID.query>
> {
return async (context, req, res) => {
try {
const client = context.core.elasticsearch.legacy.client;

View file

@ -0,0 +1,28 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { RequestHandler, Logger } from 'kibana/server';
import { TypeOf } from '@kbn/config-schema';
import { validateTree } from '../../../../../common/endpoint/schema/resolver';
import { Fetcher } from './utils/fetch';
export function handleTree(
log: Logger
): RequestHandler<unknown, unknown, TypeOf<typeof validateTree.body>> {
return async (context, req, res) => {
try {
const client = context.core.elasticsearch.client;
const fetcher = new Fetcher(client);
const body = await fetcher.tree(req.body);
return res.ok({
body,
});
} catch (err) {
log.warn(err);
return res.internalError({ body: 'Error retrieving tree.' });
}
};
}

View file

@ -0,0 +1,206 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { SearchResponse } from 'elasticsearch';
import { ApiResponse } from '@elastic/elasticsearch';
import { IScopedClusterClient } from 'src/core/server';
import { FieldsObject } from '../../../../../../common/endpoint/types';
import { JsonObject, JsonValue } from '../../../../../../../../../src/plugins/kibana_utils/common';
import { NodeID, Schema, Timerange, docValueFields } from '../utils/index';
interface DescendantsParams {
schema: Schema;
indexPatterns: string | string[];
timerange: Timerange;
}
/**
* Builds a query for retrieving descendants of a node.
*/
export class DescendantsQuery {
private readonly schema: Schema;
private readonly indexPatterns: string | string[];
private readonly timerange: Timerange;
private readonly docValueFields: JsonValue[];
constructor({ schema, indexPatterns, timerange }: DescendantsParams) {
this.docValueFields = docValueFields(schema);
this.schema = schema;
this.indexPatterns = indexPatterns;
this.timerange = timerange;
}
private query(nodes: NodeID[], size: number): JsonObject {
return {
_source: false,
docvalue_fields: this.docValueFields,
size,
collapse: {
field: this.schema.id,
},
sort: [{ '@timestamp': 'asc' }],
query: {
bool: {
filter: [
{
range: {
'@timestamp': {
gte: this.timerange.from,
lte: this.timerange.to,
format: 'strict_date_optional_time',
},
},
},
{
terms: { [this.schema.parent]: nodes },
},
{
exists: {
field: this.schema.id,
},
},
{
exists: {
field: this.schema.parent,
},
},
{
term: { 'event.category': 'process' },
},
{
term: { 'event.kind': 'event' },
},
],
},
},
};
}
private queryWithAncestryArray(nodes: NodeID[], ancestryField: string, size: number): JsonObject {
return {
_source: false,
docvalue_fields: this.docValueFields,
size,
collapse: {
field: this.schema.id,
},
sort: [
{
_script: {
type: 'number',
script: {
/**
* This script is used to sort the returned documents in a breadth first order so that we return all of
* a single level of nodes before returning the next level of nodes. This is needed because using the
* ancestry array could result in the search going deep before going wide depending on when the nodes
* spawned their children. If a node spawns a child before it's sibling is spawned then the child would
* be found before the sibling because by default the sort was on timestamp ascending.
*/
source: `
Map ancestryToIndex = [:];
List sourceAncestryArray = params._source.${ancestryField};
int length = sourceAncestryArray.length;
for (int i = 0; i < length; i++) {
ancestryToIndex[sourceAncestryArray[i]] = i;
}
for (String id : params.ids) {
def index = ancestryToIndex[id];
if (index != null) {
return index;
}
}
return -1;
`,
params: {
ids: nodes,
},
},
},
},
{ '@timestamp': 'asc' },
],
query: {
bool: {
filter: [
{
range: {
'@timestamp': {
gte: this.timerange.from,
lte: this.timerange.to,
format: 'strict_date_optional_time',
},
},
},
{
terms: {
[ancestryField]: nodes,
},
},
{
exists: {
field: this.schema.id,
},
},
{
exists: {
field: this.schema.parent,
},
},
{
exists: {
field: ancestryField,
},
},
{
term: { 'event.category': 'process' },
},
{
term: { 'event.kind': 'event' },
},
],
},
},
};
}
/**
* Searches for descendant nodes matching the specified IDs.
*
* @param client for making requests to Elasticsearch
* @param nodes the unique IDs to search for in Elasticsearch
* @param limit the upper limit of documents to returned
*/
async search(
client: IScopedClusterClient,
nodes: NodeID[],
limit: number
): Promise<FieldsObject[]> {
if (nodes.length <= 0) {
return [];
}
let response: ApiResponse<SearchResponse<unknown>>;
if (this.schema.ancestry) {
response = await client.asCurrentUser.search({
body: this.queryWithAncestryArray(nodes, this.schema.ancestry, limit),
index: this.indexPatterns,
});
} else {
response = await client.asCurrentUser.search({
body: this.query(nodes, limit),
index: this.indexPatterns,
});
}
/**
* The returned values will look like:
* [
* { 'schema_id_value': <value>, 'schema_parent_value': <value> }
* ]
*
* So the schema fields are flattened ('process.parent.entity_id')
*/
return response.body.hits.hits.map((hit) => hit.fields);
}
}

View file

@ -0,0 +1,101 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { SearchResponse } from 'elasticsearch';
import { ApiResponse } from '@elastic/elasticsearch';
import { IScopedClusterClient } from 'src/core/server';
import { FieldsObject } from '../../../../../../common/endpoint/types';
import { JsonObject, JsonValue } from '../../../../../../../../../src/plugins/kibana_utils/common';
import { NodeID, Schema, Timerange, docValueFields } from '../utils/index';
interface LifecycleParams {
schema: Schema;
indexPatterns: string | string[];
timerange: Timerange;
}
/**
* Builds a query for retrieving descendants of a node.
*/
export class LifecycleQuery {
private readonly schema: Schema;
private readonly indexPatterns: string | string[];
private readonly timerange: Timerange;
private readonly docValueFields: JsonValue[];
constructor({ schema, indexPatterns, timerange }: LifecycleParams) {
this.docValueFields = docValueFields(schema);
this.schema = schema;
this.indexPatterns = indexPatterns;
this.timerange = timerange;
}
private query(nodes: NodeID[]): JsonObject {
return {
_source: false,
docvalue_fields: this.docValueFields,
size: nodes.length,
collapse: {
field: this.schema.id,
},
sort: [{ '@timestamp': 'asc' }],
query: {
bool: {
filter: [
{
range: {
'@timestamp': {
gte: this.timerange.from,
lte: this.timerange.to,
format: 'strict_date_optional_time',
},
},
},
{
terms: { [this.schema.id]: nodes },
},
{
exists: {
field: this.schema.id,
},
},
{
term: { 'event.category': 'process' },
},
{
term: { 'event.kind': 'event' },
},
],
},
},
};
}
/**
* Searches for lifecycle events matching the specified node IDs.
*
* @param client for making requests to Elasticsearch
* @param nodes the unique IDs to search for in Elasticsearch
*/
async search(client: IScopedClusterClient, nodes: NodeID[]): Promise<FieldsObject[]> {
if (nodes.length <= 0) {
return [];
}
const response: ApiResponse<SearchResponse<unknown>> = await client.asCurrentUser.search({
body: this.query(nodes),
index: this.indexPatterns,
});
/**
* The returned values will look like:
* [
* { 'schema_id_value': <value>, 'schema_parent_value': <value> }
* ]
*
* So the schema fields are flattened ('process.parent.entity_id')
*/
return response.body.hits.hits.map((hit) => hit.fields);
}
}

View file

@ -0,0 +1,139 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { SearchResponse } from 'elasticsearch';
import { ApiResponse } from '@elastic/elasticsearch';
import { IScopedClusterClient } from 'src/core/server';
import { JsonObject } from '../../../../../../../../../src/plugins/kibana_utils/common';
import { EventStats } from '../../../../../../common/endpoint/types';
import { NodeID, Schema, Timerange } from '../utils/index';
interface AggBucket {
key: string;
doc_count: number;
}
interface CategoriesAgg extends AggBucket {
/**
* The reason categories is optional here is because if no data was returned in the query the categories aggregation
* will not be defined on the response (because it's a sub aggregation).
*/
categories?: {
buckets?: AggBucket[];
};
}
interface StatsParams {
schema: Schema;
indexPatterns: string | string[];
timerange: Timerange;
}
/**
* Builds a query for retrieving descendants of a node.
*/
export class StatsQuery {
private readonly schema: Schema;
private readonly indexPatterns: string | string[];
private readonly timerange: Timerange;
constructor({ schema, indexPatterns, timerange }: StatsParams) {
this.schema = schema;
this.indexPatterns = indexPatterns;
this.timerange = timerange;
}
private query(nodes: NodeID[]): JsonObject {
return {
size: 0,
query: {
bool: {
filter: [
{
range: {
'@timestamp': {
gte: this.timerange.from,
lte: this.timerange.to,
format: 'strict_date_optional_time',
},
},
},
{
terms: { [this.schema.id]: nodes },
},
{
term: { 'event.kind': 'event' },
},
{
bool: {
must_not: {
term: {
'event.category': 'process',
},
},
},
},
],
},
},
aggs: {
ids: {
terms: { field: this.schema.id, size: nodes.length },
aggs: {
categories: {
terms: { field: 'event.category', size: 1000 },
},
},
},
},
};
}
private static getEventStats(catAgg: CategoriesAgg): EventStats {
const total = catAgg.doc_count;
if (!catAgg.categories?.buckets) {
return {
total,
byCategory: {},
};
}
const byCategory: Record<string, number> = catAgg.categories.buckets.reduce(
(cummulative: Record<string, number>, bucket: AggBucket) => ({
...cummulative,
[bucket.key]: bucket.doc_count,
}),
{}
);
return {
total,
byCategory,
};
}
/**
* Returns the related event statistics for a set of nodes.
* @param client used to make requests to Elasticsearch
* @param nodes an array of unique IDs representing nodes in a resolver graph
*/
async search(client: IScopedClusterClient, nodes: NodeID[]): Promise<Record<string, EventStats>> {
if (nodes.length <= 0) {
return {};
}
// leaving unknown here because we don't actually need the hits part of the body
const response: ApiResponse<SearchResponse<unknown>> = await client.asCurrentUser.search({
body: this.query(nodes),
index: this.indexPatterns,
});
return response.body.aggregations?.ids?.buckets.reduce(
(cummulative: Record<string, number>, bucket: CategoriesAgg) => ({
...cummulative,
[bucket.key]: StatsQuery.getEventStats(bucket),
}),
{}
);
}
}

View file

@ -0,0 +1,707 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import {
Fetcher,
getAncestryAsArray,
getIDField,
getLeafNodes,
getNameField,
getParentField,
TreeOptions,
} from './fetch';
import { LifecycleQuery } from '../queries/lifecycle';
import { DescendantsQuery } from '../queries/descendants';
import { StatsQuery } from '../queries/stats';
import { IScopedClusterClient } from 'src/core/server';
import { elasticsearchServiceMock } from 'src/core/server/mocks';
import { FieldsObject, ResolverNode } from '../../../../../../common/endpoint/types';
import { Schema } from './index';
jest.mock('../queries/descendants');
jest.mock('../queries/lifecycle');
jest.mock('../queries/stats');
function formatResponse(results: FieldsObject[], schema: Schema): ResolverNode[] {
return results.map((node) => {
return {
id: getIDField(node, schema) ?? '',
parent: getParentField(node, schema),
name: getNameField(node, schema),
data: node,
stats: {
total: 0,
byCategory: {},
},
};
});
}
describe('fetcher test', () => {
const schemaIDParent = {
id: 'id',
parent: 'parent',
};
const schemaIDParentAncestry = {
id: 'id',
parent: 'parent',
ancestry: 'ancestry',
};
const schemaIDParentName = {
id: 'id',
parent: 'parent',
name: 'name',
};
let client: jest.Mocked<IScopedClusterClient>;
beforeAll(() => {
StatsQuery.prototype.search = jest.fn().mockImplementation(async () => {
return {};
});
});
beforeEach(() => {
client = elasticsearchServiceMock.createScopedClusterClient();
});
describe('descendants', () => {
it('correctly exists loop when the search returns no results', async () => {
DescendantsQuery.prototype.search = jest.fn().mockImplementationOnce(async () => {
return [];
});
const options: TreeOptions = {
descendantLevels: 1,
descendants: 5,
ancestors: 0,
timerange: {
from: '',
to: '',
},
schema: {
id: '',
parent: '',
},
indexPatterns: [''],
nodes: ['a'],
};
const fetcher = new Fetcher(client);
expect(await fetcher.tree(options)).toEqual([]);
});
it('exists the loop when the options specify no descendants', async () => {
const options: TreeOptions = {
descendantLevels: 0,
descendants: 0,
ancestors: 0,
timerange: {
from: '',
to: '',
},
schema: {
id: '',
parent: '',
},
indexPatterns: [''],
nodes: ['a'],
};
const fetcher = new Fetcher(client);
expect(await fetcher.tree(options)).toEqual([]);
});
it('returns the correct results without the ancestry defined', async () => {
/**
.
0
1
2
3
4
5
*/
const level1 = [
{
id: '1',
parent: '0',
},
{
id: '3',
parent: '0',
},
];
const level2 = [
{
id: '2',
parent: '1',
},
{
id: '4',
parent: '3',
},
{
id: '5',
parent: '3',
},
];
DescendantsQuery.prototype.search = jest
.fn()
.mockImplementationOnce(async () => {
return level1;
})
.mockImplementationOnce(async () => {
return level2;
});
const options: TreeOptions = {
descendantLevels: 2,
descendants: 5,
ancestors: 0,
timerange: {
from: '',
to: '',
},
schema: schemaIDParent,
indexPatterns: [''],
nodes: ['0'],
};
const fetcher = new Fetcher(client);
expect(await fetcher.tree(options)).toEqual(
formatResponse([...level1, ...level2], schemaIDParent)
);
});
});
describe('ancestors', () => {
it('correctly exits loop when the search returns no results', async () => {
LifecycleQuery.prototype.search = jest.fn().mockImplementationOnce(async () => {
return [];
});
const options: TreeOptions = {
descendantLevels: 0,
descendants: 0,
ancestors: 5,
timerange: {
from: '',
to: '',
},
schema: {
id: '',
parent: '',
},
indexPatterns: [''],
nodes: ['a'],
};
const fetcher = new Fetcher(client);
expect(await fetcher.tree(options)).toEqual([]);
});
it('correctly exits loop when the options specify no ancestors', async () => {
LifecycleQuery.prototype.search = jest.fn().mockImplementationOnce(async () => {
throw new Error('should not have called this');
});
const options: TreeOptions = {
descendantLevels: 0,
descendants: 0,
ancestors: 0,
timerange: {
from: '',
to: '',
},
schema: {
id: '',
parent: '',
},
indexPatterns: [''],
nodes: ['a'],
};
const fetcher = new Fetcher(client);
expect(await fetcher.tree(options)).toEqual([]);
});
it('correctly returns the ancestors when the number of levels has been reached', async () => {
LifecycleQuery.prototype.search = jest
.fn()
.mockImplementationOnce(async () => {
return [
{
id: '3',
parent: '2',
},
];
})
.mockImplementationOnce(async () => {
return [
{
id: '2',
parent: '1',
},
];
});
const options: TreeOptions = {
descendantLevels: 0,
descendants: 0,
ancestors: 2,
timerange: {
from: '',
to: '',
},
schema: schemaIDParent,
indexPatterns: [''],
nodes: ['3'],
};
const fetcher = new Fetcher(client);
expect(await fetcher.tree(options)).toEqual(
formatResponse(
[
{ id: '3', parent: '2' },
{ id: '2', parent: '1' },
],
schemaIDParent
)
);
});
it('correctly adds name field to response', async () => {
LifecycleQuery.prototype.search = jest
.fn()
.mockImplementationOnce(async () => {
return [
{
id: '3',
parent: '2',
},
];
})
.mockImplementationOnce(async () => {
return [
{
id: '2',
parent: '1',
},
];
});
const options: TreeOptions = {
descendantLevels: 0,
descendants: 0,
ancestors: 2,
timerange: {
from: '',
to: '',
},
schema: schemaIDParentName,
indexPatterns: [''],
nodes: ['3'],
};
const fetcher = new Fetcher(client);
expect(await fetcher.tree(options)).toEqual(
formatResponse(
[
{ id: '3', parent: '2' },
{ id: '2', parent: '1' },
],
schemaIDParentName
)
);
});
it('correctly returns the ancestors with ancestry arrays', async () => {
const node3 = {
ancestry: ['2', '1'],
id: '3',
parent: '2',
};
const node1 = {
ancestry: ['0'],
id: '1',
parent: '0',
};
const node2 = {
ancestry: ['1', '0'],
id: '2',
parent: '1',
};
LifecycleQuery.prototype.search = jest
.fn()
.mockImplementationOnce(async () => {
return [node3];
})
.mockImplementationOnce(async () => {
return [node1, node2];
});
const options: TreeOptions = {
descendantLevels: 0,
descendants: 0,
ancestors: 3,
timerange: {
from: '',
to: '',
},
schema: schemaIDParentAncestry,
indexPatterns: [''],
nodes: ['3'],
};
const fetcher = new Fetcher(client);
expect(await fetcher.tree(options)).toEqual(
formatResponse([node3, node1, node2], schemaIDParentAncestry)
);
});
});
describe('retrieving leaf nodes', () => {
it('correctly identifies the leaf nodes in a response without the ancestry field', () => {
/**
.
0
1
2
3
*/
const results = [
{
id: '1',
parent: '0',
},
{
id: '2',
parent: '0',
},
{
id: '3',
parent: '0',
},
];
const leaves = getLeafNodes(results, ['0'], { id: 'id', parent: 'parent' });
expect(leaves).toStrictEqual(['1', '2', '3']);
});
it('correctly ignores nodes without the proper fields', () => {
/**
.
0
1
2
*/
const results = [
{
id: '1',
parent: '0',
},
{
id: '2',
parent: '0',
},
{
idNotReal: '3',
parentNotReal: '0',
},
];
const leaves = getLeafNodes(results, ['0'], { id: 'id', parent: 'parent' });
expect(leaves).toStrictEqual(['1', '2']);
});
it('returns an empty response when the proper fields are not defined', () => {
const results = [
{
id: '1',
parentNotReal: '0',
},
{
id: '2',
parentNotReal: '0',
},
{
idNotReal: '3',
parent: '0',
},
];
const leaves = getLeafNodes(results, ['0'], { id: 'id', parent: 'parent' });
expect(leaves).toStrictEqual([]);
});
describe('with the ancestry field defined', () => {
it('correctly identifies the leaf nodes in a response with the ancestry field', () => {
/**
.
1
2
3
*/
const results = [
{
id: '1',
parent: '0',
ancestry: ['0', 'a'],
},
{
id: '2',
parent: '1',
ancestry: ['1', '0'],
},
{
id: '3',
parent: '0',
ancestry: ['0', 'a'],
},
];
const leaves = getLeafNodes(results, ['0'], {
id: 'id',
parent: 'parent',
ancestry: 'ancestry',
});
expect(leaves).toStrictEqual(['2']);
});
it('falls back to using parent field if it cannot find the ancestry field', () => {
/**
.
1
2
3
*/
const results = [
{
id: '1',
parent: '0',
ancestryNotValid: ['0', 'a'],
},
{
id: '2',
parent: '1',
},
{
id: '3',
parent: '0',
},
];
const leaves = getLeafNodes(results, ['0'], {
id: 'id',
parent: 'parent',
ancestry: 'ancestry',
});
expect(leaves).toStrictEqual(['1', '3']);
});
it('correctly identifies the leaf nodes with a tree with multiple leaves', () => {
/**
.
0
1
2
3
4
5
*/
const results = [
{
id: '1',
parent: '0',
ancestry: ['0', 'a'],
},
{
id: '2',
parent: '1',
ancestry: ['1', '0'],
},
{
id: '3',
parent: '0',
ancestry: ['0', 'a'],
},
{
id: '4',
parent: '3',
ancestry: ['3', '0'],
},
{
id: '5',
parent: '3',
ancestry: ['3', '0'],
},
];
const leaves = getLeafNodes(results, ['0'], {
id: 'id',
parent: 'parent',
ancestry: 'ancestry',
});
expect(leaves).toStrictEqual(['2', '4', '5']);
});
it('correctly identifies the leaf nodes with multiple queried nodes', () => {
/**
.
0
1
2
3
4
5
a
b
c
d
*/
const results = [
{
id: '1',
parent: '0',
ancestry: ['0'],
},
{
id: '2',
parent: '1',
ancestry: ['1', '0'],
},
{
id: '3',
parent: '0',
ancestry: ['0'],
},
{
id: '4',
parent: '3',
ancestry: ['3', '0'],
},
{
id: '5',
parent: '3',
ancestry: ['3', '0'],
},
{
id: 'b',
parent: 'a',
ancestry: ['a'],
},
{
id: 'c',
parent: 'b',
ancestry: ['b', 'a'],
},
{
id: 'd',
parent: 'b',
ancestry: ['b', 'a'],
},
];
const leaves = getLeafNodes(results, ['0', 'a'], {
id: 'id',
parent: 'parent',
ancestry: 'ancestry',
});
expect(leaves).toStrictEqual(['2', '4', '5', 'c', 'd']);
});
it('correctly identifies the leaf nodes with an unbalanced tree', () => {
/**
.
0
1
2
3
4
5
a
b
*/
const results = [
{
id: '1',
parent: '0',
ancestry: ['0'],
},
{
id: '2',
parent: '1',
ancestry: ['1', '0'],
},
{
id: '3',
parent: '0',
ancestry: ['0'],
},
{
id: '4',
parent: '3',
ancestry: ['3', '0'],
},
{
id: '5',
parent: '3',
ancestry: ['3', '0'],
},
{
id: 'b',
parent: 'a',
ancestry: ['a'],
},
];
const leaves = getLeafNodes(results, ['0', 'a'], {
id: 'id',
parent: 'parent',
ancestry: 'ancestry',
});
// the reason b is not identified here is because the ancestry array
// size is 2, which means that if b had a descendant, then it would have been found
// using our query which found 2, 4, 5. So either we hit the size limit or there are no
// children of b
expect(leaves).toStrictEqual(['2', '4', '5']);
});
});
});
describe('getIDField', () => {
it('returns undefined if the field does not exist', () => {
expect(getIDField({}, { id: 'a', parent: 'b' })).toBeUndefined();
});
it('returns the first value if the field is an array', () => {
expect(getIDField({ 'a.b': ['1', '2'] }, { id: 'a.b', parent: 'b' })).toStrictEqual('1');
});
});
describe('getParentField', () => {
it('returns undefined if the field does not exist', () => {
expect(getParentField({}, { id: 'a', parent: 'b' })).toBeUndefined();
});
it('returns the first value if the field is an array', () => {
expect(getParentField({ 'a.b': ['1', '2'] }, { id: 'z', parent: 'a.b' })).toStrictEqual('1');
});
});
describe('getAncestryAsArray', () => {
it('returns an empty array if the field does not exist', () => {
expect(getAncestryAsArray({}, { id: 'a', parent: 'b', ancestry: 'z' })).toStrictEqual([]);
});
it('returns the full array if the field exists', () => {
expect(
getAncestryAsArray({ 'a.b': ['1', '2'] }, { id: 'z', parent: 'f', ancestry: 'a.b' })
).toStrictEqual(['1', '2']);
});
it('returns a built array using the parent field if ancestry field is empty', () => {
expect(
getAncestryAsArray(
{ 'aParent.bParent': ['1', '2'], ancestry: [] },
{ id: 'z', parent: 'aParent.bParent', ancestry: 'ancestry' }
)
).toStrictEqual(['1']);
});
it('returns a built array using the parent field if ancestry field does not exist', () => {
expect(
getAncestryAsArray(
{ 'aParent.bParent': '1' },
{ id: 'z', parent: 'aParent.bParent', ancestry: 'ancestry' }
)
).toStrictEqual(['1']);
});
});
});

View file

@ -0,0 +1,334 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { IScopedClusterClient } from 'kibana/server';
import {
firstNonNullValue,
values,
} from '../../../../../../common/endpoint/models/ecs_safety_helpers';
import { ECSField, ResolverNode, FieldsObject } from '../../../../../../common/endpoint/types';
import { DescendantsQuery } from '../queries/descendants';
import { Schema, NodeID } from './index';
import { LifecycleQuery } from '../queries/lifecycle';
import { StatsQuery } from '../queries/stats';
/**
* The query parameters passed in from the request. These define the limits for the ES requests for retrieving the
* resolver tree.
*/
export interface TreeOptions {
descendantLevels: number;
descendants: number;
ancestors: number;
timerange: {
from: string;
to: string;
};
schema: Schema;
nodes: NodeID[];
indexPatterns: string[];
}
/**
* Handles retrieving nodes of a resolver tree.
*/
export class Fetcher {
constructor(private readonly client: IScopedClusterClient) {}
/**
* This method retrieves the ancestors and descendants of a resolver tree.
*
* @param options the options for retrieving the structure of the tree.
*/
public async tree(options: TreeOptions): Promise<ResolverNode[]> {
const treeParts = await Promise.all([
this.retrieveAncestors(options),
this.retrieveDescendants(options),
]);
const tree = treeParts.reduce((results, partArray) => {
results.push(...partArray);
return results;
}, []);
return this.formatResponse(tree, options);
}
private async formatResponse(
treeNodes: FieldsObject[],
options: TreeOptions
): Promise<ResolverNode[]> {
const statsIDs: NodeID[] = [];
for (const node of treeNodes) {
const id = getIDField(node, options.schema);
if (id) {
statsIDs.push(id);
}
}
const query = new StatsQuery({
indexPatterns: options.indexPatterns,
schema: options.schema,
timerange: options.timerange,
});
const eventStats = await query.search(this.client, statsIDs);
const statsNodes: ResolverNode[] = [];
for (const node of treeNodes) {
const id = getIDField(node, options.schema);
const parent = getParentField(node, options.schema);
const name = getNameField(node, options.schema);
// at this point id should never be undefined, it should be enforced by the Elasticsearch query
// but let's check anyway
if (id !== undefined) {
statsNodes.push({
id,
parent,
name,
data: node,
stats: eventStats[id] ?? { total: 0, byCategory: {} },
});
}
}
return statsNodes;
}
private static getNextAncestorsToFind(
results: FieldsObject[],
schema: Schema,
levelsLeft: number
): NodeID[] {
const nodesByID = results.reduce((accMap: Map<NodeID, FieldsObject>, result: FieldsObject) => {
const id = getIDField(result, schema);
if (id) {
accMap.set(id, result);
}
return accMap;
}, new Map());
const nodes: NodeID[] = [];
// Find all the nodes that don't have their parent in the result set, we will use these
// nodes to find the additional ancestry
for (const result of results) {
const parent = getParentField(result, schema);
if (parent) {
const parentNode = nodesByID.get(parent);
if (!parentNode) {
// it's ok if the nodes array is larger than the levelsLeft because the query
// will have the size set to the levelsLeft which will restrict the number of results
nodes.push(...getAncestryAsArray(result, schema).slice(0, levelsLeft));
}
}
}
return nodes;
}
private async retrieveAncestors(options: TreeOptions): Promise<FieldsObject[]> {
const ancestors: FieldsObject[] = [];
const query = new LifecycleQuery({
schema: options.schema,
indexPatterns: options.indexPatterns,
timerange: options.timerange,
});
let nodes = options.nodes;
let numLevelsLeft = options.ancestors;
while (numLevelsLeft > 0) {
const results: FieldsObject[] = await query.search(this.client, nodes);
if (results.length <= 0) {
return ancestors;
}
/**
* This array (this.ancestry.ancestors) is the accumulated ancestors of the node of interest. This array is different
* from the ancestry array of a specific document. The order of this array is going to be weird, it will look like this
* [most distant ancestor...closer ancestor, next recursive call most distant ancestor...closer ancestor]
*
* Here is an example of why this happens
* Consider the following tree:
* A -> B -> C -> D -> E -> Origin
* Where A was spawn before B, which was before C, etc
*
* Let's assume the ancestry array limit is 2 so Origin's array would be: [E, D]
* E's ancestry array would be: [D, C] etc
*
* If a request comes in to retrieve all the ancestors in this tree, the accumulate results will be:
* [D, E, B, C, A]
*
* The first iteration would retrieve D and E in that order because they are sorted in ascending order by timestamp.
* The next iteration would get the ancestors of D (since that's the most distant ancestor from Origin) which are
* [B, C]
* The next iteration would get the ancestors of B which is A
* Hence: [D, E, B, C, A]
*/
ancestors.push(...results);
numLevelsLeft -= results.length;
nodes = Fetcher.getNextAncestorsToFind(results, options.schema, numLevelsLeft);
}
return ancestors;
}
private async retrieveDescendants(options: TreeOptions): Promise<FieldsObject[]> {
const descendants: FieldsObject[] = [];
const query = new DescendantsQuery({
schema: options.schema,
indexPatterns: options.indexPatterns,
timerange: options.timerange,
});
let nodes: NodeID[] = options.nodes;
let numNodesLeftToRequest: number = options.descendants;
let levelsLeftToRequest: number = options.descendantLevels;
// if the ancestry was specified then ignore the levels
while (
numNodesLeftToRequest > 0 &&
(options.schema.ancestry !== undefined || levelsLeftToRequest > 0)
) {
const results: FieldsObject[] = await query.search(this.client, nodes, numNodesLeftToRequest);
if (results.length <= 0) {
return descendants;
}
nodes = getLeafNodes(results, nodes, options.schema);
numNodesLeftToRequest -= results.length;
levelsLeftToRequest -= 1;
descendants.push(...results);
}
return descendants;
}
}
/**
* This functions finds the leaf nodes for a given response from an Elasticsearch query.
*
* Exporting so it can be tested.
*
* @param results the doc values portion of the documents returned from an Elasticsearch query
* @param nodes an array of unique IDs that were used to find the returned documents
* @param schema the field definitions for how nodes are represented in the resolver graph
*/
export function getLeafNodes(
results: FieldsObject[],
nodes: Array<string | number>,
schema: Schema
): NodeID[] {
let largestAncestryArray = 0;
const nodesToQueryNext: Map<number, Set<NodeID>> = new Map();
const queriedNodes = new Set<NodeID>(nodes);
const isDistantGrandchild = (event: FieldsObject) => {
const ancestry = getAncestryAsArray(event, schema);
return ancestry.length > 0 && queriedNodes.has(ancestry[ancestry.length - 1]);
};
for (const result of results) {
const ancestry = getAncestryAsArray(result, schema);
// This is to handle the following unlikely but possible scenario:
// if an alert was generated by the kernel process (parent process of all other processes) then
// the direct children of that process would only have an ancestry array of [parent_kernel], a single value in the array.
// The children of those children would have two values in their array [direct parent, parent_kernel]
// we need to determine which nodes are the most distant grandchildren of the queriedNodes because those should
// be used for the next query if more nodes should be retrieved. To generally determine the most distant grandchildren
// we can use the last entry in the ancestry array because of its ordering. The problem with that is in the scenario above
// the direct children of parent_kernel will also meet that criteria even though they are not actually the most
// distant grandchildren. To get around that issue we'll bucket all the nodes by the size of their ancestry array
// and then only return the nodes in the largest bucket because those should be the most distant grandchildren
// from the queried nodes that were passed in.
if (ancestry.length > largestAncestryArray) {
largestAncestryArray = ancestry.length;
}
// a grandchild must have an array of > 0 and have it's last parent be in the set of previously queried nodes
// this is one of the furthest descendants from the queried nodes
if (isDistantGrandchild(result)) {
let levelOfNodes = nodesToQueryNext.get(ancestry.length);
if (!levelOfNodes) {
levelOfNodes = new Set<NodeID>();
nodesToQueryNext.set(ancestry.length, levelOfNodes);
}
const nodeID = getIDField(result, schema);
if (nodeID) {
levelOfNodes.add(nodeID);
}
}
}
const nextNodes = nodesToQueryNext.get(largestAncestryArray);
return nextNodes !== undefined ? Array.from(nextNodes) : [];
}
/**
* Retrieves the unique ID field from a document.
*
* Exposed for testing.
* @param obj the doc value fields retrieved from a document returned by Elasticsearch
* @param schema the schema used for identifying connections between documents
*/
export function getIDField(obj: FieldsObject, schema: Schema): NodeID | undefined {
const id: ECSField<NodeID> = obj[schema.id];
return firstNonNullValue(id);
}
/**
* Retrieves the name field from a document.
*
* Exposed for testing.
* @param obj the doc value fields retrieved from a document returned by Elasticsearch
* @param schema the schema used for identifying connections between documents
*/
export function getNameField(obj: FieldsObject, schema: Schema): string | undefined {
if (!schema.name) {
return undefined;
}
const name: ECSField<string | number> = obj[schema.name];
return String(firstNonNullValue(name));
}
/**
* Retrieves the unique parent ID field from a document.
*
* Exposed for testing.
* @param obj the doc value fields retrieved from a document returned by Elasticsearch
* @param schema the schema used for identifying connections between documents
*/
export function getParentField(obj: FieldsObject, schema: Schema): NodeID | undefined {
const parent: ECSField<NodeID> = obj[schema.parent];
return firstNonNullValue(parent);
}
function getAncestryField(obj: FieldsObject, schema: Schema): NodeID[] | undefined {
if (!schema.ancestry) {
return undefined;
}
const ancestry: ECSField<NodeID> = obj[schema.ancestry];
if (!ancestry) {
return undefined;
}
return values(ancestry);
}
/**
* Retrieves the ancestry array field if it exists. If it doesn't exist or if it is empty it reverts to
* creating an array using the parent field. If the parent field doesn't exist, it returns
* an empty array.
*
* Exposed for testing.
* @param obj the doc value fields retrieved from a document returned by Elasticsearch
* @param schema the schema used for identifying connections between documents
*/
export function getAncestryAsArray(obj: FieldsObject, schema: Schema): NodeID[] {
const ancestry = getAncestryField(obj, schema);
if (!ancestry || ancestry.length <= 0) {
const parentField = getParentField(obj, schema);
return parentField !== undefined ? [parentField] : [];
}
return ancestry;
}

View file

@ -0,0 +1,62 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
/**
* Represents a time range filter
*/
export interface Timerange {
from: string;
to: string;
}
/**
* An array of unique IDs to identify nodes within the resolver tree.
*/
export type NodeID = string | number;
/**
* The fields to use to identify nodes within a resolver tree.
*/
export interface Schema {
/**
* the ancestry field should be set to a field that contains an order array representing
* the ancestors of a node.
*/
ancestry?: string;
/**
* id represents the field to use as the unique ID for a node.
*/
id: string;
/**
* field to use for the name of the node
*/
name?: string;
/**
* parent represents the field that is the edge between two nodes.
*/
parent: string;
}
/**
* Returns the doc value fields filter to use in queries to limit the number of fields returned in the
* query response.
*
* See for more info: https://www.elastic.co/guide/en/elasticsearch/reference/current/search-fields.html#docvalue-fields
*
* @param schema is the node schema information describing how relationships are formed between nodes
* in the resolver graph.
*/
export function docValueFields(schema: Schema): Array<{ field: string }> {
const filter = [{ field: '@timestamp' }, { field: schema.id }, { field: schema.parent }];
if (schema.ancestry) {
filter.push({ field: schema.ancestry });
}
if (schema.name) {
filter.push({ field: schema.name });
}
return filter;
}

View file

@ -5,16 +5,24 @@
*/
import _ from 'lodash';
import expect from '@kbn/expect';
import { firstNonNullValue } from '../../../../plugins/security_solution/common/endpoint/models/ecs_safety_helpers';
import {
NodeID,
Schema,
} from '../../../../plugins/security_solution/server/endpoint/routes/resolver/tree/utils';
import {
SafeResolverChildNode,
SafeResolverLifecycleNode,
SafeResolverEvent,
ResolverNodeStats,
ResolverNode,
} from '../../../../plugins/security_solution/common/endpoint/types';
import {
parentEntityIDSafeVersion,
entityIDSafeVersion,
eventIDSafeVersion,
timestampSafeVersion,
timestampAsDateSafeVersion,
} from '../../../../plugins/security_solution/common/endpoint/models/event';
import {
Event,
@ -24,6 +32,344 @@ import {
categoryMapping,
} from '../../../../plugins/security_solution/common/endpoint/generate_data';
const createLevels = ({
descendantsByParent,
levels,
currentNodes,
schema,
}: {
descendantsByParent: Map<NodeID, Map<NodeID, ResolverNode>>;
levels: Array<Map<NodeID, ResolverNode>>;
currentNodes: Map<NodeID, ResolverNode> | undefined;
schema: Schema;
}): Array<Map<NodeID, ResolverNode>> => {
if (!currentNodes || currentNodes.size === 0) {
return levels;
}
levels.push(currentNodes);
const nextLevel: Map<NodeID, ResolverNode> = new Map();
for (const node of currentNodes.values()) {
const id = getID(node, schema);
const children = descendantsByParent.get(id);
if (children) {
for (const child of children.values()) {
const childID = getID(child, schema);
nextLevel.set(childID, child);
}
}
}
return createLevels({ descendantsByParent, levels, currentNodes: nextLevel, schema });
};
interface TreeExpectation {
origin: NodeID;
nodeExpectations: NodeExpectations;
}
interface NodeExpectations {
ancestors?: number;
descendants?: number;
descendantLevels?: number;
}
interface APITree {
// entries closer to the beginning of the array are more direct parents of the origin aka
// ancestors[0] = the origin's parent, ancestors[1] = the origin's grandparent
ancestors: ResolverNode[];
// if no ancestors were retrieved then the origin will be undefined
origin: ResolverNode | undefined;
descendantLevels: Array<Map<NodeID, ResolverNode>>;
nodeExpectations: NodeExpectations;
}
/**
* Represents a utility structure for making it easier to perform expect calls on the response
* from the /tree api. This can represent multiple trees, since the tree api can return multiple trees.
*/
export interface APIResponse {
nodesByID: Map<NodeID, ResolverNode>;
trees: Map<NodeID, APITree>;
allNodes: ResolverNode[];
}
/**
* Gets the ID field from a resolver node. Throws an error if the ID doesn't exist.
*
* @param node a resolver node
* @param schema the schema that was used to retrieve this resolver node
*/
export const getID = (node: ResolverNode | undefined, schema: Schema): NodeID => {
const id = firstNonNullValue(node?.data[schema.id]);
if (!id) {
throw new Error(`Unable to find id ${schema.id} in node: ${JSON.stringify(node)}`);
}
return id;
};
const getParentInternal = (node: ResolverNode | undefined, schema: Schema): NodeID | undefined => {
if (node) {
return firstNonNullValue(node?.data[schema.parent]);
}
return undefined;
};
/**
* Gets the parent ID field from a resolver node. Throws an error if the ID doesn't exist.
*
* @param node a resolver node
* @param schema the schema that was used to retrieve this resolver node
*/
export const getParent = (node: ResolverNode | undefined, schema: Schema): NodeID => {
const parent = getParentInternal(node, schema);
if (!parent) {
throw new Error(`Unable to find parent ${schema.parent} in node: ${JSON.stringify(node)}`);
}
return parent;
};
/**
* Reformats the tree's response to make it easier to perform testing on the results.
*
* @param treeExpectations the node IDs used to retrieve the trees and the expected number of ancestors/descendants in the
* resulting trees
* @param nodes the response from the tree api
* @param schema the schema used when calling the tree api
*/
const createTreeFromResponse = (
treeExpectations: TreeExpectation[],
nodes: ResolverNode[],
schema: Schema
) => {
const nodesByID = new Map<NodeID, ResolverNode>();
const nodesByParent = new Map<NodeID, Map<NodeID, ResolverNode>>();
for (const node of nodes) {
const id = getID(node, schema);
const parent = getParentInternal(node, schema);
nodesByID.set(id, node);
if (parent) {
let groupedChildren = nodesByParent.get(parent);
if (!groupedChildren) {
groupedChildren = new Map();
nodesByParent.set(parent, groupedChildren);
}
groupedChildren.set(id, node);
}
}
const trees: Map<NodeID, APITree> = new Map();
for (const expectation of treeExpectations) {
const descendantLevels = createLevels({
descendantsByParent: nodesByParent,
levels: [],
currentNodes: nodesByParent.get(expectation.origin),
schema,
});
const ancestors: ResolverNode[] = [];
const originNode = nodesByID.get(expectation.origin);
if (originNode) {
let currentID: NodeID | undefined = getParentInternal(originNode, schema);
// construct an array with all the ancestors from the response. We'll use this to verify that
// all the expected ancestors were returned in the response.
while (currentID !== undefined) {
const parentNode = nodesByID.get(currentID);
if (parentNode) {
ancestors.push(parentNode);
}
currentID = getParentInternal(parentNode, schema);
}
}
trees.set(expectation.origin, {
ancestors,
origin: originNode,
descendantLevels,
nodeExpectations: expectation.nodeExpectations,
});
}
return {
nodesByID,
trees,
allNodes: nodes,
};
};
const verifyAncestry = ({
responseTrees,
schema,
genTree,
}: {
responseTrees: APIResponse;
schema: Schema;
genTree: Tree;
}) => {
const allGenNodes = new Map<string, TreeNode>([
...genTree.ancestry,
...genTree.children,
[genTree.origin.id, genTree.origin],
]);
for (const tree of responseTrees.trees.values()) {
if (tree.nodeExpectations.ancestors !== undefined) {
expect(tree.ancestors.length).to.be(tree.nodeExpectations.ancestors);
}
if (tree.origin !== undefined) {
// make sure the origin node from the request exists in the generated data and has the same fields
const originID = getID(tree.origin, schema);
const originParentID = getParent(tree.origin, schema);
expect(tree.origin.id).to.be(originID);
expect(tree.origin.parent).to.be(originParentID);
expect(allGenNodes.get(String(originID))?.id).to.be(String(originID));
expect(allGenNodes.get(String(originParentID))?.id).to.be(String(originParentID));
expect(originID).to.be(entityIDSafeVersion(allGenNodes.get(String(originID))!.lifecycle[0]));
expect(originParentID).to.be(
parentEntityIDSafeVersion(allGenNodes.get(String(originID))!.lifecycle[0])
);
// make sure the lifecycle events are sorted by timestamp in ascending order because the
// event that will be returned that we need to compare to should be the earliest event
// found
const originLifecycleSorted = [...allGenNodes.get(String(originID))!.lifecycle].sort(
(a: Event, b: Event) => {
const aTime: number | undefined = timestampSafeVersion(a);
const bTime = timestampSafeVersion(b);
if (aTime !== undefined && bTime !== undefined) {
return aTime - bTime;
} else {
return 0;
}
}
);
const ts = timestampAsDateSafeVersion(tree.origin?.data);
expect(ts).to.not.be(undefined);
expect(ts).to.eql(timestampAsDateSafeVersion(originLifecycleSorted[0]));
}
// check the constructed ancestors array to see if we're missing any nodes in the ancestry
for (let i = 0; i < tree.ancestors.length; i++) {
const id = getID(tree.ancestors[i], schema);
const parent = getParentInternal(tree.ancestors[i], schema);
// only compare to the parent if this is not the last entry in the array
if (i < tree.ancestors.length - 1) {
// the current node's parent ID should match the parent's ID field
expect(parent).to.be(getID(tree.ancestors[i + 1], schema));
expect(parent).to.not.be(undefined);
expect(tree.ancestors[i].parent).to.not.be(undefined);
expect(tree.ancestors[i].parent).to.be(parent);
}
// the current node's ID must exist in the generated tree
expect(allGenNodes.get(String(id))?.id).to.be(id);
expect(tree.ancestors[i].id).to.be(id);
}
}
};
const verifyChildren = ({
responseTrees,
schema,
genTree,
}: {
responseTrees: APIResponse;
schema: Schema;
genTree: Tree;
}) => {
const allGenNodes = new Map<string, TreeNode>([
...genTree.ancestry,
...genTree.children,
[genTree.origin.id, genTree.origin],
]);
for (const tree of responseTrees.trees.values()) {
if (tree.nodeExpectations.descendantLevels !== undefined) {
expect(tree.nodeExpectations.descendantLevels).to.be(tree.descendantLevels.length);
}
let totalDescendants = 0;
for (const level of tree.descendantLevels) {
for (const node of level.values()) {
totalDescendants += 1;
const id = getID(node, schema);
const parent = getParent(node, schema);
const genNode = allGenNodes.get(String(id));
expect(id).to.be(node.id);
expect(parent).to.be(node.parent);
expect(node.parent).to.not.be(undefined);
// make sure the id field is the same in the returned node as the generated one
expect(id).to.be(entityIDSafeVersion(genNode!.lifecycle[0]));
// make sure the parent field is the same in the returned node as the generated one
expect(parent).to.be(parentEntityIDSafeVersion(genNode!.lifecycle[0]));
}
}
if (tree.nodeExpectations.descendants !== undefined) {
expect(tree.nodeExpectations.descendants).to.be(totalDescendants);
}
}
};
const verifyStats = ({
responseTrees,
relatedEventsCategories,
}: {
responseTrees: APIResponse;
relatedEventsCategories: RelatedEventInfo[];
}) => {
for (const node of responseTrees.allNodes) {
let totalExpEvents = 0;
for (const cat of relatedEventsCategories) {
const ecsCategories = categoryMapping[cat.category];
if (Array.isArray(ecsCategories)) {
// if there are multiple ecs categories used to define a related event, the count for all of them should be the same
// and they should equal what is defined in the categories used to generate the related events
for (const ecsCat of ecsCategories) {
expect(node.stats.byCategory[ecsCat]).to.be(cat.count);
}
} else {
expect(node.stats.byCategory[ecsCategories]).to.be(cat.count);
}
totalExpEvents += cat.count;
}
expect(node.stats.total).to.be(totalExpEvents);
}
};
/**
* Verify the ancestry of multiple trees.
*
* @param expectations array of expectations based on the origin that built a particular tree
* @param response the nodes returned from the api
* @param schema the schema fields passed to the tree api
* @param genTree the generated tree that was inserted in Elasticsearch that we are querying
* @param relatedEventsCategories an optional array to instruct the verification to check the stats
* on each node returned
*/
export const verifyTree = ({
expectations,
response,
schema,
genTree,
relatedEventsCategories,
}: {
expectations: TreeExpectation[];
response: ResolverNode[];
schema: Schema;
genTree: Tree;
relatedEventsCategories?: RelatedEventInfo[];
}) => {
const responseTrees = createTreeFromResponse(expectations, response, schema);
verifyAncestry({ responseTrees, schema, genTree });
verifyChildren({ responseTrees, schema, genTree });
if (relatedEventsCategories !== undefined) {
verifyStats({ responseTrees, relatedEventsCategories });
}
};
/**
* Creates the ancestry array based on an array of events. The order of the ancestry array will match the order
* of the events passed in.
@ -44,6 +390,7 @@ export const createAncestryArray = (events: Event[]) => {
/**
* Check that the given lifecycle is in the resolver tree's corresponding map
*
* @deprecated use verifyTree
* @param node a lifecycle node containing the start and end events for a node
* @param nodeMap a map of entity_ids to nodes to look for the passed in `node`
*/
@ -59,12 +406,13 @@ const expectLifecycleNodeInMap = (
/**
* Verify that all the ancestor nodes are valid and optionally have parents.
*
* @deprecated use verifyTree
* @param ancestors an array of ancestors
* @param tree the generated resolver tree as the source of truth
* @param verifyLastParent a boolean indicating whether to check the last ancestor. If the ancestors array intentionally
* does not contain all the ancestors, the last one will not have the parent
*/
export const verifyAncestry = (
export const checkAncestryFromEntityTreeAPI = (
ancestors: SafeResolverLifecycleNode[],
tree: Tree,
verifyLastParent: boolean
@ -114,6 +462,7 @@ export const verifyAncestry = (
/**
* Retrieves the most distant ancestor in the given array.
*
* @deprecated use verifyTree
* @param ancestors an array of ancestor nodes
*/
export const retrieveDistantAncestor = (ancestors: SafeResolverLifecycleNode[]) => {
@ -137,12 +486,13 @@ export const retrieveDistantAncestor = (ancestors: SafeResolverLifecycleNode[])
/**
* Verify that the children nodes are correct
*
* @deprecated use verifyTree
* @param children the children nodes
* @param tree the generated resolver tree as the source of truth
* @param numberOfParents an optional number to compare that are a certain number of parents in the children array
* @param childrenPerParent an optional number to compare that there are a certain number of children for each parent
*/
export const verifyChildren = (
export const verifyChildrenFromEntityTreeAPI = (
children: SafeResolverChildNode[],
tree: Tree,
numberOfParents?: number,
@ -200,10 +550,11 @@ export const compareArrays = (
/**
* Verifies that the stats received from ES for a node reflect the categories of events that the generator created.
*
* @deprecated use verifyTree
* @param relatedEvents the related events received for a particular node
* @param categories the related event info used when generating the resolver tree
*/
export const verifyStats = (
export const verifyEntityTreeStats = (
stats: ResolverNodeStats | undefined,
categories: RelatedEventInfo[],
relatedAlerts: number
@ -225,12 +576,12 @@ export const verifyStats = (
totalExpEvents += cat.count;
}
expect(stats?.events.total).to.be(totalExpEvents);
expect(stats?.totalAlerts);
};
/**
* A helper function for verifying the stats information an array of nodes.
*
* @deprecated use verifyTree
* @param nodes an array of lifecycle nodes that should have a stats field defined
* @param categories the related event info used when generating the resolver tree
*/
@ -240,6 +591,6 @@ export const verifyLifecycleStats = (
relatedAlerts: number
) => {
for (const node of nodes) {
verifyStats(node.stats, categories, relatedAlerts);
verifyEntityTreeStats(node.stats, categories, relatedAlerts);
}
};

View file

@ -12,6 +12,7 @@ export default function (providerContext: FtrProviderContext) {
loadTestFile(require.resolve('./entity_id'));
loadTestFile(require.resolve('./entity'));
loadTestFile(require.resolve('./children'));
loadTestFile(require.resolve('./tree_entity_id'));
loadTestFile(require.resolve('./tree'));
loadTestFile(require.resolve('./alerts'));
loadTestFile(require.resolve('./events'));

View file

@ -4,31 +4,23 @@
* you may not use this file except in compliance with the Elastic License.
*/
import expect from '@kbn/expect';
import { getNameField } from '../../../../plugins/security_solution/server/endpoint/routes/resolver/tree/utils/fetch';
import { Schema } from '../../../../plugins/security_solution/server/endpoint/routes/resolver/tree/utils';
import { ResolverNode } from '../../../../plugins/security_solution/common/endpoint/types';
import {
SafeResolverAncestry,
SafeResolverChildren,
SafeResolverTree,
SafeLegacyEndpointEvent,
} from '../../../../plugins/security_solution/common/endpoint/types';
import { parentEntityIDSafeVersion } from '../../../../plugins/security_solution/common/endpoint/models/event';
parentEntityIDSafeVersion,
timestampSafeVersion,
} from '../../../../plugins/security_solution/common/endpoint/models/event';
import { FtrProviderContext } from '../../ftr_provider_context';
import {
Tree,
RelatedEventCategory,
} from '../../../../plugins/security_solution/common/endpoint/generate_data';
import { Options, GeneratedTrees } from '../../services/resolver';
import {
compareArrays,
verifyAncestry,
retrieveDistantAncestor,
verifyChildren,
verifyLifecycleStats,
verifyStats,
} from './common';
import { verifyTree } from './common';
export default function ({ getService }: FtrProviderContext) {
const supertest = getService('supertest');
const esArchiver = getService('esArchiver');
const resolver = getService('resolverGenerator');
const relatedEventsToGen = [
@ -52,322 +44,641 @@ export default function ({ getService }: FtrProviderContext) {
ancestryArraySize: 2,
};
const schemaWithAncestry: Schema = {
ancestry: 'process.Ext.ancestry',
id: 'process.entity_id',
parent: 'process.parent.entity_id',
};
const schemaWithoutAncestry: Schema = {
id: 'process.entity_id',
parent: 'process.parent.entity_id',
};
const schemaWithName: Schema = {
id: 'process.entity_id',
parent: 'process.parent.entity_id',
name: 'process.name',
};
describe('Resolver tree', () => {
before(async () => {
await esArchiver.load('endpoint/resolver/api_feature');
resolverTrees = await resolver.createTrees(treeOptions);
// we only requested a single alert so there's only 1 tree
tree = resolverTrees.trees[0];
});
after(async () => {
await resolver.deleteData(resolverTrees);
// this unload is for an endgame-* index so it does not use data streams
await esArchiver.unload('endpoint/resolver/api_feature');
});
describe('ancestry events route', () => {
describe('legacy events', () => {
const endpointID = '5a0c957f-b8e7-4538-965e-57e8bb86ad3a';
const entityID = '94042';
it('should return details for the root node', async () => {
const { body }: { body: SafeResolverAncestry } = await supertest
.get(
`/api/endpoint/resolver/${entityID}/ancestry?legacyEndpointID=${endpointID}&ancestors=5`
)
.expect(200);
expect(body.ancestors[0].lifecycle.length).to.eql(2);
expect(body.ancestors.length).to.eql(2);
expect(body.nextAncestor).to.eql(null);
});
it('should have a populated next parameter', async () => {
const { body }: { body: SafeResolverAncestry } = await supertest
.get(
`/api/endpoint/resolver/${entityID}/ancestry?legacyEndpointID=${endpointID}&ancestors=0`
)
.expect(200);
expect(body.nextAncestor).to.eql('94041');
});
it('should handle an ancestors param request', async () => {
let { body }: { body: SafeResolverAncestry } = await supertest
.get(
`/api/endpoint/resolver/${entityID}/ancestry?legacyEndpointID=${endpointID}&ancestors=0`
)
.expect(200);
const next = body.nextAncestor;
({ body } = await supertest
.get(
`/api/endpoint/resolver/${next}/ancestry?legacyEndpointID=${endpointID}&ancestors=1`
)
.expect(200));
expect(body.ancestors[0].lifecycle.length).to.eql(1);
expect(body.nextAncestor).to.eql(null);
describe('ancestry events', () => {
it('should return the correct ancestor nodes for the tree', async () => {
const { body }: { body: ResolverNode[] } = await supertest
.post('/api/endpoint/resolver/tree')
.set('kbn-xsrf', 'xxx')
.send({
descendants: 0,
descendantLevels: 0,
ancestors: 9,
schema: schemaWithAncestry,
nodes: [tree.origin.id],
timerange: {
from: tree.startTime.toISOString(),
to: tree.endTime.toISOString(),
},
indexPatterns: ['logs-*'],
})
.expect(200);
verifyTree({
expectations: [{ origin: tree.origin.id, nodeExpectations: { ancestors: 5 } }],
response: body,
schema: schemaWithAncestry,
genTree: tree,
});
});
describe('endpoint events', () => {
it('should return the origin node at the front of the array', async () => {
const { body }: { body: SafeResolverAncestry } = await supertest
.get(`/api/endpoint/resolver/${tree.origin.id}/ancestry?ancestors=9`)
.expect(200);
expect(body.ancestors[0].entityID).to.eql(tree.origin.id);
it('should handle an invalid id', async () => {
const { body }: { body: ResolverNode[] } = await supertest
.post('/api/endpoint/resolver/tree')
.set('kbn-xsrf', 'xxx')
.send({
descendants: 0,
descendantLevels: 0,
ancestors: 9,
schema: schemaWithAncestry,
nodes: ['bogus id'],
timerange: {
from: tree.startTime.toISOString(),
to: tree.endTime.toISOString(),
},
indexPatterns: ['logs-*'],
})
.expect(200);
expect(body).to.be.empty();
});
it('should return a subset of the ancestors', async () => {
const { body }: { body: ResolverNode[] } = await supertest
.post('/api/endpoint/resolver/tree')
.set('kbn-xsrf', 'xxx')
.send({
descendants: 0,
descendantLevels: 0,
// 3 ancestors means 1 origin and 2 ancestors of the origin
ancestors: 3,
schema: schemaWithAncestry,
nodes: [tree.origin.id],
timerange: {
from: tree.startTime.toISOString(),
to: tree.endTime.toISOString(),
},
indexPatterns: ['logs-*'],
})
.expect(200);
verifyTree({
expectations: [{ origin: tree.origin.id, nodeExpectations: { ancestors: 2 } }],
response: body,
schema: schemaWithAncestry,
genTree: tree,
});
});
it('should return details for the root node', async () => {
const { body }: { body: SafeResolverAncestry } = await supertest
.get(`/api/endpoint/resolver/${tree.origin.id}/ancestry?ancestors=9`)
.expect(200);
// the tree we generated had 5 ancestors + 1 origin node
expect(body.ancestors.length).to.eql(6);
expect(body.ancestors[0].entityID).to.eql(tree.origin.id);
verifyAncestry(body.ancestors, tree, true);
expect(body.nextAncestor).to.eql(null);
it('should return ancestors without the ancestry array', async () => {
const { body }: { body: ResolverNode[] } = await supertest
.post('/api/endpoint/resolver/tree')
.set('kbn-xsrf', 'xxx')
.send({
descendants: 0,
descendantLevels: 0,
ancestors: 50,
schema: schemaWithoutAncestry,
nodes: [tree.origin.id],
timerange: {
from: tree.startTime.toISOString(),
to: tree.endTime.toISOString(),
},
indexPatterns: ['logs-*'],
})
.expect(200);
verifyTree({
expectations: [{ origin: tree.origin.id, nodeExpectations: { ancestors: 5 } }],
response: body,
schema: schemaWithoutAncestry,
genTree: tree,
});
});
it('should handle an invalid id', async () => {
const { body }: { body: SafeResolverAncestry } = await supertest
.get(`/api/endpoint/resolver/alskdjflasj/ancestry`)
.expect(200);
expect(body.ancestors).to.be.empty();
expect(body.nextAncestor).to.eql(null);
it('should respect the time range specified and only return the origin node', async () => {
const from = new Date(
timestampSafeVersion(tree.origin.lifecycle[0]) ?? new Date()
).toISOString();
const { body }: { body: ResolverNode[] } = await supertest
.post('/api/endpoint/resolver/tree')
.set('kbn-xsrf', 'xxx')
.send({
descendants: 0,
descendantLevels: 0,
ancestors: 50,
schema: schemaWithoutAncestry,
nodes: [tree.origin.id],
timerange: {
from,
to: from,
},
indexPatterns: ['logs-*'],
})
.expect(200);
verifyTree({
expectations: [{ origin: tree.origin.id, nodeExpectations: { ancestors: 0 } }],
response: body,
schema: schemaWithoutAncestry,
genTree: tree,
});
});
it('should have a populated next parameter', async () => {
const { body }: { body: SafeResolverAncestry } = await supertest
.get(`/api/endpoint/resolver/${tree.origin.id}/ancestry?ancestors=2`)
.expect(200);
// it should have 2 ancestors + 1 origin
expect(body.ancestors.length).to.eql(3);
verifyAncestry(body.ancestors, tree, false);
const distantGrandparent = retrieveDistantAncestor(body.ancestors);
expect(body.nextAncestor).to.eql(
parentEntityIDSafeVersion(distantGrandparent.lifecycle[0])
);
it('should support returning multiple ancestor trees when multiple nodes are requested', async () => {
// There should be 2 levels of descendants under the origin, grab the bottom one, and the first node's id
const bottomMostDescendant = Array.from(tree.childrenLevels[1].values())[0].id;
const { body }: { body: ResolverNode[] } = await supertest
.post('/api/endpoint/resolver/tree')
.set('kbn-xsrf', 'xxx')
.send({
descendants: 0,
descendantLevels: 0,
ancestors: 50,
schema: schemaWithoutAncestry,
nodes: [tree.origin.id, bottomMostDescendant],
timerange: {
from: tree.startTime.toISOString(),
to: tree.endTime.toISOString(),
},
indexPatterns: ['logs-*'],
})
.expect(200);
verifyTree({
expectations: [
// there are 5 ancestors above the origin
{ origin: tree.origin.id, nodeExpectations: { ancestors: 5 } },
// there are 2 levels below the origin so the bottom node's ancestry should be
// all the ancestors (5) + one level + the origin = 7
{ origin: bottomMostDescendant, nodeExpectations: { ancestors: 7 } },
],
response: body,
schema: schemaWithoutAncestry,
genTree: tree,
});
});
it('should handle multiple ancestor requests', async () => {
let { body }: { body: SafeResolverAncestry } = await supertest
.get(`/api/endpoint/resolver/${tree.origin.id}/ancestry?ancestors=3`)
.expect(200);
expect(body.ancestors.length).to.eql(4);
const next = body.nextAncestor;
it('should return a single ancestry when two nodes a the same level and from same parent are requested', async () => {
// there are 2 levels after the origin, let's get the first level, there will be three
// children so get the left and right most ones
const level0Nodes = Array.from(tree.childrenLevels[0].values());
const leftNode = level0Nodes[0].id;
const rightNode = level0Nodes[2].id;
const { body }: { body: ResolverNode[] } = await supertest
.post('/api/endpoint/resolver/tree')
.set('kbn-xsrf', 'xxx')
.send({
descendants: 0,
descendantLevels: 0,
ancestors: 50,
schema: schemaWithoutAncestry,
nodes: [leftNode, rightNode],
timerange: {
from: tree.startTime.toISOString(),
to: tree.endTime.toISOString(),
},
indexPatterns: ['logs-*'],
})
.expect(200);
verifyTree({
expectations: [
// We should be 1 level below the origin so the node's ancestry should be
// all the ancestors (5) + the origin = 6
{ origin: leftNode, nodeExpectations: { ancestors: 6 } },
// these nodes should be at the same level so the ancestors should be the same number
{ origin: rightNode, nodeExpectations: { ancestors: 6 } },
],
response: body,
schema: schemaWithoutAncestry,
genTree: tree,
});
});
({ body } = await supertest
.get(`/api/endpoint/resolver/${next}/ancestry?ancestors=1`)
.expect(200));
expect(body.ancestors.length).to.eql(2);
verifyAncestry(body.ancestors, tree, true);
// the highest node in the generated tree will not have a parent ID which causes the server to return
// without setting the pagination so nextAncestor will be null
expect(body.nextAncestor).to.eql(null);
it('should not return any nodes when the search index does not have any data', async () => {
const { body }: { body: ResolverNode[] } = await supertest
.post('/api/endpoint/resolver/tree')
.set('kbn-xsrf', 'xxx')
.send({
descendants: 0,
descendantLevels: 0,
ancestors: 50,
schema: schemaWithoutAncestry,
nodes: [tree.origin.id],
timerange: {
from: tree.startTime.toISOString(),
to: tree.endTime.toISOString(),
},
indexPatterns: ['metrics-*'],
})
.expect(200);
expect(body).to.be.empty();
});
});
describe('descendant events', () => {
it('returns all descendants for the origin without using the ancestry field', async () => {
const { body }: { body: ResolverNode[] } = await supertest
.post('/api/endpoint/resolver/tree')
.set('kbn-xsrf', 'xxx')
.send({
descendants: 100,
descendantLevels: 2,
ancestors: 0,
schema: schemaWithoutAncestry,
nodes: [tree.origin.id],
timerange: {
from: tree.startTime.toISOString(),
to: tree.endTime.toISOString(),
},
indexPatterns: ['logs-*'],
})
.expect(200);
verifyTree({
expectations: [
// there are 2 levels in the descendant part of the tree and 3 nodes for each
// descendant = 3 children for the origin + 3 children for each of the origin's children = 12
{ origin: tree.origin.id, nodeExpectations: { descendants: 12, descendantLevels: 2 } },
],
response: body,
schema: schemaWithoutAncestry,
genTree: tree,
});
});
it('returns all descendants for the origin using the ancestry field', async () => {
const { body }: { body: ResolverNode[] } = await supertest
.post('/api/endpoint/resolver/tree')
.set('kbn-xsrf', 'xxx')
.send({
descendants: 100,
// should be ignored when using the ancestry array
descendantLevels: 0,
ancestors: 0,
schema: schemaWithAncestry,
nodes: [tree.origin.id],
timerange: {
from: tree.startTime.toISOString(),
to: tree.endTime.toISOString(),
},
indexPatterns: ['logs-*'],
})
.expect(200);
verifyTree({
expectations: [
// there are 2 levels in the descendant part of the tree and 3 nodes for each
// descendant = 3 children for the origin + 3 children for each of the origin's children = 12
{ origin: tree.origin.id, nodeExpectations: { descendants: 12, descendantLevels: 2 } },
],
response: body,
schema: schemaWithAncestry,
genTree: tree,
});
});
it('should handle an invalid id', async () => {
const { body }: { body: ResolverNode[] } = await supertest
.post('/api/endpoint/resolver/tree')
.set('kbn-xsrf', 'xxx')
.send({
descendants: 100,
descendantLevels: 100,
ancestors: 0,
schema: schemaWithAncestry,
nodes: ['bogus id'],
timerange: {
from: tree.startTime.toISOString(),
to: tree.endTime.toISOString(),
},
indexPatterns: ['logs-*'],
})
.expect(200);
expect(body).to.be.empty();
});
it('returns a single generation of children', async () => {
// this gets a node should have 3 children which were created in succession so that the timestamps
// are ordered correctly to be retrieved in a single call
const childID = Array.from(tree.childrenLevels[0].values())[0].id;
const { body }: { body: ResolverNode[] } = await supertest
.post('/api/endpoint/resolver/tree')
.set('kbn-xsrf', 'xxx')
.send({
descendants: 100,
descendantLevels: 1,
ancestors: 0,
schema: schemaWithoutAncestry,
nodes: [childID],
timerange: {
from: tree.startTime.toISOString(),
to: tree.endTime.toISOString(),
},
indexPatterns: ['logs-*'],
})
.expect(200);
verifyTree({
expectations: [
// a single generation should be three nodes
{ origin: childID, nodeExpectations: { descendants: 3, descendantLevels: 1 } },
],
response: body,
schema: schemaWithoutAncestry,
genTree: tree,
});
});
it('should support returning multiple descendant trees when multiple nodes are requested', async () => {
// there are 2 levels after the origin, let's get the first level, there will be three
// children so get the left and right most ones
const level0Nodes = Array.from(tree.childrenLevels[0].values());
const leftNodeID = level0Nodes[0].id;
const rightNodeID = level0Nodes[2].id;
const { body }: { body: ResolverNode[] } = await supertest
.post('/api/endpoint/resolver/tree')
.set('kbn-xsrf', 'xxx')
.send({
descendants: 6,
descendantLevels: 0,
ancestors: 0,
schema: schemaWithAncestry,
nodes: [leftNodeID, rightNodeID],
timerange: {
from: tree.startTime.toISOString(),
to: tree.endTime.toISOString(),
},
indexPatterns: ['logs-*'],
})
.expect(200);
verifyTree({
expectations: [
{ origin: leftNodeID, nodeExpectations: { descendantLevels: 1, descendants: 3 } },
{ origin: rightNodeID, nodeExpectations: { descendantLevels: 1, descendants: 3 } },
],
response: body,
schema: schemaWithAncestry,
genTree: tree,
});
});
it('should support returning multiple descendant trees when multiple nodes are requested at different levels', async () => {
const originParent = parentEntityIDSafeVersion(tree.origin.lifecycle[0]) ?? '';
expect(originParent).to.not.be('');
const originGrandparent =
parentEntityIDSafeVersion(tree.ancestry.get(originParent)!.lifecycle[0]) ?? '';
expect(originGrandparent).to.not.be('');
const { body }: { body: ResolverNode[] } = await supertest
.post('/api/endpoint/resolver/tree')
.set('kbn-xsrf', 'xxx')
.send({
descendants: 2,
descendantLevels: 0,
ancestors: 0,
schema: schemaWithAncestry,
nodes: [tree.origin.id, originGrandparent],
timerange: {
from: tree.startTime.toISOString(),
to: tree.endTime.toISOString(),
},
indexPatterns: ['logs-*'],
})
.expect(200);
verifyTree({
expectations: [
{ origin: tree.origin.id, nodeExpectations: { descendantLevels: 1, descendants: 1 } },
// the origin's grandparent should only have the origin's parent as a descendant
{
origin: originGrandparent,
nodeExpectations: { descendantLevels: 1, descendants: 1 },
},
],
response: body,
schema: schemaWithAncestry,
genTree: tree,
});
});
it('should support returning multiple descendant trees when multiple nodes are requested at different levels without ancestry field', async () => {
const originParent = parentEntityIDSafeVersion(tree.origin.lifecycle[0]) ?? '';
expect(originParent).to.not.be('');
const originGrandparent =
parentEntityIDSafeVersion(tree.ancestry.get(originParent)!.lifecycle[0]) ?? '';
expect(originGrandparent).to.not.be('');
const { body }: { body: ResolverNode[] } = await supertest
.post('/api/endpoint/resolver/tree')
.set('kbn-xsrf', 'xxx')
.send({
descendants: 6,
descendantLevels: 1,
ancestors: 0,
schema: schemaWithoutAncestry,
nodes: [tree.origin.id, originGrandparent],
timerange: {
from: tree.startTime.toISOString(),
to: tree.endTime.toISOString(),
},
indexPatterns: ['logs-*'],
})
.expect(200);
verifyTree({
expectations: [
{ origin: tree.origin.id, nodeExpectations: { descendantLevels: 1, descendants: 3 } },
// the origin's grandparent should only have the origin's parent as a descendant
{
origin: originGrandparent,
nodeExpectations: { descendantLevels: 1, descendants: 1 },
},
],
response: body,
schema: schemaWithoutAncestry,
genTree: tree,
});
});
it('should respect the time range specified and only return one descendant', async () => {
const level0Node = Array.from(tree.childrenLevels[0].values())[0];
const end = new Date(
timestampSafeVersion(level0Node.lifecycle[0]) ?? new Date()
).toISOString();
const { body }: { body: ResolverNode[] } = await supertest
.post('/api/endpoint/resolver/tree')
.set('kbn-xsrf', 'xxx')
.send({
descendants: 100,
descendantLevels: 5,
ancestors: 0,
schema: schemaWithoutAncestry,
nodes: [tree.origin.id],
timerange: {
from: tree.startTime.toISOString(),
to: end,
},
indexPatterns: ['logs-*'],
})
.expect(200);
verifyTree({
expectations: [
{ origin: tree.origin.id, nodeExpectations: { descendantLevels: 1, descendants: 1 } },
],
response: body,
schema: schemaWithoutAncestry,
genTree: tree,
});
});
});
describe('children route', () => {
describe('legacy events', () => {
const endpointID = '5a0c957f-b8e7-4538-965e-57e8bb86ad3a';
const entityID = '94041';
it('returns child process lifecycle events', async () => {
const { body }: { body: SafeResolverChildren } = await supertest
.get(`/api/endpoint/resolver/${entityID}/children?legacyEndpointID=${endpointID}`)
.expect(200);
expect(body.childNodes.length).to.eql(1);
expect(body.childNodes[0].lifecycle.length).to.eql(2);
expect(
// for some reason the ts server doesn't think `endgame` exists even though we're using ResolverEvent
// here, so to avoid it complaining we'll just force it
(body.childNodes[0].lifecycle[0] as SafeLegacyEndpointEvent).endgame.unique_pid
).to.eql(94042);
describe('ancestry and descendants', () => {
it('returns all descendants and ancestors without the ancestry field and they should have the name field', async () => {
const { body }: { body: ResolverNode[] } = await supertest
.post('/api/endpoint/resolver/tree')
.set('kbn-xsrf', 'xxx')
.send({
descendants: 100,
descendantLevels: 10,
ancestors: 50,
schema: schemaWithName,
nodes: [tree.origin.id],
timerange: {
from: tree.startTime.toISOString(),
to: tree.endTime.toISOString(),
},
indexPatterns: ['logs-*'],
})
.expect(200);
verifyTree({
expectations: [
// there are 2 levels in the descendant part of the tree and 3 nodes for each
// descendant = 3 children for the origin + 3 children for each of the origin's children = 12
{
origin: tree.origin.id,
nodeExpectations: { descendants: 12, descendantLevels: 2, ancestors: 5 },
},
],
response: body,
schema: schemaWithName,
genTree: tree,
relatedEventsCategories: relatedEventsToGen,
});
it('returns multiple levels of child process lifecycle events', async () => {
const { body }: { body: SafeResolverChildren } = await supertest
.get(`/api/endpoint/resolver/93802/children?legacyEndpointID=${endpointID}&children=10`)
.expect(200);
expect(body.childNodes.length).to.eql(10);
expect(body.nextChild).to.be(null);
expect(body.childNodes[0].lifecycle.length).to.eql(1);
expect(
// for some reason the ts server doesn't think `endgame` exists even though we're using ResolverEvent
// here, so to avoid it complaining we'll just force it
(body.childNodes[0].lifecycle[0] as SafeLegacyEndpointEvent).endgame.unique_pid
).to.eql(93932);
});
it('returns no values when there is no more data', async () => {
let { body }: { body: SafeResolverChildren } = await supertest
.get(
// there should only be a single child for this node
`/api/endpoint/resolver/94041/children?legacyEndpointID=${endpointID}&children=1`
)
.expect(200);
expect(body.nextChild).to.not.be(null);
({ body } = await supertest
.get(
`/api/endpoint/resolver/94041/children?legacyEndpointID=${endpointID}&afterChild=${body.nextChild}`
)
.expect(200));
expect(body.childNodes).be.empty();
expect(body.nextChild).to.eql(null);
});
it('returns the first page of information when the cursor is invalid', async () => {
const { body }: { body: SafeResolverChildren } = await supertest
.get(
`/api/endpoint/resolver/${entityID}/children?legacyEndpointID=${endpointID}&afterChild=blah`
)
.expect(200);
expect(body.childNodes.length).to.eql(1);
expect(body.nextChild).to.be(null);
});
it('errors on invalid pagination values', async () => {
await supertest.get(`/api/endpoint/resolver/${entityID}/children?children=0`).expect(400);
await supertest
.get(`/api/endpoint/resolver/${entityID}/children?children=20000`)
.expect(400);
await supertest
.get(`/api/endpoint/resolver/${entityID}/children?children=-1`)
.expect(400);
});
it('returns empty events without a matching entity id', async () => {
const { body }: { body: SafeResolverChildren } = await supertest
.get(`/api/endpoint/resolver/5555/children`)
.expect(200);
expect(body.nextChild).to.eql(null);
expect(body.childNodes).to.be.empty();
});
it('returns empty events with an invalid endpoint id', async () => {
const { body }: { body: SafeResolverChildren } = await supertest
.get(`/api/endpoint/resolver/${entityID}/children?legacyEndpointID=foo`)
.expect(200);
expect(body.nextChild).to.eql(null);
expect(body.childNodes).to.be.empty();
});
for (const node of body) {
expect(node.name).to.be(getNameField(node.data, schemaWithName));
expect(node.name).to.not.be(undefined);
}
});
describe('endpoint events', () => {
it('returns all children for the origin', async () => {
const { body }: { body: SafeResolverChildren } = await supertest
.get(`/api/endpoint/resolver/${tree.origin.id}/children?children=100`)
.expect(200);
// there are 2 levels in the children part of the tree and 3 nodes for each =
// 3 children for the origin + 3 children for each of the origin's children = 12
expect(body.childNodes.length).to.eql(12);
// there will be 4 parents, the origin of the tree, and it's 3 children
verifyChildren(body.childNodes, tree, 4, 3);
expect(body.nextChild).to.eql(null);
it('returns all descendants and ancestors without the ancestry field', async () => {
const { body }: { body: ResolverNode[] } = await supertest
.post('/api/endpoint/resolver/tree')
.set('kbn-xsrf', 'xxx')
.send({
descendants: 100,
descendantLevels: 10,
ancestors: 50,
schema: schemaWithoutAncestry,
nodes: [tree.origin.id],
timerange: {
from: tree.startTime.toISOString(),
to: tree.endTime.toISOString(),
},
indexPatterns: ['logs-*'],
})
.expect(200);
verifyTree({
expectations: [
// there are 2 levels in the descendant part of the tree and 3 nodes for each
// descendant = 3 children for the origin + 3 children for each of the origin's children = 12
{
origin: tree.origin.id,
nodeExpectations: { descendants: 12, descendantLevels: 2, ancestors: 5 },
},
],
response: body,
schema: schemaWithoutAncestry,
genTree: tree,
relatedEventsCategories: relatedEventsToGen,
});
it('returns a single generation of children', async () => {
// this gets a node should have 3 children which were created in succession so that the timestamps
// are ordered correctly to be retrieved in a single call
const distantChildEntityID = Array.from(tree.childrenLevels[0].values())[0].id;
const { body }: { body: SafeResolverChildren } = await supertest
.get(`/api/endpoint/resolver/${distantChildEntityID}/children?children=3`)
.expect(200);
expect(body.childNodes.length).to.eql(3);
verifyChildren(body.childNodes, tree, 1, 3);
expect(body.nextChild).to.not.eql(null);
});
it('paginates the children', async () => {
// this gets a node should have 3 children which were created in succession so that the timestamps
// are ordered correctly to be retrieved in a single call
const distantChildEntityID = Array.from(tree.childrenLevels[0].values())[0].id;
let { body }: { body: SafeResolverChildren } = await supertest
.get(`/api/endpoint/resolver/${distantChildEntityID}/children?children=1`)
.expect(200);
expect(body.childNodes.length).to.eql(1);
verifyChildren(body.childNodes, tree, 1, 1);
expect(body.nextChild).to.not.be(null);
({ body } = await supertest
.get(
`/api/endpoint/resolver/${distantChildEntityID}/children?children=2&afterChild=${body.nextChild}`
)
.expect(200));
expect(body.childNodes.length).to.eql(2);
verifyChildren(body.childNodes, tree, 1, 2);
expect(body.nextChild).to.not.be(null);
({ body } = await supertest
.get(
`/api/endpoint/resolver/${distantChildEntityID}/children?children=2&afterChild=${body.nextChild}`
)
.expect(200));
expect(body.childNodes.length).to.eql(0);
expect(body.nextChild).to.be(null);
});
it('gets all children in two queries', async () => {
// should get all the children of the origin
let { body }: { body: SafeResolverChildren } = await supertest
.get(`/api/endpoint/resolver/${tree.origin.id}/children?children=3`)
.expect(200);
expect(body.childNodes.length).to.eql(3);
verifyChildren(body.childNodes, tree);
expect(body.nextChild).to.not.be(null);
const firstNodes = [...body.childNodes];
({ body } = await supertest
.get(
`/api/endpoint/resolver/${tree.origin.id}/children?children=10&afterChild=${body.nextChild}`
)
.expect(200));
expect(body.childNodes.length).to.eql(9);
// put all the results together and we should have all the children
verifyChildren([...firstNodes, ...body.childNodes], tree, 4, 3);
expect(body.nextChild).to.be(null);
});
});
});
describe('tree api', () => {
describe('legacy events', () => {
const endpointID = '5a0c957f-b8e7-4538-965e-57e8bb86ad3a';
it('returns ancestors, events, children, and current process lifecycle', async () => {
const { body }: { body: SafeResolverTree } = await supertest
.get(`/api/endpoint/resolver/93933?legacyEndpointID=${endpointID}`)
.expect(200);
expect(body.ancestry.nextAncestor).to.equal(null);
expect(body.children.nextChild).to.equal(null);
expect(body.children.childNodes.length).to.equal(0);
expect(body.lifecycle.length).to.equal(2);
});
for (const node of body) {
expect(node.name).to.be(getNameField(node.data, schemaWithoutAncestry));
expect(node.name).to.be(undefined);
}
});
describe('endpoint events', () => {
it('returns a tree', async () => {
const { body }: { body: SafeResolverTree } = await supertest
.get(
`/api/endpoint/resolver/${tree.origin.id}?children=100&ancestors=5&events=5&alerts=5`
)
.expect(200);
it('returns all descendants and ancestors with the ancestry field', async () => {
const { body }: { body: ResolverNode[] } = await supertest
.post('/api/endpoint/resolver/tree')
.set('kbn-xsrf', 'xxx')
.send({
descendants: 100,
descendantLevels: 10,
ancestors: 50,
schema: schemaWithAncestry,
nodes: [tree.origin.id],
timerange: {
from: tree.startTime.toISOString(),
to: tree.endTime.toISOString(),
},
indexPatterns: ['logs-*'],
})
.expect(200);
verifyTree({
expectations: [
// there are 2 levels in the descendant part of the tree and 3 nodes for each
// descendant = 3 children for the origin + 3 children for each of the origin's children = 12
{
origin: tree.origin.id,
nodeExpectations: { descendants: 12, descendantLevels: 2, ancestors: 5 },
},
],
response: body,
schema: schemaWithAncestry,
genTree: tree,
relatedEventsCategories: relatedEventsToGen,
});
expect(body.children.nextChild).to.equal(null);
expect(body.children.childNodes.length).to.equal(12);
verifyChildren(body.children.childNodes, tree, 4, 3);
verifyLifecycleStats(body.children.childNodes, relatedEventsToGen, relatedAlerts);
for (const node of body) {
expect(node.name).to.be(getNameField(node.data, schemaWithAncestry));
expect(node.name).to.be(undefined);
}
});
expect(body.ancestry.nextAncestor).to.equal(null);
verifyAncestry(body.ancestry.ancestors, tree, true);
verifyLifecycleStats(body.ancestry.ancestors, relatedEventsToGen, relatedAlerts);
expect(body.relatedAlerts.nextAlert).to.equal(null);
compareArrays(tree.origin.relatedAlerts, body.relatedAlerts.alerts, true);
compareArrays(tree.origin.lifecycle, body.lifecycle, true);
verifyStats(body.stats, relatedEventsToGen, relatedAlerts);
it('returns an empty response when limits are zero', async () => {
const { body }: { body: ResolverNode[] } = await supertest
.post('/api/endpoint/resolver/tree')
.set('kbn-xsrf', 'xxx')
.send({
descendants: 0,
descendantLevels: 0,
ancestors: 0,
schema: schemaWithAncestry,
nodes: [tree.origin.id],
timerange: {
from: tree.startTime.toISOString(),
to: tree.endTime.toISOString(),
},
indexPatterns: ['logs-*'],
})
.expect(200);
expect(body).to.be.empty();
verifyTree({
expectations: [
{
origin: tree.origin.id,
nodeExpectations: { descendants: 0, descendantLevels: 0, ancestors: 0 },
},
],
response: body,
schema: schemaWithAncestry,
genTree: tree,
});
});
});

View file

@ -0,0 +1,375 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import expect from '@kbn/expect';
import {
SafeResolverAncestry,
SafeResolverChildren,
SafeResolverTree,
SafeLegacyEndpointEvent,
} from '../../../../plugins/security_solution/common/endpoint/types';
import { parentEntityIDSafeVersion } from '../../../../plugins/security_solution/common/endpoint/models/event';
import { FtrProviderContext } from '../../ftr_provider_context';
import {
Tree,
RelatedEventCategory,
} from '../../../../plugins/security_solution/common/endpoint/generate_data';
import { Options, GeneratedTrees } from '../../services/resolver';
import {
compareArrays,
checkAncestryFromEntityTreeAPI,
retrieveDistantAncestor,
verifyChildrenFromEntityTreeAPI,
verifyLifecycleStats,
verifyEntityTreeStats,
} from './common';
export default function ({ getService }: FtrProviderContext) {
const supertest = getService('supertest');
const esArchiver = getService('esArchiver');
const resolver = getService('resolverGenerator');
const relatedEventsToGen = [
{ category: RelatedEventCategory.Driver, count: 2 },
{ category: RelatedEventCategory.File, count: 1 },
{ category: RelatedEventCategory.Registry, count: 1 },
];
const relatedAlerts = 4;
let resolverTrees: GeneratedTrees;
let tree: Tree;
const treeOptions: Options = {
ancestors: 5,
relatedEvents: relatedEventsToGen,
relatedAlerts,
children: 3,
generations: 2,
percentTerminated: 100,
percentWithRelated: 100,
numTrees: 1,
alwaysGenMaxChildrenPerNode: true,
ancestryArraySize: 2,
};
describe('Resolver entity tree api', () => {
before(async () => {
await esArchiver.load('endpoint/resolver/api_feature');
resolverTrees = await resolver.createTrees(treeOptions);
// we only requested a single alert so there's only 1 tree
tree = resolverTrees.trees[0];
});
after(async () => {
await resolver.deleteData(resolverTrees);
// this unload is for an endgame-* index so it does not use data streams
await esArchiver.unload('endpoint/resolver/api_feature');
});
describe('ancestry events route', () => {
describe('legacy events', () => {
const endpointID = '5a0c957f-b8e7-4538-965e-57e8bb86ad3a';
const entityID = '94042';
it('should return details for the root node', async () => {
const { body }: { body: SafeResolverAncestry } = await supertest
.get(
`/api/endpoint/resolver/${entityID}/ancestry?legacyEndpointID=${endpointID}&ancestors=5`
)
.expect(200);
expect(body.ancestors[0].lifecycle.length).to.eql(2);
expect(body.ancestors.length).to.eql(2);
expect(body.nextAncestor).to.eql(null);
});
it('should have a populated next parameter', async () => {
const { body }: { body: SafeResolverAncestry } = await supertest
.get(
`/api/endpoint/resolver/${entityID}/ancestry?legacyEndpointID=${endpointID}&ancestors=0`
)
.expect(200);
expect(body.nextAncestor).to.eql('94041');
});
it('should handle an ancestors param request', async () => {
let { body }: { body: SafeResolverAncestry } = await supertest
.get(
`/api/endpoint/resolver/${entityID}/ancestry?legacyEndpointID=${endpointID}&ancestors=0`
)
.expect(200);
const next = body.nextAncestor;
({ body } = await supertest
.get(
`/api/endpoint/resolver/${next}/ancestry?legacyEndpointID=${endpointID}&ancestors=1`
)
.expect(200));
expect(body.ancestors[0].lifecycle.length).to.eql(1);
expect(body.nextAncestor).to.eql(null);
});
});
describe('endpoint events', () => {
it('should return the origin node at the front of the array', async () => {
const { body }: { body: SafeResolverAncestry } = await supertest
.get(`/api/endpoint/resolver/${tree.origin.id}/ancestry?ancestors=9`)
.expect(200);
expect(body.ancestors[0].entityID).to.eql(tree.origin.id);
});
it('should return details for the root node', async () => {
const { body }: { body: SafeResolverAncestry } = await supertest
.get(`/api/endpoint/resolver/${tree.origin.id}/ancestry?ancestors=9`)
.expect(200);
// the tree we generated had 5 ancestors + 1 origin node
expect(body.ancestors.length).to.eql(6);
expect(body.ancestors[0].entityID).to.eql(tree.origin.id);
checkAncestryFromEntityTreeAPI(body.ancestors, tree, true);
expect(body.nextAncestor).to.eql(null);
});
it('should handle an invalid id', async () => {
const { body }: { body: SafeResolverAncestry } = await supertest
.get(`/api/endpoint/resolver/alskdjflasj/ancestry`)
.expect(200);
expect(body.ancestors).to.be.empty();
expect(body.nextAncestor).to.eql(null);
});
it('should have a populated next parameter', async () => {
const { body }: { body: SafeResolverAncestry } = await supertest
.get(`/api/endpoint/resolver/${tree.origin.id}/ancestry?ancestors=2`)
.expect(200);
// it should have 2 ancestors + 1 origin
expect(body.ancestors.length).to.eql(3);
checkAncestryFromEntityTreeAPI(body.ancestors, tree, false);
const distantGrandparent = retrieveDistantAncestor(body.ancestors);
expect(body.nextAncestor).to.eql(
parentEntityIDSafeVersion(distantGrandparent.lifecycle[0])
);
});
it('should handle multiple ancestor requests', async () => {
let { body }: { body: SafeResolverAncestry } = await supertest
.get(`/api/endpoint/resolver/${tree.origin.id}/ancestry?ancestors=3`)
.expect(200);
expect(body.ancestors.length).to.eql(4);
const next = body.nextAncestor;
({ body } = await supertest
.get(`/api/endpoint/resolver/${next}/ancestry?ancestors=1`)
.expect(200));
expect(body.ancestors.length).to.eql(2);
checkAncestryFromEntityTreeAPI(body.ancestors, tree, true);
// the highest node in the generated tree will not have a parent ID which causes the server to return
// without setting the pagination so nextAncestor will be null
expect(body.nextAncestor).to.eql(null);
});
});
});
describe('children route', () => {
describe('legacy events', () => {
const endpointID = '5a0c957f-b8e7-4538-965e-57e8bb86ad3a';
const entityID = '94041';
it('returns child process lifecycle events', async () => {
const { body }: { body: SafeResolverChildren } = await supertest
.get(`/api/endpoint/resolver/${entityID}/children?legacyEndpointID=${endpointID}`)
.expect(200);
expect(body.childNodes.length).to.eql(1);
expect(body.childNodes[0].lifecycle.length).to.eql(2);
expect(
// for some reason the ts server doesn't think `endgame` exists even though we're using ResolverEvent
// here, so to avoid it complaining we'll just force it
(body.childNodes[0].lifecycle[0] as SafeLegacyEndpointEvent).endgame.unique_pid
).to.eql(94042);
});
it('returns multiple levels of child process lifecycle events', async () => {
const { body }: { body: SafeResolverChildren } = await supertest
.get(`/api/endpoint/resolver/93802/children?legacyEndpointID=${endpointID}&children=10`)
.expect(200);
expect(body.childNodes.length).to.eql(10);
expect(body.nextChild).to.be(null);
expect(body.childNodes[0].lifecycle.length).to.eql(1);
expect(
// for some reason the ts server doesn't think `endgame` exists even though we're using ResolverEvent
// here, so to avoid it complaining we'll just force it
(body.childNodes[0].lifecycle[0] as SafeLegacyEndpointEvent).endgame.unique_pid
).to.eql(93932);
});
it('returns no values when there is no more data', async () => {
let { body }: { body: SafeResolverChildren } = await supertest
.get(
// there should only be a single child for this node
`/api/endpoint/resolver/94041/children?legacyEndpointID=${endpointID}&children=1`
)
.expect(200);
expect(body.nextChild).to.not.be(null);
({ body } = await supertest
.get(
`/api/endpoint/resolver/94041/children?legacyEndpointID=${endpointID}&afterChild=${body.nextChild}`
)
.expect(200));
expect(body.childNodes).be.empty();
expect(body.nextChild).to.eql(null);
});
it('returns the first page of information when the cursor is invalid', async () => {
const { body }: { body: SafeResolverChildren } = await supertest
.get(
`/api/endpoint/resolver/${entityID}/children?legacyEndpointID=${endpointID}&afterChild=blah`
)
.expect(200);
expect(body.childNodes.length).to.eql(1);
expect(body.nextChild).to.be(null);
});
it('errors on invalid pagination values', async () => {
await supertest.get(`/api/endpoint/resolver/${entityID}/children?children=0`).expect(400);
await supertest
.get(`/api/endpoint/resolver/${entityID}/children?children=20000`)
.expect(400);
await supertest
.get(`/api/endpoint/resolver/${entityID}/children?children=-1`)
.expect(400);
});
it('returns empty events without a matching entity id', async () => {
const { body }: { body: SafeResolverChildren } = await supertest
.get(`/api/endpoint/resolver/5555/children`)
.expect(200);
expect(body.nextChild).to.eql(null);
expect(body.childNodes).to.be.empty();
});
it('returns empty events with an invalid endpoint id', async () => {
const { body }: { body: SafeResolverChildren } = await supertest
.get(`/api/endpoint/resolver/${entityID}/children?legacyEndpointID=foo`)
.expect(200);
expect(body.nextChild).to.eql(null);
expect(body.childNodes).to.be.empty();
});
});
describe('endpoint events', () => {
it('returns all children for the origin', async () => {
const { body }: { body: SafeResolverChildren } = await supertest
.get(`/api/endpoint/resolver/${tree.origin.id}/children?children=100`)
.expect(200);
// there are 2 levels in the children part of the tree and 3 nodes for each =
// 3 children for the origin + 3 children for each of the origin's children = 12
expect(body.childNodes.length).to.eql(12);
// there will be 4 parents, the origin of the tree, and it's 3 children
verifyChildrenFromEntityTreeAPI(body.childNodes, tree, 4, 3);
expect(body.nextChild).to.eql(null);
});
it('returns a single generation of children', async () => {
// this gets a node should have 3 children which were created in succession so that the timestamps
// are ordered correctly to be retrieved in a single call
const distantChildEntityID = Array.from(tree.childrenLevels[0].values())[0].id;
const { body }: { body: SafeResolverChildren } = await supertest
.get(`/api/endpoint/resolver/${distantChildEntityID}/children?children=3`)
.expect(200);
expect(body.childNodes.length).to.eql(3);
verifyChildrenFromEntityTreeAPI(body.childNodes, tree, 1, 3);
expect(body.nextChild).to.not.eql(null);
});
it('paginates the children', async () => {
// this gets a node should have 3 children which were created in succession so that the timestamps
// are ordered correctly to be retrieved in a single call
const distantChildEntityID = Array.from(tree.childrenLevels[0].values())[0].id;
let { body }: { body: SafeResolverChildren } = await supertest
.get(`/api/endpoint/resolver/${distantChildEntityID}/children?children=1`)
.expect(200);
expect(body.childNodes.length).to.eql(1);
verifyChildrenFromEntityTreeAPI(body.childNodes, tree, 1, 1);
expect(body.nextChild).to.not.be(null);
({ body } = await supertest
.get(
`/api/endpoint/resolver/${distantChildEntityID}/children?children=2&afterChild=${body.nextChild}`
)
.expect(200));
expect(body.childNodes.length).to.eql(2);
verifyChildrenFromEntityTreeAPI(body.childNodes, tree, 1, 2);
expect(body.nextChild).to.not.be(null);
({ body } = await supertest
.get(
`/api/endpoint/resolver/${distantChildEntityID}/children?children=2&afterChild=${body.nextChild}`
)
.expect(200));
expect(body.childNodes.length).to.eql(0);
expect(body.nextChild).to.be(null);
});
it('gets all children in two queries', async () => {
// should get all the children of the origin
let { body }: { body: SafeResolverChildren } = await supertest
.get(`/api/endpoint/resolver/${tree.origin.id}/children?children=3`)
.expect(200);
expect(body.childNodes.length).to.eql(3);
verifyChildrenFromEntityTreeAPI(body.childNodes, tree);
expect(body.nextChild).to.not.be(null);
const firstNodes = [...body.childNodes];
({ body } = await supertest
.get(
`/api/endpoint/resolver/${tree.origin.id}/children?children=10&afterChild=${body.nextChild}`
)
.expect(200));
expect(body.childNodes.length).to.eql(9);
// put all the results together and we should have all the children
verifyChildrenFromEntityTreeAPI([...firstNodes, ...body.childNodes], tree, 4, 3);
expect(body.nextChild).to.be(null);
});
});
});
describe('tree api', () => {
describe('legacy events', () => {
const endpointID = '5a0c957f-b8e7-4538-965e-57e8bb86ad3a';
it('returns ancestors, events, children, and current process lifecycle', async () => {
const { body }: { body: SafeResolverTree } = await supertest
.get(`/api/endpoint/resolver/93933?legacyEndpointID=${endpointID}`)
.expect(200);
expect(body.ancestry.nextAncestor).to.equal(null);
expect(body.children.nextChild).to.equal(null);
expect(body.children.childNodes.length).to.equal(0);
expect(body.lifecycle.length).to.equal(2);
});
});
describe('endpoint events', () => {
it('returns a tree', async () => {
const { body }: { body: SafeResolverTree } = await supertest
.get(
`/api/endpoint/resolver/${tree.origin.id}?children=100&ancestors=5&events=5&alerts=5`
)
.expect(200);
expect(body.children.nextChild).to.equal(null);
expect(body.children.childNodes.length).to.equal(12);
verifyChildrenFromEntityTreeAPI(body.children.childNodes, tree, 4, 3);
verifyLifecycleStats(body.children.childNodes, relatedEventsToGen, relatedAlerts);
expect(body.ancestry.nextAncestor).to.equal(null);
checkAncestryFromEntityTreeAPI(body.ancestry.ancestors, tree, true);
verifyLifecycleStats(body.ancestry.ancestors, relatedEventsToGen, relatedAlerts);
expect(body.relatedAlerts.nextAlert).to.equal(null);
compareArrays(tree.origin.relatedAlerts, body.relatedAlerts.alerts, true);
compareArrays(tree.origin.lifecycle, body.lifecycle, true);
verifyEntityTreeStats(body.stats, relatedEventsToGen, relatedAlerts);
});
});
});
});
}