[Security Solution][Resolver] Data stream fields being populated (#80216)

* Data stream fields being populated

* Adding some comments

* Switching data stream options to specific functions

* Removing unneeded import

* Refactoring based on Brent's feedback
This commit is contained in:
Jonathan Buttner 2020-10-15 12:33:53 -04:00 committed by GitHub
parent e1456372da
commit cd9381c118
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 346 additions and 97 deletions

View file

@ -26,6 +26,53 @@ interface Node {
parent_entity_id?: string;
}
describe('data generator data streams', () => {
// these tests cast the result of the generate methods so that we can specifically compare the `data_stream` fields
it('creates a generator with default data streams', () => {
const generator = new EndpointDocGenerator('seed');
expect(generator.generateHostMetadata().data_stream).toEqual({
type: 'metrics',
dataset: 'endpoint.metadata',
namespace: 'default',
});
expect(generator.generatePolicyResponse().data_stream).toEqual({
type: 'metrics',
dataset: 'endpoint.policy',
namespace: 'default',
});
expect(generator.generateEvent().data_stream).toEqual({
type: 'logs',
dataset: 'endpoint.events.process',
namespace: 'default',
});
expect(generator.generateAlert().data_stream).toEqual({
type: 'logs',
dataset: 'endpoint.alerts',
namespace: 'default',
});
});
it('creates a generator with custom data streams', () => {
const metadataDataStream = { type: 'meta', dataset: 'dataset', namespace: 'name' };
const policyDataStream = { type: 'policy', dataset: 'fake', namespace: 'something' };
const eventsDataStream = { type: 'events', dataset: 'events stuff', namespace: 'name' };
const alertsDataStream = { type: 'alerts', dataset: 'alerts stuff', namespace: 'name' };
const generator = new EndpointDocGenerator('seed');
expect(generator.generateHostMetadata(0, metadataDataStream).data_stream).toStrictEqual(
metadataDataStream
);
expect(generator.generatePolicyResponse({ policyDataStream }).data_stream).toStrictEqual(
policyDataStream
);
expect(generator.generateEvent({ eventsDataStream }).data_stream).toStrictEqual(
eventsDataStream
);
expect(generator.generateAlert({ alertsDataStream }).data_stream).toStrictEqual(
alertsDataStream
);
});
});
describe('data generator', () => {
let generator: EndpointDocGenerator;
beforeEach(() => {
@ -69,7 +116,7 @@ describe('data generator', () => {
it('creates policy response documents', () => {
const timestamp = new Date().getTime();
const hostPolicyResponse = generator.generatePolicyResponse(timestamp);
const hostPolicyResponse = generator.generatePolicyResponse({ ts: timestamp });
expect(hostPolicyResponse['@timestamp']).toEqual(timestamp);
expect(hostPolicyResponse.event.created).toEqual(timestamp);
expect(hostPolicyResponse.Endpoint).not.toBeNull();
@ -80,7 +127,7 @@ describe('data generator', () => {
it('creates alert event documents', () => {
const timestamp = new Date().getTime();
const alert = generator.generateAlert(timestamp);
const alert = generator.generateAlert({ ts: timestamp });
expect(alert['@timestamp']).toEqual(timestamp);
expect(alert.event?.action).not.toBeNull();
expect(alert.Endpoint).not.toBeNull();

View file

@ -7,6 +7,7 @@ import uuid from 'uuid';
import seedrandom from 'seedrandom';
import {
AlertEvent,
DataStream,
EndpointStatus,
Host,
HostMetadata,
@ -59,6 +60,7 @@ interface EventOptions {
pid?: number;
parentPid?: number;
extensions?: object;
eventsDataStream?: DataStream;
}
const Windows: OSFields[] = [
@ -330,6 +332,8 @@ export interface TreeOptions {
percentTerminated?: number;
alwaysGenMaxChildrenPerNode?: boolean;
ancestryArraySize?: number;
eventsDataStream?: DataStream;
alertsDataStream?: DataStream;
}
type TreeOptionDefaults = Required<TreeOptions>;
@ -351,19 +355,51 @@ export function getTreeOptionsWithDef(options?: TreeOptions): TreeOptionDefaults
percentTerminated: options?.percentTerminated ?? 100,
alwaysGenMaxChildrenPerNode: options?.alwaysGenMaxChildrenPerNode ?? false,
ancestryArraySize: options?.ancestryArraySize ?? ANCESTRY_LIMIT,
eventsDataStream: options?.eventsDataStream ?? eventsDefaultDataStream,
alertsDataStream: options?.alertsDataStream ?? alertsDefaultDataStream,
};
}
const metadataDefaultDataStream = {
type: 'metrics',
dataset: 'endpoint.metadata',
namespace: 'default',
};
const policyDefaultDataStream = {
type: 'metrics',
dataset: 'endpoint.policy',
namespace: 'default',
};
const eventsDefaultDataStream = {
type: 'logs',
dataset: 'endpoint.events.process',
namespace: 'default',
};
const alertsDefaultDataStream = {
type: 'logs',
dataset: 'endpoint.alerts',
namespace: 'default',
};
export class EndpointDocGenerator {
commonInfo: HostInfo;
random: seedrandom.prng;
sequence: number = 0;
/**
* The EndpointDocGenerator parameters
*
* @param seed either a string to seed the random number generator or a random number generator function
*/
constructor(seed: string | seedrandom.prng = Math.random().toString()) {
if (typeof seed === 'string') {
this.random = seedrandom(seed);
} else {
this.random = seed;
}
this.commonInfo = this.createHostData();
}
@ -383,6 +419,21 @@ export class EndpointDocGenerator {
this.commonInfo.Endpoint.policy.applied.status = this.randomChoice(POLICY_RESPONSE_STATUSES);
}
/**
* Parses an index and returns the data stream fields extracted from the index.
*
* @param index the index name to parse into the data stream parts
*/
public static createDataStreamFromIndex(index: string): DataStream {
// e.g. logs-endpoint.events.network-default
const parts = index.split('-');
return {
type: parts[0], // logs
dataset: parts[1], // endpoint.events.network
namespace: parts[2], // default
};
}
private createHostData(): HostInfo {
const hostName = this.randomHostname();
return {
@ -417,8 +468,12 @@ export class EndpointDocGenerator {
/**
* Creates a host metadata document
* @param ts - Timestamp to put in the event
* @param metadataDataStream the values to populate the data_stream fields when generating metadata documents
*/
public generateHostMetadata(ts = new Date().getTime()): HostMetadata {
public generateHostMetadata(
ts = new Date().getTime(),
metadataDataStream = metadataDefaultDataStream
): HostMetadata {
return {
'@timestamp': ts,
event: {
@ -432,6 +487,7 @@ export class EndpointDocGenerator {
dataset: 'endpoint.metadata',
},
...this.commonInfo,
data_stream: metadataDataStream,
};
}
@ -441,15 +497,24 @@ export class EndpointDocGenerator {
* @param entityID - entityID of the originating process
* @param parentEntityID - optional entityID of the parent process, if it exists
* @param ancestry - an array of ancestors for the generated alert
* @param alertsDataStream the values to populate the data_stream fields when generating alert documents
*/
public generateAlert(
public generateAlert({
ts = new Date().getTime(),
entityID = this.randomString(10),
parentEntityID?: string,
ancestry: string[] = []
): AlertEvent {
parentEntityID,
ancestry = [],
alertsDataStream = alertsDefaultDataStream,
}: {
ts?: number;
entityID?: string;
parentEntityID?: string;
ancestry?: string[];
alertsDataStream?: DataStream;
} = {}): AlertEvent {
return {
...this.commonInfo,
data_stream: alertsDataStream,
'@timestamp': ts,
ecs: {
version: '1.4.0',
@ -598,6 +663,7 @@ export class EndpointDocGenerator {
return {};
})(options.eventCategory);
return {
data_stream: options?.eventsDataStream ?? eventsDefaultDataStream,
'@timestamp': options.timestamp ? options.timestamp : new Date().getTime(),
agent: { ...this.commonInfo.agent, type: 'endpoint' },
ecs: {
@ -813,6 +879,7 @@ export class EndpointDocGenerator {
const startDate = new Date().getTime();
const root = this.generateEvent({
timestamp: startDate + 1000,
eventsDataStream: opts.eventsDataStream,
});
events.push(root);
let ancestor = root;
@ -824,18 +891,24 @@ export class EndpointDocGenerator {
secBeforeAlert: number,
eventList: Event[]
) => {
for (const relatedAlert of this.relatedAlertsGenerator(node, alertsPerNode, secBeforeAlert)) {
for (const relatedAlert of this.relatedAlertsGenerator({
node,
relatedAlerts: alertsPerNode,
alertCreationTime: secBeforeAlert,
alertsDataStream: opts.alertsDataStream,
})) {
eventList.push(relatedAlert);
}
};
const addRelatedEvents = (node: Event, secBeforeEvent: number, eventList: Event[]) => {
for (const relatedEvent of this.relatedEventsGenerator(
for (const relatedEvent of this.relatedEventsGenerator({
node,
opts.relatedEvents,
secBeforeEvent,
opts.relatedEventsOrdered
)) {
relatedEvents: opts.relatedEvents,
processDuration: secBeforeEvent,
ordered: opts.relatedEventsOrdered,
eventsDataStream: opts.eventsDataStream,
})) {
eventList.push(relatedEvent);
}
};
@ -857,6 +930,7 @@ export class EndpointDocGenerator {
parentEntityID: parentEntityIDSafeVersion(root),
eventCategory: ['process'],
eventType: ['end'],
eventsDataStream: opts.eventsDataStream,
})
);
}
@ -877,6 +951,7 @@ export class EndpointDocGenerator {
ancestryArrayLimit: opts.ancestryArraySize,
parentPid: firstNonNullValue(ancestor.process?.pid),
pid: this.randomN(5000),
eventsDataStream: opts.eventsDataStream,
});
events.push(ancestor);
timestamp = timestamp + 1000;
@ -892,6 +967,7 @@ export class EndpointDocGenerator {
eventType: ['end'],
ancestry: ancestryArray(ancestor),
ancestryArrayLimit: opts.ancestryArraySize,
eventsDataStream: opts.eventsDataStream,
})
);
}
@ -912,12 +988,13 @@ export class EndpointDocGenerator {
timestamp = timestamp + 1000;
events.push(
this.generateAlert(
timestamp,
entityIDSafeVersion(ancestor),
parentEntityIDSafeVersion(ancestor),
ancestryArray(ancestor)
)
this.generateAlert({
ts: timestamp,
entityID: entityIDSafeVersion(ancestor),
parentEntityID: parentEntityIDSafeVersion(ancestor),
ancestry: ancestryArray(ancestor),
alertsDataStream: opts.alertsDataStream,
})
);
return events;
}
@ -973,6 +1050,7 @@ export class EndpointDocGenerator {
parentEntityID: currentStateEntityID,
ancestry,
ancestryArrayLimit: opts.ancestryArraySize,
eventsDataStream: opts.eventsDataStream,
});
maxChildren = this.randomN(opts.children + 1);
@ -996,16 +1074,23 @@ export class EndpointDocGenerator {
eventType: ['end'],
ancestry,
ancestryArrayLimit: opts.ancestryArraySize,
eventsDataStream: opts.eventsDataStream,
});
}
if (this.randomN(100) < opts.percentWithRelated) {
yield* this.relatedEventsGenerator(
child,
opts.relatedEvents,
yield* this.relatedEventsGenerator({
node: child,
relatedEvents: opts.relatedEvents,
processDuration,
opts.relatedEventsOrdered
);
yield* this.relatedAlertsGenerator(child, opts.relatedAlerts, processDuration);
ordered: opts.relatedEventsOrdered,
eventsDataStream: opts.eventsDataStream,
});
yield* this.relatedAlertsGenerator({
node: child,
relatedAlerts: opts.relatedAlerts,
alertCreationTime: processDuration,
alertsDataStream: opts.alertsDataStream,
});
}
}
}
@ -1019,12 +1104,19 @@ export class EndpointDocGenerator {
* @param ordered - if true the events will have an increasing timestamp, otherwise their timestamp will be random but
* guaranteed to be greater than or equal to the originating event
*/
public *relatedEventsGenerator(
node: Event,
relatedEvents: RelatedEventInfo[] | number = 10,
processDuration: number = 6 * 3600,
ordered: boolean = false
) {
public *relatedEventsGenerator({
node,
relatedEvents = 10,
processDuration = 6 * 3600,
ordered = false,
eventsDataStream = eventsDefaultDataStream,
}: {
node: Event;
relatedEvents?: RelatedEventInfo[] | number;
processDuration?: number;
ordered?: boolean;
eventsDataStream?: DataStream;
}) {
let relatedEventsInfo: RelatedEventInfo[];
const nodeTimestamp = timestampSafeVersion(node) ?? 0;
let ts = nodeTimestamp + 1;
@ -1056,6 +1148,7 @@ export class EndpointDocGenerator {
eventCategory: eventInfo.category,
eventType: eventInfo.creationType,
ancestry: ancestryArray(node),
eventsDataStream,
});
}
}
@ -1067,19 +1160,26 @@ export class EndpointDocGenerator {
* @param relatedAlerts - number which defines the number of related alerts to create
* @param alertCreationTime - maximum number of seconds after process event that related alert timestamp can be
*/
public *relatedAlertsGenerator(
node: Event,
relatedAlerts: number = 3,
alertCreationTime: number = 6 * 3600
) {
public *relatedAlertsGenerator({
node,
relatedAlerts = 3,
alertCreationTime = 6 * 3600,
alertsDataStream = alertsDefaultDataStream,
}: {
node: Event;
relatedAlerts: number;
alertCreationTime: number;
alertsDataStream: DataStream;
}) {
for (let i = 0; i < relatedAlerts; i++) {
const ts = (timestampSafeVersion(node) ?? 0) + this.randomN(alertCreationTime) * 1000;
yield this.generateAlert(
yield this.generateAlert({
ts,
entityIDSafeVersion(node),
parentEntityIDSafeVersion(node),
ancestryArray(node)
);
entityID: entityIDSafeVersion(node),
parentEntityID: parentEntityIDSafeVersion(node),
ancestry: ancestryArray(node),
alertsDataStream,
});
}
}
@ -1227,15 +1327,21 @@ export class EndpointDocGenerator {
/**
* Generates a Host Policy response message
*/
public generatePolicyResponse(
public generatePolicyResponse({
ts = new Date().getTime(),
allStatus?: HostPolicyResponseActionStatus
): HostPolicyResponse {
allStatus,
policyDataStream = policyDefaultDataStream,
}: {
ts?: number;
allStatus?: HostPolicyResponseActionStatus;
policyDataStream?: DataStream;
} = {}): HostPolicyResponse {
const policyVersion = this.seededUUIDv4();
const status = () => {
return allStatus || this.randomHostPolicyResponseActionStatus();
};
return {
data_stream: policyDataStream,
'@timestamp': ts,
agent: {
id: this.commonInfo.agent.id,

View file

@ -52,10 +52,9 @@ export async function indexHostsAndAlerts(
const epmEndpointPackage = await getEndpointPackageInfo(kbnClient);
// Keep a map of host applied policy ids (fake) to real ingest package configs (policy record)
const realPolicies: Record<string, CreatePackagePolicyResponse['item']> = {};
for (let i = 0; i < numHosts; i++) {
const generator = new EndpointDocGenerator(random);
await indexHostDocs(
await indexHostDocs({
numDocs,
client,
kbnClient,
@ -63,10 +62,17 @@ export async function indexHostsAndAlerts(
epmEndpointPackage,
metadataIndex,
policyResponseIndex,
fleet,
generator
);
await indexAlerts(client, eventIndex, alertIndex, generator, alertsPerHost, options);
enrollFleet: fleet,
generator,
});
await indexAlerts({
client,
eventIndex,
alertIndex,
generator,
numAlerts: alertsPerHost,
options,
});
}
await client.indices.refresh({
index: eventIndex,
@ -81,17 +87,27 @@ function delay(ms: number) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
async function indexHostDocs(
numDocs: number,
client: Client,
kbnClient: KbnClientWithApiKeySupport,
realPolicies: Record<string, CreatePackagePolicyResponse['item']>,
epmEndpointPackage: GetPackagesResponse['response'][0],
metadataIndex: string,
policyResponseIndex: string,
enrollFleet: boolean,
generator: EndpointDocGenerator
) {
async function indexHostDocs({
numDocs,
client,
kbnClient,
realPolicies,
epmEndpointPackage,
metadataIndex,
policyResponseIndex,
enrollFleet,
generator,
}: {
numDocs: number;
client: Client;
kbnClient: KbnClientWithApiKeySupport;
realPolicies: Record<string, CreatePackagePolicyResponse['item']>;
epmEndpointPackage: GetPackagesResponse['response'][0];
metadataIndex: string;
policyResponseIndex: string;
enrollFleet: boolean;
generator: EndpointDocGenerator;
}) {
const timeBetweenDocs = 6 * 3600 * 1000; // 6 hours between metadata documents
const timestamp = new Date().getTime();
let hostMetadata: HostMetadata;
@ -102,7 +118,10 @@ async function indexHostDocs(
generator.updateHostData();
generator.updateHostPolicyData();
hostMetadata = generator.generateHostMetadata(timestamp - timeBetweenDocs * (numDocs - j - 1));
hostMetadata = generator.generateHostMetadata(
timestamp - timeBetweenDocs * (numDocs - j - 1),
EndpointDocGenerator.createDataStreamFromIndex(metadataIndex)
);
if (enrollFleet) {
const { id: appliedPolicyId, name: appliedPolicyName } = hostMetadata.Endpoint.policy.applied;
@ -156,20 +175,30 @@ async function indexHostDocs(
});
await client.index({
index: policyResponseIndex,
body: generator.generatePolicyResponse(timestamp - timeBetweenDocs * (numDocs - j - 1)),
body: generator.generatePolicyResponse({
ts: timestamp - timeBetweenDocs * (numDocs - j - 1),
policyDataStream: EndpointDocGenerator.createDataStreamFromIndex(policyResponseIndex),
}),
op_type: 'create',
});
}
}
async function indexAlerts(
client: Client,
eventIndex: string,
alertIndex: string,
generator: EndpointDocGenerator,
numAlerts: number,
options: TreeOptions = {}
) {
async function indexAlerts({
client,
eventIndex,
alertIndex,
generator,
numAlerts,
options = {},
}: {
client: Client;
eventIndex: string;
alertIndex: string;
generator: EndpointDocGenerator;
numAlerts: number;
options: TreeOptions;
}) {
const alertGenerator = generator.alertsGenerator(numAlerts, options);
let result = alertGenerator.next();
while (!result.done) {

View file

@ -300,6 +300,15 @@ export interface HostResultList {
query_strategy_version: MetadataQueryStrategyVersions;
}
/**
* The data_stream fields in an elasticsearch document.
*/
export interface DataStream {
dataset: string;
namespace: string;
type: string;
}
/**
* Operating System metadata.
*/
@ -556,6 +565,7 @@ export type HostMetadata = Immutable<{
version: string;
};
host: Host;
data_stream: DataStream;
}>;
export interface LegacyEndpointEvent {
@ -675,6 +685,11 @@ export type SafeEndpointEvent = Partial<{
version: ECSField<string>;
type: ECSField<string>;
}>;
data_stream: Partial<{
type: ECSField<string>;
dataset: ECSField<string>;
namespace: ECSField<string>;
}>;
ecs: Partial<{
version: ECSField<string>;
}>;
@ -1002,6 +1017,7 @@ interface HostPolicyResponseAppliedArtifact {
*/
export interface HostPolicyResponse {
'@timestamp': number;
data_stream: DataStream;
elastic: {
agent: {
id: string;

View file

@ -702,10 +702,10 @@ describe('when on the list page', () => {
});
it('should not show any numbered badges if all actions are successful', () => {
const policyResponse = docGenerator.generatePolicyResponse(
new Date().getTime(),
HostPolicyResponseActionStatus.success
);
const policyResponse = docGenerator.generatePolicyResponse({
ts: new Date().getTime(),
allStatus: HostPolicyResponseActionStatus.success,
});
reactTestingLibrary.act(() => {
store.dispatch({
type: 'serverReturnedEndpointPolicyResponse',

View file

@ -10,7 +10,7 @@ import { ResponseError } from '@elastic/elasticsearch/lib/errors';
import { KbnClient, ToolingLog } from '@kbn/dev-utils';
import { AxiosResponse } from 'axios';
import { indexHostsAndAlerts } from '../../common/endpoint/index_data';
import { ANCESTRY_LIMIT } from '../../common/endpoint/generate_data';
import { ANCESTRY_LIMIT, EndpointDocGenerator } from '../../common/endpoint/generate_data';
import { AGENTS_SETUP_API_ROUTES, SETUP_API_ROUTE } from '../../../ingest_manager/common/constants';
import {
CreateFleetSetupResponse,
@ -250,6 +250,8 @@ async function main() {
percentTerminated: argv.percentTerminated,
alwaysGenMaxChildrenPerNode: argv.maxChildrenPerNode,
ancestryArraySize: argv.ancestryArraySize,
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(argv.eventIndex),
alertsDataStream: EndpointDocGenerator.createDataStreamFromIndex(argv.alertIndex),
}
);
console.log(`Creating and indexing documents took: ${new Date().getTime() - startTime}ms`);

View file

@ -20,7 +20,7 @@ describe('Pagination', () => {
};
describe('cursor', () => {
const root = generator.generateEvent();
const events = Array.from(generator.relatedEventsGenerator(root, 5));
const events = Array.from(generator.relatedEventsGenerator({ node: root, relatedEvents: 5 }));
it('does build a cursor when received the same number of events as was requested', () => {
expect(PaginationBuilder.buildCursorRequestLimit(4, events)).not.toBeNull();

View file

@ -74,7 +74,9 @@ export default function ({ getService }: FtrProviderContext) {
});
it('handles events without the `network.protocol` field being defined', async () => {
const eventWithoutNetworkObject = generator.generateEvent();
const eventWithoutNetworkObject = generator.generateEvent({
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(networkIndex),
});
// ensure that `network.protocol` does not exist in the event to test that the pipeline handles those type of events
delete eventWithoutNetworkObject.network;
@ -137,8 +139,10 @@ export default function ({ getService }: FtrProviderContext) {
let genData: InsertedEvents;
before(async () => {
event = generator.generateEvent();
genData = await resolver.insertEvents([event]);
event = generator.generateEvent({
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(processEventsIndex),
});
genData = await resolver.insertEvents([event], processEventsIndex);
});
after(async () => {
@ -158,20 +162,29 @@ export default function ({ getService }: FtrProviderContext) {
before(async () => {
// 46.239.193.5 should be in Iceland
// 8.8.8.8 should be in the US
const eventWithBothIPs = generator.generateEvent({
const eventWithBothIPsNetwork = generator.generateEvent({
extensions: { source: { ip: '8.8.8.8' }, destination: { ip: '46.239.193.5' } },
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(networkIndex),
});
const eventWithSourceOnly = generator.generateEvent({
const eventWithSourceOnlyNetwork = generator.generateEvent({
extensions: { source: { ip: '8.8.8.8' } },
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(networkIndex),
});
networkIndexData = await resolver.insertEvents(
[eventWithBothIPs, eventWithSourceOnly],
[eventWithBothIPsNetwork, eventWithSourceOnlyNetwork],
networkIndex
);
processIndexData = await resolver.insertEvents([eventWithBothIPs], processEventsIndex);
const eventWithBothIPsProcess = generator.generateEvent({
extensions: { source: { ip: '8.8.8.8' }, destination: { ip: '46.239.193.5' } },
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(processEventsIndex),
});
processIndexData = await resolver.insertEvents(
[eventWithBothIPsProcess],
processEventsIndex
);
});
after(async () => {

View file

@ -22,7 +22,7 @@ import {
Event,
EndpointDocGenerator,
} from '../../../../plugins/security_solution/common/endpoint/generate_data';
import { InsertedEvents } from '../../services/resolver';
import { InsertedEvents, processEventsIndex } from '../../services/resolver';
import { createAncestryArray } from './common';
export default function resolverAPIIntegrationTests({ getService }: FtrProviderContext) {
@ -42,25 +42,33 @@ export default function resolverAPIIntegrationTests({ getService }: FtrProviderC
before(async () => {
// Construct the following tree:
// Origin -> infoEvent -> startEvent -> execEvent
origin = generator.generateEvent();
origin = generator.generateEvent({
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(processEventsIndex),
});
infoEvent = generator.generateEvent({
parentEntityID: entityIDSafeVersion(origin),
ancestry: createAncestryArray([origin]),
eventType: ['info'],
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(processEventsIndex),
});
startEvent = generator.generateEvent({
parentEntityID: entityIDSafeVersion(infoEvent),
ancestry: createAncestryArray([infoEvent, origin]),
eventType: ['start'],
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(processEventsIndex),
});
execEvent = generator.generateEvent({
parentEntityID: entityIDSafeVersion(startEvent),
ancestry: createAncestryArray([startEvent, infoEvent]),
eventType: ['change'],
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(processEventsIndex),
});
genData = await resolver.insertEvents([origin, infoEvent, startEvent, execEvent]);
genData = await resolver.insertEvents(
[origin, infoEvent, startEvent, execEvent],
processEventsIndex
);
});
after(async () => {
@ -88,11 +96,14 @@ export default function resolverAPIIntegrationTests({ getService }: FtrProviderC
before(async () => {
// Construct the following tree:
// Origin -> (infoEvent, startEvent, execEvent are all for the same node)
origin = generator.generateEvent();
origin = generator.generateEvent({
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(processEventsIndex),
});
startEvent = generator.generateEvent({
parentEntityID: entityIDSafeVersion(origin),
ancestry: createAncestryArray([origin]),
eventType: ['start'],
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(processEventsIndex),
});
infoEvent = generator.generateEvent({
@ -100,6 +111,7 @@ export default function resolverAPIIntegrationTests({ getService }: FtrProviderC
ancestry: createAncestryArray([origin]),
entityID: entityIDSafeVersion(startEvent),
eventType: ['info'],
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(processEventsIndex),
});
execEvent = generator.generateEvent({
@ -107,8 +119,12 @@ export default function resolverAPIIntegrationTests({ getService }: FtrProviderC
ancestry: createAncestryArray([origin]),
eventType: ['change'],
entityID: entityIDSafeVersion(startEvent),
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(processEventsIndex),
});
genData = await resolver.insertEvents([origin, infoEvent, startEvent, execEvent]);
genData = await resolver.insertEvents(
[origin, infoEvent, startEvent, execEvent],
processEventsIndex
);
});
after(async () => {
@ -141,11 +157,14 @@ export default function resolverAPIIntegrationTests({ getService }: FtrProviderC
before(async () => {
// Construct the following tree:
// Origin -> (infoEvent, startEvent, execEvent are all for the same node)
origin = generator.generateEvent();
origin = generator.generateEvent({
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(processEventsIndex),
});
startEvent = generator.generateEvent({
parentEntityID: entityIDSafeVersion(origin),
ancestry: createAncestryArray([origin]),
eventType: ['start'],
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(processEventsIndex),
});
infoEvent = generator.generateEvent({
@ -154,6 +173,7 @@ export default function resolverAPIIntegrationTests({ getService }: FtrProviderC
ancestry: createAncestryArray([origin]),
entityID: entityIDSafeVersion(startEvent),
eventType: ['info'],
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(processEventsIndex),
});
execEvent = generator.generateEvent({
@ -162,8 +182,12 @@ export default function resolverAPIIntegrationTests({ getService }: FtrProviderC
ancestry: createAncestryArray([origin]),
eventType: ['change'],
entityID: entityIDSafeVersion(startEvent),
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(processEventsIndex),
});
genData = await resolver.insertEvents([origin, infoEvent, startEvent, execEvent]);
genData = await resolver.insertEvents(
[origin, infoEvent, startEvent, execEvent],
processEventsIndex
);
});
after(async () => {

View file

@ -15,7 +15,7 @@ import {
EndpointDocGenerator,
Event,
} from '../../../../plugins/security_solution/common/endpoint/generate_data';
import { InsertedEvents } from '../../services/resolver';
import { InsertedEvents, processEventsIndex } from '../../services/resolver';
import { createAncestryArray } from './common';
export default function ({ getService }: FtrProviderContext) {
@ -34,9 +34,12 @@ export default function ({ getService }: FtrProviderContext) {
let origin: Event;
let genData: InsertedEvents;
before(async () => {
origin = generator.generateEvent({ parentEntityID: 'a' });
origin = generator.generateEvent({
parentEntityID: 'a',
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(processEventsIndex),
});
setEntityIDEmptyString(origin);
genData = await resolver.insertEvents([origin]);
genData = await resolver.insertEvents([origin], processEventsIndex);
});
after(async () => {
@ -63,10 +66,14 @@ export default function ({ getService }: FtrProviderContext) {
before(async () => {
// construct a tree with an origin and two direct children. One child will not have an entity_id. That child
// should not be returned by the backend.
origin = generator.generateEvent({ entityID: 'a' });
origin = generator.generateEvent({
entityID: 'a',
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(processEventsIndex),
});
childNoEntityID = generator.generateEvent({
parentEntityID: entityIDSafeVersion(origin),
ancestry: createAncestryArray([origin]),
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(processEventsIndex),
});
// force it to be empty
setEntityIDEmptyString(childNoEntityID);
@ -75,9 +82,10 @@ export default function ({ getService }: FtrProviderContext) {
entityID: 'b',
parentEntityID: entityIDSafeVersion(origin),
ancestry: createAncestryArray([origin]),
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(processEventsIndex),
});
events = [origin, childNoEntityID, childWithEntityID];
genData = await resolver.insertEvents(events);
genData = await resolver.insertEvents(events, processEventsIndex);
});
after(async () => {
@ -106,17 +114,20 @@ export default function ({ getService }: FtrProviderContext) {
// entity_ids in the ancestry array. This is to make sure that the backend will not query for that event.
ancestor2 = generator.generateEvent({
entityID: '2',
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(processEventsIndex),
});
ancestor1 = generator.generateEvent({
entityID: '1',
parentEntityID: entityIDSafeVersion(ancestor2),
ancestry: createAncestryArray([ancestor2]),
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(processEventsIndex),
});
// we'll insert an event that doesn't have an entity id so if the backend does search for it, it should be
// returned and our test should fail
ancestorNoEntityID = generator.generateEvent({
ancestry: createAncestryArray([ancestor2]),
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(processEventsIndex),
});
setEntityIDEmptyString(ancestorNoEntityID);
@ -124,10 +135,11 @@ export default function ({ getService }: FtrProviderContext) {
entityID: 'a',
parentEntityID: entityIDSafeVersion(ancestor1),
ancestry: ['', ...createAncestryArray([ancestor2])],
eventsDataStream: EndpointDocGenerator.createDataStreamFromIndex(processEventsIndex),
});
events = [origin, ancestor1, ancestor2, ancestorNoEntityID];
genData = await resolver.insertEvents(events);
genData = await resolver.insertEvents(events, processEventsIndex);
});
after(async () => {