[Security Solution][Detections] Reduce detection engine reliance on _source (#89371) (#90287)

* First pass at switching rules to depend on fields instead of _source

* Fix tests

* Change operator: excluded logic so missing fields are allowlisted

Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>

Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
Marshall Main 2021-02-04 12:45:29 -05:00 committed by GitHub
parent af940d518f
commit 7478b45ee6
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
20 changed files with 162 additions and 71 deletions

View file

@ -166,6 +166,12 @@ export const sampleDocWithSortId = (
ip: destIp ?? '127.0.0.1',
},
},
fields: {
someKey: ['someValue'],
'@timestamp': ['2020-04-20T21:27:45+0000'],
'source.ip': ip ? (Array.isArray(ip) ? ip : [ip]) : ['127.0.0.1'],
'destination.ip': destIp ? (Array.isArray(destIp) ? destIp : [destIp]) : ['127.0.0.1'],
},
sort: ['1234567891111'],
});
@ -185,6 +191,11 @@ export const sampleDocNoSortId = (
ip: ip ?? '127.0.0.1',
},
},
fields: {
someKey: ['someValue'],
'@timestamp': ['2020-04-20T21:27:45+0000'],
'source.ip': [ip ?? '127.0.0.1'],
},
sort: [],
});

View file

@ -56,7 +56,12 @@ describe('create_signals', () => {
],
},
},
fields: [
{
field: '*',
include_unmapped: true,
},
],
sort: [
{
'@timestamp': {
@ -115,7 +120,12 @@ describe('create_signals', () => {
],
},
},
fields: [
{
field: '*',
include_unmapped: true,
},
],
sort: [
{
'@timestamp': {
@ -175,7 +185,12 @@ describe('create_signals', () => {
],
},
},
fields: [
{
field: '*',
include_unmapped: true,
},
],
sort: [
{
'@timestamp': {
@ -236,7 +251,12 @@ describe('create_signals', () => {
],
},
},
fields: [
{
field: '*',
include_unmapped: true,
},
],
sort: [
{
'@timestamp': {
@ -296,7 +316,12 @@ describe('create_signals', () => {
],
},
},
fields: [
{
field: '*',
include_unmapped: true,
},
],
sort: [
{
'@timestamp': {
@ -358,6 +383,12 @@ describe('create_signals', () => {
],
},
},
fields: [
{
field: '*',
include_unmapped: true,
},
],
aggregations: {
tags: {
terms: {

View file

@ -89,6 +89,12 @@ export const buildEventsSearchQuery = ({
],
},
},
fields: [
{
field: '*',
include_unmapped: true,
},
],
...(aggregations ? { aggregations } : {}),
sort: [
{

View file

@ -67,7 +67,7 @@ describe('transformThresholdResultsToEcs', () => {
_id,
_index: 'test',
_source: {
'@timestamp': '2020-04-20T21:27:45+0000',
'@timestamp': ['2020-04-20T21:27:45+0000'],
threshold_result: {
count: 1,
value: '127.0.0.1',

View file

@ -75,7 +75,7 @@ const getTransformedHits = (
}
const source = {
'@timestamp': get(timestampOverride ?? '@timestamp', hit._source),
'@timestamp': get(timestampOverride ?? '@timestamp', hit.fields),
threshold_result: {
count: totalResults,
value: ruleId,
@ -104,10 +104,10 @@ const getTransformedHits = (
}
const source = {
'@timestamp': get(timestampOverride ?? '@timestamp', hit._source),
'@timestamp': get(timestampOverride ?? '@timestamp', hit.fields),
threshold_result: {
count: docCount,
value: get(threshold.field, hit._source),
value: key,
},
};

View file

@ -120,7 +120,7 @@ describe('filterEventsAgainstList', () => {
exceptionItem,
buildRuleMessage,
});
expect([...matchedSet]).toEqual([JSON.stringify('1.1.1.1')]);
expect([...matchedSet]).toEqual([JSON.stringify(['1.1.1.1'])]);
});
test('it returns two matched sets as a JSON.stringify() set from the "events"', async () => {
@ -133,7 +133,7 @@ describe('filterEventsAgainstList', () => {
exceptionItem,
buildRuleMessage,
});
expect([...matchedSet]).toEqual([JSON.stringify('1.1.1.1'), JSON.stringify('2.2.2.2')]);
expect([...matchedSet]).toEqual([JSON.stringify(['1.1.1.1']), JSON.stringify(['2.2.2.2'])]);
});
test('it returns an array as a set as a JSON.stringify() array from the "events"', async () => {
@ -282,7 +282,7 @@ describe('filterEventsAgainstList', () => {
exceptionItem,
buildRuleMessage,
});
expect([...matchedSet1]).toEqual([JSON.stringify('1.1.1.1'), JSON.stringify('2.2.2.2')]);
expect([...matchedSet2]).toEqual([JSON.stringify('3.3.3.3'), JSON.stringify('5.5.5.5')]);
expect([...matchedSet1]).toEqual([JSON.stringify(['1.1.1.1']), JSON.stringify(['2.2.2.2'])]);
expect([...matchedSet2]).toEqual([JSON.stringify(['3.3.3.3']), JSON.stringify(['5.5.5.5'])]);
});
});

View file

@ -62,9 +62,9 @@ describe('createSetToFilterAgainst', () => {
expect(listClient.searchListItemByValues).toHaveBeenCalledWith({
listId: 'list-123',
type: 'ip',
value: ['1.1.1.1'],
value: [['1.1.1.1']],
});
expect([...field]).toEqual([JSON.stringify('1.1.1.1')]);
expect([...field]).toEqual([JSON.stringify(['1.1.1.1'])]);
});
test('it returns 2 fields if the list returns 2 items', async () => {
@ -81,9 +81,9 @@ describe('createSetToFilterAgainst', () => {
expect(listClient.searchListItemByValues).toHaveBeenCalledWith({
listId: 'list-123',
type: 'ip',
value: ['1.1.1.1', '2.2.2.2'],
value: [['1.1.1.1'], ['2.2.2.2']],
});
expect([...field]).toEqual([JSON.stringify('1.1.1.1'), JSON.stringify('2.2.2.2')]);
expect([...field]).toEqual([JSON.stringify(['1.1.1.1']), JSON.stringify(['2.2.2.2'])]);
});
test('it returns 0 fields if the field does not match up to a valid field within the event', async () => {

View file

@ -5,7 +5,6 @@
* 2.0.
*/
import { get } from 'lodash/fp';
import { CreateSetToFilterAgainstOptions } from './types';
/**
@ -31,7 +30,7 @@ export const createSetToFilterAgainst = async <T>({
buildRuleMessage,
}: CreateSetToFilterAgainstOptions<T>): Promise<Set<unknown>> => {
const valuesFromSearchResultField = events.reduce((acc, searchResultItem) => {
const valueField = get(field, searchResultItem._source);
const valueField = searchResultItem.fields ? searchResultItem.fields[field] : undefined;
if (valueField != null) {
acc.add(valueField);
}

View file

@ -40,7 +40,7 @@ describe('filterEvents', () => {
{
field: 'source.ip',
operator: 'included',
matchedSet: new Set([JSON.stringify('1.1.1.1')]),
matchedSet: new Set([JSON.stringify(['1.1.1.1'])]),
},
];
const field = filterEvents({
@ -56,7 +56,7 @@ describe('filterEvents', () => {
{
field: 'source.ip',
operator: 'excluded',
matchedSet: new Set([JSON.stringify('1.1.1.1')]),
matchedSet: new Set([JSON.stringify(['1.1.1.1'])]),
},
];
const field = filterEvents({
@ -72,7 +72,7 @@ describe('filterEvents', () => {
{
field: 'madeup.nonexistent', // field does not exist
operator: 'included',
matchedSet: new Set([JSON.stringify('1.1.1.1')]),
matchedSet: new Set([JSON.stringify(['1.1.1.1'])]),
},
];
const field = filterEvents({
@ -88,12 +88,12 @@ describe('filterEvents', () => {
{
field: 'source.ip',
operator: 'included',
matchedSet: new Set([JSON.stringify('1.1.1.1')]),
matchedSet: new Set([JSON.stringify(['1.1.1.1'])]),
},
{
field: 'source.ip',
operator: 'excluded',
matchedSet: new Set([JSON.stringify('1.1.1.1')]),
matchedSet: new Set([JSON.stringify(['1.1.1.1'])]),
},
];

View file

@ -5,7 +5,6 @@
* 2.0.
*/
import { get } from 'lodash/fp';
import { SearchResponse } from '../../../types';
import { FilterEventsOptions } from './types';
@ -22,13 +21,17 @@ export const filterEvents = <T>({
return events.filter((item) => {
return fieldAndSetTuples
.map((tuple) => {
const eventItem = get(tuple.field, item._source);
if (eventItem == null) {
return true;
} else if (tuple.operator === 'included') {
const eventItem = item.fields ? item.fields[tuple.field] : undefined;
if (tuple.operator === 'included') {
if (eventItem == null) {
return true;
}
// only create a signal if the event is not in the value list
return !tuple.matchedSet.has(JSON.stringify(eventItem));
} else if (tuple.operator === 'excluded') {
if (eventItem == null) {
return false;
}
// only create a signal if the event is in the value list
return tuple.matchedSet.has(JSON.stringify(eventItem));
} else {

View file

@ -162,12 +162,12 @@ describe('filterEventsAgainstList', () => {
// this call represents an exception list with a value list containing ['2.2.2.2', '4.4.4.4']
(listClient.searchListItemByValues as jest.Mock).mockResolvedValueOnce([
{ ...getSearchListItemResponseMock(), value: '2.2.2.2' },
{ ...getSearchListItemResponseMock(), value: '4.4.4.4' },
{ ...getSearchListItemResponseMock(), value: ['2.2.2.2'] },
{ ...getSearchListItemResponseMock(), value: ['4.4.4.4'] },
]);
// this call represents an exception list with a value list containing ['6.6.6.6']
(listClient.searchListItemByValues as jest.Mock).mockResolvedValueOnce([
{ ...getSearchListItemResponseMock(), value: '6.6.6.6' },
{ ...getSearchListItemResponseMock(), value: ['6.6.6.6'] },
]);
const res = await filterEventsAgainstList({
@ -224,11 +224,11 @@ describe('filterEventsAgainstList', () => {
// this call represents an exception list with a value list containing ['2.2.2.2', '4.4.4.4']
(listClient.searchListItemByValues as jest.Mock).mockResolvedValueOnce([
{ ...getSearchListItemResponseMock(), value: '2.2.2.2' },
{ ...getSearchListItemResponseMock(), value: ['2.2.2.2'] },
]);
// this call represents an exception list with a value list containing ['6.6.6.6']
(listClient.searchListItemByValues as jest.Mock).mockResolvedValueOnce([
{ ...getSearchListItemResponseMock(), value: '6.6.6.6' },
{ ...getSearchListItemResponseMock(), value: ['6.6.6.6'] },
]);
const res = await filterEventsAgainstList({
@ -283,11 +283,11 @@ describe('filterEventsAgainstList', () => {
// this call represents an exception list with a value list containing ['2.2.2.2']
(listClient.searchListItemByValues as jest.Mock).mockResolvedValueOnce([
{ ...getSearchListItemResponseMock(), value: '2.2.2.2' },
{ ...getSearchListItemResponseMock(), value: ['2.2.2.2'] },
]);
// this call represents an exception list with a value list containing ['4.4.4.4']
(listClient.searchListItemByValues as jest.Mock).mockResolvedValueOnce([
{ ...getSearchListItemResponseMock(), value: '4.4.4.4' },
{ ...getSearchListItemResponseMock(), value: ['4.4.4.4'] },
]);
const res = await filterEventsAgainstList({
@ -365,7 +365,7 @@ describe('filterEventsAgainstList', () => {
// this call represents an exception list with a value list containing ['2.2.2.2', '4.4.4.4']
(listClient.searchListItemByValues as jest.Mock).mockResolvedValue([
{ ...getSearchListItemResponseMock(), value: '2.2.2.2' },
{ ...getSearchListItemResponseMock(), value: ['2.2.2.2'] },
]);
const res = await filterEventsAgainstList({

View file

@ -69,6 +69,12 @@ export const findThresholdSignals = async ({
},
},
],
fields: [
{
field: '*',
include_unmapped: true,
},
],
size: 1,
},
},

View file

@ -310,9 +310,9 @@ describe('searchAfterAndBulkCreate', () => {
test('should return success when all search results are in the allowlist and with sortId present', async () => {
const searchListItems: SearchListItemArraySchema = [
{ ...getSearchListItemResponseMock(), value: '1.1.1.1' },
{ ...getSearchListItemResponseMock(), value: '2.2.2.2' },
{ ...getSearchListItemResponseMock(), value: '3.3.3.3' },
{ ...getSearchListItemResponseMock(), value: ['1.1.1.1'] },
{ ...getSearchListItemResponseMock(), value: ['2.2.2.2'] },
{ ...getSearchListItemResponseMock(), value: ['3.3.3.3'] },
];
listClient.searchListItemByValues = jest.fn().mockResolvedValue(searchListItems);
const sampleParams = sampleRuleAlertParams(30);
@ -374,10 +374,10 @@ describe('searchAfterAndBulkCreate', () => {
test('should return success when all search results are in the allowlist and no sortId present', async () => {
const searchListItems: SearchListItemArraySchema = [
{ ...getSearchListItemResponseMock(), value: '1.1.1.1' },
{ ...getSearchListItemResponseMock(), value: '2.2.2.2' },
{ ...getSearchListItemResponseMock(), value: '2.2.2.2' },
{ ...getSearchListItemResponseMock(), value: '2.2.2.2' },
{ ...getSearchListItemResponseMock(), value: ['1.1.1.1'] },
{ ...getSearchListItemResponseMock(), value: ['2.2.2.2'] },
{ ...getSearchListItemResponseMock(), value: ['2.2.2.2'] },
{ ...getSearchListItemResponseMock(), value: ['2.2.2.2'] },
];
listClient.searchListItemByValues = jest.fn().mockResolvedValue(searchListItems);

View file

@ -316,19 +316,9 @@ describe('singleBulkCreate', () => {
});
test('filter duplicate rules will return back search responses if they do not have a signal and will NOT filter the source out', () => {
const ancestors = sampleDocWithAncestors();
ancestors.hits.hits[0]._source = { '@timestamp': '2020-04-20T21:27:45+0000' };
const ancestors = sampleDocSearchResultsNoSortId();
const filtered = filterDuplicateRules('04128c15-0d1b-4716-a4c5-46997ac7f3bd', ancestors);
expect(filtered).toEqual([
{
_index: 'myFakeSignalIndex',
_type: 'doc',
_score: 100,
_version: 1,
_id: 'e1e08ddc-5e37-49ff-a258-5393aa44435a',
_source: { '@timestamp': '2020-04-20T21:27:45+0000' },
},
]);
expect(filtered).toEqual(ancestors.hits.hits);
});
test('filter duplicate rules does not attempt filters when the signal is not an event type of signal but rather a "clash" from the source index having its own numeric signal type', () => {

View file

@ -81,6 +81,7 @@ export const getThreatListSearchResponseMock = (): SearchResponse<ThreatListItem
_id: '123',
_score: 0,
_source: getThreatListItemMock(),
fields: getThreatListItemFieldsMock(),
},
],
},
@ -102,6 +103,16 @@ export const getThreatListItemMock = (): ThreatListItem => ({
},
});
export const getThreatListItemFieldsMock = () => ({
'@timestamp': ['2020-09-09T21:59:13Z'],
'host.name': ['host-1'],
'host.ip': ['192.168.0.0.1'],
'source.ip': ['127.0.0.1'],
'source.port': [1],
'destination.ip': ['127.0.0.1'],
'destination.port': [1],
});
export const getFilterThreatMapping = (): ThreatMapping => [
{
entries: [

View file

@ -133,10 +133,16 @@ describe('build_threat_mapping_filter', () => {
},
],
threatListItem: {
'@timestamp': '2020-09-09T21:59:13Z',
host: {
name: 'host-1',
// since ip is missing this entire AND clause should be dropped
_source: {
'@timestamp': '2020-09-09T21:59:13Z',
host: {
name: 'host-1',
// since ip is missing this entire AND clause should be dropped
},
},
fields: {
'@timestamp': ['2020-09-09T21:59:13Z'],
'host.name': ['host-1'],
},
},
});
@ -177,6 +183,10 @@ describe('build_threat_mapping_filter', () => {
name: 'host-1',
},
},
fields: {
'@timestamp': ['2020-09-09T21:59:13Z'],
'host.name': ['host-1'],
},
},
});
expect(item).toEqual([

View file

@ -55,7 +55,8 @@ export const filterThreatMapping = ({
threatMapping
.map((threatMap) => {
const atLeastOneItemMissingInThreatList = threatMap.entries.some((entry) => {
return get(entry.value, threatListItem._source) == null;
const itemValue = get(entry.value, threatListItem.fields);
return itemValue == null || itemValue.length !== 1;
});
if (atLeastOneItemMissingInThreatList) {
return { ...threatMap, entries: [] };
@ -70,15 +71,15 @@ export const createInnerAndClauses = ({
threatListItem,
}: CreateInnerAndClausesOptions): BooleanFilter[] => {
return threatMappingEntries.reduce<BooleanFilter[]>((accum, threatMappingEntry) => {
const value = get(threatMappingEntry.value, threatListItem._source);
if (value != null) {
const value = get(threatMappingEntry.value, threatListItem.fields);
if (value != null && value.length === 1) {
// These values could be potentially 10k+ large so mutating the array intentionally
accum.push({
bool: {
should: [
{
match: {
[threatMappingEntry.field]: value,
[threatMappingEntry.field]: value[0],
},
},
],

View file

@ -55,6 +55,12 @@ export const getThreatList = async ({
const response: SearchResponse<ThreatListItem> = await callCluster('search', {
body: {
query: queryFilter,
fields: [
{
field: '*',
include_unmapped: true,
},
],
search_after: searchAfter,
sort: getSortWithTieBreaker({
sortField,

View file

@ -1166,6 +1166,9 @@ describe('utils', () => {
test('It will not set an invalid date time stamp from a non-existent @timestamp when the index is not 100% ECS compliant', () => {
const searchResult = sampleDocSearchResultsNoSortId();
(searchResult.hits.hits[0]._source['@timestamp'] as unknown) = undefined;
if (searchResult.hits.hits[0].fields != null) {
(searchResult.hits.hits[0].fields['@timestamp'] as unknown) = undefined;
}
const { lastLookBackDate } = createSearchAfterReturnTypeFromResponse({
searchResult,
timestampOverride: undefined,
@ -1176,6 +1179,9 @@ describe('utils', () => {
test('It will not set an invalid date time stamp from a null @timestamp when the index is not 100% ECS compliant', () => {
const searchResult = sampleDocSearchResultsNoSortId();
(searchResult.hits.hits[0]._source['@timestamp'] as unknown) = null;
if (searchResult.hits.hits[0].fields != null) {
(searchResult.hits.hits[0].fields['@timestamp'] as unknown) = null;
}
const { lastLookBackDate } = createSearchAfterReturnTypeFromResponse({
searchResult,
timestampOverride: undefined,
@ -1186,6 +1192,9 @@ describe('utils', () => {
test('It will not set an invalid date time stamp from an invalid @timestamp string', () => {
const searchResult = sampleDocSearchResultsNoSortId();
(searchResult.hits.hits[0]._source['@timestamp'] as unknown) = 'invalid';
if (searchResult.hits.hits[0].fields != null) {
(searchResult.hits.hits[0].fields['@timestamp'] as unknown) = ['invalid'];
}
const { lastLookBackDate } = createSearchAfterReturnTypeFromResponse({
searchResult,
timestampOverride: undefined,
@ -1198,6 +1207,9 @@ describe('utils', () => {
test('It returns undefined if the search result contains a null timestamp', () => {
const searchResult = sampleDocSearchResultsNoSortId();
(searchResult.hits.hits[0]._source['@timestamp'] as unknown) = null;
if (searchResult.hits.hits[0].fields != null) {
(searchResult.hits.hits[0].fields['@timestamp'] as unknown) = null;
}
const date = lastValidDate({ searchResult, timestampOverride: undefined });
expect(date).toEqual(undefined);
});
@ -1205,6 +1217,9 @@ describe('utils', () => {
test('It returns undefined if the search result contains a undefined timestamp', () => {
const searchResult = sampleDocSearchResultsNoSortId();
(searchResult.hits.hits[0]._source['@timestamp'] as unknown) = undefined;
if (searchResult.hits.hits[0].fields != null) {
(searchResult.hits.hits[0].fields['@timestamp'] as unknown) = undefined;
}
const date = lastValidDate({ searchResult, timestampOverride: undefined });
expect(date).toEqual(undefined);
});
@ -1212,13 +1227,9 @@ describe('utils', () => {
test('It returns undefined if the search result contains an invalid string value', () => {
const searchResult = sampleDocSearchResultsNoSortId();
(searchResult.hits.hits[0]._source['@timestamp'] as unknown) = 'invalid value';
const date = lastValidDate({ searchResult, timestampOverride: undefined });
expect(date).toEqual(undefined);
});
test('It returns correct date time stamp if the search result contains an invalid string value', () => {
const searchResult = sampleDocSearchResultsNoSortId();
(searchResult.hits.hits[0]._source['@timestamp'] as unknown) = 'invalid value';
if (searchResult.hits.hits[0].fields != null) {
(searchResult.hits.hits[0].fields['@timestamp'] as unknown) = ['invalid value'];
}
const date = lastValidDate({ searchResult, timestampOverride: undefined });
expect(date).toEqual(undefined);
});

View file

@ -60,6 +60,12 @@ export const getAnomalies = async (
})?.query,
},
},
fields: [
{
field: '*',
include_unmapped: true,
},
],
sort: [{ record_score: { order: 'desc' } }],
},
},