[ML] Converts utils Mocha tests to Jest (#63132)
* [ML] Converts utils Mocha tests to Jest * [ML] Remove unused imports * [ML] Switch out enzyme mount for react testing library render Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
This commit is contained in:
parent
c2f2a79acb
commit
3e26654416
|
@ -1,443 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import {
|
||||
getSeverity,
|
||||
getSeverityWithLow,
|
||||
getSeverityColor,
|
||||
getMultiBucketImpactLabel,
|
||||
getEntityFieldName,
|
||||
getEntityFieldValue,
|
||||
getEntityFieldList,
|
||||
showActualForFunction,
|
||||
showTypicalForFunction,
|
||||
isRuleSupported,
|
||||
aggregationTypeTransform,
|
||||
} from '../anomaly_utils';
|
||||
|
||||
describe('ML - anomaly utils', () => {
|
||||
const partitionEntityRecord = {
|
||||
job_id: 'farequote',
|
||||
result_type: 'record',
|
||||
probability: 0.012818,
|
||||
record_score: 0.0162059,
|
||||
bucket_span: 300,
|
||||
detector_index: 0,
|
||||
timestamp: 1455047400000,
|
||||
partition_field_name: 'airline',
|
||||
partition_field_value: 'AAL',
|
||||
function: 'mean',
|
||||
function_description: 'mean',
|
||||
field_name: 'responsetime',
|
||||
};
|
||||
|
||||
const byEntityRecord = {
|
||||
job_id: 'farequote',
|
||||
result_type: 'record',
|
||||
probability: 0.012818,
|
||||
record_score: 0.0162059,
|
||||
bucket_span: 300,
|
||||
detector_index: 0,
|
||||
timestamp: 1455047400000,
|
||||
by_field_name: 'airline',
|
||||
by_field_value: 'JZA',
|
||||
function: 'mean',
|
||||
function_description: 'mean',
|
||||
field_name: 'responsetime',
|
||||
};
|
||||
|
||||
const overEntityRecord = {
|
||||
job_id: 'gallery',
|
||||
result_type: 'record',
|
||||
probability: 2.81806e-9,
|
||||
record_score: 59.055,
|
||||
bucket_span: 3600,
|
||||
detector_index: 4,
|
||||
timestamp: 1420552800000,
|
||||
function: 'sum',
|
||||
function_description: 'sum',
|
||||
field_name: 'bytes',
|
||||
by_field_name: 'method',
|
||||
over_field_name: 'clientip',
|
||||
over_field_value: '37.157.32.164',
|
||||
};
|
||||
|
||||
const noEntityRecord = {
|
||||
job_id: 'farequote_no_by',
|
||||
result_type: 'record',
|
||||
probability: 0.0191711,
|
||||
record_score: 4.38431,
|
||||
initial_record_score: 19.654,
|
||||
bucket_span: 300,
|
||||
detector_index: 0,
|
||||
timestamp: 1454890500000,
|
||||
function: 'mean',
|
||||
function_description: 'mean',
|
||||
field_name: 'responsetime',
|
||||
};
|
||||
|
||||
const metricNoEntityRecord = {
|
||||
job_id: 'farequote_metric',
|
||||
result_type: 'record',
|
||||
probability: 0.030133495093182184,
|
||||
record_score: 0.024881740359975164,
|
||||
initial_record_score: 0.024881740359975164,
|
||||
bucket_span: 900,
|
||||
detector_index: 0,
|
||||
is_interim: false,
|
||||
timestamp: 1486845000000,
|
||||
function: 'metric',
|
||||
function_description: 'mean',
|
||||
typical: [545.7764658569108],
|
||||
actual: [758.8220213274412],
|
||||
field_name: 'responsetime',
|
||||
influencers: [
|
||||
{
|
||||
influencer_field_name: 'airline',
|
||||
influencer_field_values: ['NKS'],
|
||||
},
|
||||
],
|
||||
airline: ['NKS'],
|
||||
};
|
||||
|
||||
const rareEntityRecord = {
|
||||
job_id: 'gallery',
|
||||
result_type: 'record',
|
||||
probability: 0.02277014211908481,
|
||||
record_score: 4.545378107075983,
|
||||
initial_record_score: 4.545378107075983,
|
||||
bucket_span: 3600,
|
||||
detector_index: 0,
|
||||
is_interim: false,
|
||||
timestamp: 1495879200000,
|
||||
by_field_name: 'status',
|
||||
function: 'rare',
|
||||
function_description: 'rare',
|
||||
over_field_name: 'clientip',
|
||||
over_field_value: '173.252.74.112',
|
||||
causes: [
|
||||
{
|
||||
probability: 0.02277014211908481,
|
||||
by_field_name: 'status',
|
||||
by_field_value: '206',
|
||||
function: 'rare',
|
||||
function_description: 'rare',
|
||||
typical: [0.00014832458182211878],
|
||||
actual: [1],
|
||||
over_field_name: 'clientip',
|
||||
over_field_value: '173.252.74.112',
|
||||
},
|
||||
],
|
||||
influencers: [
|
||||
{
|
||||
influencer_field_name: 'uri',
|
||||
influencer_field_values: [
|
||||
'/wp-content/uploads/2013/06/dune_house_oil_on_canvas_24x20-298x298.jpg',
|
||||
'/wp-content/uploads/2013/10/Case-dAste-1-11-298x298.png',
|
||||
],
|
||||
},
|
||||
{
|
||||
influencer_field_name: 'status',
|
||||
influencer_field_values: ['206'],
|
||||
},
|
||||
{
|
||||
influencer_field_name: 'clientip',
|
||||
influencer_field_values: ['173.252.74.112'],
|
||||
},
|
||||
],
|
||||
clientip: ['173.252.74.112'],
|
||||
uri: [
|
||||
'/wp-content/uploads/2013/06/dune_house_oil_on_canvas_24x20-298x298.jpg',
|
||||
'/wp-content/uploads/2013/10/Case-dAste-1-11-298x298.png',
|
||||
],
|
||||
status: ['206'],
|
||||
};
|
||||
|
||||
describe('getSeverity', () => {
|
||||
it('returns warning for 0 <= score < 25', () => {
|
||||
expect(getSeverity(0).id).to.be('warning');
|
||||
expect(getSeverity(0.001).id).to.be('warning');
|
||||
expect(getSeverity(24.99).id).to.be('warning');
|
||||
});
|
||||
|
||||
it('returns minor for 25 <= score < 50', () => {
|
||||
expect(getSeverity(25).id).to.be('minor');
|
||||
expect(getSeverity(49.99).id).to.be('minor');
|
||||
});
|
||||
|
||||
it('returns minor for 50 <= score < 75', () => {
|
||||
expect(getSeverity(50).id).to.be('major');
|
||||
expect(getSeverity(74.99).id).to.be('major');
|
||||
});
|
||||
|
||||
it('returns critical for score >= 75', () => {
|
||||
expect(getSeverity(75).id).to.be('critical');
|
||||
expect(getSeverity(100).id).to.be('critical');
|
||||
expect(getSeverity(1000).id).to.be('critical');
|
||||
});
|
||||
|
||||
it('returns unknown for scores less than 0 or string input', () => {
|
||||
expect(getSeverity(-10).id).to.be('unknown');
|
||||
expect(getSeverity('value').id).to.be('unknown');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getSeverityWithLow', () => {
|
||||
it('returns low for 0 <= score < 3', () => {
|
||||
expect(getSeverityWithLow(0).id).to.be('low');
|
||||
expect(getSeverityWithLow(0.001).id).to.be('low');
|
||||
expect(getSeverityWithLow(2.99).id).to.be('low');
|
||||
});
|
||||
|
||||
it('returns warning for 3 <= score < 25', () => {
|
||||
expect(getSeverityWithLow(3).id).to.be('warning');
|
||||
expect(getSeverityWithLow(24.99).id).to.be('warning');
|
||||
});
|
||||
|
||||
it('returns minor for 25 <= score < 50', () => {
|
||||
expect(getSeverityWithLow(25).id).to.be('minor');
|
||||
expect(getSeverityWithLow(49.99).id).to.be('minor');
|
||||
});
|
||||
|
||||
it('returns minor for 50 <= score < 75', () => {
|
||||
expect(getSeverityWithLow(50).id).to.be('major');
|
||||
expect(getSeverityWithLow(74.99).id).to.be('major');
|
||||
});
|
||||
|
||||
it('returns critical for score >= 75', () => {
|
||||
expect(getSeverityWithLow(75).id).to.be('critical');
|
||||
expect(getSeverityWithLow(100).id).to.be('critical');
|
||||
expect(getSeverityWithLow(1000).id).to.be('critical');
|
||||
});
|
||||
|
||||
it('returns unknown for scores less than 0 or string input', () => {
|
||||
expect(getSeverityWithLow(-10).id).to.be('unknown');
|
||||
expect(getSeverityWithLow('value').id).to.be('unknown');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getSeverityColor', () => {
|
||||
it('returns correct hex code for low for 0 <= score < 3', () => {
|
||||
expect(getSeverityColor(0)).to.be('#d2e9f7');
|
||||
expect(getSeverityColor(0.001)).to.be('#d2e9f7');
|
||||
expect(getSeverityColor(2.99)).to.be('#d2e9f7');
|
||||
});
|
||||
|
||||
it('returns correct hex code for warning for 3 <= score < 25', () => {
|
||||
expect(getSeverityColor(3)).to.be('#8bc8fb');
|
||||
expect(getSeverityColor(24.99)).to.be('#8bc8fb');
|
||||
});
|
||||
|
||||
it('returns correct hex code for minor for 25 <= score < 50', () => {
|
||||
expect(getSeverityColor(25)).to.be('#fdec25');
|
||||
expect(getSeverityColor(49.99)).to.be('#fdec25');
|
||||
});
|
||||
|
||||
it('returns correct hex code for major for 50 <= score < 75', () => {
|
||||
expect(getSeverityColor(50)).to.be('#fba740');
|
||||
expect(getSeverityColor(74.99)).to.be('#fba740');
|
||||
});
|
||||
|
||||
it('returns correct hex code for critical for score >= 75', () => {
|
||||
expect(getSeverityColor(75)).to.be('#fe5050');
|
||||
expect(getSeverityColor(100)).to.be('#fe5050');
|
||||
expect(getSeverityColor(1000)).to.be('#fe5050');
|
||||
});
|
||||
|
||||
it('returns correct hex code for unknown for scores less than 0 or string input', () => {
|
||||
expect(getSeverityColor(-10)).to.be('#ffffff');
|
||||
expect(getSeverityColor('value')).to.be('#ffffff');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMultiBucketImpactLabel', () => {
|
||||
it('returns high for 3 <= score <= 5', () => {
|
||||
expect(getMultiBucketImpactLabel(3)).to.be('high');
|
||||
expect(getMultiBucketImpactLabel(5)).to.be('high');
|
||||
});
|
||||
|
||||
it('returns medium for 2 <= score < 3', () => {
|
||||
expect(getMultiBucketImpactLabel(2)).to.be('medium');
|
||||
expect(getMultiBucketImpactLabel(2.99)).to.be('medium');
|
||||
});
|
||||
|
||||
it('returns low for 1 <= score < 2', () => {
|
||||
expect(getMultiBucketImpactLabel(1)).to.be('low');
|
||||
expect(getMultiBucketImpactLabel(1.99)).to.be('low');
|
||||
});
|
||||
|
||||
it('returns none for -5 <= score < 1', () => {
|
||||
expect(getMultiBucketImpactLabel(-5)).to.be('none');
|
||||
expect(getMultiBucketImpactLabel(0.99)).to.be('none');
|
||||
});
|
||||
|
||||
it('returns expected label when impact outside normal bounds', () => {
|
||||
expect(getMultiBucketImpactLabel(10)).to.be('high');
|
||||
expect(getMultiBucketImpactLabel(-10)).to.be('none');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getEntityFieldName', () => {
|
||||
it('returns the by field name', () => {
|
||||
expect(getEntityFieldName(byEntityRecord)).to.be('airline');
|
||||
});
|
||||
|
||||
it('returns the partition field name', () => {
|
||||
expect(getEntityFieldName(partitionEntityRecord)).to.be('airline');
|
||||
});
|
||||
|
||||
it('returns the over field name', () => {
|
||||
expect(getEntityFieldName(overEntityRecord)).to.be('clientip');
|
||||
});
|
||||
|
||||
it('returns undefined if no by, over or partition fields', () => {
|
||||
expect(getEntityFieldName(noEntityRecord)).to.be(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getEntityFieldValue', () => {
|
||||
it('returns the by field value', () => {
|
||||
expect(getEntityFieldValue(byEntityRecord)).to.be('JZA');
|
||||
});
|
||||
|
||||
it('returns the partition field value', () => {
|
||||
expect(getEntityFieldValue(partitionEntityRecord)).to.be('AAL');
|
||||
});
|
||||
|
||||
it('returns the over field value', () => {
|
||||
expect(getEntityFieldValue(overEntityRecord)).to.be('37.157.32.164');
|
||||
});
|
||||
|
||||
it('returns undefined if no by, over or partition fields', () => {
|
||||
expect(getEntityFieldValue(noEntityRecord)).to.be(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getEntityFieldList', () => {
|
||||
it('returns an empty list for a record with no by, over or partition fields', () => {
|
||||
expect(getEntityFieldList(noEntityRecord)).to.be.empty();
|
||||
});
|
||||
|
||||
it('returns correct list for a record with a by field', () => {
|
||||
expect(getEntityFieldList(byEntityRecord)).to.eql([
|
||||
{
|
||||
fieldName: 'airline',
|
||||
fieldValue: 'JZA',
|
||||
fieldType: 'by',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('returns correct list for a record with a partition field', () => {
|
||||
expect(getEntityFieldList(partitionEntityRecord)).to.eql([
|
||||
{
|
||||
fieldName: 'airline',
|
||||
fieldValue: 'AAL',
|
||||
fieldType: 'partition',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('returns correct list for a record with an over field', () => {
|
||||
expect(getEntityFieldList(overEntityRecord)).to.eql([
|
||||
{
|
||||
fieldName: 'clientip',
|
||||
fieldValue: '37.157.32.164',
|
||||
fieldType: 'over',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('returns correct list for a record with a by and over field', () => {
|
||||
expect(getEntityFieldList(rareEntityRecord)).to.eql([
|
||||
{
|
||||
fieldName: 'clientip',
|
||||
fieldValue: '173.252.74.112',
|
||||
fieldType: 'over',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('showActualForFunction', () => {
|
||||
it('returns true for expected function descriptions', () => {
|
||||
expect(showActualForFunction('count')).to.be(true);
|
||||
expect(showActualForFunction('distinct_count')).to.be(true);
|
||||
expect(showActualForFunction('lat_long')).to.be(true);
|
||||
expect(showActualForFunction('mean')).to.be(true);
|
||||
expect(showActualForFunction('max')).to.be(true);
|
||||
expect(showActualForFunction('min')).to.be(true);
|
||||
expect(showActualForFunction('sum')).to.be(true);
|
||||
expect(showActualForFunction('median')).to.be(true);
|
||||
expect(showActualForFunction('varp')).to.be(true);
|
||||
expect(showActualForFunction('info_content')).to.be(true);
|
||||
expect(showActualForFunction('time')).to.be(true);
|
||||
});
|
||||
|
||||
it('returns false for expected function descriptions', () => {
|
||||
expect(showActualForFunction('rare')).to.be(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('showTypicalForFunction', () => {
|
||||
it('returns true for expected function descriptions', () => {
|
||||
expect(showTypicalForFunction('count')).to.be(true);
|
||||
expect(showTypicalForFunction('distinct_count')).to.be(true);
|
||||
expect(showTypicalForFunction('lat_long')).to.be(true);
|
||||
expect(showTypicalForFunction('mean')).to.be(true);
|
||||
expect(showTypicalForFunction('max')).to.be(true);
|
||||
expect(showTypicalForFunction('min')).to.be(true);
|
||||
expect(showTypicalForFunction('sum')).to.be(true);
|
||||
expect(showTypicalForFunction('median')).to.be(true);
|
||||
expect(showTypicalForFunction('varp')).to.be(true);
|
||||
expect(showTypicalForFunction('info_content')).to.be(true);
|
||||
expect(showTypicalForFunction('time')).to.be(true);
|
||||
});
|
||||
|
||||
it('returns false for expected function descriptions', () => {
|
||||
expect(showTypicalForFunction('rare')).to.be(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isRuleSupported', () => {
|
||||
it('returns true for anomalies supporting rules', () => {
|
||||
expect(isRuleSupported(partitionEntityRecord)).to.be(true);
|
||||
expect(isRuleSupported(byEntityRecord)).to.be(true);
|
||||
expect(isRuleSupported(overEntityRecord)).to.be(true);
|
||||
expect(isRuleSupported(rareEntityRecord)).to.be(true);
|
||||
expect(isRuleSupported(noEntityRecord)).to.be(true);
|
||||
});
|
||||
|
||||
it('returns false for anomaly not supporting rules', () => {
|
||||
expect(isRuleSupported(metricNoEntityRecord)).to.be(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('aggregationTypeTransform', () => {
|
||||
it('returns correct ES aggregation type for ML function description', () => {
|
||||
expect(aggregationTypeTransform.toES('count')).to.be('count');
|
||||
expect(aggregationTypeTransform.toES('distinct_count')).to.be('cardinality');
|
||||
expect(aggregationTypeTransform.toES('distinct_count')).to.not.be('distinct_count');
|
||||
expect(aggregationTypeTransform.toES('mean')).to.be('avg');
|
||||
expect(aggregationTypeTransform.toES('mean')).to.not.be('mean');
|
||||
expect(aggregationTypeTransform.toES('max')).to.be('max');
|
||||
expect(aggregationTypeTransform.toES('min')).to.be('min');
|
||||
expect(aggregationTypeTransform.toES('sum')).to.be('sum');
|
||||
});
|
||||
|
||||
it('returns correct ML function description for ES aggregation type', () => {
|
||||
expect(aggregationTypeTransform.toML('count')).to.be('count');
|
||||
expect(aggregationTypeTransform.toML('cardinality')).to.be('distinct_count');
|
||||
expect(aggregationTypeTransform.toML('cardinality')).to.not.be('cardinality');
|
||||
expect(aggregationTypeTransform.toML('avg')).to.be('mean');
|
||||
expect(aggregationTypeTransform.toML('avg')).to.not.be('avg');
|
||||
expect(aggregationTypeTransform.toML('max')).to.be('max');
|
||||
expect(aggregationTypeTransform.toML('min')).to.be('min');
|
||||
expect(aggregationTypeTransform.toML('sum')).to.be('sum');
|
||||
});
|
||||
});
|
||||
});
|
11
x-pack/plugins/ml/common/util/anomaly_utils.d.ts
vendored
11
x-pack/plugins/ml/common/util/anomaly_utils.d.ts
vendored
|
@ -1,11 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { ANOMALY_SEVERITY } from '../constants/anomalies';
|
||||
|
||||
export function getSeverity(normalizedScore: number): string;
|
||||
export function getSeverityType(normalizedScore: number): ANOMALY_SEVERITY;
|
||||
export function getSeverityColor(normalizedScore: number): string;
|
444
x-pack/plugins/ml/common/util/anomaly_utils.test.ts
Normal file
444
x-pack/plugins/ml/common/util/anomaly_utils.test.ts
Normal file
|
@ -0,0 +1,444 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { AnomalyRecordDoc } from '../types/anomalies';
|
||||
|
||||
import {
|
||||
aggregationTypeTransform,
|
||||
getEntityFieldList,
|
||||
getEntityFieldName,
|
||||
getEntityFieldValue,
|
||||
getMultiBucketImpactLabel,
|
||||
getSeverity,
|
||||
getSeverityWithLow,
|
||||
getSeverityColor,
|
||||
isRuleSupported,
|
||||
showActualForFunction,
|
||||
showTypicalForFunction,
|
||||
} from './anomaly_utils';
|
||||
|
||||
describe('ML - anomaly utils', () => {
|
||||
const partitionEntityRecord: AnomalyRecordDoc = {
|
||||
job_id: 'farequote',
|
||||
result_type: 'record',
|
||||
probability: 0.012818,
|
||||
record_score: 0.0162059,
|
||||
initial_record_score: 0.0162059,
|
||||
bucket_span: 300,
|
||||
detector_index: 0,
|
||||
is_interim: false,
|
||||
timestamp: 1455047400000,
|
||||
partition_field_name: 'airline',
|
||||
partition_field_value: 'AAL',
|
||||
function: 'mean',
|
||||
function_description: 'mean',
|
||||
field_name: 'responsetime',
|
||||
};
|
||||
|
||||
const byEntityRecord: AnomalyRecordDoc = {
|
||||
job_id: 'farequote',
|
||||
result_type: 'record',
|
||||
probability: 0.012818,
|
||||
record_score: 0.0162059,
|
||||
initial_record_score: 0.0162059,
|
||||
bucket_span: 300,
|
||||
detector_index: 0,
|
||||
is_interim: false,
|
||||
timestamp: 1455047400000,
|
||||
by_field_name: 'airline',
|
||||
by_field_value: 'JZA',
|
||||
function: 'mean',
|
||||
function_description: 'mean',
|
||||
field_name: 'responsetime',
|
||||
};
|
||||
|
||||
const overEntityRecord: AnomalyRecordDoc = {
|
||||
job_id: 'gallery',
|
||||
result_type: 'record',
|
||||
probability: 2.81806e-9,
|
||||
record_score: 59.055,
|
||||
initial_record_score: 59.055,
|
||||
bucket_span: 3600,
|
||||
detector_index: 4,
|
||||
is_interim: false,
|
||||
timestamp: 1420552800000,
|
||||
function: 'sum',
|
||||
function_description: 'sum',
|
||||
field_name: 'bytes',
|
||||
by_field_name: 'method',
|
||||
over_field_name: 'clientip',
|
||||
over_field_value: '37.157.32.164',
|
||||
};
|
||||
|
||||
const noEntityRecord: AnomalyRecordDoc = {
|
||||
job_id: 'farequote_no_by',
|
||||
result_type: 'record',
|
||||
probability: 0.0191711,
|
||||
record_score: 4.38431,
|
||||
initial_record_score: 19.654,
|
||||
bucket_span: 300,
|
||||
detector_index: 0,
|
||||
is_interim: false,
|
||||
timestamp: 1454890500000,
|
||||
function: 'mean',
|
||||
function_description: 'mean',
|
||||
field_name: 'responsetime',
|
||||
};
|
||||
|
||||
const metricNoEntityRecord: AnomalyRecordDoc = {
|
||||
job_id: 'farequote_metric',
|
||||
result_type: 'record',
|
||||
probability: 0.030133495093182184,
|
||||
record_score: 0.024881740359975164,
|
||||
initial_record_score: 0.024881740359975164,
|
||||
bucket_span: 900,
|
||||
detector_index: 0,
|
||||
is_interim: false,
|
||||
timestamp: 1486845000000,
|
||||
function: 'metric',
|
||||
function_description: 'mean',
|
||||
typical: [545.7764658569108],
|
||||
actual: [758.8220213274412],
|
||||
field_name: 'responsetime',
|
||||
influencers: [
|
||||
{
|
||||
influencer_field_name: 'airline',
|
||||
influencer_field_values: ['NKS'],
|
||||
},
|
||||
],
|
||||
airline: ['NKS'],
|
||||
};
|
||||
|
||||
const rareEntityRecord: AnomalyRecordDoc = {
|
||||
job_id: 'gallery',
|
||||
result_type: 'record',
|
||||
probability: 0.02277014211908481,
|
||||
record_score: 4.545378107075983,
|
||||
initial_record_score: 4.545378107075983,
|
||||
bucket_span: 3600,
|
||||
detector_index: 0,
|
||||
is_interim: false,
|
||||
timestamp: 1495879200000,
|
||||
by_field_name: 'status',
|
||||
function: 'rare',
|
||||
function_description: 'rare',
|
||||
over_field_name: 'clientip',
|
||||
over_field_value: '173.252.74.112',
|
||||
causes: [
|
||||
{
|
||||
probability: 0.02277014211908481,
|
||||
by_field_name: 'status',
|
||||
by_field_value: '206',
|
||||
function: 'rare',
|
||||
function_description: 'rare',
|
||||
typical: [0.00014832458182211878],
|
||||
actual: [1],
|
||||
over_field_name: 'clientip',
|
||||
over_field_value: '173.252.74.112',
|
||||
},
|
||||
],
|
||||
influencers: [
|
||||
{
|
||||
influencer_field_name: 'uri',
|
||||
influencer_field_values: [
|
||||
'/wp-content/uploads/2013/06/dune_house_oil_on_canvas_24x20-298x298.jpg',
|
||||
'/wp-content/uploads/2013/10/Case-dAste-1-11-298x298.png',
|
||||
],
|
||||
},
|
||||
{
|
||||
influencer_field_name: 'status',
|
||||
influencer_field_values: ['206'],
|
||||
},
|
||||
{
|
||||
influencer_field_name: 'clientip',
|
||||
influencer_field_values: ['173.252.74.112'],
|
||||
},
|
||||
],
|
||||
clientip: ['173.252.74.112'],
|
||||
uri: [
|
||||
'/wp-content/uploads/2013/06/dune_house_oil_on_canvas_24x20-298x298.jpg',
|
||||
'/wp-content/uploads/2013/10/Case-dAste-1-11-298x298.png',
|
||||
],
|
||||
status: ['206'],
|
||||
};
|
||||
|
||||
describe('getSeverity', () => {
|
||||
test('returns warning for 0 <= score < 25', () => {
|
||||
expect(getSeverity(0).id).toBe('warning');
|
||||
expect(getSeverity(0.001).id).toBe('warning');
|
||||
expect(getSeverity(24.99).id).toBe('warning');
|
||||
});
|
||||
|
||||
test('returns minor for 25 <= score < 50', () => {
|
||||
expect(getSeverity(25).id).toBe('minor');
|
||||
expect(getSeverity(49.99).id).toBe('minor');
|
||||
});
|
||||
|
||||
test('returns minor for 50 <= score < 75', () => {
|
||||
expect(getSeverity(50).id).toBe('major');
|
||||
expect(getSeverity(74.99).id).toBe('major');
|
||||
});
|
||||
|
||||
test('returns critical for score >= 75', () => {
|
||||
expect(getSeverity(75).id).toBe('critical');
|
||||
expect(getSeverity(100).id).toBe('critical');
|
||||
expect(getSeverity(1000).id).toBe('critical');
|
||||
});
|
||||
|
||||
test('returns unknown for scores less than 0', () => {
|
||||
expect(getSeverity(-10).id).toBe('unknown');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getSeverityWithLow', () => {
|
||||
test('returns low for 0 <= score < 3', () => {
|
||||
expect(getSeverityWithLow(0).id).toBe('low');
|
||||
expect(getSeverityWithLow(0.001).id).toBe('low');
|
||||
expect(getSeverityWithLow(2.99).id).toBe('low');
|
||||
});
|
||||
|
||||
test('returns warning for 3 <= score < 25', () => {
|
||||
expect(getSeverityWithLow(3).id).toBe('warning');
|
||||
expect(getSeverityWithLow(24.99).id).toBe('warning');
|
||||
});
|
||||
|
||||
test('returns minor for 25 <= score < 50', () => {
|
||||
expect(getSeverityWithLow(25).id).toBe('minor');
|
||||
expect(getSeverityWithLow(49.99).id).toBe('minor');
|
||||
});
|
||||
|
||||
test('returns minor for 50 <= score < 75', () => {
|
||||
expect(getSeverityWithLow(50).id).toBe('major');
|
||||
expect(getSeverityWithLow(74.99).id).toBe('major');
|
||||
});
|
||||
|
||||
test('returns critical for score >= 75', () => {
|
||||
expect(getSeverityWithLow(75).id).toBe('critical');
|
||||
expect(getSeverityWithLow(100).id).toBe('critical');
|
||||
expect(getSeverityWithLow(1000).id).toBe('critical');
|
||||
});
|
||||
|
||||
test('returns unknown for scores less than 0 ', () => {
|
||||
expect(getSeverityWithLow(-10).id).toBe('unknown');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getSeverityColor', () => {
|
||||
test('returns correct hex code for low for 0 <= score < 3', () => {
|
||||
expect(getSeverityColor(0)).toBe('#d2e9f7');
|
||||
expect(getSeverityColor(0.001)).toBe('#d2e9f7');
|
||||
expect(getSeverityColor(2.99)).toBe('#d2e9f7');
|
||||
});
|
||||
|
||||
test('returns correct hex code for warning for 3 <= score < 25', () => {
|
||||
expect(getSeverityColor(3)).toBe('#8bc8fb');
|
||||
expect(getSeverityColor(24.99)).toBe('#8bc8fb');
|
||||
});
|
||||
|
||||
test('returns correct hex code for minor for 25 <= score < 50', () => {
|
||||
expect(getSeverityColor(25)).toBe('#fdec25');
|
||||
expect(getSeverityColor(49.99)).toBe('#fdec25');
|
||||
});
|
||||
|
||||
test('returns correct hex code for major for 50 <= score < 75', () => {
|
||||
expect(getSeverityColor(50)).toBe('#fba740');
|
||||
expect(getSeverityColor(74.99)).toBe('#fba740');
|
||||
});
|
||||
|
||||
test('returns correct hex code for critical for score >= 75', () => {
|
||||
expect(getSeverityColor(75)).toBe('#fe5050');
|
||||
expect(getSeverityColor(100)).toBe('#fe5050');
|
||||
expect(getSeverityColor(1000)).toBe('#fe5050');
|
||||
});
|
||||
|
||||
test('returns correct hex code for unknown for scores less than 0', () => {
|
||||
expect(getSeverityColor(-10)).toBe('#ffffff');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMultiBucketImpactLabel', () => {
|
||||
test('returns high for 3 <= score <= 5', () => {
|
||||
expect(getMultiBucketImpactLabel(3)).toBe('high');
|
||||
expect(getMultiBucketImpactLabel(5)).toBe('high');
|
||||
});
|
||||
|
||||
test('returns medium for 2 <= score < 3', () => {
|
||||
expect(getMultiBucketImpactLabel(2)).toBe('medium');
|
||||
expect(getMultiBucketImpactLabel(2.99)).toBe('medium');
|
||||
});
|
||||
|
||||
test('returns low for 1 <= score < 2', () => {
|
||||
expect(getMultiBucketImpactLabel(1)).toBe('low');
|
||||
expect(getMultiBucketImpactLabel(1.99)).toBe('low');
|
||||
});
|
||||
|
||||
test('returns none for -5 <= score < 1', () => {
|
||||
expect(getMultiBucketImpactLabel(-5)).toBe('none');
|
||||
expect(getMultiBucketImpactLabel(0.99)).toBe('none');
|
||||
});
|
||||
|
||||
test('returns expected label when impact outside normal bounds', () => {
|
||||
expect(getMultiBucketImpactLabel(10)).toBe('high');
|
||||
expect(getMultiBucketImpactLabel(-10)).toBe('none');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getEntityFieldName', () => {
|
||||
it('returns the by field name', () => {
|
||||
expect(getEntityFieldName(byEntityRecord)).toBe('airline');
|
||||
});
|
||||
|
||||
it('returns the partition field name', () => {
|
||||
expect(getEntityFieldName(partitionEntityRecord)).toBe('airline');
|
||||
});
|
||||
|
||||
it('returns the over field name', () => {
|
||||
expect(getEntityFieldName(overEntityRecord)).toBe('clientip');
|
||||
});
|
||||
|
||||
it('returns undefined if no by, over or partition fields', () => {
|
||||
expect(getEntityFieldName(noEntityRecord)).toBe(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getEntityFieldValue', () => {
|
||||
test('returns the by field value', () => {
|
||||
expect(getEntityFieldValue(byEntityRecord)).toBe('JZA');
|
||||
});
|
||||
|
||||
test('returns the partition field value', () => {
|
||||
expect(getEntityFieldValue(partitionEntityRecord)).toBe('AAL');
|
||||
});
|
||||
|
||||
test('returns the over field value', () => {
|
||||
expect(getEntityFieldValue(overEntityRecord)).toBe('37.157.32.164');
|
||||
});
|
||||
|
||||
test('returns undefined if no by, over or partition fields', () => {
|
||||
expect(getEntityFieldValue(noEntityRecord)).toBe(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getEntityFieldList', () => {
|
||||
test('returns an empty list for a record with no by, over or partition fields', () => {
|
||||
expect(getEntityFieldList(noEntityRecord)).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('returns correct list for a record with a by field', () => {
|
||||
expect(getEntityFieldList(byEntityRecord)).toEqual([
|
||||
{
|
||||
fieldName: 'airline',
|
||||
fieldValue: 'JZA',
|
||||
fieldType: 'by',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test('returns correct list for a record with a partition field', () => {
|
||||
expect(getEntityFieldList(partitionEntityRecord)).toEqual([
|
||||
{
|
||||
fieldName: 'airline',
|
||||
fieldValue: 'AAL',
|
||||
fieldType: 'partition',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test('returns correct list for a record with an over field', () => {
|
||||
expect(getEntityFieldList(overEntityRecord)).toEqual([
|
||||
{
|
||||
fieldName: 'clientip',
|
||||
fieldValue: '37.157.32.164',
|
||||
fieldType: 'over',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test('returns correct list for a record with a by and over field', () => {
|
||||
expect(getEntityFieldList(rareEntityRecord)).toEqual([
|
||||
{
|
||||
fieldName: 'clientip',
|
||||
fieldValue: '173.252.74.112',
|
||||
fieldType: 'over',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('showActualForFunction', () => {
|
||||
test('returns true for expected function descriptions', () => {
|
||||
expect(showActualForFunction('count')).toBe(true);
|
||||
expect(showActualForFunction('distinct_count')).toBe(true);
|
||||
expect(showActualForFunction('lat_long')).toBe(true);
|
||||
expect(showActualForFunction('mean')).toBe(true);
|
||||
expect(showActualForFunction('max')).toBe(true);
|
||||
expect(showActualForFunction('min')).toBe(true);
|
||||
expect(showActualForFunction('sum')).toBe(true);
|
||||
expect(showActualForFunction('median')).toBe(true);
|
||||
expect(showActualForFunction('varp')).toBe(true);
|
||||
expect(showActualForFunction('info_content')).toBe(true);
|
||||
expect(showActualForFunction('time')).toBe(true);
|
||||
});
|
||||
|
||||
test('returns false for expected function descriptions', () => {
|
||||
expect(showActualForFunction('rare')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('showTypicalForFunction', () => {
|
||||
test('returns true for expected function descriptions', () => {
|
||||
expect(showTypicalForFunction('count')).toBe(true);
|
||||
expect(showTypicalForFunction('distinct_count')).toBe(true);
|
||||
expect(showTypicalForFunction('lat_long')).toBe(true);
|
||||
expect(showTypicalForFunction('mean')).toBe(true);
|
||||
expect(showTypicalForFunction('max')).toBe(true);
|
||||
expect(showTypicalForFunction('min')).toBe(true);
|
||||
expect(showTypicalForFunction('sum')).toBe(true);
|
||||
expect(showTypicalForFunction('median')).toBe(true);
|
||||
expect(showTypicalForFunction('varp')).toBe(true);
|
||||
expect(showTypicalForFunction('info_content')).toBe(true);
|
||||
expect(showTypicalForFunction('time')).toBe(true);
|
||||
});
|
||||
|
||||
test('returns false for expected function descriptions', () => {
|
||||
expect(showTypicalForFunction('rare')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isRuleSupported', () => {
|
||||
test('returns true for anomalies supporting rules', () => {
|
||||
expect(isRuleSupported(partitionEntityRecord)).toBe(true);
|
||||
expect(isRuleSupported(byEntityRecord)).toBe(true);
|
||||
expect(isRuleSupported(overEntityRecord)).toBe(true);
|
||||
expect(isRuleSupported(rareEntityRecord)).toBe(true);
|
||||
expect(isRuleSupported(noEntityRecord)).toBe(true);
|
||||
});
|
||||
|
||||
it('returns false for anomaly not supporting rules', () => {
|
||||
expect(isRuleSupported(metricNoEntityRecord)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('aggregationTypeTransform', () => {
|
||||
test('returns correct ES aggregation type for ML function description', () => {
|
||||
expect(aggregationTypeTransform.toES('count')).toBe('count');
|
||||
expect(aggregationTypeTransform.toES('distinct_count')).toBe('cardinality');
|
||||
expect(aggregationTypeTransform.toES('mean')).toBe('avg');
|
||||
expect(aggregationTypeTransform.toES('max')).toBe('max');
|
||||
expect(aggregationTypeTransform.toES('min')).toBe('min');
|
||||
expect(aggregationTypeTransform.toES('sum')).toBe('sum');
|
||||
});
|
||||
|
||||
test('returns correct ML function description for ES aggregation type', () => {
|
||||
expect(aggregationTypeTransform.toML('count')).toBe('count');
|
||||
expect(aggregationTypeTransform.toML('cardinality')).toBe('distinct_count');
|
||||
expect(aggregationTypeTransform.toML('avg')).toBe('mean');
|
||||
expect(aggregationTypeTransform.toML('max')).toBe('max');
|
||||
expect(aggregationTypeTransform.toML('min')).toBe('min');
|
||||
expect(aggregationTypeTransform.toML('sum')).toBe('sum');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -13,6 +13,24 @@ import { i18n } from '@kbn/i18n';
|
|||
import { CONDITIONS_NOT_SUPPORTED_FUNCTIONS } from '../constants/detector_rule';
|
||||
import { MULTI_BUCKET_IMPACT } from '../constants/multi_bucket_impact';
|
||||
import { ANOMALY_SEVERITY, ANOMALY_THRESHOLD } from '../constants/anomalies';
|
||||
import { AnomalyRecordDoc } from '../types/anomalies';
|
||||
|
||||
export interface SeverityType {
|
||||
id: ANOMALY_SEVERITY;
|
||||
label: string;
|
||||
}
|
||||
|
||||
export enum ENTITY_FIELD_TYPE {
|
||||
BY = 'by',
|
||||
OVER = 'over',
|
||||
PARTITON = 'partition',
|
||||
}
|
||||
|
||||
export interface EntityField {
|
||||
fieldName: string;
|
||||
fieldValue: string | number | undefined;
|
||||
fieldType: ENTITY_FIELD_TYPE;
|
||||
}
|
||||
|
||||
// List of function descriptions for which actual values from record level results should be displayed.
|
||||
const DISPLAY_ACTUAL_FUNCTIONS = [
|
||||
|
@ -44,7 +62,7 @@ const DISPLAY_TYPICAL_FUNCTIONS = [
|
|||
'time',
|
||||
];
|
||||
|
||||
let severityTypes;
|
||||
let severityTypes: Record<string, SeverityType>;
|
||||
|
||||
function getSeverityTypes() {
|
||||
if (severityTypes) {
|
||||
|
@ -93,7 +111,7 @@ function getSeverityTypes() {
|
|||
|
||||
// Returns a severity label (one of critical, major, minor, warning or unknown)
|
||||
// for the supplied normalized anomaly score (a value between 0 and 100).
|
||||
export function getSeverity(normalizedScore) {
|
||||
export function getSeverity(normalizedScore: number): SeverityType {
|
||||
const severityTypesList = getSeverityTypes();
|
||||
|
||||
if (normalizedScore >= ANOMALY_THRESHOLD.CRITICAL) {
|
||||
|
@ -109,7 +127,7 @@ export function getSeverity(normalizedScore) {
|
|||
}
|
||||
}
|
||||
|
||||
export function getSeverityType(normalizedScore) {
|
||||
export function getSeverityType(normalizedScore: number): ANOMALY_SEVERITY {
|
||||
if (normalizedScore >= 75) {
|
||||
return ANOMALY_SEVERITY.CRITICAL;
|
||||
} else if (normalizedScore >= 50) {
|
||||
|
@ -128,7 +146,7 @@ export function getSeverityType(normalizedScore) {
|
|||
// Returns a severity label (one of critical, major, minor, warning, low or unknown)
|
||||
// for the supplied normalized anomaly score (a value between 0 and 100), where scores
|
||||
// less than 3 are assigned a severity of 'low'.
|
||||
export function getSeverityWithLow(normalizedScore) {
|
||||
export function getSeverityWithLow(normalizedScore: number): SeverityType {
|
||||
const severityTypesList = getSeverityTypes();
|
||||
|
||||
if (normalizedScore >= ANOMALY_THRESHOLD.CRITICAL) {
|
||||
|
@ -148,7 +166,7 @@ export function getSeverityWithLow(normalizedScore) {
|
|||
|
||||
// Returns a severity RGB color (one of critical, major, minor, warning, low_warning or unknown)
|
||||
// for the supplied normalized anomaly score (a value between 0 and 100).
|
||||
export function getSeverityColor(normalizedScore) {
|
||||
export function getSeverityColor(normalizedScore: number): string {
|
||||
if (normalizedScore >= ANOMALY_THRESHOLD.CRITICAL) {
|
||||
return '#fe5050';
|
||||
} else if (normalizedScore >= ANOMALY_THRESHOLD.MAJOR) {
|
||||
|
@ -167,7 +185,7 @@ export function getSeverityColor(normalizedScore) {
|
|||
// Returns a label to use for the multi-bucket impact of an anomaly
|
||||
// according to the value of the multi_bucket_impact field of a record,
|
||||
// which ranges from -5 to +5.
|
||||
export function getMultiBucketImpactLabel(multiBucketImpact) {
|
||||
export function getMultiBucketImpactLabel(multiBucketImpact: number): string {
|
||||
if (multiBucketImpact >= MULTI_BUCKET_IMPACT.HIGH) {
|
||||
return i18n.translate('xpack.ml.anomalyUtils.multiBucketImpact.highLabel', {
|
||||
defaultMessage: 'high',
|
||||
|
@ -190,7 +208,7 @@ export function getMultiBucketImpactLabel(multiBucketImpact) {
|
|||
// Returns the name of the field to use as the entity name from the source record
|
||||
// obtained from Elasticsearch. The function looks first for a by_field, then over_field,
|
||||
// then partition_field, returning undefined if none of these fields are present.
|
||||
export function getEntityFieldName(record) {
|
||||
export function getEntityFieldName(record: AnomalyRecordDoc): string | undefined {
|
||||
// Analyses with by and over fields, will have a top-level by_field_name, but
|
||||
// the by_field_value(s) will be in the nested causes array.
|
||||
if (record.by_field_name !== undefined && record.by_field_value !== undefined) {
|
||||
|
@ -211,7 +229,7 @@ export function getEntityFieldName(record) {
|
|||
// Returns the value of the field to use as the entity value from the source record
|
||||
// obtained from Elasticsearch. The function looks first for a by_field, then over_field,
|
||||
// then partition_field, returning undefined if none of these fields are present.
|
||||
export function getEntityFieldValue(record) {
|
||||
export function getEntityFieldValue(record: AnomalyRecordDoc): string | number | undefined {
|
||||
if (record.by_field_value !== undefined) {
|
||||
return record.by_field_value;
|
||||
}
|
||||
|
@ -229,13 +247,13 @@ export function getEntityFieldValue(record) {
|
|||
|
||||
// Returns the list of partitioning entity fields for the source record as a list
|
||||
// of objects in the form { fieldName: airline, fieldValue: AAL, fieldType: partition }
|
||||
export function getEntityFieldList(record) {
|
||||
const entityFields = [];
|
||||
export function getEntityFieldList(record: AnomalyRecordDoc): EntityField[] {
|
||||
const entityFields: EntityField[] = [];
|
||||
if (record.partition_field_name !== undefined) {
|
||||
entityFields.push({
|
||||
fieldName: record.partition_field_name,
|
||||
fieldValue: record.partition_field_value,
|
||||
fieldType: 'partition',
|
||||
fieldType: ENTITY_FIELD_TYPE.PARTITON,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -243,7 +261,7 @@ export function getEntityFieldList(record) {
|
|||
entityFields.push({
|
||||
fieldName: record.over_field_name,
|
||||
fieldValue: record.over_field_value,
|
||||
fieldType: 'over',
|
||||
fieldType: ENTITY_FIELD_TYPE.OVER,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -254,7 +272,7 @@ export function getEntityFieldList(record) {
|
|||
entityFields.push({
|
||||
fieldName: record.by_field_name,
|
||||
fieldValue: record.by_field_value,
|
||||
fieldType: 'by',
|
||||
fieldType: ENTITY_FIELD_TYPE.BY,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -264,19 +282,19 @@ export function getEntityFieldList(record) {
|
|||
// Returns whether actual values should be displayed for a record with the specified function description.
|
||||
// Note that the 'function' field in a record contains what the user entered e.g. 'high_count',
|
||||
// whereas the 'function_description' field holds a ML-built display hint for function e.g. 'count'.
|
||||
export function showActualForFunction(functionDescription) {
|
||||
export function showActualForFunction(functionDescription: string): boolean {
|
||||
return DISPLAY_ACTUAL_FUNCTIONS.indexOf(functionDescription) > -1;
|
||||
}
|
||||
|
||||
// Returns whether typical values should be displayed for a record with the specified function description.
|
||||
// Note that the 'function' field in a record contains what the user entered e.g. 'high_count',
|
||||
// whereas the 'function_description' field holds a ML-built display hint for function e.g. 'count'.
|
||||
export function showTypicalForFunction(functionDescription) {
|
||||
export function showTypicalForFunction(functionDescription: string): boolean {
|
||||
return DISPLAY_TYPICAL_FUNCTIONS.indexOf(functionDescription) > -1;
|
||||
}
|
||||
|
||||
// Returns whether a rule can be configured against the specified anomaly.
|
||||
export function isRuleSupported(record) {
|
||||
export function isRuleSupported(record: AnomalyRecordDoc): boolean {
|
||||
// A rule can be configured with a numeric condition if the function supports it,
|
||||
// and/or with scope if there is a partitioning fields.
|
||||
return (
|
||||
|
@ -303,7 +321,7 @@ export function isRuleSupported(record) {
|
|||
// The input to toES and the output from toML correspond to the value of the
|
||||
// function_description field of anomaly records.
|
||||
export const aggregationTypeTransform = {
|
||||
toES: function(oldAggType) {
|
||||
toES(oldAggType: string): string {
|
||||
let newAggType = oldAggType;
|
||||
|
||||
if (newAggType === 'mean') {
|
||||
|
@ -316,7 +334,7 @@ export const aggregationTypeTransform = {
|
|||
|
||||
return newAggType;
|
||||
},
|
||||
toML: function(oldAggType) {
|
||||
toML(oldAggType: string): string {
|
||||
let newAggType = oldAggType;
|
||||
|
||||
if (newAggType === 'avg') {
|
|
@ -4,14 +4,13 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import { isValidRule, buildRuleDescription, getAppliesToValueFromAnomaly } from '../utils';
|
||||
import { isValidRule, buildRuleDescription, getAppliesToValueFromAnomaly } from './utils';
|
||||
import {
|
||||
ACTION,
|
||||
APPLIES_TO,
|
||||
OPERATOR,
|
||||
FILTER_TYPE,
|
||||
} from '../../../../../common/constants/detector_rule';
|
||||
} from '../../../../common/constants/detector_rule';
|
||||
|
||||
describe('ML - rule editor utils', () => {
|
||||
const ruleWithCondition = {
|
||||
|
@ -55,19 +54,19 @@ describe('ML - rule editor utils', () => {
|
|||
};
|
||||
|
||||
describe('isValidRule', () => {
|
||||
it('returns true for a rule with an action and a condition', () => {
|
||||
expect(isValidRule(ruleWithCondition)).to.be(true);
|
||||
test('returns true for a rule with an action and a condition', () => {
|
||||
expect(isValidRule(ruleWithCondition)).toBe(true);
|
||||
});
|
||||
|
||||
it('returns true for a rule with an action and scope', () => {
|
||||
expect(isValidRule(ruleWithScope)).to.be(true);
|
||||
test('returns true for a rule with an action and scope', () => {
|
||||
expect(isValidRule(ruleWithScope)).toBe(true);
|
||||
});
|
||||
|
||||
it('returns true for a rule with an action, scope and condition', () => {
|
||||
expect(isValidRule(ruleWithConditionAndScope)).to.be(true);
|
||||
test('returns true for a rule with an action, scope and condition', () => {
|
||||
expect(isValidRule(ruleWithConditionAndScope)).toBe(true);
|
||||
});
|
||||
|
||||
it('returns false for a rule with no action', () => {
|
||||
test('returns false for a rule with no action', () => {
|
||||
const ruleWithNoAction = {
|
||||
actions: [],
|
||||
conditions: [
|
||||
|
@ -79,27 +78,27 @@ describe('ML - rule editor utils', () => {
|
|||
],
|
||||
};
|
||||
|
||||
expect(isValidRule(ruleWithNoAction)).to.be(false);
|
||||
expect(isValidRule(ruleWithNoAction)).toBe(false);
|
||||
});
|
||||
|
||||
it('returns false for a rule with no scope or conditions', () => {
|
||||
test('returns false for a rule with no scope or conditions', () => {
|
||||
const ruleWithNoScopeOrCondition = {
|
||||
actions: [ACTION.SKIP_RESULT],
|
||||
};
|
||||
|
||||
expect(isValidRule(ruleWithNoScopeOrCondition)).to.be(false);
|
||||
expect(isValidRule(ruleWithNoScopeOrCondition)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildRuleDescription', () => {
|
||||
it('returns expected rule descriptions', () => {
|
||||
expect(buildRuleDescription(ruleWithCondition)).to.be(
|
||||
test('returns expected rule descriptions', () => {
|
||||
expect(buildRuleDescription(ruleWithCondition)).toBe(
|
||||
'skip result when actual is greater than 10'
|
||||
);
|
||||
expect(buildRuleDescription(ruleWithScope)).to.be(
|
||||
expect(buildRuleDescription(ruleWithScope)).toBe(
|
||||
'skip result when instance is in test_aws_instances'
|
||||
);
|
||||
expect(buildRuleDescription(ruleWithConditionAndScope)).to.be(
|
||||
expect(buildRuleDescription(ruleWithConditionAndScope)).toBe(
|
||||
'skip result when typical is less than 100 AND instance is not in test_aws_instances'
|
||||
);
|
||||
});
|
||||
|
@ -111,16 +110,16 @@ describe('ML - rule editor utils', () => {
|
|||
typical: [1.23],
|
||||
};
|
||||
|
||||
it('returns expected actual value from an anomaly', () => {
|
||||
expect(getAppliesToValueFromAnomaly(anomaly, APPLIES_TO.ACTUAL)).to.be(210);
|
||||
test('returns expected actual value from an anomaly', () => {
|
||||
expect(getAppliesToValueFromAnomaly(anomaly, APPLIES_TO.ACTUAL)).toBe(210);
|
||||
});
|
||||
|
||||
it('returns expected typical value from an anomaly', () => {
|
||||
expect(getAppliesToValueFromAnomaly(anomaly, APPLIES_TO.TYPICAL)).to.be(1.23);
|
||||
test('returns expected typical value from an anomaly', () => {
|
||||
expect(getAppliesToValueFromAnomaly(anomaly, APPLIES_TO.TYPICAL)).toBe(1.23);
|
||||
});
|
||||
|
||||
it('returns expected diff from typical value from an anomaly', () => {
|
||||
expect(getAppliesToValueFromAnomaly(anomaly, APPLIES_TO.DIFF_FROM_TYPICAL)).to.be(208.77);
|
||||
test('returns expected diff from typical value from an anomaly', () => {
|
||||
expect(getAppliesToValueFromAnomaly(anomaly, APPLIES_TO.DIFF_FROM_TYPICAL)).toBe(208.77);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,140 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import moment from 'moment';
|
||||
|
||||
import { timeBucketsCalcAutoIntervalProvider } from '../calc_auto_interval';
|
||||
|
||||
describe('ML - calc auto intervals', () => {
|
||||
const calcAuto = timeBucketsCalcAutoIntervalProvider();
|
||||
|
||||
describe('near interval', () => {
|
||||
it('returns 0ms buckets for undefined / 0 bars', () => {
|
||||
const interval = calcAuto.near(0, undefined);
|
||||
expect(interval.asMilliseconds()).to.be(0);
|
||||
});
|
||||
|
||||
it('returns 1000ms buckets for 60s / 100 bars', () => {
|
||||
const interval = calcAuto.near(100, moment.duration(60, 's'));
|
||||
expect(interval.asMilliseconds()).to.be(1000);
|
||||
});
|
||||
|
||||
it('returns 5m buckets for 8h / 100 bars', () => {
|
||||
const interval = calcAuto.near(100, moment.duration(8, 'h'));
|
||||
expect(interval.asMinutes()).to.be(5);
|
||||
});
|
||||
|
||||
it('returns 15m buckets for 1d / 100 bars', () => {
|
||||
const interval = calcAuto.near(100, moment.duration(1, 'd'));
|
||||
expect(interval.asMinutes()).to.be(15);
|
||||
});
|
||||
|
||||
it('returns 1h buckets for 20d / 500 bars', () => {
|
||||
const interval = calcAuto.near(500, moment.duration(20, 'd'));
|
||||
expect(interval.asHours()).to.be(1);
|
||||
});
|
||||
|
||||
it('returns 6h buckets for 100d / 500 bars', () => {
|
||||
const interval = calcAuto.near(500, moment.duration(100, 'd'));
|
||||
expect(interval.asHours()).to.be(6);
|
||||
});
|
||||
|
||||
it('returns 24h buckets for 1y / 500 bars', () => {
|
||||
const interval = calcAuto.near(500, moment.duration(1, 'y'));
|
||||
expect(interval.asHours()).to.be(24);
|
||||
});
|
||||
|
||||
it('returns 12h buckets for 1y / 1000 bars', () => {
|
||||
const interval = calcAuto.near(1000, moment.duration(1, 'y'));
|
||||
expect(interval.asHours()).to.be(12);
|
||||
});
|
||||
});
|
||||
|
||||
describe('lessThan interval', () => {
|
||||
it('returns 0ms buckets for undefined / 0 bars', () => {
|
||||
const interval = calcAuto.lessThan(0, undefined);
|
||||
expect(interval.asMilliseconds()).to.be(0);
|
||||
});
|
||||
|
||||
it('returns 500ms buckets for 60s / 100 bars', () => {
|
||||
const interval = calcAuto.lessThan(100, moment.duration(60, 's'));
|
||||
expect(interval.asMilliseconds()).to.be(500);
|
||||
});
|
||||
|
||||
it('returns 5m buckets for 8h / 100 bars', () => {
|
||||
const interval = calcAuto.lessThan(100, moment.duration(8, 'h'));
|
||||
expect(interval.asMinutes()).to.be(5);
|
||||
});
|
||||
|
||||
it('returns 30m buckets for 1d / 100 bars', () => {
|
||||
const interval = calcAuto.lessThan(100, moment.duration(1, 'd'));
|
||||
expect(interval.asMinutes()).to.be(30);
|
||||
});
|
||||
|
||||
it('returns 1h buckets for 20d / 500 bars', () => {
|
||||
const interval = calcAuto.lessThan(500, moment.duration(20, 'd'));
|
||||
expect(interval.asHours()).to.be(1);
|
||||
});
|
||||
|
||||
it('returns 6h buckets for 100d / 500 bars', () => {
|
||||
const interval = calcAuto.lessThan(500, moment.duration(100, 'd'));
|
||||
expect(interval.asHours()).to.be(6);
|
||||
});
|
||||
|
||||
it('returns 24h buckets for 1y / 500 bars', () => {
|
||||
const interval = calcAuto.lessThan(500, moment.duration(1, 'y'));
|
||||
expect(interval.asHours()).to.be(24);
|
||||
});
|
||||
|
||||
it('returns 12h buckets for 1y / 1000 bars', () => {
|
||||
const interval = calcAuto.lessThan(1000, moment.duration(1, 'y'));
|
||||
expect(interval.asHours()).to.be(12);
|
||||
});
|
||||
});
|
||||
|
||||
describe('atLeast interval', () => {
|
||||
it('returns 0ms buckets for undefined / 0 bars', () => {
|
||||
const interval = calcAuto.atLeast(0, undefined);
|
||||
expect(interval.asMilliseconds()).to.be(0);
|
||||
});
|
||||
|
||||
it('returns 100ms buckets for 60s / 100 bars', () => {
|
||||
const interval = calcAuto.atLeast(100, moment.duration(60, 's'));
|
||||
expect(interval.asMilliseconds()).to.be(100);
|
||||
});
|
||||
|
||||
it('returns 1m buckets for 8h / 100 bars', () => {
|
||||
const interval = calcAuto.atLeast(100, moment.duration(8, 'h'));
|
||||
expect(interval.asMinutes()).to.be(1);
|
||||
});
|
||||
|
||||
it('returns 10m buckets for 1d / 100 bars', () => {
|
||||
const interval = calcAuto.atLeast(100, moment.duration(1, 'd'));
|
||||
expect(interval.asMinutes()).to.be(10);
|
||||
});
|
||||
|
||||
it('returns 30m buckets for 20d / 500 bars', () => {
|
||||
const interval = calcAuto.atLeast(500, moment.duration(20, 'd'));
|
||||
expect(interval.asMinutes()).to.be(30);
|
||||
});
|
||||
|
||||
it('returns 4h buckets for 100d / 500 bars', () => {
|
||||
const interval = calcAuto.atLeast(500, moment.duration(100, 'd'));
|
||||
expect(interval.asHours()).to.be(4);
|
||||
});
|
||||
|
||||
it('returns 12h buckets for 1y / 500 bars', () => {
|
||||
const interval = calcAuto.atLeast(500, moment.duration(1, 'y'));
|
||||
expect(interval.asHours()).to.be(12);
|
||||
});
|
||||
|
||||
it('returns 8h buckets for 1y / 1000 bars', () => {
|
||||
const interval = calcAuto.atLeast(1000, moment.duration(1, 'y'));
|
||||
expect(interval.asHours()).to.be(8);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,297 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import $ from 'jquery';
|
||||
import d3 from 'd3';
|
||||
import expect from '@kbn/expect';
|
||||
import {
|
||||
chartLimits,
|
||||
filterAxisLabels,
|
||||
getChartType,
|
||||
numTicks,
|
||||
showMultiBucketAnomalyMarker,
|
||||
showMultiBucketAnomalyTooltip,
|
||||
} from '../chart_utils';
|
||||
import { MULTI_BUCKET_IMPACT } from '../../../../common/constants/multi_bucket_impact';
|
||||
import { CHART_TYPE } from '../../explorer/explorer_constants';
|
||||
|
||||
describe('ML - chart utils', () => {
|
||||
describe('chartLimits', () => {
|
||||
it('returns NaN when called without data', () => {
|
||||
const limits = chartLimits();
|
||||
expect(limits.min).to.be.NaN;
|
||||
expect(limits.max).to.be.NaN;
|
||||
});
|
||||
|
||||
it('returns {max: 625736376, min: 201039318} for some test data', () => {
|
||||
const data = [
|
||||
{
|
||||
date: new Date('2017-02-23T08:00:00.000Z'),
|
||||
value: 228243469,
|
||||
anomalyScore: 63.32916,
|
||||
numberOfCauses: 1,
|
||||
actual: [228243469],
|
||||
typical: [133107.7703441773],
|
||||
},
|
||||
{ date: new Date('2017-02-23T09:00:00.000Z'), value: null },
|
||||
{ date: new Date('2017-02-23T10:00:00.000Z'), value: null },
|
||||
{ date: new Date('2017-02-23T11:00:00.000Z'), value: null },
|
||||
{
|
||||
date: new Date('2017-02-23T12:00:00.000Z'),
|
||||
value: 625736376,
|
||||
anomalyScore: 97.32085,
|
||||
numberOfCauses: 1,
|
||||
actual: [625736376],
|
||||
typical: [132830.424736973],
|
||||
},
|
||||
{
|
||||
date: new Date('2017-02-23T13:00:00.000Z'),
|
||||
value: 201039318,
|
||||
anomalyScore: 59.83488,
|
||||
numberOfCauses: 1,
|
||||
actual: [201039318],
|
||||
typical: [132739.5267403542],
|
||||
},
|
||||
];
|
||||
|
||||
const limits = chartLimits(data);
|
||||
|
||||
// {max: 625736376, min: 201039318}
|
||||
expect(limits.min).to.be(201039318);
|
||||
expect(limits.max).to.be(625736376);
|
||||
});
|
||||
|
||||
it("adds 5% padding when min/max are the same, e.g. when there's only one data point", () => {
|
||||
const data = [
|
||||
{
|
||||
date: new Date('2017-02-23T08:00:00.000Z'),
|
||||
value: 100,
|
||||
anomalyScore: 50,
|
||||
numberOfCauses: 1,
|
||||
actual: [100],
|
||||
typical: [100],
|
||||
},
|
||||
];
|
||||
|
||||
const limits = chartLimits(data);
|
||||
expect(limits.min).to.be(95);
|
||||
expect(limits.max).to.be(105);
|
||||
});
|
||||
|
||||
it('returns minimum of 0 when data includes an anomaly for missing data', () => {
|
||||
const data = [
|
||||
{ date: new Date('2017-02-23T09:00:00.000Z'), value: 22.2 },
|
||||
{ date: new Date('2017-02-23T10:00:00.000Z'), value: 23.3 },
|
||||
{ date: new Date('2017-02-23T11:00:00.000Z'), value: 24.4 },
|
||||
{
|
||||
date: new Date('2017-02-23T12:00:00.000Z'),
|
||||
value: null,
|
||||
anomalyScore: 97.32085,
|
||||
actual: [0],
|
||||
typical: [22.2],
|
||||
},
|
||||
{ date: new Date('2017-02-23T13:00:00.000Z'), value: 21.3 },
|
||||
{ date: new Date('2017-02-23T14:00:00.000Z'), value: 21.2 },
|
||||
{ date: new Date('2017-02-23T15:00:00.000Z'), value: 21.1 },
|
||||
];
|
||||
|
||||
const limits = chartLimits(data);
|
||||
expect(limits.min).to.be(0);
|
||||
expect(limits.max).to.be(24.4);
|
||||
});
|
||||
});
|
||||
|
||||
describe('filterAxisLabels', () => {
|
||||
it('throws an error when called without arguments', () => {
|
||||
expect(() => filterAxisLabels()).to.throwError();
|
||||
});
|
||||
|
||||
it('filters axis labels', () => {
|
||||
// this provides a dummy structure of axis labels.
|
||||
// the first one should always be filtered because it overflows on the
|
||||
// left side of the axis. the last one should be filtered based on the
|
||||
// given width parameter when doing the test calls.
|
||||
$('body').append(`
|
||||
<svg id="filterAxisLabels">
|
||||
<g class="x axis">
|
||||
<g class="tick" transform="translate(5,0)">
|
||||
<text dy=".71em" y="10" x="0" style="text-anchor: middle;">06:00</text>
|
||||
</g>
|
||||
<g class="tick" transform="translate(187.24137931034485,0)">
|
||||
<text dy=".71em" y="10" x="0" style="text-anchor: middle;">12:00</text>
|
||||
</g>
|
||||
<g class="tick" transform="translate(486.82758620689657,0)">
|
||||
<text dy=".71em" y="10" x="0" style="text-anchor: middle;">18:00</text>
|
||||
</g>
|
||||
<g class="tick" transform="translate(786.4137931034483,0)">
|
||||
<text dy=".71em" y="10" x="0" style="text-anchor: middle;">00:00</text>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
`);
|
||||
|
||||
const selector = '#filterAxisLabels .x.axis';
|
||||
|
||||
// given this width, the last tick should not be removed
|
||||
filterAxisLabels(d3.selectAll(selector), 1000);
|
||||
expect(d3.selectAll(selector + ' .tick text').size()).to.be(3);
|
||||
|
||||
// given this width, the last tick should be removed
|
||||
filterAxisLabels(d3.selectAll(selector), 790);
|
||||
expect(d3.selectAll(selector + ' .tick text').size()).to.be(2);
|
||||
|
||||
// clean up
|
||||
$('#filterAxisLabels').remove();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getChartType', () => {
|
||||
const singleMetricConfig = {
|
||||
metricFunction: 'avg',
|
||||
functionDescription: 'mean',
|
||||
fieldName: 'responsetime',
|
||||
entityFields: [],
|
||||
};
|
||||
|
||||
const multiMetricConfig = {
|
||||
metricFunction: 'avg',
|
||||
functionDescription: 'mean',
|
||||
fieldName: 'responsetime',
|
||||
entityFields: [
|
||||
{
|
||||
fieldName: 'airline',
|
||||
fieldValue: 'AAL',
|
||||
fieldType: 'partition',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const populationConfig = {
|
||||
metricFunction: 'avg',
|
||||
functionDescription: 'mean',
|
||||
fieldName: 'http.response.body.bytes',
|
||||
entityFields: [
|
||||
{
|
||||
fieldName: 'source.ip',
|
||||
fieldValue: '10.11.12.13',
|
||||
fieldType: 'over',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const rareConfig = {
|
||||
metricFunction: 'count',
|
||||
functionDescription: 'rare',
|
||||
entityFields: [
|
||||
{
|
||||
fieldName: 'http.response.status_code',
|
||||
fieldValue: '404',
|
||||
fieldType: 'by',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const varpModelPlotConfig = {
|
||||
metricFunction: null,
|
||||
functionDescription: 'varp',
|
||||
fieldName: 'NetworkOut',
|
||||
entityFields: [
|
||||
{
|
||||
fieldName: 'instance',
|
||||
fieldValue: 'i-ef74d410',
|
||||
fieldType: 'over',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const overScriptFieldModelPlotConfig = {
|
||||
metricFunction: 'count',
|
||||
functionDescription: 'count',
|
||||
fieldName: 'highest_registered_domain',
|
||||
entityFields: [
|
||||
{
|
||||
fieldName: 'highest_registered_domain',
|
||||
fieldValue: 'elastic.co',
|
||||
fieldType: 'over',
|
||||
},
|
||||
],
|
||||
datafeedConfig: {
|
||||
script_fields: {
|
||||
highest_registered_domain: {
|
||||
script: {
|
||||
source: "return domainSplit(doc['query'].value, params).get(1);",
|
||||
lang: 'painless',
|
||||
},
|
||||
ignore_failure: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
it('returns single metric chart type as expected for configs', () => {
|
||||
expect(getChartType(singleMetricConfig)).to.be(CHART_TYPE.SINGLE_METRIC);
|
||||
expect(getChartType(multiMetricConfig)).to.be(CHART_TYPE.SINGLE_METRIC);
|
||||
expect(getChartType(varpModelPlotConfig)).to.be(CHART_TYPE.SINGLE_METRIC);
|
||||
expect(getChartType(overScriptFieldModelPlotConfig)).to.be(CHART_TYPE.SINGLE_METRIC);
|
||||
});
|
||||
|
||||
it('returns event distribution chart type as expected for configs', () => {
|
||||
expect(getChartType(rareConfig)).to.be(CHART_TYPE.EVENT_DISTRIBUTION);
|
||||
});
|
||||
|
||||
it('returns population distribution chart type as expected for configs', () => {
|
||||
expect(getChartType(populationConfig)).to.be(CHART_TYPE.POPULATION_DISTRIBUTION);
|
||||
});
|
||||
});
|
||||
|
||||
describe('numTicks', () => {
|
||||
it('returns 10 for 1000', () => {
|
||||
expect(numTicks(1000)).to.be(10);
|
||||
});
|
||||
});
|
||||
|
||||
describe('showMultiBucketAnomalyMarker', () => {
|
||||
it('returns true for points with multiBucketImpact at or above medium impact', () => {
|
||||
expect(showMultiBucketAnomalyMarker({ multiBucketImpact: MULTI_BUCKET_IMPACT.HIGH })).to.be(
|
||||
true
|
||||
);
|
||||
expect(showMultiBucketAnomalyMarker({ multiBucketImpact: MULTI_BUCKET_IMPACT.MEDIUM })).to.be(
|
||||
true
|
||||
);
|
||||
});
|
||||
|
||||
it('returns false for points with multiBucketImpact missing or below medium impact', () => {
|
||||
expect(showMultiBucketAnomalyMarker({})).to.be(false);
|
||||
expect(showMultiBucketAnomalyMarker({ multiBucketImpact: MULTI_BUCKET_IMPACT.LOW })).to.be(
|
||||
false
|
||||
);
|
||||
expect(showMultiBucketAnomalyMarker({ multiBucketImpact: MULTI_BUCKET_IMPACT.NONE })).to.be(
|
||||
false
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('showMultiBucketAnomalyTooltip', () => {
|
||||
it('returns true for points with multiBucketImpact at or above low impact', () => {
|
||||
expect(showMultiBucketAnomalyTooltip({ multiBucketImpact: MULTI_BUCKET_IMPACT.HIGH })).to.be(
|
||||
true
|
||||
);
|
||||
expect(
|
||||
showMultiBucketAnomalyTooltip({ multiBucketImpact: MULTI_BUCKET_IMPACT.MEDIUM })
|
||||
).to.be(true);
|
||||
expect(showMultiBucketAnomalyTooltip({ multiBucketImpact: MULTI_BUCKET_IMPACT.LOW })).to.be(
|
||||
true
|
||||
);
|
||||
});
|
||||
|
||||
it('returns false for points with multiBucketImpact missing or below medium impact', () => {
|
||||
expect(showMultiBucketAnomalyTooltip({})).to.be(false);
|
||||
expect(showMultiBucketAnomalyTooltip({ multiBucketImpact: MULTI_BUCKET_IMPACT.NONE })).to.be(
|
||||
false
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,229 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import {
|
||||
replaceStringTokens,
|
||||
detectorToString,
|
||||
sortByKey,
|
||||
guessTimeFormat,
|
||||
toLocaleString,
|
||||
mlEscape,
|
||||
escapeForElasticsearchQuery,
|
||||
} from '../string_utils';
|
||||
|
||||
describe('ML - string utils', () => {
|
||||
describe('replaceStringTokens', () => {
|
||||
const testRecord = {
|
||||
job_id: 'test_job',
|
||||
result_type: 'record',
|
||||
probability: 0.0191711,
|
||||
record_score: 4.3,
|
||||
bucket_span: 300,
|
||||
detector_index: 0,
|
||||
timestamp: 1454890500000,
|
||||
function: 'mean',
|
||||
function_description: 'mean',
|
||||
field_name: 'responsetime',
|
||||
user: "Des O'Connor",
|
||||
testfield1: 'test$tring=[+-?]',
|
||||
testfield2: '{<()>}',
|
||||
testfield3: 'host=\\\\test@uk.dev',
|
||||
};
|
||||
|
||||
it('returns correct values without URI encoding', () => {
|
||||
const result = replaceStringTokens('user=$user$,time=$timestamp$', testRecord, false);
|
||||
expect(result).to.be("user=Des O'Connor,time=1454890500000");
|
||||
});
|
||||
|
||||
it('returns correct values for missing token without URI encoding', () => {
|
||||
const result = replaceStringTokens('user=$username$,time=$timestamp$', testRecord, false);
|
||||
expect(result).to.be('user=$username$,time=1454890500000');
|
||||
});
|
||||
|
||||
it('returns correct values with URI encoding', () => {
|
||||
const testString1 = 'https://www.google.co.uk/webhp#q=$testfield1$';
|
||||
const testString2 = 'https://www.google.co.uk/webhp#q=$testfield2$';
|
||||
const testString3 = 'https://www.google.co.uk/webhp#q=$testfield3$';
|
||||
const testString4 = 'https://www.google.co.uk/webhp#q=$user$';
|
||||
|
||||
const result1 = replaceStringTokens(testString1, testRecord, true);
|
||||
const result2 = replaceStringTokens(testString2, testRecord, true);
|
||||
const result3 = replaceStringTokens(testString3, testRecord, true);
|
||||
const result4 = replaceStringTokens(testString4, testRecord, true);
|
||||
|
||||
expect(result1).to.be('https://www.google.co.uk/webhp#q=test%24tring%3D%5B%2B-%3F%5D');
|
||||
expect(result2).to.be('https://www.google.co.uk/webhp#q=%7B%3C()%3E%7D');
|
||||
expect(result3).to.be('https://www.google.co.uk/webhp#q=host%3D%5C%5Ctest%40uk.dev');
|
||||
expect(result4).to.be("https://www.google.co.uk/webhp#q=Des%20O'Connor");
|
||||
});
|
||||
|
||||
it('returns correct values for missing token with URI encoding', () => {
|
||||
const testString = 'https://www.google.co.uk/webhp#q=$username$&time=$timestamp$';
|
||||
const result = replaceStringTokens(testString, testRecord, true);
|
||||
expect(result).to.be('https://www.google.co.uk/webhp#q=$username$&time=1454890500000');
|
||||
});
|
||||
});
|
||||
|
||||
describe('detectorToString', () => {
|
||||
it('returns the correct descriptions for detectors', () => {
|
||||
const detector1 = {
|
||||
function: 'count',
|
||||
};
|
||||
|
||||
const detector2 = {
|
||||
function: 'count',
|
||||
by_field_name: 'airline',
|
||||
use_null: false,
|
||||
};
|
||||
|
||||
const detector3 = {
|
||||
function: 'mean',
|
||||
field_name: 'CPUUtilization',
|
||||
partition_field_name: 'region',
|
||||
by_field_name: 'host',
|
||||
over_field_name: 'user',
|
||||
exclude_frequent: 'all',
|
||||
};
|
||||
|
||||
expect(detectorToString(detector1)).to.be('count');
|
||||
expect(detectorToString(detector2)).to.be('count by airline use_null=false');
|
||||
expect(detectorToString(detector3)).to.be(
|
||||
'mean(CPUUtilization) by host over user partition_field_name=region exclude_frequent=all'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('sortByKey', () => {
|
||||
const obj = {
|
||||
zebra: 'stripes',
|
||||
giraffe: 'neck',
|
||||
elephant: 'trunk',
|
||||
};
|
||||
|
||||
const valueComparator = function(value) {
|
||||
return value;
|
||||
};
|
||||
|
||||
it('returns correct ordering with default comparator', () => {
|
||||
const result = sortByKey(obj, false);
|
||||
const keys = Object.keys(result);
|
||||
expect(keys[0]).to.be('elephant');
|
||||
expect(keys[1]).to.be('giraffe');
|
||||
expect(keys[2]).to.be('zebra');
|
||||
});
|
||||
|
||||
it('returns correct ordering with default comparator and order reversed', () => {
|
||||
const result = sortByKey(obj, true);
|
||||
const keys = Object.keys(result);
|
||||
expect(keys[0]).to.be('zebra');
|
||||
expect(keys[1]).to.be('giraffe');
|
||||
expect(keys[2]).to.be('elephant');
|
||||
});
|
||||
|
||||
it('returns correct ordering with comparator', () => {
|
||||
const result = sortByKey(obj, false, valueComparator);
|
||||
const keys = Object.keys(result);
|
||||
expect(keys[0]).to.be('giraffe');
|
||||
expect(keys[1]).to.be('zebra');
|
||||
expect(keys[2]).to.be('elephant');
|
||||
});
|
||||
|
||||
it('returns correct ordering with comparator and order reversed', () => {
|
||||
const result = sortByKey(obj, true, valueComparator);
|
||||
const keys = Object.keys(result);
|
||||
expect(keys[0]).to.be('elephant');
|
||||
expect(keys[1]).to.be('zebra');
|
||||
expect(keys[2]).to.be('giraffe');
|
||||
});
|
||||
});
|
||||
|
||||
describe('guessTimeFormat', () => {
|
||||
it('returns correct format for various dates', () => {
|
||||
expect(guessTimeFormat('2017-03-24T00:00')).to.be("yyyy-MM-dd'T'HH:mm");
|
||||
expect(guessTimeFormat('2017-03-24 00:00')).to.be('yyyy-MM-dd HH:mm');
|
||||
expect(guessTimeFormat('2017-03-24 00:00:00')).to.be('yyyy-MM-dd HH:mm:ss');
|
||||
expect(guessTimeFormat('2017-03-24 00:00:00Z')).to.be('yyyy-MM-dd HH:mm:ssX');
|
||||
expect(guessTimeFormat('2017-03-24 00:00:00.000')).to.be('yyyy-MM-dd HH:mm:ss.SSS');
|
||||
expect(guessTimeFormat('2017-03-24 00:00:00:000')).to.be('yyyy-MM-dd HH:mm:ss:SSS');
|
||||
expect(guessTimeFormat('2017-03-24 00:00:00.000+00:00:00')).to.be(
|
||||
'yyyy-MM-dd HH:mm:ss.SSSXXXXX'
|
||||
);
|
||||
expect(guessTimeFormat('2017-03-24 00:00:00.000+00:00')).to.be('yyyy-MM-dd HH:mm:ss.SSSXXX');
|
||||
expect(guessTimeFormat('2017-03-24 00:00:00.000+000000')).to.be(
|
||||
'yyyy-MM-dd HH:mm:ss.SSSXXXX'
|
||||
);
|
||||
expect(guessTimeFormat('2017-03-24 00:00:00.000+0000')).to.be('yyyy-MM-dd HH:mm:ss.SSSZ');
|
||||
expect(guessTimeFormat('2017-03-24 00:00:00.000+00')).to.be('yyyy-MM-dd HH:mm:ss.SSSX');
|
||||
expect(guessTimeFormat('2017-03-24 00:00:00.000Z')).to.be('yyyy-MM-dd HH:mm:ss.SSSX');
|
||||
expect(guessTimeFormat('2017-03-24 00:00:00.000 GMT')).to.be('yyyy-MM-dd HH:mm:ss.SSS zzz');
|
||||
expect(guessTimeFormat('2017-03-24 00:00:00 GMT')).to.be('yyyy-MM-dd HH:mm:ss zzz');
|
||||
expect(guessTimeFormat('2017 03 24 00:00:00.000')).to.be('yyyy MM dd HH:mm:ss.SSS');
|
||||
expect(guessTimeFormat('2017.03.24 00:00:00.000')).to.be('yyyy.MM.dd HH:mm:ss.SSS');
|
||||
expect(guessTimeFormat('2017/03/24 00:00:00.000')).to.be('yyyy/MM/dd HH:mm:ss.SSS');
|
||||
expect(guessTimeFormat('24/03/2017 00:00:00.000')).to.be('dd/MM/yyyy HH:mm:ss.SSS');
|
||||
expect(guessTimeFormat('03 24 2017 00:00:00.000')).to.be('MM dd yyyy HH:mm:ss.SSS');
|
||||
expect(guessTimeFormat('03/24/2017 00:00:00.000')).to.be('MM/dd/yyyy HH:mm:ss.SSS');
|
||||
expect(guessTimeFormat('2017 Mar 24 00:00:00.000')).to.be('yyyy MMM dd HH:mm:ss.SSS');
|
||||
expect(guessTimeFormat('Mar 24 2017 00:00:00.000')).to.be('MMM dd yyyy HH:mm:ss.SSS');
|
||||
expect(guessTimeFormat('24 Mar 2017 00:00:00.000')).to.be('dd MMM yyyy HH:mm:ss.SSS');
|
||||
expect(guessTimeFormat('1490313600')).to.be('epoch');
|
||||
expect(guessTimeFormat('1490313600000')).to.be('epoch_ms');
|
||||
});
|
||||
});
|
||||
|
||||
describe('toLocaleString', () => {
|
||||
it('returns correct comma placement for large numbers', () => {
|
||||
expect(toLocaleString(1)).to.be('1');
|
||||
expect(toLocaleString(10)).to.be('10');
|
||||
expect(toLocaleString(100)).to.be('100');
|
||||
expect(toLocaleString(1000)).to.be('1,000');
|
||||
expect(toLocaleString(10000)).to.be('10,000');
|
||||
expect(toLocaleString(100000)).to.be('100,000');
|
||||
expect(toLocaleString(1000000)).to.be('1,000,000');
|
||||
expect(toLocaleString(10000000)).to.be('10,000,000');
|
||||
expect(toLocaleString(100000000)).to.be('100,000,000');
|
||||
expect(toLocaleString(1000000000)).to.be('1,000,000,000');
|
||||
});
|
||||
});
|
||||
|
||||
describe('mlEscape', () => {
|
||||
it('returns correct escaping of characters', () => {
|
||||
expect(mlEscape('foo&bar')).to.be('foo&bar');
|
||||
expect(mlEscape('foo<bar')).to.be('foo<bar');
|
||||
expect(mlEscape('foo>bar')).to.be('foo>bar');
|
||||
expect(mlEscape('foo"bar')).to.be('foo"bar');
|
||||
expect(mlEscape("foo'bar")).to.be('foo'bar');
|
||||
expect(mlEscape('foo/bar')).to.be('foo/bar');
|
||||
});
|
||||
});
|
||||
|
||||
describe('escapeForElasticsearchQuery', () => {
|
||||
it('returns correct escaping of reserved elasticsearch characters', () => {
|
||||
expect(escapeForElasticsearchQuery('foo+bar')).to.be('foo\\+bar');
|
||||
expect(escapeForElasticsearchQuery('foo-bar')).to.be('foo\\-bar');
|
||||
expect(escapeForElasticsearchQuery('foo=bar')).to.be('foo\\=bar');
|
||||
expect(escapeForElasticsearchQuery('foo&&bar')).to.be('foo\\&\\&bar');
|
||||
expect(escapeForElasticsearchQuery('foo||bar')).to.be('foo\\|\\|bar');
|
||||
expect(escapeForElasticsearchQuery('foo>bar')).to.be('foo\\>bar');
|
||||
expect(escapeForElasticsearchQuery('foo<bar')).to.be('foo\\<bar');
|
||||
expect(escapeForElasticsearchQuery('foo!bar')).to.be('foo\\!bar');
|
||||
expect(escapeForElasticsearchQuery('foo(bar')).to.be('foo\\(bar');
|
||||
expect(escapeForElasticsearchQuery('foo)bar')).to.be('foo\\)bar');
|
||||
expect(escapeForElasticsearchQuery('foo{bar')).to.be('foo\\{bar');
|
||||
expect(escapeForElasticsearchQuery('foo[bar')).to.be('foo\\[bar');
|
||||
expect(escapeForElasticsearchQuery('foo]bar')).to.be('foo\\]bar');
|
||||
expect(escapeForElasticsearchQuery('foo^bar')).to.be('foo\\^bar');
|
||||
expect(escapeForElasticsearchQuery('foo"bar')).to.be('foo\\"bar');
|
||||
expect(escapeForElasticsearchQuery('foo~bar')).to.be('foo\\~bar');
|
||||
expect(escapeForElasticsearchQuery('foo*bar')).to.be('foo\\*bar');
|
||||
expect(escapeForElasticsearchQuery('foo?bar')).to.be('foo\\?bar');
|
||||
expect(escapeForElasticsearchQuery('foo:bar')).to.be('foo\\:bar');
|
||||
expect(escapeForElasticsearchQuery('foo\\bar')).to.be('foo\\\\bar');
|
||||
expect(escapeForElasticsearchQuery('foo/bar')).to.be('foo\\/bar');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,139 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import moment from 'moment';
|
||||
|
||||
import { timeBucketsCalcAutoIntervalProvider } from './calc_auto_interval';
|
||||
|
||||
describe('ML - calc auto intervals', () => {
|
||||
const calcAuto = timeBucketsCalcAutoIntervalProvider();
|
||||
|
||||
describe('near interval', () => {
|
||||
test('returns 0ms buckets for undefined / 0 bars', () => {
|
||||
const interval = calcAuto.near(0, undefined);
|
||||
expect(interval.asMilliseconds()).toBe(0);
|
||||
});
|
||||
|
||||
test('returns 1000ms buckets for 60s / 100 bars', () => {
|
||||
const interval = calcAuto.near(100, moment.duration(60, 's'));
|
||||
expect(interval.asMilliseconds()).toBe(1000);
|
||||
});
|
||||
|
||||
test('returns 5m buckets for 8h / 100 bars', () => {
|
||||
const interval = calcAuto.near(100, moment.duration(8, 'h'));
|
||||
expect(interval.asMinutes()).toBe(5);
|
||||
});
|
||||
|
||||
test('returns 15m buckets for 1d / 100 bars', () => {
|
||||
const interval = calcAuto.near(100, moment.duration(1, 'd'));
|
||||
expect(interval.asMinutes()).toBe(15);
|
||||
});
|
||||
|
||||
test('returns 1h buckets for 20d / 500 bars', () => {
|
||||
const interval = calcAuto.near(500, moment.duration(20, 'd'));
|
||||
expect(interval.asHours()).toBe(1);
|
||||
});
|
||||
|
||||
test('returns 6h buckets for 100d / 500 bars', () => {
|
||||
const interval = calcAuto.near(500, moment.duration(100, 'd'));
|
||||
expect(interval.asHours()).toBe(6);
|
||||
});
|
||||
|
||||
test('returns 24h buckets for 1y / 500 bars', () => {
|
||||
const interval = calcAuto.near(500, moment.duration(1, 'y'));
|
||||
expect(interval.asHours()).toBe(24);
|
||||
});
|
||||
|
||||
test('returns 12h buckets for 1y / 1000 bars', () => {
|
||||
const interval = calcAuto.near(1000, moment.duration(1, 'y'));
|
||||
expect(interval.asHours()).toBe(12);
|
||||
});
|
||||
});
|
||||
|
||||
describe('lessThan interval', () => {
|
||||
test('returns 0ms buckets for undefined / 0 bars', () => {
|
||||
const interval = calcAuto.lessThan(0, undefined);
|
||||
expect(interval.asMilliseconds()).toBe(0);
|
||||
});
|
||||
|
||||
test('returns 500ms buckets for 60s / 100 bars', () => {
|
||||
const interval = calcAuto.lessThan(100, moment.duration(60, 's'));
|
||||
expect(interval.asMilliseconds()).toBe(500);
|
||||
});
|
||||
|
||||
test('returns 5m buckets for 8h / 100 bars', () => {
|
||||
const interval = calcAuto.lessThan(100, moment.duration(8, 'h'));
|
||||
expect(interval.asMinutes()).toBe(5);
|
||||
});
|
||||
|
||||
test('returns 30m buckets for 1d / 100 bars', () => {
|
||||
const interval = calcAuto.lessThan(100, moment.duration(1, 'd'));
|
||||
expect(interval.asMinutes()).toBe(30);
|
||||
});
|
||||
|
||||
test('returns 1h buckets for 20d / 500 bars', () => {
|
||||
const interval = calcAuto.lessThan(500, moment.duration(20, 'd'));
|
||||
expect(interval.asHours()).toBe(1);
|
||||
});
|
||||
|
||||
test('returns 6h buckets for 100d / 500 bars', () => {
|
||||
const interval = calcAuto.lessThan(500, moment.duration(100, 'd'));
|
||||
expect(interval.asHours()).toBe(6);
|
||||
});
|
||||
|
||||
test('returns 24h buckets for 1y / 500 bars', () => {
|
||||
const interval = calcAuto.lessThan(500, moment.duration(1, 'y'));
|
||||
expect(interval.asHours()).toBe(24);
|
||||
});
|
||||
|
||||
test('returns 12h buckets for 1y / 1000 bars', () => {
|
||||
const interval = calcAuto.lessThan(1000, moment.duration(1, 'y'));
|
||||
expect(interval.asHours()).toBe(12);
|
||||
});
|
||||
});
|
||||
|
||||
describe('atLeast interval', () => {
|
||||
test('returns 0ms buckets for undefined / 0 bars', () => {
|
||||
const interval = calcAuto.atLeast(0, undefined);
|
||||
expect(interval.asMilliseconds()).toBe(0);
|
||||
});
|
||||
|
||||
test('returns 100ms buckets for 60s / 100 bars', () => {
|
||||
const interval = calcAuto.atLeast(100, moment.duration(60, 's'));
|
||||
expect(interval.asMilliseconds()).toBe(100);
|
||||
});
|
||||
|
||||
test('returns 1m buckets for 8h / 100 bars', () => {
|
||||
const interval = calcAuto.atLeast(100, moment.duration(8, 'h'));
|
||||
expect(interval.asMinutes()).toBe(1);
|
||||
});
|
||||
|
||||
test('returns 10m buckets for 1d / 100 bars', () => {
|
||||
const interval = calcAuto.atLeast(100, moment.duration(1, 'd'));
|
||||
expect(interval.asMinutes()).toBe(10);
|
||||
});
|
||||
|
||||
test('returns 30m buckets for 20d / 500 bars', () => {
|
||||
const interval = calcAuto.atLeast(500, moment.duration(20, 'd'));
|
||||
expect(interval.asMinutes()).toBe(30);
|
||||
});
|
||||
|
||||
test('returns 4h buckets for 100d / 500 bars', () => {
|
||||
const interval = calcAuto.atLeast(500, moment.duration(100, 'd'));
|
||||
expect(interval.asHours()).toBe(4);
|
||||
});
|
||||
|
||||
test('returns 12h buckets for 1y / 500 bars', () => {
|
||||
const interval = calcAuto.atLeast(500, moment.duration(1, 'y'));
|
||||
expect(interval.asHours()).toBe(12);
|
||||
});
|
||||
|
||||
test('returns 8h buckets for 1y / 1000 bars', () => {
|
||||
const interval = calcAuto.atLeast(1000, moment.duration(1, 'y'));
|
||||
expect(interval.asHours()).toBe(8);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -29,246 +29,488 @@ const timefilter = getTimefilter();
|
|||
|
||||
import d3 from 'd3';
|
||||
import moment from 'moment';
|
||||
import { mount } from 'enzyme';
|
||||
import React from 'react';
|
||||
import { render } from '@testing-library/react';
|
||||
|
||||
import {
|
||||
chartLimits,
|
||||
getChartType,
|
||||
getExploreSeriesLink,
|
||||
getTickValues,
|
||||
isLabelLengthAboveThreshold,
|
||||
getXTransform,
|
||||
isLabelLengthAboveThreshold,
|
||||
numTicks,
|
||||
removeLabelOverlap,
|
||||
showMultiBucketAnomalyMarker,
|
||||
showMultiBucketAnomalyTooltip,
|
||||
} from './chart_utils';
|
||||
|
||||
import { MULTI_BUCKET_IMPACT } from '../../../common/constants/multi_bucket_impact';
|
||||
import { CHART_TYPE } from '../explorer/explorer_constants';
|
||||
|
||||
timefilter.setTime({
|
||||
from: moment(seriesConfig.selectedEarliest).toISOString(),
|
||||
to: moment(seriesConfig.selectedLatest).toISOString(),
|
||||
});
|
||||
|
||||
describe('getExploreSeriesLink', () => {
|
||||
test('get timeseriesexplorer link', () => {
|
||||
const link = getExploreSeriesLink(seriesConfig);
|
||||
const expectedLink =
|
||||
`#/timeseriesexplorer?_g=(ml:(jobIds:!(population-03)),` +
|
||||
`refreshInterval:(display:Off,pause:!f,value:0),time:(from:'2017-02-23T00:00:00.000Z',mode:absolute,` +
|
||||
`to:'2017-02-23T23:59:59.999Z'))&_a=(mlTimeSeriesExplorer%3A(detectorIndex%3A0%2Centities%3A` +
|
||||
`(nginx.access.remote_ip%3A'72.57.0.53')%2Czoom%3A(from%3A'2017-02-19T20%3A00%3A00.000Z'%2Cto%3A'2017-02-27T04%3A00%3A00.000Z'))` +
|
||||
`%2Cquery%3A(query_string%3A(analyze_wildcard%3A!t%2Cquery%3A'*')))`;
|
||||
|
||||
expect(link).toBe(expectedLink);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTickValues', () => {
|
||||
test('farequote sample data', () => {
|
||||
const tickValues = getTickValues(1486656000000, 14400000, 1486606500000, 1486719900000);
|
||||
|
||||
expect(tickValues).toEqual([
|
||||
1486612800000,
|
||||
1486627200000,
|
||||
1486641600000,
|
||||
1486656000000,
|
||||
1486670400000,
|
||||
1486684800000,
|
||||
1486699200000,
|
||||
1486713600000,
|
||||
]);
|
||||
});
|
||||
|
||||
test('filebeat sample data', () => {
|
||||
const tickValues = getTickValues(1486080000000, 14400000, 1485860400000, 1486314000000);
|
||||
expect(tickValues).toEqual([
|
||||
1485864000000,
|
||||
1485878400000,
|
||||
1485892800000,
|
||||
1485907200000,
|
||||
1485921600000,
|
||||
1485936000000,
|
||||
1485950400000,
|
||||
1485964800000,
|
||||
1485979200000,
|
||||
1485993600000,
|
||||
1486008000000,
|
||||
1486022400000,
|
||||
1486036800000,
|
||||
1486051200000,
|
||||
1486065600000,
|
||||
1486080000000,
|
||||
1486094400000,
|
||||
1486108800000,
|
||||
1486123200000,
|
||||
1486137600000,
|
||||
1486152000000,
|
||||
1486166400000,
|
||||
1486180800000,
|
||||
1486195200000,
|
||||
1486209600000,
|
||||
1486224000000,
|
||||
1486238400000,
|
||||
1486252800000,
|
||||
1486267200000,
|
||||
1486281600000,
|
||||
1486296000000,
|
||||
1486310400000,
|
||||
]);
|
||||
});
|
||||
|
||||
test('gallery sample data', () => {
|
||||
const tickValues = getTickValues(1518652800000, 604800000, 1518274800000, 1519635600000);
|
||||
expect(tickValues).toEqual([1518652800000, 1519257600000]);
|
||||
});
|
||||
|
||||
test('invalid tickIntervals trigger an error', () => {
|
||||
expect(() => {
|
||||
getTickValues(1518652800000, 0, 1518274800000, 1519635600000);
|
||||
}).toThrow();
|
||||
expect(() => {
|
||||
getTickValues(1518652800000, -604800000, 1518274800000, 1519635600000);
|
||||
}).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('isLabelLengthAboveThreshold', () => {
|
||||
test('short label', () => {
|
||||
const isLongLabel = isLabelLengthAboveThreshold({
|
||||
detectorLabel: 'count',
|
||||
entityFields: seriesConfig.entityFields,
|
||||
describe('ML - chart utils', () => {
|
||||
describe('chartLimits', () => {
|
||||
test('returns NaN when called without data', () => {
|
||||
const limits = chartLimits();
|
||||
expect(limits.min).toBeNaN();
|
||||
expect(limits.max).toBeNaN();
|
||||
});
|
||||
|
||||
test('returns {max: 625736376, min: 201039318} for some test data', () => {
|
||||
const data = [
|
||||
{
|
||||
date: new Date('2017-02-23T08:00:00.000Z'),
|
||||
value: 228243469,
|
||||
anomalyScore: 63.32916,
|
||||
numberOfCauses: 1,
|
||||
actual: [228243469],
|
||||
typical: [133107.7703441773],
|
||||
},
|
||||
{ date: new Date('2017-02-23T09:00:00.000Z'), value: null },
|
||||
{ date: new Date('2017-02-23T10:00:00.000Z'), value: null },
|
||||
{ date: new Date('2017-02-23T11:00:00.000Z'), value: null },
|
||||
{
|
||||
date: new Date('2017-02-23T12:00:00.000Z'),
|
||||
value: 625736376,
|
||||
anomalyScore: 97.32085,
|
||||
numberOfCauses: 1,
|
||||
actual: [625736376],
|
||||
typical: [132830.424736973],
|
||||
},
|
||||
{
|
||||
date: new Date('2017-02-23T13:00:00.000Z'),
|
||||
value: 201039318,
|
||||
anomalyScore: 59.83488,
|
||||
numberOfCauses: 1,
|
||||
actual: [201039318],
|
||||
typical: [132739.5267403542],
|
||||
},
|
||||
];
|
||||
|
||||
const limits = chartLimits(data);
|
||||
|
||||
// {max: 625736376, min: 201039318}
|
||||
expect(limits.min).toBe(201039318);
|
||||
expect(limits.max).toBe(625736376);
|
||||
});
|
||||
|
||||
test("adds 5% padding when min/max are the same, e.g. when there's only one data point", () => {
|
||||
const data = [
|
||||
{
|
||||
date: new Date('2017-02-23T08:00:00.000Z'),
|
||||
value: 100,
|
||||
anomalyScore: 50,
|
||||
numberOfCauses: 1,
|
||||
actual: [100],
|
||||
typical: [100],
|
||||
},
|
||||
];
|
||||
|
||||
const limits = chartLimits(data);
|
||||
expect(limits.min).toBe(95);
|
||||
expect(limits.max).toBe(105);
|
||||
});
|
||||
|
||||
test('returns minimum of 0 when data includes an anomaly for missing data', () => {
|
||||
const data = [
|
||||
{ date: new Date('2017-02-23T09:00:00.000Z'), value: 22.2 },
|
||||
{ date: new Date('2017-02-23T10:00:00.000Z'), value: 23.3 },
|
||||
{ date: new Date('2017-02-23T11:00:00.000Z'), value: 24.4 },
|
||||
{
|
||||
date: new Date('2017-02-23T12:00:00.000Z'),
|
||||
value: null,
|
||||
anomalyScore: 97.32085,
|
||||
actual: [0],
|
||||
typical: [22.2],
|
||||
},
|
||||
{ date: new Date('2017-02-23T13:00:00.000Z'), value: 21.3 },
|
||||
{ date: new Date('2017-02-23T14:00:00.000Z'), value: 21.2 },
|
||||
{ date: new Date('2017-02-23T15:00:00.000Z'), value: 21.1 },
|
||||
];
|
||||
|
||||
const limits = chartLimits(data);
|
||||
expect(limits.min).toBe(0);
|
||||
expect(limits.max).toBe(24.4);
|
||||
});
|
||||
expect(isLongLabel).toBeFalsy();
|
||||
});
|
||||
|
||||
test('long label', () => {
|
||||
const isLongLabel = isLabelLengthAboveThreshold(seriesConfig);
|
||||
expect(isLongLabel).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getXTransform', () => {
|
||||
const expectedXTransform = 0.007167499999999999;
|
||||
|
||||
test('Chrome/Safari/Firefox String variant.', () => {
|
||||
const transformStr = 'translate(0.007167499999999999,0)';
|
||||
const xTransform = getXTransform(transformStr);
|
||||
expect(xTransform).toEqual(expectedXTransform);
|
||||
});
|
||||
|
||||
test('IE11 String variant.', () => {
|
||||
const transformStr = 'translate(0.007167499999999999)';
|
||||
const xTransform = getXTransform(transformStr);
|
||||
expect(xTransform).toEqual(expectedXTransform);
|
||||
});
|
||||
|
||||
test('Invalid String.', () => {
|
||||
const transformStr = 'translate()';
|
||||
const xTransform = getXTransform(transformStr);
|
||||
expect(xTransform).toEqual(NaN);
|
||||
});
|
||||
});
|
||||
|
||||
describe('removeLabelOverlap', () => {
|
||||
const originalGetBBox = SVGElement.prototype.getBBox;
|
||||
|
||||
// This resembles how ExplorerChart renders its x axis.
|
||||
// We set up this boilerplate so we can then run removeLabelOverlap()
|
||||
// on some "real" structure.
|
||||
function axisSetup({ interval, plotEarliest, plotLatest, startTimeMs, xAxisTickFormat }) {
|
||||
const wrapper = mount(<div className="content-wrapper" />);
|
||||
const node = wrapper.getDOMNode();
|
||||
|
||||
const chartHeight = 170;
|
||||
const margin = { top: 10, right: 0, bottom: 30, left: 60 };
|
||||
const svgWidth = 500;
|
||||
const svgHeight = chartHeight + margin.top + margin.bottom;
|
||||
const vizWidth = 500;
|
||||
|
||||
const chartElement = d3.select(node);
|
||||
|
||||
const lineChartXScale = d3.time
|
||||
.scale()
|
||||
.range([0, vizWidth])
|
||||
.domain([plotEarliest, plotLatest]);
|
||||
|
||||
const xAxis = d3.svg
|
||||
.axis()
|
||||
.scale(lineChartXScale)
|
||||
.orient('bottom')
|
||||
.innerTickSize(-chartHeight)
|
||||
.outerTickSize(0)
|
||||
.tickPadding(10)
|
||||
.tickFormat(d => moment(d).format(xAxisTickFormat));
|
||||
|
||||
const tickValues = getTickValues(startTimeMs, interval, plotEarliest, plotLatest);
|
||||
xAxis.tickValues(tickValues);
|
||||
|
||||
const svg = chartElement
|
||||
.append('svg')
|
||||
.attr('width', svgWidth)
|
||||
.attr('height', svgHeight);
|
||||
|
||||
const axes = svg.append('g');
|
||||
|
||||
const gAxis = axes
|
||||
.append('g')
|
||||
.attr('class', 'x axis')
|
||||
.attr('transform', 'translate(0,' + chartHeight + ')')
|
||||
.call(xAxis);
|
||||
|
||||
return {
|
||||
gAxis,
|
||||
node,
|
||||
vizWidth,
|
||||
describe('getChartType', () => {
|
||||
const singleMetricConfig = {
|
||||
metricFunction: 'avg',
|
||||
functionDescription: 'mean',
|
||||
fieldName: 'responsetime',
|
||||
entityFields: [],
|
||||
};
|
||||
}
|
||||
|
||||
test('farequote sample data', () => {
|
||||
const mockedGetBBox = { width: 27.21875 };
|
||||
SVGElement.prototype.getBBox = () => mockedGetBBox;
|
||||
const multiMetricConfig = {
|
||||
metricFunction: 'avg',
|
||||
functionDescription: 'mean',
|
||||
fieldName: 'responsetime',
|
||||
entityFields: [
|
||||
{
|
||||
fieldName: 'airline',
|
||||
fieldValue: 'AAL',
|
||||
fieldType: 'partition',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const startTimeMs = 1486656000000;
|
||||
const interval = 14400000;
|
||||
const populationConfig = {
|
||||
metricFunction: 'avg',
|
||||
functionDescription: 'mean',
|
||||
fieldName: 'http.response.body.bytes',
|
||||
entityFields: [
|
||||
{
|
||||
fieldName: 'source.ip',
|
||||
fieldValue: '10.11.12.13',
|
||||
fieldType: 'over',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const { gAxis, node, vizWidth } = axisSetup({
|
||||
interval,
|
||||
plotEarliest: 1486606500000,
|
||||
plotLatest: 1486719900000,
|
||||
startTimeMs,
|
||||
xAxisTickFormat: 'HH:mm',
|
||||
const rareConfig = {
|
||||
metricFunction: 'count',
|
||||
functionDescription: 'rare',
|
||||
entityFields: [
|
||||
{
|
||||
fieldName: 'http.response.status_code',
|
||||
fieldValue: '404',
|
||||
fieldType: 'by',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const varpModelPlotConfig = {
|
||||
metricFunction: null,
|
||||
functionDescription: 'varp',
|
||||
fieldName: 'NetworkOut',
|
||||
entityFields: [
|
||||
{
|
||||
fieldName: 'instance',
|
||||
fieldValue: 'i-ef74d410',
|
||||
fieldType: 'over',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const overScriptFieldModelPlotConfig = {
|
||||
metricFunction: 'count',
|
||||
functionDescription: 'count',
|
||||
fieldName: 'highest_registered_domain',
|
||||
entityFields: [
|
||||
{
|
||||
fieldName: 'highest_registered_domain',
|
||||
fieldValue: 'elastic.co',
|
||||
fieldType: 'over',
|
||||
},
|
||||
],
|
||||
datafeedConfig: {
|
||||
script_fields: {
|
||||
highest_registered_domain: {
|
||||
script: {
|
||||
source: "return domainSplit(doc['query'].value, params).get(1);",
|
||||
lang: 'painless',
|
||||
},
|
||||
ignore_failure: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
test('returns single metric chart type as expected for configs', () => {
|
||||
expect(getChartType(singleMetricConfig)).toBe(CHART_TYPE.SINGLE_METRIC);
|
||||
expect(getChartType(multiMetricConfig)).toBe(CHART_TYPE.SINGLE_METRIC);
|
||||
expect(getChartType(varpModelPlotConfig)).toBe(CHART_TYPE.SINGLE_METRIC);
|
||||
expect(getChartType(overScriptFieldModelPlotConfig)).toBe(CHART_TYPE.SINGLE_METRIC);
|
||||
});
|
||||
|
||||
expect(node.getElementsByTagName('text')).toHaveLength(8);
|
||||
test('returns event distribution chart type as expected for configs', () => {
|
||||
expect(getChartType(rareConfig)).toBe(CHART_TYPE.EVENT_DISTRIBUTION);
|
||||
});
|
||||
|
||||
removeLabelOverlap(gAxis, startTimeMs, interval, vizWidth);
|
||||
|
||||
// at the vizWidth of 500, the most left and right tick label
|
||||
// will get removed because it overflows the chart area
|
||||
expect(node.getElementsByTagName('text')).toHaveLength(6);
|
||||
|
||||
SVGElement.prototype.getBBox = originalGetBBox;
|
||||
test('returns population distribution chart type as expected for configs', () => {
|
||||
expect(getChartType(populationConfig)).toBe(CHART_TYPE.POPULATION_DISTRIBUTION);
|
||||
});
|
||||
});
|
||||
|
||||
test('filebeat sample data', () => {
|
||||
const mockedGetBBox = { width: 85.640625 };
|
||||
SVGElement.prototype.getBBox = () => mockedGetBBox;
|
||||
describe('getExploreSeriesLink', () => {
|
||||
test('get timeseriesexplorer link', () => {
|
||||
const link = getExploreSeriesLink(seriesConfig);
|
||||
const expectedLink =
|
||||
`#/timeseriesexplorer?_g=(ml:(jobIds:!(population-03)),` +
|
||||
`refreshInterval:(display:Off,pause:!f,value:0),time:(from:'2017-02-23T00:00:00.000Z',mode:absolute,` +
|
||||
`to:'2017-02-23T23:59:59.999Z'))&_a=(mlTimeSeriesExplorer%3A(detectorIndex%3A0%2Centities%3A` +
|
||||
`(nginx.access.remote_ip%3A'72.57.0.53')%2Czoom%3A(from%3A'2017-02-19T20%3A00%3A00.000Z'%2Cto%3A'2017-02-27T04%3A00%3A00.000Z'))` +
|
||||
`%2Cquery%3A(query_string%3A(analyze_wildcard%3A!t%2Cquery%3A'*')))`;
|
||||
|
||||
const startTimeMs = 1486080000000;
|
||||
const interval = 14400000;
|
||||
expect(link).toBe(expectedLink);
|
||||
});
|
||||
});
|
||||
|
||||
const { gAxis, node, vizWidth } = axisSetup({
|
||||
interval,
|
||||
plotEarliest: 1485860400000,
|
||||
plotLatest: 1486314000000,
|
||||
startTimeMs,
|
||||
xAxisTickFormat: 'YYYY-MM-DD HH:mm',
|
||||
describe('numTicks', () => {
|
||||
test('returns 10 for 1000', () => {
|
||||
expect(numTicks(1000)).toBe(10);
|
||||
});
|
||||
});
|
||||
|
||||
describe('showMultiBucketAnomalyMarker', () => {
|
||||
test('returns true for points with multiBucketImpact at or above medium impact', () => {
|
||||
expect(showMultiBucketAnomalyMarker({ multiBucketImpact: MULTI_BUCKET_IMPACT.HIGH })).toBe(
|
||||
true
|
||||
);
|
||||
expect(showMultiBucketAnomalyMarker({ multiBucketImpact: MULTI_BUCKET_IMPACT.MEDIUM })).toBe(
|
||||
true
|
||||
);
|
||||
});
|
||||
|
||||
expect(node.getElementsByTagName('text')).toHaveLength(32);
|
||||
test('returns false for points with multiBucketImpact missing or below medium impact', () => {
|
||||
expect(showMultiBucketAnomalyMarker({})).toBe(false);
|
||||
expect(showMultiBucketAnomalyMarker({ multiBucketImpact: MULTI_BUCKET_IMPACT.LOW })).toBe(
|
||||
false
|
||||
);
|
||||
expect(showMultiBucketAnomalyMarker({ multiBucketImpact: MULTI_BUCKET_IMPACT.NONE })).toBe(
|
||||
false
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
removeLabelOverlap(gAxis, startTimeMs, interval, vizWidth);
|
||||
describe('showMultiBucketAnomalyTooltip', () => {
|
||||
test('returns true for points with multiBucketImpact at or above low impact', () => {
|
||||
expect(showMultiBucketAnomalyTooltip({ multiBucketImpact: MULTI_BUCKET_IMPACT.HIGH })).toBe(
|
||||
true
|
||||
);
|
||||
expect(showMultiBucketAnomalyTooltip({ multiBucketImpact: MULTI_BUCKET_IMPACT.MEDIUM })).toBe(
|
||||
true
|
||||
);
|
||||
expect(showMultiBucketAnomalyTooltip({ multiBucketImpact: MULTI_BUCKET_IMPACT.LOW })).toBe(
|
||||
true
|
||||
);
|
||||
});
|
||||
|
||||
// In this case labels get reduced significantly because of the wider
|
||||
// labels (full dates + time) and the narrow interval.
|
||||
expect(node.getElementsByTagName('text')).toHaveLength(3);
|
||||
test('returns false for points with multiBucketImpact missing or below medium impact', () => {
|
||||
expect(showMultiBucketAnomalyTooltip({})).toBe(false);
|
||||
expect(showMultiBucketAnomalyTooltip({ multiBucketImpact: MULTI_BUCKET_IMPACT.NONE })).toBe(
|
||||
false
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
SVGElement.prototype.getBBox = originalGetBBox;
|
||||
describe('getTickValues', () => {
|
||||
test('farequote sample data', () => {
|
||||
const tickValues = getTickValues(1486656000000, 14400000, 1486606500000, 1486719900000);
|
||||
|
||||
expect(tickValues).toEqual([
|
||||
1486612800000,
|
||||
1486627200000,
|
||||
1486641600000,
|
||||
1486656000000,
|
||||
1486670400000,
|
||||
1486684800000,
|
||||
1486699200000,
|
||||
1486713600000,
|
||||
]);
|
||||
});
|
||||
|
||||
test('filebeat sample data', () => {
|
||||
const tickValues = getTickValues(1486080000000, 14400000, 1485860400000, 1486314000000);
|
||||
expect(tickValues).toEqual([
|
||||
1485864000000,
|
||||
1485878400000,
|
||||
1485892800000,
|
||||
1485907200000,
|
||||
1485921600000,
|
||||
1485936000000,
|
||||
1485950400000,
|
||||
1485964800000,
|
||||
1485979200000,
|
||||
1485993600000,
|
||||
1486008000000,
|
||||
1486022400000,
|
||||
1486036800000,
|
||||
1486051200000,
|
||||
1486065600000,
|
||||
1486080000000,
|
||||
1486094400000,
|
||||
1486108800000,
|
||||
1486123200000,
|
||||
1486137600000,
|
||||
1486152000000,
|
||||
1486166400000,
|
||||
1486180800000,
|
||||
1486195200000,
|
||||
1486209600000,
|
||||
1486224000000,
|
||||
1486238400000,
|
||||
1486252800000,
|
||||
1486267200000,
|
||||
1486281600000,
|
||||
1486296000000,
|
||||
1486310400000,
|
||||
]);
|
||||
});
|
||||
|
||||
test('gallery sample data', () => {
|
||||
const tickValues = getTickValues(1518652800000, 604800000, 1518274800000, 1519635600000);
|
||||
expect(tickValues).toEqual([1518652800000, 1519257600000]);
|
||||
});
|
||||
|
||||
test('invalid tickIntervals trigger an error', () => {
|
||||
expect(() => {
|
||||
getTickValues(1518652800000, 0, 1518274800000, 1519635600000);
|
||||
}).toThrow();
|
||||
expect(() => {
|
||||
getTickValues(1518652800000, -604800000, 1518274800000, 1519635600000);
|
||||
}).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('isLabelLengthAboveThreshold', () => {
|
||||
test('short label', () => {
|
||||
const isLongLabel = isLabelLengthAboveThreshold({
|
||||
detectorLabel: 'count',
|
||||
entityFields: seriesConfig.entityFields,
|
||||
});
|
||||
expect(isLongLabel).toBeFalsy();
|
||||
});
|
||||
|
||||
test('long label', () => {
|
||||
const isLongLabel = isLabelLengthAboveThreshold(seriesConfig);
|
||||
expect(isLongLabel).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getXTransform', () => {
|
||||
const expectedXTransform = 0.007167499999999999;
|
||||
|
||||
test('Chrome/Safari/Firefox String variant.', () => {
|
||||
const transformStr = 'translate(0.007167499999999999,0)';
|
||||
const xTransform = getXTransform(transformStr);
|
||||
expect(xTransform).toEqual(expectedXTransform);
|
||||
});
|
||||
|
||||
test('IE11 String variant.', () => {
|
||||
const transformStr = 'translate(0.007167499999999999)';
|
||||
const xTransform = getXTransform(transformStr);
|
||||
expect(xTransform).toEqual(expectedXTransform);
|
||||
});
|
||||
|
||||
test('Invalid String.', () => {
|
||||
const transformStr = 'translate()';
|
||||
const xTransform = getXTransform(transformStr);
|
||||
expect(xTransform).toEqual(NaN);
|
||||
});
|
||||
});
|
||||
|
||||
describe('removeLabelOverlap', () => {
|
||||
const originalGetBBox = SVGElement.prototype.getBBox;
|
||||
|
||||
// This resembles how ExplorerChart renders its x axis.
|
||||
// We set up this boilerplate so we can then run removeLabelOverlap()
|
||||
// on some "real" structure.
|
||||
function axisSetup({ interval, plotEarliest, plotLatest, startTimeMs, xAxisTickFormat }) {
|
||||
const { container } = render(<div className="content-wrapper" />);
|
||||
const node = container.querySelector('.content-wrapper');
|
||||
|
||||
const chartHeight = 170;
|
||||
const margin = { top: 10, right: 0, bottom: 30, left: 60 };
|
||||
const svgWidth = 500;
|
||||
const svgHeight = chartHeight + margin.top + margin.bottom;
|
||||
const vizWidth = 500;
|
||||
|
||||
const chartElement = d3.select(node);
|
||||
|
||||
const lineChartXScale = d3.time
|
||||
.scale()
|
||||
.range([0, vizWidth])
|
||||
.domain([plotEarliest, plotLatest]);
|
||||
|
||||
const xAxis = d3.svg
|
||||
.axis()
|
||||
.scale(lineChartXScale)
|
||||
.orient('bottom')
|
||||
.innerTickSize(-chartHeight)
|
||||
.outerTickSize(0)
|
||||
.tickPadding(10)
|
||||
.tickFormat(d => moment(d).format(xAxisTickFormat));
|
||||
|
||||
const tickValues = getTickValues(startTimeMs, interval, plotEarliest, plotLatest);
|
||||
xAxis.tickValues(tickValues);
|
||||
|
||||
const svg = chartElement
|
||||
.append('svg')
|
||||
.attr('width', svgWidth)
|
||||
.attr('height', svgHeight);
|
||||
|
||||
const axes = svg.append('g');
|
||||
|
||||
const gAxis = axes
|
||||
.append('g')
|
||||
.attr('class', 'x axis')
|
||||
.attr('transform', 'translate(0,' + chartHeight + ')')
|
||||
.call(xAxis);
|
||||
|
||||
return {
|
||||
gAxis,
|
||||
node,
|
||||
vizWidth,
|
||||
};
|
||||
}
|
||||
|
||||
test('farequote sample data', () => {
|
||||
const mockedGetBBox = { width: 27.21875 };
|
||||
SVGElement.prototype.getBBox = () => mockedGetBBox;
|
||||
|
||||
const startTimeMs = 1486656000000;
|
||||
const interval = 14400000;
|
||||
|
||||
const { gAxis, node, vizWidth } = axisSetup({
|
||||
interval,
|
||||
plotEarliest: 1486606500000,
|
||||
plotLatest: 1486719900000,
|
||||
startTimeMs,
|
||||
xAxisTickFormat: 'HH:mm',
|
||||
});
|
||||
|
||||
expect(node.getElementsByTagName('text')).toHaveLength(8);
|
||||
|
||||
removeLabelOverlap(gAxis, startTimeMs, interval, vizWidth);
|
||||
|
||||
// at the vizWidth of 500, the most left and right tick label
|
||||
// will get removed because it overflows the chart area
|
||||
expect(node.getElementsByTagName('text')).toHaveLength(6);
|
||||
|
||||
SVGElement.prototype.getBBox = originalGetBBox;
|
||||
});
|
||||
|
||||
test('filebeat sample data', () => {
|
||||
const mockedGetBBox = { width: 85.640625 };
|
||||
SVGElement.prototype.getBBox = () => mockedGetBBox;
|
||||
|
||||
const startTimeMs = 1486080000000;
|
||||
const interval = 14400000;
|
||||
|
||||
const { gAxis, node, vizWidth } = axisSetup({
|
||||
interval,
|
||||
plotEarliest: 1485860400000,
|
||||
plotLatest: 1486314000000,
|
||||
startTimeMs,
|
||||
xAxisTickFormat: 'YYYY-MM-DD HH:mm',
|
||||
});
|
||||
|
||||
expect(node.getElementsByTagName('text')).toHaveLength(32);
|
||||
|
||||
removeLabelOverlap(gAxis, startTimeMs, interval, vizWidth);
|
||||
|
||||
// In this case labels get reduced significantly because of the wider
|
||||
// labels (full dates + time) and the narrow interval.
|
||||
expect(node.getElementsByTagName('text')).toHaveLength(3);
|
||||
|
||||
SVGElement.prototype.getBBox = originalGetBBox;
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -14,4 +14,8 @@ export function replaceStringTokens(
|
|||
|
||||
export function detectorToString(dtr: any): string;
|
||||
|
||||
export function sortByKey(list: any, reverse: boolean, comparator?: any): any;
|
||||
|
||||
export function toLocaleString(x: number): string;
|
||||
|
||||
export function mlEscape(str: string): string;
|
||||
|
|
|
@ -99,211 +99,6 @@ export function sortByKey(list, reverse, comparator) {
|
|||
);
|
||||
}
|
||||
|
||||
// guess the time format for a given time string
|
||||
export function guessTimeFormat(time) {
|
||||
let format = '';
|
||||
let matched = false;
|
||||
if (isNaN(time)) {
|
||||
let match;
|
||||
|
||||
// match date format
|
||||
if (!matched) {
|
||||
let reg = '';
|
||||
|
||||
reg += '('; // 1 ( date
|
||||
|
||||
reg += '('; // 2 ( yyyy-MM-dd
|
||||
reg += '(\\d{4})'; // 3 yyyy
|
||||
reg += '([-/.\\s])'; // 4 - or . or \s
|
||||
reg += '('; // 5 ( month
|
||||
reg += '([01]\\d)'; // 6 MM
|
||||
reg += '|'; // or
|
||||
reg += '(\\w{3})'; // 7 MMM
|
||||
reg += ')'; // ) end month
|
||||
reg += '([-/.\\s])'; // 8 - or . or \s
|
||||
reg += '([0-3]\\d)'; // 9 dd 0-3 and 0-9
|
||||
reg += ')'; // ) end yyyy-MM-dd
|
||||
|
||||
reg += '|'; // or
|
||||
|
||||
reg += '('; // 10 ( d[d]-MM[M]-yyyy or MM[M]-d[d]-yyyy
|
||||
|
||||
reg += '('; // 11 ( day or month
|
||||
reg += '(\\d{1,2})'; // 12 d or M or dd or MM
|
||||
reg += '|'; // or
|
||||
reg += '(\\w{3})'; // 13 MMM
|
||||
reg += ')'; // ) end day or month
|
||||
|
||||
reg += '([-/.\\s])'; // 14 - or . or \s
|
||||
|
||||
reg += '('; // 15 ( day or month
|
||||
reg += '(\\d{1,2})'; // 12 d or M or dd or MM
|
||||
reg += '|'; // or
|
||||
reg += '(\\w{3})'; // 17 MMM
|
||||
reg += ')'; // ) end day or month
|
||||
|
||||
reg += '([-/.\\s])'; // 18 - or . or \s
|
||||
reg += '(\\d{4})'; // 19 yyyy
|
||||
reg += ')'; // ) end d[d]-MM[M]-yyyy or MM[M]-d[d]-yyyy
|
||||
|
||||
reg += ')'; // ) end date
|
||||
|
||||
reg += '([T\\s])?'; // 20 T or space
|
||||
|
||||
reg += '([0-2]\\d)'; // 21 HH 0-2 and 0-9
|
||||
reg += '([:.])'; // 22 :.
|
||||
reg += '([0-5]\\d)'; // 23 mm 0-5 and 0-9
|
||||
reg += '('; // 24 ( optional secs
|
||||
reg += '([:.])'; // 25 :.
|
||||
reg += '([0-5]\\d)'; // 26 ss 0-5 and 0-9
|
||||
reg += ')?'; // ) end optional secs
|
||||
reg += '('; // 27 ( optional millisecs
|
||||
reg += '([:.])'; // 28 :.
|
||||
reg += '(\\d{3})'; // 29 3 * 0-9
|
||||
reg += ')?'; // ) end optional millisecs
|
||||
reg += '('; // 30 ( optional timezone matches
|
||||
reg += '([+-]\\d{2}[:.]\\d{2}[:.]\\d{2})'; // 31 +- 0-9 0-9 :. 0-9 0-9 :. 0-9 0-9 e.g. +00:00:00
|
||||
reg += '|'; // or
|
||||
reg += '([+-]\\d{2}[:.]\\d{2})'; // 32 +- 0-9 0-9 :. 0-9 0-9 e.g. +00:00
|
||||
reg += '|'; // or
|
||||
reg += '([+-]\\d{6})'; // 33 +- 6 * 0-9 e.g. +000000
|
||||
reg += '|'; // or
|
||||
reg += '([+-]\\d{4})'; // 34 +- 4 * 0-9 e.g. +0000
|
||||
reg += '|'; // or
|
||||
reg += '(Z)'; // 35 Z
|
||||
reg += '|'; // or
|
||||
reg += '([+-]\\d{2})'; // 36 +- 0-9 0-9 e.g. +00
|
||||
reg += '|'; // or
|
||||
reg += '('; // 37 ( string timezone
|
||||
reg += '(\\s)'; // 38 optional space
|
||||
reg += '(\\w{1,4})'; // 39 1-4 letters e.g UTC
|
||||
reg += ')'; // ) end string timezone
|
||||
reg += ')?'; // ) end optional timezone
|
||||
|
||||
console.log('guessTimeFormat: time format regex: ' + reg);
|
||||
|
||||
match = time.match(new RegExp(reg));
|
||||
// console.log(match);
|
||||
if (match) {
|
||||
// add the standard data and time
|
||||
if (match[2] !== undefined) {
|
||||
// match yyyy-[MM MMM]-dd
|
||||
format += 'yyyy';
|
||||
format += match[4];
|
||||
if (match[6] !== undefined) {
|
||||
format += 'MM';
|
||||
} else if (match[7] !== undefined) {
|
||||
format += 'MMM';
|
||||
}
|
||||
format += match[8];
|
||||
format += 'dd';
|
||||
} else if (match[10] !== undefined) {
|
||||
// match dd-MM[M]-yyyy or MM[M]-dd-yyyy
|
||||
|
||||
if (match[13] !== undefined) {
|
||||
// found a word as the first part
|
||||
// e.g., Jan 01 2000
|
||||
format += 'MMM';
|
||||
format += match[14];
|
||||
format += 'dd';
|
||||
} else if (match[17] !== undefined) {
|
||||
// found a word as the second part
|
||||
// e.g., 01 Jan 2000
|
||||
format += 'dd';
|
||||
format += match[14];
|
||||
format += 'MMM';
|
||||
} else {
|
||||
// check to see if the first number is greater than 12
|
||||
// e.g., 24/03/1981
|
||||
// this is a guess, but is only thing we can do
|
||||
// with one line from the data set
|
||||
if (match[12] !== undefined && +match[12] > 12) {
|
||||
format += 'dd';
|
||||
format += match[14];
|
||||
format += 'MM';
|
||||
} else {
|
||||
// default to US format.
|
||||
format += 'MM';
|
||||
format += match[14];
|
||||
format += 'dd';
|
||||
}
|
||||
}
|
||||
|
||||
format += match[18];
|
||||
format += 'yyyy';
|
||||
}
|
||||
|
||||
// optional T or space splitter
|
||||
// wrap T in single quotes
|
||||
format += match[20] === 'T' ? "'" + match[20] + "'" : match[20];
|
||||
format += 'HH';
|
||||
format += match[22];
|
||||
format += 'mm';
|
||||
|
||||
// add optional secs
|
||||
if (match[24] !== undefined) {
|
||||
format += match[25];
|
||||
format += 'ss';
|
||||
}
|
||||
|
||||
// add optional millisecs
|
||||
if (match[27] !== undefined) {
|
||||
// .000
|
||||
format += match[28];
|
||||
format += 'SSS';
|
||||
}
|
||||
|
||||
// add optional time zone
|
||||
if (match[31] !== undefined) {
|
||||
// +00:00:00
|
||||
format += 'XXXXX';
|
||||
} else if (match[32] !== undefined) {
|
||||
// +00:00
|
||||
format += 'XXX';
|
||||
} else if (match[33] !== undefined) {
|
||||
// +000000
|
||||
format += 'XXXX';
|
||||
} else if (match[34] !== undefined) {
|
||||
// +0000
|
||||
format += 'Z';
|
||||
} else if (match[35] !== undefined || match[36] !== undefined) {
|
||||
// Z or +00
|
||||
format += 'X';
|
||||
} else if (match[37] !== undefined) {
|
||||
// UTC
|
||||
if (match[38] !== undefined) {
|
||||
// add optional space char
|
||||
format += match[38];
|
||||
}
|
||||
// add time zone name, up to 4 chars
|
||||
for (let i = 0; i < match[39].length; i++) {
|
||||
format += 'z';
|
||||
}
|
||||
}
|
||||
matched = true;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// time field is a number, so probably epoch or epoch_ms
|
||||
if (time > 10000000000) {
|
||||
// probably milliseconds
|
||||
format = 'epoch_ms';
|
||||
} else {
|
||||
// probably seconds
|
||||
format = 'epoch';
|
||||
}
|
||||
matched = true;
|
||||
}
|
||||
|
||||
if (matched) {
|
||||
console.log('guessTimeFormat: guessed time format: ', format);
|
||||
} else {
|
||||
console.log('guessTimeFormat: time format could not be guessed from: ' + time);
|
||||
}
|
||||
|
||||
return format;
|
||||
}
|
||||
|
||||
// add commas to large numbers
|
||||
// Number.toLocaleString is not supported on safari
|
||||
export function toLocaleString(x) {
|
||||
|
|
193
x-pack/plugins/ml/public/application/util/string_utils.test.ts
Normal file
193
x-pack/plugins/ml/public/application/util/string_utils.test.ts
Normal file
|
@ -0,0 +1,193 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import {
|
||||
replaceStringTokens,
|
||||
detectorToString,
|
||||
sortByKey,
|
||||
toLocaleString,
|
||||
mlEscape,
|
||||
escapeForElasticsearchQuery,
|
||||
} from './string_utils';
|
||||
|
||||
describe('ML - string utils', () => {
|
||||
describe('replaceStringTokens', () => {
|
||||
const testRecord = {
|
||||
job_id: 'test_job',
|
||||
result_type: 'record',
|
||||
probability: 0.0191711,
|
||||
record_score: 4.3,
|
||||
bucket_span: 300,
|
||||
detector_index: 0,
|
||||
timestamp: 1454890500000,
|
||||
function: 'mean',
|
||||
function_description: 'mean',
|
||||
field_name: 'responsetime',
|
||||
user: "Des O'Connor",
|
||||
testfield1: 'test$tring=[+-?]',
|
||||
testfield2: '{<()>}',
|
||||
testfield3: 'host=\\\\test@uk.dev',
|
||||
};
|
||||
|
||||
test('returns correct values without URI encoding', () => {
|
||||
const result = replaceStringTokens('user=$user$,time=$timestamp$', testRecord, false);
|
||||
expect(result).toBe("user=Des O'Connor,time=1454890500000");
|
||||
});
|
||||
|
||||
test('returns correct values for missing token without URI encoding', () => {
|
||||
const result = replaceStringTokens('user=$username$,time=$timestamp$', testRecord, false);
|
||||
expect(result).toBe('user=$username$,time=1454890500000');
|
||||
});
|
||||
|
||||
test('returns correct values with URI encoding', () => {
|
||||
const testString1 = 'https://www.google.co.uk/webhp#q=$testfield1$';
|
||||
const testString2 = 'https://www.google.co.uk/webhp#q=$testfield2$';
|
||||
const testString3 = 'https://www.google.co.uk/webhp#q=$testfield3$';
|
||||
const testString4 = 'https://www.google.co.uk/webhp#q=$user$';
|
||||
|
||||
const result1 = replaceStringTokens(testString1, testRecord, true);
|
||||
const result2 = replaceStringTokens(testString2, testRecord, true);
|
||||
const result3 = replaceStringTokens(testString3, testRecord, true);
|
||||
const result4 = replaceStringTokens(testString4, testRecord, true);
|
||||
|
||||
expect(result1).toBe('https://www.google.co.uk/webhp#q=test%24tring%3D%5B%2B-%3F%5D');
|
||||
expect(result2).toBe('https://www.google.co.uk/webhp#q=%7B%3C()%3E%7D');
|
||||
expect(result3).toBe('https://www.google.co.uk/webhp#q=host%3D%5C%5Ctest%40uk.dev');
|
||||
expect(result4).toBe("https://www.google.co.uk/webhp#q=Des%20O'Connor");
|
||||
});
|
||||
|
||||
test('returns correct values for missing token with URI encoding', () => {
|
||||
const testString = 'https://www.google.co.uk/webhp#q=$username$&time=$timestamp$';
|
||||
const result = replaceStringTokens(testString, testRecord, true);
|
||||
expect(result).toBe('https://www.google.co.uk/webhp#q=$username$&time=1454890500000');
|
||||
});
|
||||
});
|
||||
|
||||
describe('detectorToString', () => {
|
||||
test('returns the correct descriptions for detectors', () => {
|
||||
const detector1 = {
|
||||
function: 'count',
|
||||
};
|
||||
|
||||
const detector2 = {
|
||||
function: 'count',
|
||||
by_field_name: 'airline',
|
||||
use_null: false,
|
||||
};
|
||||
|
||||
const detector3 = {
|
||||
function: 'mean',
|
||||
field_name: 'CPUUtilization',
|
||||
partition_field_name: 'region',
|
||||
by_field_name: 'host',
|
||||
over_field_name: 'user',
|
||||
exclude_frequent: 'all',
|
||||
};
|
||||
|
||||
expect(detectorToString(detector1)).toBe('count');
|
||||
expect(detectorToString(detector2)).toBe('count by airline use_null=false');
|
||||
expect(detectorToString(detector3)).toBe(
|
||||
'mean(CPUUtilization) by host over user partition_field_name=region exclude_frequent=all'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('sortByKey', () => {
|
||||
const obj = {
|
||||
zebra: 'stripes',
|
||||
giraffe: 'neck',
|
||||
elephant: 'trunk',
|
||||
};
|
||||
|
||||
const valueComparator = function(value: string) {
|
||||
return value;
|
||||
};
|
||||
|
||||
test('returns correct ordering with default comparator', () => {
|
||||
const result = sortByKey(obj, false);
|
||||
const keys = Object.keys(result);
|
||||
expect(keys[0]).toBe('elephant');
|
||||
expect(keys[1]).toBe('giraffe');
|
||||
expect(keys[2]).toBe('zebra');
|
||||
});
|
||||
|
||||
test('returns correct ordering with default comparator and order reversed', () => {
|
||||
const result = sortByKey(obj, true);
|
||||
const keys = Object.keys(result);
|
||||
expect(keys[0]).toBe('zebra');
|
||||
expect(keys[1]).toBe('giraffe');
|
||||
expect(keys[2]).toBe('elephant');
|
||||
});
|
||||
|
||||
test('returns correct ordering with comparator', () => {
|
||||
const result = sortByKey(obj, false, valueComparator);
|
||||
const keys = Object.keys(result);
|
||||
expect(keys[0]).toBe('giraffe');
|
||||
expect(keys[1]).toBe('zebra');
|
||||
expect(keys[2]).toBe('elephant');
|
||||
});
|
||||
|
||||
test('returns correct ordering with comparator and order reversed', () => {
|
||||
const result = sortByKey(obj, true, valueComparator);
|
||||
const keys = Object.keys(result);
|
||||
expect(keys[0]).toBe('elephant');
|
||||
expect(keys[1]).toBe('zebra');
|
||||
expect(keys[2]).toBe('giraffe');
|
||||
});
|
||||
});
|
||||
|
||||
describe('toLocaleString', () => {
|
||||
test('returns correct comma placement for large numbers', () => {
|
||||
expect(toLocaleString(1)).toBe('1');
|
||||
expect(toLocaleString(10)).toBe('10');
|
||||
expect(toLocaleString(100)).toBe('100');
|
||||
expect(toLocaleString(1000)).toBe('1,000');
|
||||
expect(toLocaleString(10000)).toBe('10,000');
|
||||
expect(toLocaleString(100000)).toBe('100,000');
|
||||
expect(toLocaleString(1000000)).toBe('1,000,000');
|
||||
expect(toLocaleString(10000000)).toBe('10,000,000');
|
||||
expect(toLocaleString(100000000)).toBe('100,000,000');
|
||||
expect(toLocaleString(1000000000)).toBe('1,000,000,000');
|
||||
});
|
||||
});
|
||||
|
||||
describe('mlEscape', () => {
|
||||
test('returns correct escaping of characters', () => {
|
||||
expect(mlEscape('foo&bar')).toBe('foo&bar');
|
||||
expect(mlEscape('foo<bar')).toBe('foo<bar');
|
||||
expect(mlEscape('foo>bar')).toBe('foo>bar');
|
||||
expect(mlEscape('foo"bar')).toBe('foo"bar');
|
||||
expect(mlEscape("foo'bar")).toBe('foo'bar');
|
||||
expect(mlEscape('foo/bar')).toBe('foo/bar');
|
||||
});
|
||||
});
|
||||
|
||||
describe('escapeForElasticsearchQuery', () => {
|
||||
test('returns correct escaping of reserved elasticsearch characters', () => {
|
||||
expect(escapeForElasticsearchQuery('foo+bar')).toBe('foo\\+bar');
|
||||
expect(escapeForElasticsearchQuery('foo-bar')).toBe('foo\\-bar');
|
||||
expect(escapeForElasticsearchQuery('foo=bar')).toBe('foo\\=bar');
|
||||
expect(escapeForElasticsearchQuery('foo&&bar')).toBe('foo\\&\\&bar');
|
||||
expect(escapeForElasticsearchQuery('foo||bar')).toBe('foo\\|\\|bar');
|
||||
expect(escapeForElasticsearchQuery('foo>bar')).toBe('foo\\>bar');
|
||||
expect(escapeForElasticsearchQuery('foo<bar')).toBe('foo\\<bar');
|
||||
expect(escapeForElasticsearchQuery('foo!bar')).toBe('foo\\!bar');
|
||||
expect(escapeForElasticsearchQuery('foo(bar')).toBe('foo\\(bar');
|
||||
expect(escapeForElasticsearchQuery('foo)bar')).toBe('foo\\)bar');
|
||||
expect(escapeForElasticsearchQuery('foo{bar')).toBe('foo\\{bar');
|
||||
expect(escapeForElasticsearchQuery('foo[bar')).toBe('foo\\[bar');
|
||||
expect(escapeForElasticsearchQuery('foo]bar')).toBe('foo\\]bar');
|
||||
expect(escapeForElasticsearchQuery('foo^bar')).toBe('foo\\^bar');
|
||||
expect(escapeForElasticsearchQuery('foo"bar')).toBe('foo\\"bar');
|
||||
expect(escapeForElasticsearchQuery('foo~bar')).toBe('foo\\~bar');
|
||||
expect(escapeForElasticsearchQuery('foo*bar')).toBe('foo\\*bar');
|
||||
expect(escapeForElasticsearchQuery('foo?bar')).toBe('foo\\?bar');
|
||||
expect(escapeForElasticsearchQuery('foo:bar')).toBe('foo\\:bar');
|
||||
expect(escapeForElasticsearchQuery('foo\\bar')).toBe('foo\\\\bar');
|
||||
expect(escapeForElasticsearchQuery('foo/bar')).toBe('foo\\/bar');
|
||||
});
|
||||
});
|
||||
});
|
Loading…
Reference in a new issue