[ML] Removing full lodash library imports (#74742)

* [ML] Removing full lodash library imports

* more has

* fixing missing filter

* removing _

* removing unused file

* removing first use

* removing comment
This commit is contained in:
James Gowdy 2020-08-12 09:24:43 +01:00 committed by GitHub
parent 506e9537bf
commit a81059b14b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
34 changed files with 365 additions and 371 deletions

View file

@ -4,7 +4,11 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import _ from 'lodash'; import isEmpty from 'lodash/isEmpty';
import isEqual from 'lodash/isEqual';
import each from 'lodash/each';
import pick from 'lodash/pick';
import semver from 'semver'; import semver from 'semver';
import moment, { Duration } from 'moment'; import moment, { Duration } from 'moment';
// @ts-ignore // @ts-ignore
@ -307,7 +311,7 @@ export function getSafeAggregationName(fieldName: string, index: number): string
export function uniqWithIsEqual<T extends any[]>(arr: T): T { export function uniqWithIsEqual<T extends any[]>(arr: T): T {
return arr.reduce((dedupedArray, value) => { return arr.reduce((dedupedArray, value) => {
if (dedupedArray.filter((compareValue: any) => _.isEqual(compareValue, value)).length === 0) { if (dedupedArray.filter((compareValue: any) => isEqual(compareValue, value)).length === 0) {
dedupedArray.push(value); dedupedArray.push(value);
} }
return dedupedArray; return dedupedArray;
@ -328,7 +332,7 @@ export function basicJobValidation(
if (job) { if (job) {
// Job details // Job details
if (_.isEmpty(job.job_id)) { if (isEmpty(job.job_id)) {
messages.push({ id: 'job_id_empty' }); messages.push({ id: 'job_id_empty' });
valid = false; valid = false;
} else if (isJobIdValid(job.job_id) === false) { } else if (isJobIdValid(job.job_id) === false) {
@ -350,7 +354,7 @@ export function basicJobValidation(
// Analysis Configuration // Analysis Configuration
if (job.analysis_config.categorization_filters) { if (job.analysis_config.categorization_filters) {
let v = true; let v = true;
_.each(job.analysis_config.categorization_filters, (d) => { each(job.analysis_config.categorization_filters, (d) => {
try { try {
new RegExp(d); new RegExp(d);
} catch (e) { } catch (e) {
@ -382,8 +386,8 @@ export function basicJobValidation(
valid = false; valid = false;
} else { } else {
let v = true; let v = true;
_.each(job.analysis_config.detectors, (d) => { each(job.analysis_config.detectors, (d) => {
if (_.isEmpty(d.function)) { if (isEmpty(d.function)) {
v = false; v = false;
} }
}); });
@ -400,7 +404,7 @@ export function basicJobValidation(
// create an array of objects with a subset of the attributes // create an array of objects with a subset of the attributes
// where we want to make sure they are not be the same across detectors // where we want to make sure they are not be the same across detectors
const compareSubSet = job.analysis_config.detectors.map((d) => const compareSubSet = job.analysis_config.detectors.map((d) =>
_.pick(d, [ pick(d, [
'function', 'function',
'field_name', 'field_name',
'by_field_name', 'by_field_name',

View file

@ -9,7 +9,9 @@
* This version supports both fetching the annotations by itself (used in the jobs list) and * This version supports both fetching the annotations by itself (used in the jobs list) and
* getting the annotations via props (used in Anomaly Explorer and Single Series Viewer). * getting the annotations via props (used in Anomaly Explorer and Single Series Viewer).
*/ */
import _ from 'lodash';
import uniq from 'lodash/uniq';
import PropTypes from 'prop-types'; import PropTypes from 'prop-types';
import rison from 'rison-node'; import rison from 'rison-node';
import React, { Component, Fragment } from 'react'; import React, { Component, Fragment } from 'react';
@ -255,18 +257,18 @@ export class AnnotationsTable extends Component {
// if the annotation is at the series level // if the annotation is at the series level
// then pass the partitioning field(s) and detector index to the Single Metric Viewer // then pass the partitioning field(s) and detector index to the Single Metric Viewer
if (_.has(annotation, 'detector_index')) { if (annotation.detector_index !== undefined) {
mlTimeSeriesExplorer.detectorIndex = annotation.detector_index; mlTimeSeriesExplorer.detectorIndex = annotation.detector_index;
} }
if (_.has(annotation, 'partition_field_value')) { if (annotation.partition_field_value !== undefined) {
entityCondition[annotation.partition_field_name] = annotation.partition_field_value; entityCondition[annotation.partition_field_name] = annotation.partition_field_value;
} }
if (_.has(annotation, 'over_field_value')) { if (annotation.over_field_value !== undefined) {
entityCondition[annotation.over_field_name] = annotation.over_field_value; entityCondition[annotation.over_field_name] = annotation.over_field_value;
} }
if (_.has(annotation, 'by_field_value')) { if (annotation.by_field_value !== undefined) {
// Note that analyses with by and over fields, will have a top-level by_field_name, // Note that analyses with by and over fields, will have a top-level by_field_name,
// but the by_field_value(s) will be in the nested causes array. // but the by_field_value(s) will be in the nested causes array.
entityCondition[annotation.by_field_name] = annotation.by_field_value; entityCondition[annotation.by_field_name] = annotation.by_field_value;
@ -421,7 +423,7 @@ export class AnnotationsTable extends Component {
}, },
]; ];
const jobIds = _.uniq(annotations.map((a) => a.job_id)); const jobIds = uniq(annotations.map((a) => a.job_id));
if (jobIds.length > 1) { if (jobIds.length > 1) {
columns.unshift({ columns.unshift({
field: 'job_id', field: 'job_id',

View file

@ -9,7 +9,7 @@
*/ */
import PropTypes from 'prop-types'; import PropTypes from 'prop-types';
import _ from 'lodash'; import get from 'lodash/get';
import React, { Component } from 'react'; import React, { Component } from 'react';
@ -70,7 +70,7 @@ class AnomaliesTable extends Component {
} else { } else {
const examples = const examples =
item.entityName === 'mlcategory' item.entityName === 'mlcategory'
? _.get(this.props.tableData, ['examplesByJobId', item.jobId, item.entityValue]) ? get(this.props.tableData, ['examplesByJobId', item.jobId, item.entityValue])
: undefined; : undefined;
let definition = undefined; let definition = undefined;

View file

@ -7,7 +7,7 @@
import { EuiButtonIcon, EuiLink, EuiScreenReaderOnly } from '@elastic/eui'; import { EuiButtonIcon, EuiLink, EuiScreenReaderOnly } from '@elastic/eui';
import React from 'react'; import React from 'react';
import _ from 'lodash'; import get from 'lodash/get';
import { i18n } from '@kbn/i18n'; import { i18n } from '@kbn/i18n';
import { FormattedMessage } from '@kbn/i18n/react'; import { FormattedMessage } from '@kbn/i18n/react';
@ -251,7 +251,7 @@ export function getColumns(
sortable: false, sortable: false,
truncateText: true, truncateText: true,
render: (item) => { render: (item) => {
const examples = _.get(examplesByJobId, [item.jobId, item.entityValue], []); const examples = get(examplesByJobId, [item.jobId, item.entityValue], []);
return ( return (
<EuiLink <EuiLink
className="mlAnomalyCategoryExamples__link" className="mlAnomalyCategoryExamples__link"

View file

@ -11,7 +11,8 @@
import PropTypes from 'prop-types'; import PropTypes from 'prop-types';
import React, { Component, Fragment } from 'react'; import React, { Component, Fragment } from 'react';
import _ from 'lodash'; import get from 'lodash/get';
import pick from 'lodash/pick';
import { i18n } from '@kbn/i18n'; import { i18n } from '@kbn/i18n';
import { FormattedMessage } from '@kbn/i18n/react'; import { FormattedMessage } from '@kbn/i18n/react';
@ -63,16 +64,12 @@ function getDetailsItems(anomaly, examples, filter) {
} }
} else { } else {
causes = sourceCauses.map((cause) => { causes = sourceCauses.map((cause) => {
const simplified = _.pick(cause, 'typical', 'actual', 'probability'); const simplified = pick(cause, 'typical', 'actual', 'probability');
// Get the 'entity field name/value' to display in the cause - // Get the 'entity field name/value' to display in the cause -
// For by and over, use by_field_name/value (over_field_name/value are in the top level fields) // For by and over, use by_field_name/value (over_field_name/value are in the top level fields)
// For just an 'over' field - the over_field_name/value appear in both top level and cause. // For just an 'over' field - the over_field_name/value appear in both top level and cause.
simplified.entityName = _.has(cause, 'by_field_name') simplified.entityName = cause.by_field_name ? cause.by_field_name : cause.over_field_name;
? cause.by_field_name simplified.entityValue = cause.by_field_value ? cause.by_field_value : cause.over_field_value;
: cause.over_field_name;
simplified.entityValue = _.has(cause, 'by_field_value')
? cause.by_field_value
: cause.over_field_value;
return simplified; return simplified;
}); });
} }
@ -471,7 +468,7 @@ export class AnomalyDetails extends Component {
renderDetails() { renderDetails() {
const detailItems = getDetailsItems(this.props.anomaly, this.props.examples, this.props.filter); const detailItems = getDetailsItems(this.props.anomaly, this.props.examples, this.props.filter);
const isInterimResult = _.get(this.props.anomaly, 'source.is_interim', false); const isInterimResult = get(this.props.anomaly, 'source.is_interim', false);
return ( return (
<React.Fragment> <React.Fragment>
<EuiText size="xs"> <EuiText size="xs">

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import _ from 'lodash'; import each from 'lodash/each';
import PropTypes from 'prop-types'; import PropTypes from 'prop-types';
import React, { Component } from 'react'; import React, { Component } from 'react';
@ -148,7 +148,7 @@ export class InfluencersCell extends Component {
const influencers = []; const influencers = [];
recordInfluencers.forEach((influencer) => { recordInfluencers.forEach((influencer) => {
_.each(influencer, (influencerFieldValue, influencerFieldName) => { each(influencer, (influencerFieldValue, influencerFieldName) => {
influencers.push({ influencers.push({
influencerFieldName, influencerFieldName,
influencerFieldValue, influencerFieldValue,

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import _ from 'lodash'; import cloneDeep from 'lodash/cloneDeep';
import moment from 'moment'; import moment from 'moment';
import rison from 'rison-node'; import rison from 'rison-node';
import PropTypes from 'prop-types'; import PropTypes from 'prop-types';
@ -58,7 +58,7 @@ class LinksMenuUI extends Component {
// If url_value contains $earliest$ and $latest$ tokens, add in times to the source record. // If url_value contains $earliest$ and $latest$ tokens, add in times to the source record.
// Create a copy of the record as we are adding properties into it. // Create a copy of the record as we are adding properties into it.
const record = _.cloneDeep(anomaly.source); const record = cloneDeep(anomaly.source);
const timestamp = record.timestamp; const timestamp = record.timestamp;
const configuredUrlValue = customUrl.url_value; const configuredUrlValue = customUrl.url_value;
const timeRangeInterval = parseInterval(customUrl.time_range); const timeRangeInterval = parseInterval(customUrl.time_range);
@ -99,7 +99,7 @@ class LinksMenuUI extends Component {
if ( if (
(configuredUrlValue.includes('$mlcategoryterms$') || (configuredUrlValue.includes('$mlcategoryterms$') ||
configuredUrlValue.includes('$mlcategoryregex$')) && configuredUrlValue.includes('$mlcategoryregex$')) &&
_.has(record, 'mlcategory') record.mlcategory !== undefined
) { ) {
const jobId = record.job_id; const jobId = record.job_id;
@ -156,15 +156,15 @@ class LinksMenuUI extends Component {
// Extract the by, over and partition fields for the record. // Extract the by, over and partition fields for the record.
const entityCondition = {}; const entityCondition = {};
if (_.has(record, 'partition_field_value')) { if (record.partition_field_value !== undefined) {
entityCondition[record.partition_field_name] = record.partition_field_value; entityCondition[record.partition_field_name] = record.partition_field_value;
} }
if (_.has(record, 'over_field_value')) { if (record.over_field_value !== undefined) {
entityCondition[record.over_field_name] = record.over_field_value; entityCondition[record.over_field_name] = record.over_field_value;
} }
if (_.has(record, 'by_field_value')) { if (record.by_field_value !== undefined) {
// Note that analyses with by and over fields, will have a top-level by_field_name, // Note that analyses with by and over fields, will have a top-level by_field_name,
// but the by_field_value(s) will be in the nested causes array. // but the by_field_value(s) will be in the nested causes array.
// TODO - drilldown from cause in expanded row only? // TODO - drilldown from cause in expanded row only?

View file

@ -9,8 +9,6 @@
* the raw data in the Explorer dashboard. * the raw data in the Explorer dashboard.
*/ */
import _ from 'lodash';
import { parseInterval } from '../../../../common/util/parse_interval'; import { parseInterval } from '../../../../common/util/parse_interval';
import { getEntityFieldList } from '../../../../common/util/anomaly_utils'; import { getEntityFieldList } from '../../../../common/util/anomaly_utils';
import { buildConfigFromDetector } from '../../util/chart_config_builder'; import { buildConfigFromDetector } from '../../util/chart_config_builder';
@ -30,7 +28,7 @@ export function buildConfig(record) {
config.detectorLabel = record.function; config.detectorLabel = record.function;
if ( if (
_.has(mlJobService.detectorsByJob, record.job_id) && mlJobService.detectorsByJob[record.job_id] !== undefined &&
detectorIndex < mlJobService.detectorsByJob[record.job_id].length detectorIndex < mlJobService.detectorsByJob[record.job_id].length
) { ) {
config.detectorLabel = config.detectorLabel =

View file

@ -11,8 +11,8 @@
import PropTypes from 'prop-types'; import PropTypes from 'prop-types';
import React from 'react'; import React from 'react';
import { i18n } from '@kbn/i18n';
import _ from 'lodash';
import d3 from 'd3'; import d3 from 'd3';
import $ from 'jquery'; import $ from 'jquery';
import moment from 'moment'; import moment from 'moment';
@ -33,8 +33,6 @@ import { mlFieldFormatService } from '../../services/field_format_service';
import { CHART_TYPE } from '../explorer_constants'; import { CHART_TYPE } from '../explorer_constants';
import { i18n } from '@kbn/i18n';
const CONTENT_WRAPPER_HEIGHT = 215; const CONTENT_WRAPPER_HEIGHT = 215;
// If a rare/event-distribution chart has a cardinality of 10 or less, // If a rare/event-distribution chart has a cardinality of 10 or less,
@ -403,7 +401,7 @@ export class ExplorerChartDistribution extends React.Component {
.attr('cy', (d) => lineChartYScale(d[CHART_Y_ATTRIBUTE])) .attr('cy', (d) => lineChartYScale(d[CHART_Y_ATTRIBUTE]))
.attr('class', (d) => { .attr('class', (d) => {
let markerClass = 'metric-value'; let markerClass = 'metric-value';
if (_.has(d, 'anomalyScore') && Number(d.anomalyScore) >= severity) { if (d.anomalyScore !== undefined && Number(d.anomalyScore) >= severity) {
markerClass += ' anomaly-marker '; markerClass += ' anomaly-marker ';
markerClass += getSeverityWithLow(d.anomalyScore).id; markerClass += getSeverityWithLow(d.anomalyScore).id;
} }
@ -444,7 +442,7 @@ export class ExplorerChartDistribution extends React.Component {
const tooltipData = [{ label: formattedDate }]; const tooltipData = [{ label: formattedDate }];
const seriesKey = config.detectorLabel; const seriesKey = config.detectorLabel;
if (_.has(marker, 'entity')) { if (marker.entity !== undefined) {
tooltipData.push({ tooltipData.push({
label: i18n.translate('xpack.ml.explorer.distributionChart.entityLabel', { label: i18n.translate('xpack.ml.explorer.distributionChart.entityLabel', {
defaultMessage: 'entity', defaultMessage: 'entity',
@ -457,7 +455,7 @@ export class ExplorerChartDistribution extends React.Component {
}); });
} }
if (_.has(marker, 'anomalyScore')) { if (marker.anomalyScore !== undefined) {
const score = parseInt(marker.anomalyScore); const score = parseInt(marker.anomalyScore);
const displayScore = score > 0 ? score : '< 1'; const displayScore = score > 0 ? score : '< 1';
tooltipData.push({ tooltipData.push({
@ -494,7 +492,7 @@ export class ExplorerChartDistribution extends React.Component {
valueAccessor: 'typical', valueAccessor: 'typical',
}); });
} }
if (typeof marker.byFieldName !== 'undefined' && _.has(marker, 'numberOfCauses')) { if (typeof marker.byFieldName !== 'undefined' && marker.numberOfCauses !== undefined) {
tooltipData.push({ tooltipData.push({
label: i18n.translate( label: i18n.translate(
'xpack.ml.explorer.distributionChart.unusualByFieldValuesLabel', 'xpack.ml.explorer.distributionChart.unusualByFieldValuesLabel',
@ -532,7 +530,7 @@ export class ExplorerChartDistribution extends React.Component {
}); });
} }
if (_.has(marker, 'scheduledEvents')) { if (marker.scheduledEvents !== undefined) {
marker.scheduledEvents.forEach((scheduledEvent, i) => { marker.scheduledEvents.forEach((scheduledEvent, i) => {
tooltipData.push({ tooltipData.push({
label: i18n.translate( label: i18n.translate(

View file

@ -12,10 +12,10 @@
import PropTypes from 'prop-types'; import PropTypes from 'prop-types';
import React from 'react'; import React from 'react';
import _ from 'lodash';
import d3 from 'd3'; import d3 from 'd3';
import $ from 'jquery'; import $ from 'jquery';
import moment from 'moment'; import moment from 'moment';
import { i18n } from '@kbn/i18n';
import { formatHumanReadableDateTime } from '../../util/date_utils'; import { formatHumanReadableDateTime } from '../../util/date_utils';
import { formatValue } from '../../formatters/format_value'; import { formatValue } from '../../formatters/format_value';
@ -40,8 +40,6 @@ import { getTimeBucketsFromCache } from '../../util/time_buckets';
import { mlEscape } from '../../util/string_utils'; import { mlEscape } from '../../util/string_utils';
import { mlFieldFormatService } from '../../services/field_format_service'; import { mlFieldFormatService } from '../../services/field_format_service';
import { i18n } from '@kbn/i18n';
const CONTENT_WRAPPER_HEIGHT = 215; const CONTENT_WRAPPER_HEIGHT = 215;
const CONTENT_WRAPPER_CLASS = 'ml-explorer-chart-content-wrapper'; const CONTENT_WRAPPER_CLASS = 'ml-explorer-chart-content-wrapper';
@ -307,7 +305,7 @@ export class ExplorerChartSingleMetric extends React.Component {
.on('mouseout', () => tooltipService.hide()); .on('mouseout', () => tooltipService.hide());
const isAnomalyVisible = (d) => const isAnomalyVisible = (d) =>
_.has(d, 'anomalyScore') && Number(d.anomalyScore) >= severity; d.anomalyScore !== undefined && Number(d.anomalyScore) >= severity;
// Update all dots to new positions. // Update all dots to new positions.
dots dots
@ -380,7 +378,7 @@ export class ExplorerChartSingleMetric extends React.Component {
const tooltipData = [{ label: formattedDate }]; const tooltipData = [{ label: formattedDate }];
const seriesKey = config.detectorLabel; const seriesKey = config.detectorLabel;
if (_.has(marker, 'anomalyScore')) { if (marker.anomalyScore !== undefined) {
const score = parseInt(marker.anomalyScore); const score = parseInt(marker.anomalyScore);
const displayScore = score > 0 ? score : '< 1'; const displayScore = score > 0 ? score : '< 1';
tooltipData.push({ tooltipData.push({
@ -411,7 +409,7 @@ export class ExplorerChartSingleMetric extends React.Component {
// Show actual/typical when available except for rare detectors. // Show actual/typical when available except for rare detectors.
// Rare detectors always have 1 as actual and the probability as typical. // Rare detectors always have 1 as actual and the probability as typical.
// Exposing those values in the tooltip with actual/typical labels might irritate users. // Exposing those values in the tooltip with actual/typical labels might irritate users.
if (_.has(marker, 'actual') && config.functionDescription !== 'rare') { if (marker.actual !== undefined && config.functionDescription !== 'rare') {
// Display the record actual in preference to the chart value, which may be // Display the record actual in preference to the chart value, which may be
// different depending on the aggregation interval of the chart. // different depending on the aggregation interval of the chart.
tooltipData.push({ tooltipData.push({
@ -445,7 +443,7 @@ export class ExplorerChartSingleMetric extends React.Component {
}, },
valueAccessor: 'value', valueAccessor: 'value',
}); });
if (_.has(marker, 'byFieldName') && _.has(marker, 'numberOfCauses')) { if (marker.byFieldName !== undefined && marker.numberOfCauses !== undefined) {
tooltipData.push({ tooltipData.push({
label: i18n.translate( label: i18n.translate(
'xpack.ml.explorer.distributionChart.unusualByFieldValuesLabel', 'xpack.ml.explorer.distributionChart.unusualByFieldValuesLabel',
@ -483,7 +481,7 @@ export class ExplorerChartSingleMetric extends React.Component {
}); });
} }
if (_.has(marker, 'scheduledEvents')) { if (marker.scheduledEvents !== undefined) {
tooltipData.push({ tooltipData.push({
label: i18n.translate('xpack.ml.explorer.singleMetricChart.scheduledEventsLabel', { label: i18n.translate('xpack.ml.explorer.singleMetricChart.scheduledEventsLabel', {
defaultMessage: 'Scheduled events', defaultMessage: 'Scheduled events',

View file

@ -11,7 +11,12 @@
* and manages the layout of the charts in the containing div. * and manages the layout of the charts in the containing div.
*/ */
import _ from 'lodash'; import get from 'lodash/get';
import each from 'lodash/each';
import find from 'lodash/find';
import sortBy from 'lodash/sortBy';
import map from 'lodash/map';
import reduce from 'lodash/reduce';
import { buildConfig } from './explorer_chart_config_builder'; import { buildConfig } from './explorer_chart_config_builder';
import { chartLimits, getChartType } from '../../util/chart_utils'; import { chartLimits, getChartType } from '../../util/chart_utils';
@ -113,7 +118,7 @@ export const anomalyDataChange = function (
// If source data can be plotted, use that, otherwise model plot will be available. // If source data can be plotted, use that, otherwise model plot will be available.
const useSourceData = isSourceDataChartableForDetector(job, detectorIndex); const useSourceData = isSourceDataChartableForDetector(job, detectorIndex);
if (useSourceData === true) { if (useSourceData === true) {
const datafeedQuery = _.get(config, 'datafeedConfig.query', null); const datafeedQuery = get(config, 'datafeedConfig.query', null);
return mlResultsService return mlResultsService
.getMetricData( .getMetricData(
config.datafeedConfig.indices, config.datafeedConfig.indices,
@ -131,8 +136,8 @@ export const anomalyDataChange = function (
// Extract the partition, by, over fields on which to filter. // Extract the partition, by, over fields on which to filter.
const criteriaFields = []; const criteriaFields = [];
const detector = job.analysis_config.detectors[detectorIndex]; const detector = job.analysis_config.detectors[detectorIndex];
if (_.has(detector, 'partition_field_name')) { if (detector.partition_field_name !== undefined) {
const partitionEntity = _.find(entityFields, { const partitionEntity = find(entityFields, {
fieldName: detector.partition_field_name, fieldName: detector.partition_field_name,
}); });
if (partitionEntity !== undefined) { if (partitionEntity !== undefined) {
@ -143,8 +148,8 @@ export const anomalyDataChange = function (
} }
} }
if (_.has(detector, 'over_field_name')) { if (detector.over_field_name !== undefined) {
const overEntity = _.find(entityFields, { fieldName: detector.over_field_name }); const overEntity = find(entityFields, { fieldName: detector.over_field_name });
if (overEntity !== undefined) { if (overEntity !== undefined) {
criteriaFields.push( criteriaFields.push(
{ fieldName: 'over_field_name', fieldValue: overEntity.fieldName }, { fieldName: 'over_field_name', fieldValue: overEntity.fieldName },
@ -153,8 +158,8 @@ export const anomalyDataChange = function (
} }
} }
if (_.has(detector, 'by_field_name')) { if (detector.by_field_name !== undefined) {
const byEntity = _.find(entityFields, { fieldName: detector.by_field_name }); const byEntity = find(entityFields, { fieldName: detector.by_field_name });
if (byEntity !== undefined) { if (byEntity !== undefined) {
criteriaFields.push( criteriaFields.push(
{ fieldName: 'by_field_name', fieldValue: byEntity.fieldName }, { fieldName: 'by_field_name', fieldValue: byEntity.fieldName },
@ -236,7 +241,7 @@ export const anomalyDataChange = function (
filterField = config.entityFields.find((f) => f.fieldType === 'partition'); filterField = config.entityFields.find((f) => f.fieldType === 'partition');
} }
const datafeedQuery = _.get(config, 'datafeedConfig.query', null); const datafeedQuery = get(config, 'datafeedConfig.query', null);
return mlResultsService.getEventDistributionData( return mlResultsService.getEventDistributionData(
config.datafeedConfig.indices, config.datafeedConfig.indices,
splitField, splitField,
@ -285,7 +290,7 @@ export const anomalyDataChange = function (
if (eventDistribution.length > 0 && records.length > 0) { if (eventDistribution.length > 0 && records.length > 0) {
const filterField = records[0].by_field_value || records[0].over_field_value; const filterField = records[0].by_field_value || records[0].over_field_value;
chartData = eventDistribution.filter((d) => d.entity !== filterField); chartData = eventDistribution.filter((d) => d.entity !== filterField);
_.map(metricData, (value, time) => { map(metricData, (value, time) => {
// The filtering for rare/event_distribution charts needs to be handled // The filtering for rare/event_distribution charts needs to be handled
// differently because of how the source data is structured. // differently because of how the source data is structured.
// For rare chart values we are only interested wether a value is either `0` or not, // For rare chart values we are only interested wether a value is either `0` or not,
@ -304,7 +309,7 @@ export const anomalyDataChange = function (
} }
}); });
} else { } else {
chartData = _.map(metricData, (value, time) => ({ chartData = map(metricData, (value, time) => ({
date: +time, date: +time,
value: value, value: value,
})); }));
@ -314,7 +319,7 @@ export const anomalyDataChange = function (
// Iterate through the anomaly records, adding anomalyScore properties // Iterate through the anomaly records, adding anomalyScore properties
// to the chartData entries for anomalous buckets. // to the chartData entries for anomalous buckets.
const chartDataForPointSearch = getChartDataForPointSearch(chartData, records[0], chartType); const chartDataForPointSearch = getChartDataForPointSearch(chartData, records[0], chartType);
_.each(records, (record) => { each(records, (record) => {
// Look for a chart point with the same time as the record. // Look for a chart point with the same time as the record.
// If none found, insert a point for anomalies due to a gap in the data. // If none found, insert a point for anomalies due to a gap in the data.
const recordTime = record[ML_TIME_FIELD_NAME]; const recordTime = record[ML_TIME_FIELD_NAME];
@ -330,13 +335,13 @@ export const anomalyDataChange = function (
chartPoint.actual = record.actual; chartPoint.actual = record.actual;
chartPoint.typical = record.typical; chartPoint.typical = record.typical;
} else { } else {
const causes = _.get(record, 'causes', []); const causes = get(record, 'causes', []);
if (causes.length > 0) { if (causes.length > 0) {
chartPoint.byFieldName = record.by_field_name; chartPoint.byFieldName = record.by_field_name;
chartPoint.numberOfCauses = causes.length; chartPoint.numberOfCauses = causes.length;
if (causes.length === 1) { if (causes.length === 1) {
// If only a single cause, copy actual and typical values to the top level. // If only a single cause, copy actual and typical values to the top level.
const cause = _.first(record.causes); const cause = record.causes[0];
chartPoint.actual = cause.actual; chartPoint.actual = cause.actual;
chartPoint.typical = cause.typical; chartPoint.typical = cause.typical;
} }
@ -351,7 +356,7 @@ export const anomalyDataChange = function (
// Add a scheduledEvents property to any points in the chart data set // Add a scheduledEvents property to any points in the chart data set
// which correspond to times of scheduled events for the job. // which correspond to times of scheduled events for the job.
if (scheduledEvents !== undefined) { if (scheduledEvents !== undefined) {
_.each(scheduledEvents, (events, time) => { each(scheduledEvents, (events, time) => {
const chartPoint = findChartPointForTime(chartDataForPointSearch, Number(time)); const chartPoint = findChartPointForTime(chartDataForPointSearch, Number(time));
if (chartPoint !== undefined) { if (chartPoint !== undefined) {
// Note if the scheduled event coincides with an absence of the underlying metric data, // Note if the scheduled event coincides with an absence of the underlying metric data,
@ -385,10 +390,10 @@ export const anomalyDataChange = function (
.then((response) => { .then((response) => {
// calculate an overall min/max for all series // calculate an overall min/max for all series
const processedData = response.map(processChartData); const processedData = response.map(processChartData);
const allDataPoints = _.reduce( const allDataPoints = reduce(
processedData, processedData,
(datapoints, series) => { (datapoints, series) => {
_.each(series, (d) => datapoints.push(d)); each(series, (d) => datapoints.push(d));
return datapoints; return datapoints;
}, },
[] []
@ -420,7 +425,7 @@ function processRecordsForDisplay(anomalyRecords) {
// Aggregate by job, detector, and analysis fields (partition, by, over). // Aggregate by job, detector, and analysis fields (partition, by, over).
const aggregatedData = {}; const aggregatedData = {};
_.each(anomalyRecords, (record) => { each(anomalyRecords, (record) => {
// Check if we can plot a chart for this record, depending on whether the source data // Check if we can plot a chart for this record, depending on whether the source data
// is chartable, and if model plot is enabled for the job. // is chartable, and if model plot is enabled for the job.
const job = mlJobService.getJob(record.job_id); const job = mlJobService.getJob(record.job_id);
@ -524,20 +529,20 @@ function processRecordsForDisplay(anomalyRecords) {
let recordsForSeries = []; let recordsForSeries = [];
// Convert to an array of the records with the highest record_score per unique series. // Convert to an array of the records with the highest record_score per unique series.
_.each(aggregatedData, (detectorsForJob) => { each(aggregatedData, (detectorsForJob) => {
_.each(detectorsForJob, (groupsForDetector) => { each(detectorsForJob, (groupsForDetector) => {
if (groupsForDetector.maxScoreRecord !== undefined) { if (groupsForDetector.maxScoreRecord !== undefined) {
// Detector with no partition / by field. // Detector with no partition / by field.
recordsForSeries.push(groupsForDetector.maxScoreRecord); recordsForSeries.push(groupsForDetector.maxScoreRecord);
} else { } else {
_.each(groupsForDetector, (valuesForGroup) => { each(groupsForDetector, (valuesForGroup) => {
_.each(valuesForGroup, (dataForGroupValue) => { each(valuesForGroup, (dataForGroupValue) => {
if (dataForGroupValue.maxScoreRecord !== undefined) { if (dataForGroupValue.maxScoreRecord !== undefined) {
recordsForSeries.push(dataForGroupValue.maxScoreRecord); recordsForSeries.push(dataForGroupValue.maxScoreRecord);
} else { } else {
// Second level of aggregation for partition and by/over. // Second level of aggregation for partition and by/over.
_.each(dataForGroupValue, (splitsForGroup) => { each(dataForGroupValue, (splitsForGroup) => {
_.each(splitsForGroup, (dataForSplitValue) => { each(splitsForGroup, (dataForSplitValue) => {
recordsForSeries.push(dataForSplitValue.maxScoreRecord); recordsForSeries.push(dataForSplitValue.maxScoreRecord);
}); });
}); });
@ -547,7 +552,7 @@ function processRecordsForDisplay(anomalyRecords) {
} }
}); });
}); });
recordsForSeries = _.sortBy(recordsForSeries, 'record_score').reverse(); recordsForSeries = sortBy(recordsForSeries, 'record_score').reverse();
return recordsForSeries; return recordsForSeries;
} }
@ -564,7 +569,7 @@ function calculateChartRange(
// Calculate the time range for the charts. // Calculate the time range for the charts.
// Fit in as many points in the available container width plotted at the job bucket span. // Fit in as many points in the available container width plotted at the job bucket span.
const midpointMs = Math.ceil((earliestMs + latestMs) / 2); const midpointMs = Math.ceil((earliestMs + latestMs) / 2);
const maxBucketSpanMs = Math.max.apply(null, _.map(seriesConfigs, 'bucketSpanSeconds')) * 1000; const maxBucketSpanMs = Math.max.apply(null, map(seriesConfigs, 'bucketSpanSeconds')) * 1000;
const pointsToPlotFullSelection = Math.ceil((latestMs - earliestMs) / maxBucketSpanMs); const pointsToPlotFullSelection = Math.ceil((latestMs - earliestMs) / maxBucketSpanMs);
@ -588,7 +593,7 @@ function calculateChartRange(
let minMs = recordsToPlot[0][timeFieldName]; let minMs = recordsToPlot[0][timeFieldName];
let maxMs = recordsToPlot[0][timeFieldName]; let maxMs = recordsToPlot[0][timeFieldName];
_.each(recordsToPlot, (record) => { each(recordsToPlot, (record) => {
const diffMs = maxMs - minMs; const diffMs = maxMs - minMs;
if (diffMs < maxTimeSpan) { if (diffMs < maxTimeSpan) {
const recordTime = record[timeFieldName]; const recordTime = record[timeFieldName];

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import _ from 'lodash'; import cloneDeep from 'lodash/cloneDeep';
import mockAnomalyChartRecords from './__mocks__/mock_anomaly_chart_records.json'; import mockAnomalyChartRecords from './__mocks__/mock_anomaly_chart_records.json';
import mockDetectorsByJob from './__mocks__/mock_detectors_by_job.json'; import mockDetectorsByJob from './__mocks__/mock_detectors_by_job.json';
@ -24,10 +24,10 @@ import mockSeriesPromisesResponse from './__mocks__/mock_series_promises_respons
// suitable responses from the mocked services. The mocked services check against the // suitable responses from the mocked services. The mocked services check against the
// provided alternative values and return specific modified mock responses for the test case. // provided alternative values and return specific modified mock responses for the test case.
const mockJobConfigClone = _.cloneDeep(mockJobConfig); const mockJobConfigClone = cloneDeep(mockJobConfig);
// adjust mock data to tests against null/0 values // adjust mock data to tests against null/0 values
const mockMetricClone = _.cloneDeep(mockSeriesPromisesResponse[0][0]); const mockMetricClone = cloneDeep(mockSeriesPromisesResponse[0][0]);
mockMetricClone.results['1486712700000'] = null; mockMetricClone.results['1486712700000'] = null;
mockMetricClone.results['1486713600000'] = 0; mockMetricClone.results['1486713600000'] = 0;
@ -127,7 +127,7 @@ describe('explorerChartsContainerService', () => {
}); });
test('filtering should skip values of null', (done) => { test('filtering should skip values of null', (done) => {
const mockAnomalyChartRecordsClone = _.cloneDeep(mockAnomalyChartRecords).map((d) => { const mockAnomalyChartRecordsClone = cloneDeep(mockAnomalyChartRecords).map((d) => {
d.job_id = 'mock-job-id-distribution'; d.job_id = 'mock-job-id-distribution';
return d; return d;
}); });
@ -151,7 +151,7 @@ describe('explorerChartsContainerService', () => {
}); });
test('field value with trailing dot should not throw an error', (done) => { test('field value with trailing dot should not throw an error', (done) => {
const mockAnomalyChartRecordsClone = _.cloneDeep(mockAnomalyChartRecords); const mockAnomalyChartRecordsClone = cloneDeep(mockAnomalyChartRecords);
mockAnomalyChartRecordsClone[1].partition_field_value = 'AAL.'; mockAnomalyChartRecordsClone[1].partition_field_value = 'AAL.';
expect(() => { expect(() => {

View file

@ -10,7 +10,9 @@
import React from 'react'; import React from 'react';
import './_explorer.scss'; import './_explorer.scss';
import _, { isEqual } from 'lodash'; import isEqual from 'lodash/isEqual';
import uniq from 'lodash/uniq';
import get from 'lodash/get';
import d3 from 'd3'; import d3 from 'd3';
import moment from 'moment'; import moment from 'moment';
import DragSelect from 'dragselect'; import DragSelect from 'dragselect';
@ -176,9 +178,9 @@ export class ExplorerSwimlane extends React.Component<ExplorerSwimlaneProps> {
} }
); );
selectedData.laneLabels = _.uniq(selectedData.laneLabels); selectedData.laneLabels = uniq(selectedData.laneLabels);
selectedData.times = _.uniq(selectedData.times); selectedData.times = uniq(selectedData.times);
if (_.isEqual(selectedData, previousSelectedData) === false) { if (isEqual(selectedData, previousSelectedData) === false) {
// If no cells containing anomalies have been selected, // If no cells containing anomalies have been selected,
// immediately clear the selection, otherwise trigger // immediately clear the selection, otherwise trigger
// a reload with the updated selected cells. // a reload with the updated selected cells.
@ -246,7 +248,7 @@ export class ExplorerSwimlane extends React.Component<ExplorerSwimlaneProps> {
selectedTimes: d3.extent(times), selectedTimes: d3.extent(times),
}; };
if (_.isEqual(oldSelection, newSelection)) { if (isEqual(oldSelection, newSelection)) {
triggerNewSelection = false; triggerNewSelection = false;
} }
@ -277,8 +279,8 @@ export class ExplorerSwimlane extends React.Component<ExplorerSwimlaneProps> {
// Check for selection and reselect the corresponding swimlane cell // Check for selection and reselect the corresponding swimlane cell
// if the time range and lane label are still in view. // if the time range and lane label are still in view.
const selectionState = selection; const selectionState = selection;
const selectedType = _.get(selectionState, 'type', undefined); const selectedType = get(selectionState, 'type', undefined);
const selectionViewByFieldName = _.get(selectionState, 'viewByFieldName', ''); const selectionViewByFieldName = get(selectionState, 'viewByFieldName', '');
// If a selection was done in the other swimlane, add the "masked" classes // If a selection was done in the other swimlane, add the "masked" classes
// to de-emphasize the swimlane cells. // to de-emphasize the swimlane cells.
@ -288,8 +290,8 @@ export class ExplorerSwimlane extends React.Component<ExplorerSwimlaneProps> {
} }
const cellsToSelect: Node[] = []; const cellsToSelect: Node[] = [];
const selectedLanes = _.get(selectionState, 'lanes', []); const selectedLanes = get(selectionState, 'lanes', []);
const selectedTimes = _.get(selectionState, 'times', []); const selectedTimes = get(selectionState, 'times', []);
const selectedTimeExtent = d3.extent(selectedTimes); const selectedTimeExtent = d3.extent(selectedTimes);
if ( if (

View file

@ -6,7 +6,9 @@
// Service for carrying out requests to run ML forecasts and to obtain // Service for carrying out requests to run ML forecasts and to obtain
// data on forecasts that have been performed. // data on forecasts that have been performed.
import _ from 'lodash'; import get from 'lodash/get';
import find from 'lodash/find';
import each from 'lodash/each';
import { map } from 'rxjs/operators'; import { map } from 'rxjs/operators';
import { ml } from './ml_api_service'; import { ml } from './ml_api_service';
@ -129,8 +131,8 @@ function getForecastDateRange(job, forecastId) {
}, },
}) })
.then((resp) => { .then((resp) => {
obj.earliest = _.get(resp, 'aggregations.earliest.value', null); obj.earliest = get(resp, 'aggregations.earliest.value', null);
obj.latest = _.get(resp, 'aggregations.latest.value', null); obj.latest = get(resp, 'aggregations.latest.value', null);
if (obj.earliest === null || obj.latest === null) { if (obj.earliest === null || obj.latest === null) {
reject(resp); reject(resp);
} else { } else {
@ -157,8 +159,8 @@ function getForecastData(
// Extract the partition, by, over fields on which to filter. // Extract the partition, by, over fields on which to filter.
const criteriaFields = []; const criteriaFields = [];
const detector = job.analysis_config.detectors[detectorIndex]; const detector = job.analysis_config.detectors[detectorIndex];
if (_.has(detector, 'partition_field_name')) { if (detector.partition_field_name !== undefined) {
const partitionEntity = _.find(entityFields, { fieldName: detector.partition_field_name }); const partitionEntity = find(entityFields, { fieldName: detector.partition_field_name });
if (partitionEntity !== undefined) { if (partitionEntity !== undefined) {
criteriaFields.push( criteriaFields.push(
{ fieldName: 'partition_field_name', fieldValue: partitionEntity.fieldName }, { fieldName: 'partition_field_name', fieldValue: partitionEntity.fieldName },
@ -167,8 +169,8 @@ function getForecastData(
} }
} }
if (_.has(detector, 'over_field_name')) { if (detector.over_field_name !== undefined) {
const overEntity = _.find(entityFields, { fieldName: detector.over_field_name }); const overEntity = find(entityFields, { fieldName: detector.over_field_name });
if (overEntity !== undefined) { if (overEntity !== undefined) {
criteriaFields.push( criteriaFields.push(
{ fieldName: 'over_field_name', fieldValue: overEntity.fieldName }, { fieldName: 'over_field_name', fieldValue: overEntity.fieldName },
@ -177,8 +179,8 @@ function getForecastData(
} }
} }
if (_.has(detector, 'by_field_name')) { if (detector.by_field_name !== undefined) {
const byEntity = _.find(entityFields, { fieldName: detector.by_field_name }); const byEntity = find(entityFields, { fieldName: detector.by_field_name });
if (byEntity !== undefined) { if (byEntity !== undefined) {
criteriaFields.push( criteriaFields.push(
{ fieldName: 'by_field_name', fieldValue: byEntity.fieldName }, { fieldName: 'by_field_name', fieldValue: byEntity.fieldName },
@ -222,7 +224,7 @@ function getForecastData(
]; ];
// Add in term queries for each of the specified criteria. // Add in term queries for each of the specified criteria.
_.each(criteriaFields, (criteria) => { each(criteriaFields, (criteria) => {
filterCriteria.push({ filterCriteria.push({
term: { term: {
[criteria.fieldName]: criteria.fieldValue, [criteria.fieldName]: criteria.fieldValue,
@ -281,13 +283,13 @@ function getForecastData(
}) })
.pipe( .pipe(
map((resp) => { map((resp) => {
const aggregationsByTime = _.get(resp, ['aggregations', 'times', 'buckets'], []); const aggregationsByTime = get(resp, ['aggregations', 'times', 'buckets'], []);
_.each(aggregationsByTime, (dataForTime) => { each(aggregationsByTime, (dataForTime) => {
const time = dataForTime.key; const time = dataForTime.key;
obj.results[time] = { obj.results[time] = {
prediction: _.get(dataForTime, ['prediction', 'value']), prediction: get(dataForTime, ['prediction', 'value']),
forecastUpper: _.get(dataForTime, ['forecastUpper', 'value']), forecastUpper: get(dataForTime, ['forecastUpper', 'value']),
forecastLower: _.get(dataForTime, ['forecastLower', 'value']), forecastLower: get(dataForTime, ['forecastLower', 'value']),
}; };
}); });
@ -355,7 +357,7 @@ function getForecastRequestStats(job, forecastId) {
}) })
.then((resp) => { .then((resp) => {
if (resp.hits.total !== 0) { if (resp.hits.total !== 0) {
obj.stats = _.first(resp.hits.hits)._source; obj.stats = resp.hits.hits[0]._source;
} }
resolve(obj); resolve(obj);
}) })

View file

@ -4,7 +4,11 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import _ from 'lodash'; import cloneDeep from 'lodash/cloneDeep';
import each from 'lodash/each';
import find from 'lodash/find';
import get from 'lodash/get';
import isNumber from 'lodash/isNumber';
import moment from 'moment'; import moment from 'moment';
import { i18n } from '@kbn/i18n'; import { i18n } from '@kbn/i18n';
@ -135,10 +139,10 @@ class JobService {
const jobStats = statsResp.jobs[j]; const jobStats = statsResp.jobs[j];
if (job.job_id === jobStats.job_id) { if (job.job_id === jobStats.job_id) {
job.state = jobStats.state; job.state = jobStats.state;
job.data_counts = _.cloneDeep(jobStats.data_counts); job.data_counts = cloneDeep(jobStats.data_counts);
job.model_size_stats = _.cloneDeep(jobStats.model_size_stats); job.model_size_stats = cloneDeep(jobStats.model_size_stats);
if (jobStats.node) { if (jobStats.node) {
job.node = _.cloneDeep(jobStats.node); job.node = cloneDeep(jobStats.node);
} }
if (jobStats.open_time) { if (jobStats.open_time) {
job.open_time = jobStats.open_time; job.open_time = jobStats.open_time;
@ -212,10 +216,10 @@ class JobService {
newJob.state = statsJob.state; newJob.state = statsJob.state;
newJob.data_counts = {}; newJob.data_counts = {};
newJob.model_size_stats = {}; newJob.model_size_stats = {};
newJob.data_counts = _.cloneDeep(statsJob.data_counts); newJob.data_counts = cloneDeep(statsJob.data_counts);
newJob.model_size_stats = _.cloneDeep(statsJob.model_size_stats); newJob.model_size_stats = cloneDeep(statsJob.model_size_stats);
if (newJob.node) { if (newJob.node) {
newJob.node = _.cloneDeep(statsJob.node); newJob.node = cloneDeep(statsJob.node);
} }
if (statsJob.open_time) { if (statsJob.open_time) {
@ -352,7 +356,7 @@ class JobService {
// create a deep copy of a job object // create a deep copy of a job object
// also remove items from the job which are set by the server and not needed // also remove items from the job which are set by the server and not needed
// in the future this formatting could be optional // in the future this formatting could be optional
const tempJob = _.cloneDeep(job); const tempJob = cloneDeep(job);
// remove all of the items which should not be copied // remove all of the items which should not be copied
// such as counts, state and times // such as counts, state and times
@ -375,7 +379,7 @@ class JobService {
delete tempJob.analysis_config.use_per_partition_normalization; delete tempJob.analysis_config.use_per_partition_normalization;
_.each(tempJob.analysis_config.detectors, (d) => { each(tempJob.analysis_config.detectors, (d) => {
delete d.detector_index; delete d.detector_index;
}); });
@ -469,7 +473,7 @@ class JobService {
// find a job based on the id // find a job based on the id
getJob(jobId) { getJob(jobId) {
const job = _.find(jobs, (j) => { const job = find(jobs, (j) => {
return j.job_id === jobId; return j.job_id === jobId;
}); });
@ -550,7 +554,7 @@ class JobService {
// get fields from detectors // get fields from detectors
if (job.analysis_config.detectors) { if (job.analysis_config.detectors) {
_.each(job.analysis_config.detectors, (dtr) => { each(job.analysis_config.detectors, (dtr) => {
if (dtr.by_field_name) { if (dtr.by_field_name) {
fields[dtr.by_field_name] = {}; fields[dtr.by_field_name] = {};
} }
@ -568,7 +572,7 @@ class JobService {
// get fields from influencers // get fields from influencers
if (job.analysis_config.influencers) { if (job.analysis_config.influencers) {
_.each(job.analysis_config.influencers, (inf) => { each(job.analysis_config.influencers, (inf) => {
fields[inf] = {}; fields[inf] = {};
}); });
} }
@ -659,7 +663,7 @@ class JobService {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
// if the end timestamp is a number, add one ms to it to make it // if the end timestamp is a number, add one ms to it to make it
// inclusive of the end of the data // inclusive of the end of the data
if (_.isNumber(end)) { if (isNumber(end)) {
end++; end++;
} }
@ -780,7 +784,7 @@ class JobService {
}); });
} }
}); });
_.each(tempGroups, (js, id) => { each(tempGroups, (js, id) => {
groups.push({ id, jobs: js }); groups.push({ id, jobs: js });
}); });
return groups; return groups;
@ -837,9 +841,9 @@ function processBasicJobInfo(localJobService, jobsList) {
const customUrlsByJob = {}; const customUrlsByJob = {};
// use cloned copy of jobs list so not to alter the original // use cloned copy of jobs list so not to alter the original
const jobsListCopy = _.cloneDeep(jobsList); const jobsListCopy = cloneDeep(jobsList);
_.each(jobsListCopy, (jobObj) => { each(jobsListCopy, (jobObj) => {
const analysisConfig = jobObj.analysis_config; const analysisConfig = jobObj.analysis_config;
const bucketSpan = parseInterval(analysisConfig.bucket_span); const bucketSpan = parseInterval(analysisConfig.bucket_span);
@ -848,20 +852,20 @@ function processBasicJobInfo(localJobService, jobsList) {
bucketSpanSeconds: bucketSpan.asSeconds(), bucketSpanSeconds: bucketSpan.asSeconds(),
}; };
if (_.has(jobObj, 'description') && /^\s*$/.test(jobObj.description) === false) { if (jobObj.description !== undefined && /^\s*$/.test(jobObj.description) === false) {
job.description = jobObj.description; job.description = jobObj.description;
} else { } else {
// Just use the id as the description. // Just use the id as the description.
job.description = jobObj.job_id; job.description = jobObj.job_id;
} }
job.detectors = _.get(analysisConfig, 'detectors', []); job.detectors = get(analysisConfig, 'detectors', []);
detectorsByJob[job.id] = job.detectors; detectorsByJob[job.id] = job.detectors;
if (_.has(jobObj, 'custom_settings.custom_urls')) { if (jobObj.custom_settings !== undefined && jobObj.custom_settings.custom_urls !== undefined) {
job.customUrls = []; job.customUrls = [];
_.each(jobObj.custom_settings.custom_urls, (url) => { each(jobObj.custom_settings.custom_urls, (url) => {
if (_.has(url, 'url_name') && _.has(url, 'url_value') && isWebUrl(url.url_value)) { if (url.url_name !== undefined && url.url_value !== undefined && isWebUrl(url.url_value)) {
// Only make web URLs (i.e. http or https) available in dashboard drilldowns. // Only make web URLs (i.e. http or https) available in dashboard drilldowns.
job.customUrls.push(url); job.customUrls.push(url);
} }
@ -897,7 +901,7 @@ function createJobStats(jobsList, jobStats) {
const mlNodes = {}; const mlNodes = {};
let failedJobs = 0; let failedJobs = 0;
_.each(jobsList, (job) => { each(jobsList, (job) => {
if (job.state === 'opened') { if (job.state === 'opened') {
jobStats.open.value++; jobStats.open.value++;
} else if (job.state === 'closed') { } else if (job.state === 'closed') {

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import _ from 'lodash'; import each from 'lodash/each';
import { ml } from './ml_api_service'; import { ml } from './ml_api_service';
@ -16,8 +16,8 @@ export function getFieldTypeFromMapping(index, fieldName) {
ml.getFieldCaps({ index, fields: [fieldName] }) ml.getFieldCaps({ index, fields: [fieldName] })
.then((resp) => { .then((resp) => {
let fieldType = ''; let fieldType = '';
_.each(resp.fields, (field) => { each(resp.fields, (field) => {
_.each(field, (type) => { each(field, (type) => {
if (fieldType === '') { if (fieldType === '') {
fieldType = type.type; fieldType = type.type;
} }

View file

@ -13,7 +13,8 @@
// Returned response contains a results property containing the requested aggregation. // Returned response contains a results property containing the requested aggregation.
import { Observable } from 'rxjs'; import { Observable } from 'rxjs';
import { map } from 'rxjs/operators'; import { map } from 'rxjs/operators';
import _ from 'lodash'; import each from 'lodash/each';
import get from 'lodash/get';
import { Dictionary } from '../../../../common/types/common'; import { Dictionary } from '../../../../common/types/common';
import { ML_MEDIAN_PERCENTS } from '../../../../common/util/job_utils'; import { ML_MEDIAN_PERCENTS } from '../../../../common/util/job_utils';
import { JobId } from '../../../../common/types/anomaly_detection_jobs'; import { JobId } from '../../../../common/types/anomaly_detection_jobs';
@ -237,7 +238,7 @@ export function resultsServiceRxProvider(mlApiServices: MlApiServices) {
]; ];
// Add in term queries for each of the specified criteria. // Add in term queries for each of the specified criteria.
_.each(criteriaFields, (criteria) => { each(criteriaFields, (criteria) => {
mustCriteria.push({ mustCriteria.push({
term: { term: {
[criteria.fieldName]: criteria.fieldValue, [criteria.fieldName]: criteria.fieldValue,
@ -316,12 +317,12 @@ export function resultsServiceRxProvider(mlApiServices: MlApiServices) {
}) })
.pipe( .pipe(
map((resp) => { map((resp) => {
const aggregationsByTime = _.get(resp, ['aggregations', 'times', 'buckets'], []); const aggregationsByTime = get(resp, ['aggregations', 'times', 'buckets'], []);
_.each(aggregationsByTime, (dataForTime: any) => { each(aggregationsByTime, (dataForTime: any) => {
const time = dataForTime.key; const time = dataForTime.key;
const modelUpper: number | undefined = _.get(dataForTime, ['modelUpper', 'value']); const modelUpper: number | undefined = get(dataForTime, ['modelUpper', 'value']);
const modelLower: number | undefined = _.get(dataForTime, ['modelLower', 'value']); const modelLower: number | undefined = get(dataForTime, ['modelLower', 'value']);
const actual = _.get(dataForTime, ['actual', 'value']); const actual = get(dataForTime, ['actual', 'value']);
obj.results[time] = { obj.results[time] = {
actual, actual,
@ -375,7 +376,7 @@ export function resultsServiceRxProvider(mlApiServices: MlApiServices) {
if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) { if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) {
let jobIdFilterStr = ''; let jobIdFilterStr = '';
_.each(jobIds, (jobId, i) => { each(jobIds, (jobId, i) => {
if (i > 0) { if (i > 0) {
jobIdFilterStr += ' OR '; jobIdFilterStr += ' OR ';
} }
@ -391,7 +392,7 @@ export function resultsServiceRxProvider(mlApiServices: MlApiServices) {
} }
// Add in term queries for each of the specified criteria. // Add in term queries for each of the specified criteria.
_.each(criteriaFields, (criteria) => { each(criteriaFields, (criteria) => {
boolCriteria.push({ boolCriteria.push({
term: { term: {
[criteria.fieldName]: criteria.fieldValue, [criteria.fieldName]: criteria.fieldValue,
@ -428,7 +429,7 @@ export function resultsServiceRxProvider(mlApiServices: MlApiServices) {
.pipe( .pipe(
map((resp) => { map((resp) => {
if (resp.hits.total !== 0) { if (resp.hits.total !== 0) {
_.each(resp.hits.hits, (hit: any) => { each(resp.hits.hits, (hit: any) => {
obj.records.push(hit._source); obj.records.push(hit._source);
}); });
} }
@ -473,7 +474,7 @@ export function resultsServiceRxProvider(mlApiServices: MlApiServices) {
if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) { if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) {
let jobIdFilterStr = ''; let jobIdFilterStr = '';
_.each(jobIds, (jobId, i) => { each(jobIds, (jobId, i) => {
jobIdFilterStr += `${i > 0 ? ' OR ' : ''}job_id:${jobId}`; jobIdFilterStr += `${i > 0 ? ' OR ' : ''}job_id:${jobId}`;
}); });
boolCriteria.push({ boolCriteria.push({
@ -536,15 +537,15 @@ export function resultsServiceRxProvider(mlApiServices: MlApiServices) {
}) })
.pipe( .pipe(
map((resp) => { map((resp) => {
const dataByJobId = _.get(resp, ['aggregations', 'jobs', 'buckets'], []); const dataByJobId = get(resp, ['aggregations', 'jobs', 'buckets'], []);
_.each(dataByJobId, (dataForJob: any) => { each(dataByJobId, (dataForJob: any) => {
const jobId: string = dataForJob.key; const jobId: string = dataForJob.key;
const resultsForTime: Record<string, any> = {}; const resultsForTime: Record<string, any> = {};
const dataByTime = _.get(dataForJob, ['times', 'buckets'], []); const dataByTime = get(dataForJob, ['times', 'buckets'], []);
_.each(dataByTime, (dataForTime: any) => { each(dataByTime, (dataForTime: any) => {
const time: string = dataForTime.key; const time: string = dataForTime.key;
const events: object[] = _.get(dataForTime, ['events', 'buckets']); const events: any[] = get(dataForTime, ['events', 'buckets']);
resultsForTime[time] = _.map(events, 'key'); resultsForTime[time] = events.map((e) => e.key);
}); });
obj.events[jobId] = resultsForTime; obj.events[jobId] = resultsForTime;
}); });

View file

@ -4,7 +4,8 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import _ from 'lodash'; import each from 'lodash/each';
import get from 'lodash/get';
import { ML_MEDIAN_PERCENTS } from '../../../../common/util/job_utils'; import { ML_MEDIAN_PERCENTS } from '../../../../common/util/job_utils';
import { escapeForElasticsearchQuery } from '../../util/string_utils'; import { escapeForElasticsearchQuery } from '../../util/string_utils';
@ -50,7 +51,7 @@ export function resultsServiceProvider(mlApiServices) {
if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) { if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) {
let jobIdFilterStr = ''; let jobIdFilterStr = '';
_.each(jobIds, (jobId, i) => { each(jobIds, (jobId, i) => {
if (i > 0) { if (i > 0) {
jobIdFilterStr += ' OR '; jobIdFilterStr += ' OR ';
} }
@ -131,18 +132,18 @@ export function resultsServiceProvider(mlApiServices) {
}, },
}) })
.then((resp) => { .then((resp) => {
const dataByJobId = _.get(resp, ['aggregations', 'jobId', 'buckets'], []); const dataByJobId = get(resp, ['aggregations', 'jobId', 'buckets'], []);
_.each(dataByJobId, (dataForJob) => { each(dataByJobId, (dataForJob) => {
const jobId = dataForJob.key; const jobId = dataForJob.key;
const resultsForTime = {}; const resultsForTime = {};
const dataByTime = _.get(dataForJob, ['byTime', 'buckets'], []); const dataByTime = get(dataForJob, ['byTime', 'buckets'], []);
_.each(dataByTime, (dataForTime) => { each(dataByTime, (dataForTime) => {
const value = _.get(dataForTime, ['anomalyScore', 'value']); const value = get(dataForTime, ['anomalyScore', 'value']);
if (value !== undefined) { if (value !== undefined) {
const time = dataForTime.key; const time = dataForTime.key;
resultsForTime[time] = _.get(dataForTime, ['anomalyScore', 'value']); resultsForTime[time] = get(dataForTime, ['anomalyScore', 'value']);
} }
}); });
obj.results[jobId] = resultsForTime; obj.results[jobId] = resultsForTime;
@ -198,7 +199,7 @@ export function resultsServiceProvider(mlApiServices) {
if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) { if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) {
let jobIdFilterStr = ''; let jobIdFilterStr = '';
_.each(jobIds, (jobId, i) => { each(jobIds, (jobId, i) => {
if (i > 0) { if (i > 0) {
jobIdFilterStr += ' OR '; jobIdFilterStr += ' OR ';
} }
@ -305,17 +306,17 @@ export function resultsServiceProvider(mlApiServices) {
}, },
}) })
.then((resp) => { .then((resp) => {
const fieldNameBuckets = _.get( const fieldNameBuckets = get(
resp, resp,
['aggregations', 'influencerFieldNames', 'buckets'], ['aggregations', 'influencerFieldNames', 'buckets'],
[] []
); );
_.each(fieldNameBuckets, (nameBucket) => { each(fieldNameBuckets, (nameBucket) => {
const fieldName = nameBucket.key; const fieldName = nameBucket.key;
const fieldValues = []; const fieldValues = [];
const fieldValueBuckets = _.get(nameBucket, ['influencerFieldValues', 'buckets'], []); const fieldValueBuckets = get(nameBucket, ['influencerFieldValues', 'buckets'], []);
_.each(fieldValueBuckets, (valueBucket) => { each(fieldValueBuckets, (valueBucket) => {
const fieldValueResult = { const fieldValueResult = {
influencerFieldValue: valueBucket.key, influencerFieldValue: valueBucket.key,
maxAnomalyScore: valueBucket.maxAnomalyScore.value, maxAnomalyScore: valueBucket.maxAnomalyScore.value,
@ -360,7 +361,7 @@ export function resultsServiceProvider(mlApiServices) {
if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) { if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) {
let jobIdFilterStr = ''; let jobIdFilterStr = '';
_.each(jobIds, (jobId, i) => { each(jobIds, (jobId, i) => {
if (i > 0) { if (i > 0) {
jobIdFilterStr += ' OR '; jobIdFilterStr += ' OR ';
} }
@ -424,8 +425,8 @@ export function resultsServiceProvider(mlApiServices) {
}, },
}) })
.then((resp) => { .then((resp) => {
const buckets = _.get(resp, ['aggregations', 'influencerFieldValues', 'buckets'], []); const buckets = get(resp, ['aggregations', 'influencerFieldValues', 'buckets'], []);
_.each(buckets, (bucket) => { each(buckets, (bucket) => {
const result = { const result = {
influencerFieldValue: bucket.key, influencerFieldValue: bucket.key,
maxAnomalyScore: bucket.maxAnomalyScore.value, maxAnomalyScore: bucket.maxAnomalyScore.value,
@ -458,9 +459,9 @@ export function resultsServiceProvider(mlApiServices) {
end: latestMs, end: latestMs,
}) })
.then((resp) => { .then((resp) => {
const dataByTime = _.get(resp, ['overall_buckets'], []); const dataByTime = get(resp, ['overall_buckets'], []);
_.each(dataByTime, (dataForTime) => { each(dataByTime, (dataForTime) => {
const value = _.get(dataForTime, ['overall_score']); const value = get(dataForTime, ['overall_score']);
if (value !== undefined) { if (value !== undefined) {
obj.results[dataForTime.timestamp] = value; obj.results[dataForTime.timestamp] = value;
} }
@ -517,7 +518,7 @@ export function resultsServiceProvider(mlApiServices) {
if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) { if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) {
let jobIdFilterStr = ''; let jobIdFilterStr = '';
_.each(jobIds, (jobId, i) => { each(jobIds, (jobId, i) => {
if (i > 0) { if (i > 0) {
jobIdFilterStr += ' OR '; jobIdFilterStr += ' OR ';
} }
@ -537,7 +538,7 @@ export function resultsServiceProvider(mlApiServices) {
if (influencerFieldValues && influencerFieldValues.length > 0) { if (influencerFieldValues && influencerFieldValues.length > 0) {
let influencerFilterStr = ''; let influencerFilterStr = '';
_.each(influencerFieldValues, (value, i) => { each(influencerFieldValues, (value, i) => {
if (i > 0) { if (i > 0) {
influencerFilterStr += ' OR '; influencerFilterStr += ' OR ';
} }
@ -625,17 +626,17 @@ export function resultsServiceProvider(mlApiServices) {
}, },
}) })
.then((resp) => { .then((resp) => {
const fieldValueBuckets = _.get( const fieldValueBuckets = get(
resp, resp,
['aggregations', 'influencerFieldValues', 'buckets'], ['aggregations', 'influencerFieldValues', 'buckets'],
[] []
); );
_.each(fieldValueBuckets, (valueBucket) => { each(fieldValueBuckets, (valueBucket) => {
const fieldValue = valueBucket.key; const fieldValue = valueBucket.key;
const fieldValues = {}; const fieldValues = {};
const timeBuckets = _.get(valueBucket, ['byTime', 'buckets'], []); const timeBuckets = get(valueBucket, ['byTime', 'buckets'], []);
_.each(timeBuckets, (timeBucket) => { each(timeBuckets, (timeBucket) => {
const time = timeBucket.key; const time = timeBucket.key;
const score = timeBucket.maxAnomalyScore.value; const score = timeBucket.maxAnomalyScore.value;
fieldValues[time] = score; fieldValues[time] = score;
@ -701,7 +702,7 @@ export function resultsServiceProvider(mlApiServices) {
if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) { if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) {
let jobIdFilterStr = ''; let jobIdFilterStr = '';
_.each(jobIds, (jobId, i) => { each(jobIds, (jobId, i) => {
if (i > 0) { if (i > 0) {
jobIdFilterStr += ' OR '; jobIdFilterStr += ' OR ';
} }
@ -744,7 +745,7 @@ export function resultsServiceProvider(mlApiServices) {
}) })
.then((resp) => { .then((resp) => {
if (resp.hits.total !== 0) { if (resp.hits.total !== 0) {
_.each(resp.hits.hits, (hit) => { each(resp.hits.hits, (hit) => {
obj.records.push(hit._source); obj.records.push(hit._source);
}); });
} }
@ -797,7 +798,7 @@ export function resultsServiceProvider(mlApiServices) {
if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) { if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) {
let jobIdFilterStr = ''; let jobIdFilterStr = '';
_.each(jobIds, (jobId, i) => { each(jobIds, (jobId, i) => {
if (i > 0) { if (i > 0) {
jobIdFilterStr += ' OR '; jobIdFilterStr += ' OR ';
} }
@ -875,7 +876,7 @@ export function resultsServiceProvider(mlApiServices) {
}) })
.then((resp) => { .then((resp) => {
if (resp.hits.total !== 0) { if (resp.hits.total !== 0) {
_.each(resp.hits.hits, (hit) => { each(resp.hits.hits, (hit) => {
obj.records.push(hit._source); obj.records.push(hit._source);
}); });
} }
@ -1000,7 +1001,7 @@ export function resultsServiceProvider(mlApiServices) {
}) })
.then((resp) => { .then((resp) => {
if (resp.hits.total !== 0) { if (resp.hits.total !== 0) {
_.each(resp.hits.hits, (hit) => { each(resp.hits.hits, (hit) => {
obj.records.push(hit._source); obj.records.push(hit._source);
}); });
} }
@ -1079,8 +1080,8 @@ export function resultsServiceProvider(mlApiServices) {
}, },
}) })
.then((resp) => { .then((resp) => {
const dataByTimeBucket = _.get(resp, ['aggregations', 'eventRate', 'buckets'], []); const dataByTimeBucket = get(resp, ['aggregations', 'eventRate', 'buckets'], []);
_.each(dataByTimeBucket, (dataForTime) => { each(dataByTimeBucket, (dataForTime) => {
const time = dataForTime.key; const time = dataForTime.key;
obj.results[time] = dataForTime.doc_count; obj.results[time] = dataForTime.doc_count;
}); });
@ -1227,18 +1228,18 @@ export function resultsServiceProvider(mlApiServices) {
// Because of the sampling, results of metricFunctions which use sum or count // Because of the sampling, results of metricFunctions which use sum or count
// can be significantly skewed. Taking into account totalHits we calculate a // can be significantly skewed. Taking into account totalHits we calculate a
// a factor to normalize results for these metricFunctions. // a factor to normalize results for these metricFunctions.
const totalHits = _.get(resp, ['hits', 'total'], 0); const totalHits = get(resp, ['hits', 'total'], 0);
const successfulShards = _.get(resp, ['_shards', 'successful'], 0); const successfulShards = get(resp, ['_shards', 'successful'], 0);
let normalizeFactor = 1; let normalizeFactor = 1;
if (totalHits > successfulShards * SAMPLER_TOP_TERMS_SHARD_SIZE) { if (totalHits > successfulShards * SAMPLER_TOP_TERMS_SHARD_SIZE) {
normalizeFactor = totalHits / (successfulShards * SAMPLER_TOP_TERMS_SHARD_SIZE); normalizeFactor = totalHits / (successfulShards * SAMPLER_TOP_TERMS_SHARD_SIZE);
} }
const dataByTime = _.get(resp, ['aggregations', 'sample', 'byTime', 'buckets'], []); const dataByTime = get(resp, ['aggregations', 'sample', 'byTime', 'buckets'], []);
const data = dataByTime.reduce((d, dataForTime) => { const data = dataByTime.reduce((d, dataForTime) => {
const date = +dataForTime.key; const date = +dataForTime.key;
const entities = _.get(dataForTime, ['entities', 'buckets'], []); const entities = get(dataForTime, ['entities', 'buckets'], []);
entities.forEach((entity) => { entities.forEach((entity) => {
let value = metricFunction === 'count' ? entity.doc_count : entity.metric.value; let value = metricFunction === 'count' ? entity.doc_count : entity.metric.value;
@ -1291,7 +1292,7 @@ export function resultsServiceProvider(mlApiServices) {
{ term: { job_id: jobId } }, { term: { job_id: jobId } },
]; ];
_.each(criteriaFields, (criteria) => { each(criteriaFields, (criteria) => {
mustCriteria.push({ mustCriteria.push({
term: { term: {
[criteria.fieldName]: criteria.fieldValue, [criteria.fieldName]: criteria.fieldValue,
@ -1339,11 +1340,11 @@ export function resultsServiceProvider(mlApiServices) {
}, },
}) })
.then((resp) => { .then((resp) => {
const aggregationsByTime = _.get(resp, ['aggregations', 'times', 'buckets'], []); const aggregationsByTime = get(resp, ['aggregations', 'times', 'buckets'], []);
_.each(aggregationsByTime, (dataForTime) => { each(aggregationsByTime, (dataForTime) => {
const time = dataForTime.key; const time = dataForTime.key;
obj.results[time] = { obj.results[time] = {
score: _.get(dataForTime, ['recordScore', 'value']), score: get(dataForTime, ['recordScore', 'value']),
}; };
}); });

View file

@ -9,7 +9,7 @@
*/ */
import PropTypes from 'prop-types'; import PropTypes from 'prop-types';
import _ from 'lodash'; import get from 'lodash/get';
import React, { Component } from 'react'; import React, { Component } from 'react';
@ -250,8 +250,8 @@ export class ForecastingModalUI extends Component {
.getForecastRequestStats(this.props.job, forecastId) .getForecastRequestStats(this.props.job, forecastId)
.then((resp) => { .then((resp) => {
// Get the progress (stats value is between 0 and 1). // Get the progress (stats value is between 0 and 1).
const progress = _.get(resp, ['stats', 'forecast_progress'], previousProgress); const progress = get(resp, ['stats', 'forecast_progress'], previousProgress);
const status = _.get(resp, ['stats', 'forecast_status']); const status = get(resp, ['stats', 'forecast_status']);
// The requests for forecast stats can get routed to different shards, // The requests for forecast stats can get routed to different shards,
// and if these operate at different speeds there is a chance that a // and if these operate at different speeds there is a chance that a
@ -263,7 +263,7 @@ export class ForecastingModalUI extends Component {
} }
// Display any messages returned in the request stats. // Display any messages returned in the request stats.
let messages = _.get(resp, ['stats', 'forecast_messages'], []); let messages = get(resp, ['stats', 'forecast_messages'], []);
messages = messages.map((message) => ({ message, status: MESSAGE_LEVEL.WARNING })); messages = messages.map((message) => ({ message, status: MESSAGE_LEVEL.WARNING }));
this.setState({ messages }); this.setState({ messages });

View file

@ -12,9 +12,13 @@
import PropTypes from 'prop-types'; import PropTypes from 'prop-types';
import React, { Component } from 'react'; import React, { Component } from 'react';
import useObservable from 'react-use/lib/useObservable'; import useObservable from 'react-use/lib/useObservable';
import _ from 'lodash'; import isEqual from 'lodash/isEqual';
import reduce from 'lodash/reduce';
import each from 'lodash/each';
import get from 'lodash/get';
import d3 from 'd3'; import d3 from 'd3';
import moment from 'moment'; import moment from 'moment';
import { i18n } from '@kbn/i18n';
import { import {
getSeverityWithLow, getSeverityWithLow,
@ -49,8 +53,6 @@ import {
unhighlightFocusChartAnnotation, unhighlightFocusChartAnnotation,
} from './timeseries_chart_annotations'; } from './timeseries_chart_annotations';
import { i18n } from '@kbn/i18n';
const focusZoomPanelHeight = 25; const focusZoomPanelHeight = 25;
const focusChartHeight = 310; const focusChartHeight = 310;
const focusHeight = focusZoomPanelHeight + focusChartHeight; const focusHeight = focusZoomPanelHeight + focusChartHeight;
@ -399,7 +401,7 @@ class TimeseriesChartIntl extends Component {
if (zoomFrom) { if (zoomFrom) {
focusLoadFrom = zoomFrom.getTime(); focusLoadFrom = zoomFrom.getTime();
} else { } else {
focusLoadFrom = _.reduce( focusLoadFrom = reduce(
combinedData, combinedData,
(memo, point) => Math.min(memo, point.date.getTime()), (memo, point) => Math.min(memo, point.date.getTime()),
new Date(2099, 12, 31).getTime() new Date(2099, 12, 31).getTime()
@ -410,11 +412,7 @@ class TimeseriesChartIntl extends Component {
if (zoomTo) { if (zoomTo) {
focusLoadTo = zoomTo.getTime(); focusLoadTo = zoomTo.getTime();
} else { } else {
focusLoadTo = _.reduce( focusLoadTo = reduce(combinedData, (memo, point) => Math.max(memo, point.date.getTime()), 0);
combinedData,
(memo, point) => Math.max(memo, point.date.getTime()),
0
);
} }
focusLoadTo = Math.min(focusLoadTo, contextXMax); focusLoadTo = Math.min(focusLoadTo, contextXMax);
@ -431,7 +429,7 @@ class TimeseriesChartIntl extends Component {
min: moment(new Date(contextXScaleDomain[0])), min: moment(new Date(contextXScaleDomain[0])),
max: moment(contextXScaleDomain[1]), max: moment(contextXScaleDomain[1]),
}; };
if (!_.isEqual(newSelectedBounds, this.selectedBounds)) { if (!isEqual(newSelectedBounds, this.selectedBounds)) {
this.selectedBounds = newSelectedBounds; this.selectedBounds = newSelectedBounds;
this.setContextBrushExtent( this.setContextBrushExtent(
new Date(contextXScaleDomain[0]), new Date(contextXScaleDomain[0]),
@ -764,7 +762,7 @@ class TimeseriesChartIntl extends Component {
}) })
.attr('class', (d) => { .attr('class', (d) => {
let markerClass = 'metric-value'; let markerClass = 'metric-value';
if (_.has(d, 'anomalyScore')) { if (d.anomalyScore !== undefined) {
markerClass += ` anomaly-marker ${getSeverityWithLow(d.anomalyScore).id}`; markerClass += ` anomaly-marker ${getSeverityWithLow(d.anomalyScore).id}`;
} }
return markerClass; return markerClass;
@ -887,14 +885,14 @@ class TimeseriesChartIntl extends Component {
); );
const zoomOptions = [{ durationMs: autoZoomDuration, label: 'auto' }]; const zoomOptions = [{ durationMs: autoZoomDuration, label: 'auto' }];
_.each(ZOOM_INTERVAL_OPTIONS, (option) => { each(ZOOM_INTERVAL_OPTIONS, (option) => {
if (option.duration.asSeconds() > minSecs && option.duration.asSeconds() < boundsSecs) { if (option.duration.asSeconds() > minSecs && option.duration.asSeconds() < boundsSecs) {
zoomOptions.push({ durationMs: option.duration.asMilliseconds(), label: option.label }); zoomOptions.push({ durationMs: option.duration.asMilliseconds(), label: option.label });
} }
}); });
xPos += zoomLabel.node().getBBox().width + 4; xPos += zoomLabel.node().getBBox().width + 4;
_.each(zoomOptions, (option) => { each(zoomOptions, (option) => {
const text = zoomGroup const text = zoomGroup
.append('a') .append('a')
.attr('data-ms', option.durationMs) .attr('data-ms', option.durationMs)
@ -960,7 +958,7 @@ class TimeseriesChartIntl extends Component {
const combinedData = const combinedData =
contextForecastData === undefined ? data : data.concat(contextForecastData); contextForecastData === undefined ? data : data.concat(contextForecastData);
const valuesRange = { min: Number.MAX_VALUE, max: Number.MIN_VALUE }; const valuesRange = { min: Number.MAX_VALUE, max: Number.MIN_VALUE };
_.each(combinedData, (item) => { each(combinedData, (item) => {
valuesRange.min = Math.min(item.value, valuesRange.min); valuesRange.min = Math.min(item.value, valuesRange.min);
valuesRange.max = Math.max(item.value, valuesRange.max); valuesRange.max = Math.max(item.value, valuesRange.max);
}); });
@ -973,7 +971,7 @@ class TimeseriesChartIntl extends Component {
(contextForecastData !== undefined && contextForecastData.length > 0) (contextForecastData !== undefined && contextForecastData.length > 0)
) { ) {
const boundsRange = { min: Number.MAX_VALUE, max: Number.MIN_VALUE }; const boundsRange = { min: Number.MAX_VALUE, max: Number.MIN_VALUE };
_.each(combinedData, (item) => { each(combinedData, (item) => {
boundsRange.min = Math.min(item.lower, boundsRange.min); boundsRange.min = Math.min(item.lower, boundsRange.min);
boundsRange.max = Math.max(item.upper, boundsRange.max); boundsRange.max = Math.max(item.upper, boundsRange.max);
}); });
@ -1294,7 +1292,7 @@ class TimeseriesChartIntl extends Component {
if (swimlaneData !== undefined && swimlaneData.length > 0) { if (swimlaneData !== undefined && swimlaneData.length > 0) {
// Adjust the earliest back to the time of the first swimlane point // Adjust the earliest back to the time of the first swimlane point
// if this is before the time filter minimum. // if this is before the time filter minimum.
earliest = Math.min(_.first(swimlaneData).date.getTime(), bounds.min.valueOf()); earliest = Math.min(swimlaneData[0].date.getTime(), bounds.min.valueOf());
} }
const contextAggMs = contextAggregationInterval.asMilliseconds(); const contextAggMs = contextAggregationInterval.asMilliseconds();
@ -1352,7 +1350,7 @@ class TimeseriesChartIntl extends Component {
const formattedDate = formatHumanReadableDateTimeSeconds(marker.date); const formattedDate = formatHumanReadableDateTimeSeconds(marker.date);
const tooltipData = [{ label: formattedDate }]; const tooltipData = [{ label: formattedDate }];
if (_.has(marker, 'anomalyScore')) { if (marker.anomalyScore !== undefined) {
const score = parseInt(marker.anomalyScore); const score = parseInt(marker.anomalyScore);
const displayScore = score > 0 ? score : '< 1'; const displayScore = score > 0 ? score : '< 1';
tooltipData.push({ tooltipData.push({
@ -1387,7 +1385,7 @@ class TimeseriesChartIntl extends Component {
// Show actual/typical when available except for rare detectors. // Show actual/typical when available except for rare detectors.
// Rare detectors always have 1 as actual and the probability as typical. // Rare detectors always have 1 as actual and the probability as typical.
// Exposing those values in the tooltip with actual/typical labels might irritate users. // Exposing those values in the tooltip with actual/typical labels might irritate users.
if (_.has(marker, 'actual') && marker.function !== 'rare') { if (marker.actual !== undefined && marker.function !== 'rare') {
// Display the record actual in preference to the chart value, which may be // Display the record actual in preference to the chart value, which may be
// different depending on the aggregation interval of the chart. // different depending on the aggregation interval of the chart.
tooltipData.push({ tooltipData.push({
@ -1421,7 +1419,7 @@ class TimeseriesChartIntl extends Component {
}, },
valueAccessor: 'value', valueAccessor: 'value',
}); });
if (_.has(marker, 'byFieldName') && _.has(marker, 'numberOfCauses')) { if (marker.byFieldName !== undefined && marker.numberOfCauses !== undefined) {
const numberOfCauses = marker.numberOfCauses; const numberOfCauses = marker.numberOfCauses;
// If numberOfCauses === 1, won't go into this block as actual/typical copied to top level fields. // If numberOfCauses === 1, won't go into this block as actual/typical copied to top level fields.
const byFieldName = mlEscape(marker.byFieldName); const byFieldName = mlEscape(marker.byFieldName);
@ -1488,7 +1486,7 @@ class TimeseriesChartIntl extends Component {
} }
} else { } else {
// TODO - need better formatting for small decimals. // TODO - need better formatting for small decimals.
if (_.get(marker, 'isForecast', false) === true) { if (get(marker, 'isForecast', false) === true) {
tooltipData.push({ tooltipData.push({
label: i18n.translate( label: i18n.translate(
'xpack.ml.timeSeriesExplorer.timeSeriesChart.withoutAnomalyScore.predictionLabel', 'xpack.ml.timeSeriesExplorer.timeSeriesChart.withoutAnomalyScore.predictionLabel',
@ -1548,7 +1546,7 @@ class TimeseriesChartIntl extends Component {
} }
} }
if (_.has(marker, 'scheduledEvents')) { if (marker.scheduledEvents !== undefined) {
marker.scheduledEvents.forEach((scheduledEvent, i) => { marker.scheduledEvents.forEach((scheduledEvent, i) => {
tooltipData.push({ tooltipData.push({
label: i18n.translate( label: i18n.translate(
@ -1569,7 +1567,7 @@ class TimeseriesChartIntl extends Component {
}); });
} }
if (_.has(marker, 'annotation')) { if (marker.annotation !== undefined) {
tooltipData.length = 0; tooltipData.length = 0;
// header // header
tooltipData.push({ tooltipData.push({

View file

@ -4,7 +4,10 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import _ from 'lodash'; import each from 'lodash/each';
import find from 'lodash/find';
import get from 'lodash/get';
import filter from 'lodash/filter';
import { Observable } from 'rxjs'; import { Observable } from 'rxjs';
import { map } from 'rxjs/operators'; import { map } from 'rxjs/operators';
@ -35,8 +38,8 @@ function getMetricData(
// Extract the partition, by, over fields on which to filter. // Extract the partition, by, over fields on which to filter.
const criteriaFields = []; const criteriaFields = [];
const detector = job.analysis_config.detectors[detectorIndex]; const detector = job.analysis_config.detectors[detectorIndex];
if (_.has(detector, 'partition_field_name')) { if (detector.partition_field_name !== undefined) {
const partitionEntity: any = _.find(entityFields, { const partitionEntity: any = find(entityFields, {
fieldName: detector.partition_field_name, fieldName: detector.partition_field_name,
}); });
if (partitionEntity !== undefined) { if (partitionEntity !== undefined) {
@ -47,8 +50,8 @@ function getMetricData(
} }
} }
if (_.has(detector, 'over_field_name')) { if (detector.over_field_name !== undefined) {
const overEntity: any = _.find(entityFields, { fieldName: detector.over_field_name }); const overEntity: any = find(entityFields, { fieldName: detector.over_field_name });
if (overEntity !== undefined) { if (overEntity !== undefined) {
criteriaFields.push( criteriaFields.push(
{ fieldName: 'over_field_name', fieldValue: overEntity.fieldName }, { fieldName: 'over_field_name', fieldValue: overEntity.fieldName },
@ -57,8 +60,8 @@ function getMetricData(
} }
} }
if (_.has(detector, 'by_field_name')) { if (detector.by_field_name !== undefined) {
const byEntity: any = _.find(entityFields, { fieldName: detector.by_field_name }); const byEntity: any = find(entityFields, { fieldName: detector.by_field_name });
if (byEntity !== undefined) { if (byEntity !== undefined) {
criteriaFields.push( criteriaFields.push(
{ fieldName: 'by_field_name', fieldValue: byEntity.fieldName }, { fieldName: 'by_field_name', fieldValue: byEntity.fieldName },
@ -97,7 +100,7 @@ function getMetricData(
) )
.pipe( .pipe(
map((resp) => { map((resp) => {
_.each(resp.results, (value, time) => { each(resp.results, (value, time) => {
// @ts-ignore // @ts-ignore
obj.results[time] = { obj.results[time] = {
actual: value, actual: value,
@ -134,7 +137,7 @@ function getChartDetails(
} }
obj.results.functionLabel = functionLabel; obj.results.functionLabel = functionLabel;
const blankEntityFields = _.filter(entityFields, (entity) => { const blankEntityFields = filter(entityFields, (entity) => {
return entity.fieldValue === null; return entity.fieldValue === null;
}); });
@ -145,7 +148,7 @@ function getChartDetails(
obj.results.entityData.entities = entityFields; obj.results.entityData.entities = entityFields;
resolve(obj); resolve(obj);
} else { } else {
const entityFieldNames: string[] = _.map(blankEntityFields, 'fieldName'); const entityFieldNames: string[] = blankEntityFields.map((f) => f.fieldName);
ml.getCardinalityOfFields({ ml.getCardinalityOfFields({
index: chartConfig.datafeedConfig.indices, index: chartConfig.datafeedConfig.indices,
fieldNames: entityFieldNames, fieldNames: entityFieldNames,
@ -155,12 +158,12 @@ function getChartDetails(
latestMs, latestMs,
}) })
.then((results: any) => { .then((results: any) => {
_.each(blankEntityFields, (field) => { each(blankEntityFields, (field) => {
// results will not contain keys for non-aggregatable fields, // results will not contain keys for non-aggregatable fields,
// so store as 0 to indicate over all field values. // so store as 0 to indicate over all field values.
obj.results.entityData.entities.push({ obj.results.entityData.entities.push({
fieldName: field.fieldName, fieldName: field.fieldName,
cardinality: _.get(results, field.fieldName, 0), cardinality: get(results, field.fieldName, 0),
}); });
}); });

View file

@ -10,7 +10,9 @@
* Viewer dashboard. * Viewer dashboard.
*/ */
import _ from 'lodash'; import each from 'lodash/each';
import get from 'lodash/get';
import find from 'lodash/find';
import moment from 'moment-timezone'; import moment from 'moment-timezone';
import { isTimeSeriesViewJob } from '../../../../common/util/job_utils'; import { isTimeSeriesViewJob } from '../../../../common/util/job_utils';
@ -41,7 +43,7 @@ export function createTimeSeriesJobData(jobs) {
export function processMetricPlotResults(metricPlotData, modelPlotEnabled) { export function processMetricPlotResults(metricPlotData, modelPlotEnabled) {
const metricPlotChartData = []; const metricPlotChartData = [];
if (modelPlotEnabled === true) { if (modelPlotEnabled === true) {
_.each(metricPlotData, (dataForTime, time) => { each(metricPlotData, (dataForTime, time) => {
metricPlotChartData.push({ metricPlotChartData.push({
date: new Date(+time), date: new Date(+time),
lower: dataForTime.modelLower, lower: dataForTime.modelLower,
@ -50,7 +52,7 @@ export function processMetricPlotResults(metricPlotData, modelPlotEnabled) {
}); });
}); });
} else { } else {
_.each(metricPlotData, (dataForTime, time) => { each(metricPlotData, (dataForTime, time) => {
metricPlotChartData.push({ metricPlotChartData.push({
date: new Date(+time), date: new Date(+time),
value: dataForTime.actual, value: dataForTime.actual,
@ -66,7 +68,7 @@ export function processMetricPlotResults(metricPlotData, modelPlotEnabled) {
// value, lower and upper keys. // value, lower and upper keys.
export function processForecastResults(forecastData) { export function processForecastResults(forecastData) {
const forecastPlotChartData = []; const forecastPlotChartData = [];
_.each(forecastData, (dataForTime, time) => { each(forecastData, (dataForTime, time) => {
forecastPlotChartData.push({ forecastPlotChartData.push({
date: new Date(+time), date: new Date(+time),
isForecast: true, isForecast: true,
@ -83,7 +85,7 @@ export function processForecastResults(forecastData) {
// i.e. array of Objects with keys date (JavaScript date) and score. // i.e. array of Objects with keys date (JavaScript date) and score.
export function processRecordScoreResults(scoreData) { export function processRecordScoreResults(scoreData) {
const bucketScoreData = []; const bucketScoreData = [];
_.each(scoreData, (dataForTime, time) => { each(scoreData, (dataForTime, time) => {
bucketScoreData.push({ bucketScoreData.push({
date: new Date(+time), date: new Date(+time),
score: dataForTime.score, score: dataForTime.score,
@ -153,7 +155,7 @@ export function processDataForFocusAnomalies(
chartPoint.anomalyScore = recordScore; chartPoint.anomalyScore = recordScore;
chartPoint.function = record.function; chartPoint.function = record.function;
if (_.has(record, 'actual')) { if (record.actual !== undefined) {
// If cannot match chart point for anomaly time // If cannot match chart point for anomaly time
// substitute the value with the record's actual so it won't plot as null/0 // substitute the value with the record's actual so it won't plot as null/0
if (chartPoint.value === null) { if (chartPoint.value === null) {
@ -163,13 +165,13 @@ export function processDataForFocusAnomalies(
chartPoint.actual = record.actual; chartPoint.actual = record.actual;
chartPoint.typical = record.typical; chartPoint.typical = record.typical;
} else { } else {
const causes = _.get(record, 'causes', []); const causes = get(record, 'causes', []);
if (causes.length > 0) { if (causes.length > 0) {
chartPoint.byFieldName = record.by_field_name; chartPoint.byFieldName = record.by_field_name;
chartPoint.numberOfCauses = causes.length; chartPoint.numberOfCauses = causes.length;
if (causes.length === 1) { if (causes.length === 1) {
// If only a single cause, copy actual and typical values to the top level. // If only a single cause, copy actual and typical values to the top level.
const cause = _.first(record.causes); const cause = record.causes[0];
chartPoint.actual = cause.actual; chartPoint.actual = cause.actual;
chartPoint.typical = cause.typical; chartPoint.typical = cause.typical;
// substitute the value with the record's actual so it won't plot as null/0 // substitute the value with the record's actual so it won't plot as null/0
@ -180,7 +182,7 @@ export function processDataForFocusAnomalies(
} }
} }
if (_.has(record, 'multi_bucket_impact')) { if (record.multi_bucket_impact !== undefined) {
chartPoint.multiBucketImpact = record.multi_bucket_impact; chartPoint.multiBucketImpact = record.multi_bucket_impact;
} }
} }
@ -194,7 +196,7 @@ export function processDataForFocusAnomalies(
// which correspond to times of scheduled events for the job. // which correspond to times of scheduled events for the job.
export function processScheduledEventsForChart(chartData, scheduledEvents) { export function processScheduledEventsForChart(chartData, scheduledEvents) {
if (scheduledEvents !== undefined) { if (scheduledEvents !== undefined) {
_.each(scheduledEvents, (events, time) => { each(scheduledEvents, (events, time) => {
const chartPoint = findNearestChartPointToTime(chartData, time); const chartPoint = findNearestChartPointToTime(chartData, time);
if (chartPoint !== undefined) { if (chartPoint !== undefined) {
// Note if the scheduled event coincides with an absence of the underlying metric data, // Note if the scheduled event coincides with an absence of the underlying metric data,
@ -301,7 +303,7 @@ export function calculateAggregationInterval(bounds, bucketsTarget, jobs, select
// Ensure the aggregation interval is always a multiple of the bucket span to avoid strange // Ensure the aggregation interval is always a multiple of the bucket span to avoid strange
// behaviour such as adjacent chart buckets holding different numbers of job results. // behaviour such as adjacent chart buckets holding different numbers of job results.
const bucketSpanSeconds = _.find(jobs, { id: selectedJob.job_id }).bucketSpanSeconds; const bucketSpanSeconds = find(jobs, { id: selectedJob.job_id }).bucketSpanSeconds;
let aggInterval = buckets.getIntervalToNearestMultiple(bucketSpanSeconds); let aggInterval = buckets.getIntervalToNearestMultiple(bucketSpanSeconds);
// Set the interval back to the job bucket span if the auto interval is smaller. // Set the interval back to the job bucket span if the auto interval is smaller.
@ -324,8 +326,8 @@ export function calculateDefaultFocusRange(
const combinedData = const combinedData =
isForecastData === false ? contextChartData : contextChartData.concat(contextForecastData); isForecastData === false ? contextChartData : contextChartData.concat(contextForecastData);
const earliestDataDate = _.first(combinedData).date; const earliestDataDate = combinedData[0].date;
const latestDataDate = _.last(combinedData).date; const latestDataDate = combinedData[combinedData.length - 1].date;
let rangeEarliestMs; let rangeEarliestMs;
let rangeLatestMs; let rangeLatestMs;
@ -333,8 +335,8 @@ export function calculateDefaultFocusRange(
if (isForecastData === true) { if (isForecastData === true) {
// Return a range centred on the start of the forecast range, depending // Return a range centred on the start of the forecast range, depending
// on the time range of the forecast and data. // on the time range of the forecast and data.
const earliestForecastDataDate = _.first(contextForecastData).date; const earliestForecastDataDate = contextForecastData[0].date;
const latestForecastDataDate = _.last(contextForecastData).date; const latestForecastDataDate = contextForecastData[contextForecastData.length - 1].date;
rangeLatestMs = Math.min( rangeLatestMs = Math.min(
earliestForecastDataDate.getTime() + autoZoomDuration / 2, earliestForecastDataDate.getTime() + autoZoomDuration / 2,
@ -379,7 +381,7 @@ export function getAutoZoomDuration(jobs, selectedJob) {
// Calculate the 'auto' zoom duration which shows data at bucket span granularity. // Calculate the 'auto' zoom duration which shows data at bucket span granularity.
// Get the minimum bucket span of selected jobs. // Get the minimum bucket span of selected jobs.
// TODO - only look at jobs for which data has been returned? // TODO - only look at jobs for which data has been returned?
const bucketSpanSeconds = _.find(jobs, { id: selectedJob.job_id }).bucketSpanSeconds; const bucketSpanSeconds = find(jobs, { id: selectedJob.job_id }).bucketSpanSeconds;
// In most cases the duration can be obtained by simply multiplying the points target // In most cases the duration can be obtained by simply multiplying the points target
// Check that this duration returns the bucket span when run back through the // Check that this duration returns the bucket span when run back through the

View file

@ -9,7 +9,7 @@
* in the source metric data. * in the source metric data.
*/ */
import _ from 'lodash'; import get from 'lodash/get';
import { mlFunctionToESAggregation } from '../../../common/util/job_utils'; import { mlFunctionToESAggregation } from '../../../common/util/job_utils';
@ -44,15 +44,16 @@ export function buildConfigFromDetector(job, detectorIndex) {
// aggregations/<agg_name>/aggregations/<summaryCountFieldName>/cardinality/field // aggregations/<agg_name>/aggregations/<summaryCountFieldName>/cardinality/field
// or aggs/<agg_name>/aggs/<summaryCountFieldName>/cardinality/field // or aggs/<agg_name>/aggs/<summaryCountFieldName>/cardinality/field
let cardinalityField = undefined; let cardinalityField = undefined;
const topAgg = _.get(job.datafeed_config, 'aggregations') || _.get(job.datafeed_config, 'aggs'); const topAgg = get(job.datafeed_config, 'aggregations') || get(job.datafeed_config, 'aggs');
if (topAgg !== undefined && _.values(topAgg).length > 0) { if (topAgg !== undefined && Object.values(topAgg).length > 0) {
cardinalityField = cardinalityField =
_.get(_.values(topAgg)[0], [ get(Object.values(topAgg)[0], [
'aggregations', 'aggregations',
summaryCountFieldName, summaryCountFieldName,
'cardinality', 'cardinality',
'field', 'field',
]) || _.get(_.values(topAgg)[0], ['aggs', summaryCountFieldName, 'cardinality', 'field']); ]) ||
get(Object.values(topAgg)[0], ['aggs', summaryCountFieldName, 'cardinality', 'field']);
} }
if (detector.function === 'non_zero_count' && cardinalityField !== undefined) { if (detector.function === 'non_zero_count' && cardinalityField !== undefined) {

View file

@ -1,36 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
// create a property descriptor for properties
// that won't change
function describeConst(val) {
return {
writable: false,
enumerable: false,
configurable: false,
value: val,
};
}
/**
* Apply inheritance in the legacy `_.class(SubClass).inherits(SuperClass)`
* @param {Function} SubClass class that should inherit SuperClass
* @param {Function} SuperClass
* @return {Function}
*/
export function inherits(SubClass, SuperClass) {
const prototype = Object.create(SuperClass.prototype, {
constructor: describeConst(SubClass),
superConstructor: describeConst(SuperClass),
});
Object.defineProperties(SubClass, {
prototype: describeConst(prototype),
Super: describeConst(SuperClass),
});
return SubClass;
}

View file

@ -4,7 +4,11 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import _ from 'lodash'; import isPlainObject from 'lodash/isPlainObject';
import isString from 'lodash/isString';
import ary from 'lodash/ary';
import sortBy from 'lodash/sortBy';
import assign from 'lodash/assign';
import moment from 'moment'; import moment from 'moment';
import dateMath from '@elastic/datemath'; import dateMath from '@elastic/datemath';
@ -80,16 +84,16 @@ TimeBuckets.prototype.setBounds = function (input) {
if (!input) return this.clearBounds(); if (!input) return this.clearBounds();
let bounds; let bounds;
if (_.isPlainObject(input)) { if (isPlainObject(input)) {
// accept the response from timefilter.getActiveBounds() // accept the response from timefilter.getActiveBounds()
bounds = [input.min, input.max]; bounds = [input.min, input.max];
} else { } else {
bounds = Array.isArray(input) ? input : []; bounds = Array.isArray(input) ? input : [];
} }
const moments = _(bounds).map(_.ary(moment, 1)).sortBy(Number); const moments = sortBy(bounds.map(ary(moment, 1)), Number);
const valid = moments.size() === 2 && moments.every(isValidMoment); const valid = moments.length === 2 && moments.every(isValidMoment);
if (!valid) { if (!valid) {
this.clearBounds(); this.clearBounds();
throw new Error('invalid bounds set: ' + input); throw new Error('invalid bounds set: ' + input);
@ -175,7 +179,7 @@ TimeBuckets.prototype.setInterval = function (input) {
return; return;
} }
if (_.isString(interval)) { if (isString(interval)) {
input = interval; input = interval;
interval = parseInterval(interval); interval = parseInterval(interval);
if (+interval === 0) { if (+interval === 0) {
@ -256,7 +260,7 @@ TimeBuckets.prototype.getInterval = function () {
if (+scaled === +interval) return interval; if (+scaled === +interval) return interval;
decorateInterval(interval, duration); decorateInterval(interval, duration);
return _.assign(scaled, { return assign(scaled, {
preScaled: interval, preScaled: interval,
scale: interval / scaled, scale: interval / scaled,
scaled: true, scaled: true,
@ -287,7 +291,7 @@ TimeBuckets.prototype.getIntervalToNearestMultiple = function (divisorSecs) {
decorateInterval(nearestMultipleInt, this.getDuration()); decorateInterval(nearestMultipleInt, this.getDuration());
// Check to see if the new interval is scaled compared to the original. // Check to see if the new interval is scaled compared to the original.
const preScaled = _.get(interval, 'preScaled'); const preScaled = interval.preScaled;
if (preScaled !== undefined && preScaled < nearestMultipleInt) { if (preScaled !== undefined && preScaled < nearestMultipleInt) {
nearestMultipleInt.preScaled = preScaled; nearestMultipleInt.preScaled = preScaled;
nearestMultipleInt.scale = preScaled / nearestMultipleInt; nearestMultipleInt.scale = preScaled / nearestMultipleInt;

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import _ from 'lodash'; import isEmpty from 'lodash/isEmpty';
import { ISavedObjectsRepository } from 'kibana/server'; import { ISavedObjectsRepository } from 'kibana/server';
import { getInternalRepository } from './internal_repository'; import { getInternalRepository } from './internal_repository';
@ -58,7 +58,7 @@ export async function updateTelemetry(internalRepo?: ISavedObjectsRepository) {
let telemetry = await getTelemetry(internalRepository); let telemetry = await getTelemetry(internalRepository);
// Create if doesn't exist // Create if doesn't exist
if (telemetry === null || _.isEmpty(telemetry)) { if (telemetry === null || isEmpty(telemetry)) {
const newTelemetrySavedObject = await internalRepository.create( const newTelemetrySavedObject = await internalRepository.create(
TELEMETRY_DOC_ID, TELEMETRY_DOC_ID,
initTelemetry(), initTelemetry(),

View file

@ -5,7 +5,8 @@
*/ */
import Boom from 'boom'; import Boom from 'boom';
import _ from 'lodash'; import each from 'lodash/each';
import get from 'lodash/get';
import { ILegacyScopedClusterClient } from 'kibana/server'; import { ILegacyScopedClusterClient } from 'kibana/server';
import { ANNOTATION_EVENT_USER, ANNOTATION_TYPE } from '../../../common/constants/annotations'; import { ANNOTATION_EVENT_USER, ANNOTATION_TYPE } from '../../../common/constants/annotations';
@ -190,7 +191,7 @@ export function annotationProvider({ callAsInternalUser }: ILegacyScopedClusterC
if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) { if (jobIds && jobIds.length > 0 && !(jobIds.length === 1 && jobIds[0] === '*')) {
let jobIdFilterStr = ''; let jobIdFilterStr = '';
_.each(jobIds, (jobId, i: number) => { each(jobIds, (jobId, i: number) => {
jobIdFilterStr += `${i! > 0 ? ' OR ' : ''}job_id:${jobId}`; jobIdFilterStr += `${i! > 0 ? ' OR ' : ''}job_id:${jobId}`;
}); });
boolCriteria.push({ boolCriteria.push({
@ -293,7 +294,7 @@ export function annotationProvider({ callAsInternalUser }: ILegacyScopedClusterC
throw new Error(`Annotations couldn't be retrieved from Elasticsearch.`); throw new Error(`Annotations couldn't be retrieved from Elasticsearch.`);
} }
const docs: Annotations = _.get(resp, ['hits', 'hits'], []).map((d: EsResult) => { const docs: Annotations = get(resp, ['hits', 'hits'], []).map((d: EsResult) => {
// get the original source document and the document id, we need it // get the original source document and the document id, we need it
// to identify the annotation when editing/deleting it. // to identify the annotation when editing/deleting it.
// if original `event` is undefined then substitute with 'user` by default // if original `event` is undefined then substitute with 'user` by default
@ -305,7 +306,7 @@ export function annotationProvider({ callAsInternalUser }: ILegacyScopedClusterC
} as Annotation; } as Annotation;
}); });
const aggregations = _.get(resp, ['aggregations'], {}) as EsAggregationResult; const aggregations = get(resp, ['aggregations'], {}) as EsAggregationResult;
if (fields) { if (fields) {
obj.aggregations = aggregations; obj.aggregations = aggregations;
} }

View file

@ -4,7 +4,11 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import _ from 'lodash'; import cloneDeep from 'lodash/cloneDeep';
import each from 'lodash/each';
import remove from 'lodash/remove';
import sortBy from 'lodash/sortBy';
import get from 'lodash/get';
import { mlLog } from '../../client/log'; import { mlLog } from '../../client/log';
@ -91,7 +95,7 @@ export function estimateBucketSpanFactory(mlClusterClient) {
} else { } else {
// loop over partition values // loop over partition values
for (let j = 0; j < this.splitFieldValues.length; j++) { for (let j = 0; j < this.splitFieldValues.length; j++) {
const queryCopy = _.cloneDeep(this.query); const queryCopy = cloneDeep(this.query);
// add a term to the query to filter on the partition value // add a term to the query to filter on the partition value
queryCopy.bool.must.push({ queryCopy.bool.must.push({
term: { term: {
@ -151,7 +155,7 @@ export function estimateBucketSpanFactory(mlClusterClient) {
} }
}; };
_.each(this.checkers, (check) => { each(this.checkers, (check) => {
check.check check.check
.run() .run()
.then((interval) => { .then((interval) => {
@ -174,7 +178,7 @@ export function estimateBucketSpanFactory(mlClusterClient) {
} }
processResults() { processResults() {
const allResults = _.map(this.checkers, 'result'); const allResults = this.checkers.map((c) => c.result);
let reducedResults = []; let reducedResults = [];
const numberOfSplitFields = this.splitFieldValues.length || 1; const numberOfSplitFields = this.splitFieldValues.length || 1;
@ -185,8 +189,8 @@ export function estimateBucketSpanFactory(mlClusterClient) {
const pos = i * numberOfSplitFields; const pos = i * numberOfSplitFields;
let resultsSubset = allResults.slice(pos, pos + numberOfSplitFields); let resultsSubset = allResults.slice(pos, pos + numberOfSplitFields);
// remove results of tests which have failed // remove results of tests which have failed
resultsSubset = _.remove(resultsSubset, (res) => res !== null); resultsSubset = remove(resultsSubset, (res) => res !== null);
resultsSubset = _.sortBy(resultsSubset, (r) => r.ms); resultsSubset = sortBy(resultsSubset, (r) => r.ms);
const tempMedian = this.findMedian(resultsSubset); const tempMedian = this.findMedian(resultsSubset);
if (tempMedian !== null) { if (tempMedian !== null) {
@ -194,7 +198,7 @@ export function estimateBucketSpanFactory(mlClusterClient) {
} }
} }
reducedResults = _.sortBy(reducedResults, (r) => r.ms); reducedResults = sortBy(reducedResults, (r) => r.ms);
return this.findMedian(reducedResults); return this.findMedian(reducedResults);
} }
@ -256,7 +260,7 @@ export function estimateBucketSpanFactory(mlClusterClient) {
}, },
}) })
.then((resp) => { .then((resp) => {
const value = _.get(resp, ['aggregations', 'field_count', 'value'], 0); const value = get(resp, ['aggregations', 'field_count', 'value'], 0);
resolve(value); resolve(value);
}) })
.catch((resp) => { .catch((resp) => {
@ -293,9 +297,10 @@ export function estimateBucketSpanFactory(mlClusterClient) {
}, },
}) })
.then((partitionResp) => { .then((partitionResp) => {
if (_.has(partitionResp, 'aggregations.fields_bucket_counts.buckets')) { // eslint-disable-next-line camelcase
if (partitionResp.aggregations?.fields_bucket_counts?.buckets !== undefined) {
const buckets = partitionResp.aggregations.fields_bucket_counts.buckets; const buckets = partitionResp.aggregations.fields_bucket_counts.buckets;
fieldValues = _.map(buckets, (b) => b.key); fieldValues = buckets.map((b) => b.key);
} }
resolve(fieldValues); resolve(fieldValues);
}) })

View file

@ -10,7 +10,7 @@
* And a minimum bucket span * And a minimum bucket span
*/ */
import _ from 'lodash'; import get from 'lodash/get';
export function polledDataCheckerFactory({ callAsCurrentUser }) { export function polledDataCheckerFactory({ callAsCurrentUser }) {
class PolledDataChecker { class PolledDataChecker {
@ -29,7 +29,7 @@ export function polledDataCheckerFactory({ callAsCurrentUser }) {
const interval = { name: '1m', ms: 60000 }; const interval = { name: '1m', ms: 60000 };
this.performSearch(interval.ms) this.performSearch(interval.ms)
.then((resp) => { .then((resp) => {
const fullBuckets = _.get(resp, 'aggregations.non_empty_buckets.buckets', []); const fullBuckets = get(resp, 'aggregations.non_empty_buckets.buckets', []);
const result = this.isPolledData(fullBuckets, interval); const result = this.isPolledData(fullBuckets, interval);
if (result.pass) { if (result.pass) {
// data is polled, return a flag and the minimumBucketSpan which should be // data is polled, return a flag and the minimumBucketSpan which should be

View file

@ -5,7 +5,10 @@
*/ */
import { ILegacyScopedClusterClient } from 'kibana/server'; import { ILegacyScopedClusterClient } from 'kibana/server';
import _ from 'lodash'; import get from 'lodash/get';
import each from 'lodash/each';
import last from 'lodash/last';
import find from 'lodash/find';
import { KBN_FIELD_TYPES } from '../../../../../../src/plugins/data/server'; import { KBN_FIELD_TYPES } from '../../../../../../src/plugins/data/server';
import { ML_JOB_FIELD_TYPES } from '../../../common/constants/field_types'; import { ML_JOB_FIELD_TYPES } from '../../../common/constants/field_types';
import { getSafeAggregationName } from '../../../common/util/job_utils'; import { getSafeAggregationName } from '../../../common/util/job_utils';
@ -216,7 +219,7 @@ const getAggIntervals = async (
const aggsPath = getSamplerAggregationsResponsePath(samplerShardSize); const aggsPath = getSamplerAggregationsResponsePath(samplerShardSize);
const aggregations = const aggregations =
aggsPath.length > 0 ? _.get(respStats.aggregations, aggsPath) : respStats.aggregations; aggsPath.length > 0 ? get(respStats.aggregations, aggsPath) : respStats.aggregations;
return Object.keys(aggregations).reduce((p, aggName) => { return Object.keys(aggregations).reduce((p, aggName) => {
const stats = [aggregations[aggName].min, aggregations[aggName].max]; const stats = [aggregations[aggName].min, aggregations[aggName].max];
@ -300,9 +303,7 @@ export const getHistogramsForFields = async (
const aggsPath = getSamplerAggregationsResponsePath(samplerShardSize); const aggsPath = getSamplerAggregationsResponsePath(samplerShardSize);
const aggregations = const aggregations =
aggsPath.length > 0 aggsPath.length > 0 ? get(respChartsData.aggregations, aggsPath) : respChartsData.aggregations;
? _.get(respChartsData.aggregations, aggsPath)
: respChartsData.aggregations;
const chartsData: ChartData[] = fields.map( const chartsData: ChartData[] = fields.map(
(field): ChartData => { (field): ChartData => {
@ -382,8 +383,8 @@ export class DataVisualizer {
// To avoid checking for the existence of too many aggregatable fields in one request, // To avoid checking for the existence of too many aggregatable fields in one request,
// split the check into multiple batches (max 200 fields per request). // split the check into multiple batches (max 200 fields per request).
const batches: string[][] = [[]]; const batches: string[][] = [[]];
_.each(aggregatableFields, (field) => { each(aggregatableFields, (field) => {
let lastArray: string[] = _.last(batches) as string[]; let lastArray: string[] = last(batches) as string[];
if (lastArray.length === AGGREGATABLE_EXISTS_REQUEST_BATCH_SIZE) { if (lastArray.length === AGGREGATABLE_EXISTS_REQUEST_BATCH_SIZE) {
lastArray = []; lastArray = [];
batches.push(lastArray); batches.push(lastArray);
@ -475,7 +476,7 @@ export class DataVisualizer {
// Batch up fields by type, getting stats for multiple fields at a time. // Batch up fields by type, getting stats for multiple fields at a time.
const batches: Field[][] = []; const batches: Field[][] = [];
const batchedFields: { [key: string]: Field[][] } = {}; const batchedFields: { [key: string]: Field[][] } = {};
_.each(fields, (field) => { each(fields, (field) => {
if (field.fieldName === undefined) { if (field.fieldName === undefined) {
// undefined fieldName is used for a document count request. // undefined fieldName is used for a document count request.
// getDocumentCountStats requires timeField - don't add to batched requests if not defined // getDocumentCountStats requires timeField - don't add to batched requests if not defined
@ -487,7 +488,7 @@ export class DataVisualizer {
if (batchedFields[fieldType] === undefined) { if (batchedFields[fieldType] === undefined) {
batchedFields[fieldType] = [[]]; batchedFields[fieldType] = [[]];
} }
let lastArray: Field[] = _.last(batchedFields[fieldType]) as Field[]; let lastArray: Field[] = last(batchedFields[fieldType]) as Field[];
if (lastArray.length === FIELDS_REQUEST_BATCH_SIZE) { if (lastArray.length === FIELDS_REQUEST_BATCH_SIZE) {
lastArray = []; lastArray = [];
batchedFields[fieldType].push(lastArray); batchedFields[fieldType].push(lastArray);
@ -496,7 +497,7 @@ export class DataVisualizer {
} }
}); });
_.each(batchedFields, (lists) => { each(batchedFields, (lists) => {
batches.push(...lists); batches.push(...lists);
}); });
@ -636,7 +637,7 @@ export class DataVisualizer {
body, body,
}); });
const aggregations = resp.aggregations; const aggregations = resp.aggregations;
const totalCount = _.get(resp, ['hits', 'total'], 0); const totalCount = get(resp, ['hits', 'total'], 0);
const stats = { const stats = {
totalCount, totalCount,
aggregatableExistsFields: [] as FieldData[], aggregatableExistsFields: [] as FieldData[],
@ -645,12 +646,12 @@ export class DataVisualizer {
const aggsPath = getSamplerAggregationsResponsePath(samplerShardSize); const aggsPath = getSamplerAggregationsResponsePath(samplerShardSize);
const sampleCount = const sampleCount =
samplerShardSize > 0 ? _.get(aggregations, ['sample', 'doc_count'], 0) : totalCount; samplerShardSize > 0 ? get(aggregations, ['sample', 'doc_count'], 0) : totalCount;
aggregatableFields.forEach((field, i) => { aggregatableFields.forEach((field, i) => {
const safeFieldName = getSafeAggregationName(field, i); const safeFieldName = getSafeAggregationName(field, i);
const count = _.get(aggregations, [...aggsPath, `${safeFieldName}_count`, 'doc_count'], 0); const count = get(aggregations, [...aggsPath, `${safeFieldName}_count`, 'doc_count'], 0);
if (count > 0) { if (count > 0) {
const cardinality = _.get( const cardinality = get(
aggregations, aggregations,
[...aggsPath, `${safeFieldName}_cardinality`, 'value'], [...aggsPath, `${safeFieldName}_cardinality`, 'value'],
0 0
@ -745,12 +746,12 @@ export class DataVisualizer {
}); });
const buckets: { [key: string]: number } = {}; const buckets: { [key: string]: number } = {};
const dataByTimeBucket: Array<{ key: string; doc_count: number }> = _.get( const dataByTimeBucket: Array<{ key: string; doc_count: number }> = get(
resp, resp,
['aggregations', 'eventRate', 'buckets'], ['aggregations', 'eventRate', 'buckets'],
[] []
); );
_.each(dataByTimeBucket, (dataForTime) => { each(dataByTimeBucket, (dataForTime) => {
const time = dataForTime.key; const time = dataForTime.key;
buckets[time] = dataForTime.doc_count; buckets[time] = dataForTime.doc_count;
}); });
@ -851,12 +852,12 @@ export class DataVisualizer {
const batchStats: NumericFieldStats[] = []; const batchStats: NumericFieldStats[] = [];
fields.forEach((field, i) => { fields.forEach((field, i) => {
const safeFieldName = getSafeAggregationName(field.fieldName, i); const safeFieldName = getSafeAggregationName(field.fieldName, i);
const docCount = _.get( const docCount = get(
aggregations, aggregations,
[...aggsPath, `${safeFieldName}_field_stats`, 'doc_count'], [...aggsPath, `${safeFieldName}_field_stats`, 'doc_count'],
0 0
); );
const fieldStatsResp = _.get( const fieldStatsResp = get(
aggregations, aggregations,
[...aggsPath, `${safeFieldName}_field_stats`, 'actual_stats'], [...aggsPath, `${safeFieldName}_field_stats`, 'actual_stats'],
{} {}
@ -867,20 +868,20 @@ export class DataVisualizer {
topAggsPath.push('top'); topAggsPath.push('top');
} }
const topValues: Bucket[] = _.get(aggregations, [...topAggsPath, 'buckets'], []); const topValues: Bucket[] = get(aggregations, [...topAggsPath, 'buckets'], []);
const stats: NumericFieldStats = { const stats: NumericFieldStats = {
fieldName: field.fieldName, fieldName: field.fieldName,
count: docCount, count: docCount,
min: _.get(fieldStatsResp, 'min', 0), min: get(fieldStatsResp, 'min', 0),
max: _.get(fieldStatsResp, 'max', 0), max: get(fieldStatsResp, 'max', 0),
avg: _.get(fieldStatsResp, 'avg', 0), avg: get(fieldStatsResp, 'avg', 0),
isTopValuesSampled: isTopValuesSampled:
field.cardinality >= SAMPLER_TOP_TERMS_THRESHOLD || samplerShardSize > 0, field.cardinality >= SAMPLER_TOP_TERMS_THRESHOLD || samplerShardSize > 0,
topValues, topValues,
topValuesSampleSize: topValues.reduce( topValuesSampleSize: topValues.reduce(
(acc, curr) => acc + curr.doc_count, (acc, curr) => acc + curr.doc_count,
_.get(aggregations, [...topAggsPath, 'sum_other_doc_count'], 0) get(aggregations, [...topAggsPath, 'sum_other_doc_count'], 0)
), ),
topValuesSamplerShardSize: topValuesSamplerShardSize:
field.cardinality >= SAMPLER_TOP_TERMS_THRESHOLD field.cardinality >= SAMPLER_TOP_TERMS_THRESHOLD
@ -889,12 +890,12 @@ export class DataVisualizer {
}; };
if (stats.count > 0) { if (stats.count > 0) {
const percentiles = _.get( const percentiles = get(
aggregations, aggregations,
[...aggsPath, `${safeFieldName}_percentiles`, 'values'], [...aggsPath, `${safeFieldName}_percentiles`, 'values'],
[] []
); );
const medianPercentile: { value: number; key: number } | undefined = _.find(percentiles, { const medianPercentile: { value: number; key: number } | undefined = find(percentiles, {
key: 50, key: 50,
}); });
stats.median = medianPercentile !== undefined ? medianPercentile!.value : 0; stats.median = medianPercentile !== undefined ? medianPercentile!.value : 0;
@ -978,7 +979,7 @@ export class DataVisualizer {
topAggsPath.push('top'); topAggsPath.push('top');
} }
const topValues: Bucket[] = _.get(aggregations, [...topAggsPath, 'buckets'], []); const topValues: Bucket[] = get(aggregations, [...topAggsPath, 'buckets'], []);
const stats = { const stats = {
fieldName: field.fieldName, fieldName: field.fieldName,
@ -987,7 +988,7 @@ export class DataVisualizer {
topValues, topValues,
topValuesSampleSize: topValues.reduce( topValuesSampleSize: topValues.reduce(
(acc, curr) => acc + curr.doc_count, (acc, curr) => acc + curr.doc_count,
_.get(aggregations, [...topAggsPath, 'sum_other_doc_count'], 0) get(aggregations, [...topAggsPath, 'sum_other_doc_count'], 0)
), ),
topValuesSamplerShardSize: topValuesSamplerShardSize:
field.cardinality >= SAMPLER_TOP_TERMS_THRESHOLD field.cardinality >= SAMPLER_TOP_TERMS_THRESHOLD
@ -1046,12 +1047,12 @@ export class DataVisualizer {
const batchStats: DateFieldStats[] = []; const batchStats: DateFieldStats[] = [];
fields.forEach((field, i) => { fields.forEach((field, i) => {
const safeFieldName = getSafeAggregationName(field.fieldName, i); const safeFieldName = getSafeAggregationName(field.fieldName, i);
const docCount = _.get( const docCount = get(
aggregations, aggregations,
[...aggsPath, `${safeFieldName}_field_stats`, 'doc_count'], [...aggsPath, `${safeFieldName}_field_stats`, 'doc_count'],
0 0
); );
const fieldStatsResp = _.get( const fieldStatsResp = get(
aggregations, aggregations,
[...aggsPath, `${safeFieldName}_field_stats`, 'actual_stats'], [...aggsPath, `${safeFieldName}_field_stats`, 'actual_stats'],
{} {}
@ -1059,8 +1060,8 @@ export class DataVisualizer {
batchStats.push({ batchStats.push({
fieldName: field.fieldName, fieldName: field.fieldName,
count: docCount, count: docCount,
earliest: _.get(fieldStatsResp, 'min', 0), earliest: get(fieldStatsResp, 'min', 0),
latest: _.get(fieldStatsResp, 'max', 0), latest: get(fieldStatsResp, 'max', 0),
}); });
}); });
@ -1115,17 +1116,17 @@ export class DataVisualizer {
const safeFieldName = getSafeAggregationName(field.fieldName, i); const safeFieldName = getSafeAggregationName(field.fieldName, i);
const stats: BooleanFieldStats = { const stats: BooleanFieldStats = {
fieldName: field.fieldName, fieldName: field.fieldName,
count: _.get(aggregations, [...aggsPath, `${safeFieldName}_value_count`, 'doc_count'], 0), count: get(aggregations, [...aggsPath, `${safeFieldName}_value_count`, 'doc_count'], 0),
trueCount: 0, trueCount: 0,
falseCount: 0, falseCount: 0,
}; };
const valueBuckets: Array<{ [key: string]: number }> = _.get( const valueBuckets: Array<{ [key: string]: number }> = get(
aggregations, aggregations,
[...aggsPath, `${safeFieldName}_values`, 'buckets'], [...aggsPath, `${safeFieldName}_values`, 'buckets'],
[] []
); );
_.forEach(valueBuckets, (bucket) => { valueBuckets.forEach((bucket) => {
stats[`${bucket.key_as_string}Count`] = bucket.doc_count; stats[`${bucket.key_as_string}Count`] = bucket.doc_count;
}); });
@ -1182,8 +1183,8 @@ export class DataVisualizer {
// If the field is not in the _source (as will happen if the // If the field is not in the _source (as will happen if the
// field is populated using copy_to in the index mapping), // field is populated using copy_to in the index mapping),
// there will be no example to add. // there will be no example to add.
// Use lodash _.get() to support field names containing dots. // Use lodash get() to support field names containing dots.
const example: any = _.get(hits[i]._source, field); const example: any = get(hits[i]._source, field);
if (example !== undefined && stats.examples.indexOf(example) === -1) { if (example !== undefined && stats.examples.indexOf(example) === -1) {
stats.examples.push(example); stats.examples.push(example);
if (stats.examples.length === maxExamples) { if (stats.examples.length === maxExamples) {
@ -1216,7 +1217,7 @@ export class DataVisualizer {
// Look ahead to the last percentiles and process these too if // Look ahead to the last percentiles and process these too if
// they don't add more than 50% to the value range. // they don't add more than 50% to the value range.
const lastValue = (_.last(percentileBuckets) as any).value; const lastValue = (last(percentileBuckets) as any).value;
const upperBound = lowerBound + 1.5 * (lastValue - lowerBound); const upperBound = lowerBound + 1.5 * (lastValue - lowerBound);
const filteredLength = percentileBuckets.length; const filteredLength = percentileBuckets.length;
for (let i = filteredLength; i < percentiles.length; i++) { for (let i = filteredLength; i < percentiles.length; i++) {
@ -1237,7 +1238,7 @@ export class DataVisualizer {
// Add in 0-5 and 95-100% if they don't add more // Add in 0-5 and 95-100% if they don't add more
// than 25% to the value range at either end. // than 25% to the value range at either end.
const lastValue: number = (_.last(percentileBuckets) as any).value; const lastValue: number = (last(percentileBuckets) as any).value;
const maxDiff = 0.25 * (lastValue - lowerBound); const maxDiff = 0.25 * (lastValue - lowerBound);
if (lowerBound - dataMin < maxDiff) { if (lowerBound - dataMin < maxDiff) {
percentileBuckets.splice(0, 0, percentiles[0]); percentileBuckets.splice(0, 0, percentiles[0]);

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import _ from 'lodash'; import cloneDeep from 'lodash/cloneDeep';
import { ILegacyScopedClusterClient } from 'kibana/server'; import { ILegacyScopedClusterClient } from 'kibana/server';
@ -145,7 +145,7 @@ describe('ML - validateCardinality', () => {
test: (ids: string[]) => void test: (ids: string[]) => void
) => { ) => {
const job = getJobConfig(fieldName); const job = getJobConfig(fieldName);
const mockCardinality = _.cloneDeep(mockResponses); const mockCardinality = cloneDeep(mockResponses);
mockCardinality.search.aggregations.airline_cardinality.value = cardinality; mockCardinality.search.aggregations.airline_cardinality.value = cardinality;
return validateCardinality( return validateCardinality(
mlClusterClientFactory(mockCardinality), mlClusterClientFactory(mockCardinality),
@ -250,7 +250,7 @@ describe('ML - validateCardinality', () => {
it(`disabled model_plot, over field cardinality of ${cardinality} doesn't trigger a warning`, () => { it(`disabled model_plot, over field cardinality of ${cardinality} doesn't trigger a warning`, () => {
const job = (getJobConfig('over_field_name') as unknown) as CombinedJob; const job = (getJobConfig('over_field_name') as unknown) as CombinedJob;
job.model_plot_config = { enabled: false }; job.model_plot_config = { enabled: false };
const mockCardinality = _.cloneDeep(mockResponses); const mockCardinality = cloneDeep(mockResponses);
mockCardinality.search.aggregations.airline_cardinality.value = cardinality; mockCardinality.search.aggregations.airline_cardinality.value = cardinality;
return validateCardinality(mlClusterClientFactory(mockCardinality), job).then((messages) => { return validateCardinality(mlClusterClientFactory(mockCardinality), job).then((messages) => {
const ids = messages.map((m) => m.id); const ids = messages.map((m) => m.id);
@ -261,7 +261,7 @@ describe('ML - validateCardinality', () => {
it(`enabled model_plot, over field cardinality of ${cardinality} triggers a model plot warning`, () => { it(`enabled model_plot, over field cardinality of ${cardinality} triggers a model plot warning`, () => {
const job = (getJobConfig('over_field_name') as unknown) as CombinedJob; const job = (getJobConfig('over_field_name') as unknown) as CombinedJob;
job.model_plot_config = { enabled: true }; job.model_plot_config = { enabled: true };
const mockCardinality = _.cloneDeep(mockResponses); const mockCardinality = cloneDeep(mockResponses);
mockCardinality.search.aggregations.airline_cardinality.value = cardinality; mockCardinality.search.aggregations.airline_cardinality.value = cardinality;
return validateCardinality(mlClusterClientFactory(mockCardinality), job).then((messages) => { return validateCardinality(mlClusterClientFactory(mockCardinality), job).then((messages) => {
const ids = messages.map((m) => m.id); const ids = messages.map((m) => m.id);
@ -272,7 +272,7 @@ describe('ML - validateCardinality', () => {
it(`disabled model_plot, by field cardinality of ${cardinality} triggers a field cardinality warning`, () => { it(`disabled model_plot, by field cardinality of ${cardinality} triggers a field cardinality warning`, () => {
const job = (getJobConfig('by_field_name') as unknown) as CombinedJob; const job = (getJobConfig('by_field_name') as unknown) as CombinedJob;
job.model_plot_config = { enabled: false }; job.model_plot_config = { enabled: false };
const mockCardinality = _.cloneDeep(mockResponses); const mockCardinality = cloneDeep(mockResponses);
mockCardinality.search.aggregations.airline_cardinality.value = cardinality; mockCardinality.search.aggregations.airline_cardinality.value = cardinality;
return validateCardinality(mlClusterClientFactory(mockCardinality), job).then((messages) => { return validateCardinality(mlClusterClientFactory(mockCardinality), job).then((messages) => {
const ids = messages.map((m) => m.id); const ids = messages.map((m) => m.id);
@ -283,7 +283,7 @@ describe('ML - validateCardinality', () => {
it(`enabled model_plot, by field cardinality of ${cardinality} triggers a model plot warning and field cardinality warning`, () => { it(`enabled model_plot, by field cardinality of ${cardinality} triggers a model plot warning and field cardinality warning`, () => {
const job = (getJobConfig('by_field_name') as unknown) as CombinedJob; const job = (getJobConfig('by_field_name') as unknown) as CombinedJob;
job.model_plot_config = { enabled: true }; job.model_plot_config = { enabled: true };
const mockCardinality = _.cloneDeep(mockResponses); const mockCardinality = cloneDeep(mockResponses);
mockCardinality.search.aggregations.airline_cardinality.value = cardinality; mockCardinality.search.aggregations.airline_cardinality.value = cardinality;
return validateCardinality(mlClusterClientFactory(mockCardinality), job).then((messages) => { return validateCardinality(mlClusterClientFactory(mockCardinality), job).then((messages) => {
const ids = messages.map((m) => m.id); const ids = messages.map((m) => m.id);
@ -294,7 +294,7 @@ describe('ML - validateCardinality', () => {
it(`enabled model_plot with terms, by field cardinality of ${cardinality} triggers just field cardinality warning`, () => { it(`enabled model_plot with terms, by field cardinality of ${cardinality} triggers just field cardinality warning`, () => {
const job = (getJobConfig('by_field_name') as unknown) as CombinedJob; const job = (getJobConfig('by_field_name') as unknown) as CombinedJob;
job.model_plot_config = { enabled: true, terms: 'AAL,AAB' }; job.model_plot_config = { enabled: true, terms: 'AAL,AAB' };
const mockCardinality = _.cloneDeep(mockResponses); const mockCardinality = cloneDeep(mockResponses);
mockCardinality.search.aggregations.airline_cardinality.value = cardinality; mockCardinality.search.aggregations.airline_cardinality.value = cardinality;
return validateCardinality(mlClusterClientFactory(mockCardinality), job).then((messages) => { return validateCardinality(mlClusterClientFactory(mockCardinality), job).then((messages) => {
const ids = messages.map((m) => m.id); const ids = messages.map((m) => m.id);

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import _ from 'lodash'; import cloneDeep from 'lodash/cloneDeep';
import { ILegacyScopedClusterClient } from 'kibana/server'; import { ILegacyScopedClusterClient } from 'kibana/server';
@ -144,7 +144,7 @@ describe('ML - validateTimeRange', () => {
}); });
it('invalid time field', () => { it('invalid time field', () => {
const mockSearchResponseInvalid = _.cloneDeep(mockSearchResponse); const mockSearchResponseInvalid = cloneDeep(mockSearchResponse);
mockSearchResponseInvalid.fieldCaps = undefined; mockSearchResponseInvalid.fieldCaps = undefined;
const duration = { start: 0, end: 1 }; const duration = { start: 0, end: 1 };
return validateTimeRange( return validateTimeRange(

View file

@ -4,7 +4,8 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import _ from 'lodash'; import sortBy from 'lodash/sortBy';
import each from 'lodash/each';
import moment from 'moment-timezone'; import moment from 'moment-timezone';
import { import {
@ -55,7 +56,7 @@ export function buildAnomalyTableItems(anomalyRecords, aggregationInterval, date
if (source.influencers !== undefined) { if (source.influencers !== undefined) {
const influencers = []; const influencers = [];
const sourceInfluencers = _.sortBy(source.influencers, 'influencer_field_name'); const sourceInfluencers = sortBy(source.influencers, 'influencer_field_name');
sourceInfluencers.forEach((influencer) => { sourceInfluencers.forEach((influencer) => {
const influencerFieldName = influencer.influencer_field_name; const influencerFieldName = influencer.influencer_field_name;
influencer.influencer_field_values.forEach((influencerFieldValue) => { influencer.influencer_field_values.forEach((influencerFieldValue) => {
@ -172,10 +173,10 @@ function aggregateAnomalies(anomalyRecords, interval, dateFormatTz) {
// Flatten the aggregatedData to give a list of records with // Flatten the aggregatedData to give a list of records with
// the highest score per bucketed time / jobId / detectorIndex. // the highest score per bucketed time / jobId / detectorIndex.
const summaryRecords = []; const summaryRecords = [];
_.each(aggregatedData, (times, roundedTime) => { each(aggregatedData, (times, roundedTime) => {
_.each(times, (jobIds) => { each(times, (jobIds) => {
_.each(jobIds, (entityDetectors) => { each(jobIds, (entityDetectors) => {
_.each(entityDetectors, (record) => { each(entityDetectors, (record) => {
summaryRecords.push({ summaryRecords.push({
time: +roundedTime, time: +roundedTime,
source: record, source: record,

View file

@ -4,7 +4,9 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import _ from 'lodash'; import sortBy from 'lodash/sortBy';
import slice from 'lodash/slice';
import get from 'lodash/get';
import moment from 'moment'; import moment from 'moment';
import { SearchResponse } from 'elasticsearch'; import { SearchResponse } from 'elasticsearch';
import { ILegacyScopedClusterClient } from 'kibana/server'; import { ILegacyScopedClusterClient } from 'kibana/server';
@ -175,7 +177,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
}); });
// Sort anomalies in ascending time order. // Sort anomalies in ascending time order.
records = _.sortBy(records, 'timestamp'); records = sortBy(records, 'timestamp');
tableData.interval = aggregationInterval; tableData.interval = aggregationInterval;
if (aggregationInterval === 'auto') { if (aggregationInterval === 'auto') {
// Determine the actual interval to use if aggregating. // Determine the actual interval to use if aggregating.
@ -197,7 +199,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
const categoryIdsByJobId: { [key: string]: any } = {}; const categoryIdsByJobId: { [key: string]: any } = {};
categoryAnomalies.forEach((anomaly) => { categoryAnomalies.forEach((anomaly) => {
if (!_.has(categoryIdsByJobId, anomaly.jobId)) { if (categoryIdsByJobId[anomaly.jobId] === undefined) {
categoryIdsByJobId[anomaly.jobId] = []; categoryIdsByJobId[anomaly.jobId] = [];
} }
if (categoryIdsByJobId[anomaly.jobId].indexOf(anomaly.entityValue) === -1) { if (categoryIdsByJobId[anomaly.jobId].indexOf(anomaly.entityValue) === -1) {
@ -289,7 +291,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
}; };
const resp = await callAsInternalUser('search', query); const resp = await callAsInternalUser('search', query);
const maxScore = _.get(resp, ['aggregations', 'max_score', 'value'], null); const maxScore = get(resp, ['aggregations', 'max_score', 'value'], null);
return { maxScore }; return { maxScore };
} }
@ -353,7 +355,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
}, },
}); });
const bucketsByJobId: Array<{ key: string; maxTimestamp: { value?: number } }> = _.get( const bucketsByJobId: Array<{ key: string; maxTimestamp: { value?: number } }> = get(
resp, resp,
['aggregations', 'byJobId', 'buckets'], ['aggregations', 'byJobId', 'buckets'],
[] []
@ -387,7 +389,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
if (resp.hits.total !== 0) { if (resp.hits.total !== 0) {
resp.hits.hits.forEach((hit: any) => { resp.hits.hits.forEach((hit: any) => {
if (maxExamples) { if (maxExamples) {
examplesByCategoryId[hit._source.category_id] = _.slice( examplesByCategoryId[hit._source.category_id] = slice(
hit._source.examples, hit._source.examples,
0, 0,
Math.min(hit._source.examples.length, maxExamples) Math.min(hit._source.examples.length, maxExamples)