[TSVB] Fixing memory leak (#64918)

This commit is contained in:
Maja Grubic 2020-05-04 12:12:40 +01:00 committed by GitHub
parent 007b16793d
commit ccede29e60
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
22 changed files with 128 additions and 86 deletions

View file

@ -29,6 +29,8 @@ import { getTimerange } from './get_timerange';
import { mapBucket } from './map_bucket';
import { parseSettings } from './parse_settings';
export { overwrite } from './overwrite';
export const helpers = {
bucketTransform,
getAggValue,

View file

@ -0,0 +1,31 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import set from 'set-value';
/**
* Set path in obj. Behaves like lodash `set`
* @param obj The object to mutate
* @param path The path of the sub-property to set
* @param val The value to set the sub-property to
*/
export function overwrite(obj, path, val) {
set(obj, path, undefined);
set(obj, path, val);
}

View file

@ -17,7 +17,7 @@
* under the License.
*/
import _ from 'lodash';
import { overwrite } from '../../helpers';
import { getBucketSize } from '../../helpers/get_bucket_size';
import { getTimerange } from '../../helpers/get_timerange';
import { search } from '../../../../../../../plugins/data/server';
@ -37,7 +37,7 @@ export function dateHistogram(
const { from, to } = getTimerange(req);
const timezone = capabilities.searchTimezone;
_.set(doc, `aggs.${annotation.id}.date_histogram`, {
overwrite(doc, `aggs.${annotation.id}.date_histogram`, {
field: timeField,
min_doc_count: 0,
time_zone: timezone,

View file

@ -17,13 +17,13 @@
* under the License.
*/
import _ from 'lodash';
import { overwrite } from '../../helpers';
export function topHits(req, panel, annotation) {
return next => doc => {
const fields = (annotation.fields && annotation.fields.split(/[,\s]+/)) || [];
const timeField = annotation.time_field;
_.set(doc, `aggs.${annotation.id}.aggs.hits.top_hits`, {
overwrite(doc, `aggs.${annotation.id}.aggs.hits.top_hits`, {
sort: [
{
[timeField]: { order: 'desc' },

View file

@ -17,7 +17,7 @@
* under the License.
*/
import { set } from 'lodash';
import { overwrite } from '../../helpers';
import { getBucketSize } from '../../helpers/get_bucket_size';
import { offsetTime } from '../../offset_time';
import { getIntervalAndTimefield } from '../../get_interval_and_timefield';
@ -34,7 +34,7 @@ export function dateHistogram(req, panel, series, esQueryConfig, indexPatternObj
const { from, to } = offsetTime(req, series.offset_time);
const timezone = capabilities.searchTimezone;
set(doc, `aggs.${series.id}.aggs.timeseries.date_histogram`, {
overwrite(doc, `aggs.${series.id}.aggs.timeseries.date_histogram`, {
field: timeField,
min_doc_count: 0,
time_zone: timezone,
@ -47,7 +47,7 @@ export function dateHistogram(req, panel, series, esQueryConfig, indexPatternObj
};
const getDateHistogramForEntireTimerangeMode = () =>
set(doc, `aggs.${series.id}.aggs.timeseries.auto_date_histogram`, {
overwrite(doc, `aggs.${series.id}.aggs.timeseries.auto_date_histogram`, {
field: timeField,
buckets: 1,
});
@ -58,7 +58,7 @@ export function dateHistogram(req, panel, series, esQueryConfig, indexPatternObj
// master
set(doc, `aggs.${series.id}.meta`, {
overwrite(doc, `aggs.${series.id}.meta`, {
timeField,
intervalString,
bucketSize,

View file

@ -19,16 +19,16 @@
const filter = metric => metric.type === 'filter_ratio';
import { bucketTransform } from '../../helpers/bucket_transform';
import _ from 'lodash';
import { overwrite } from '../../helpers';
export function ratios(req, panel, series) {
return next => doc => {
if (series.metrics.some(filter)) {
series.metrics.filter(filter).forEach(metric => {
_.set(doc, `aggs.${series.id}.aggs.timeseries.aggs.${metric.id}-numerator.filter`, {
overwrite(doc, `aggs.${series.id}.aggs.timeseries.aggs.${metric.id}-numerator.filter`, {
query_string: { query: metric.numerator || '*', analyze_wildcard: true },
});
_.set(doc, `aggs.${series.id}.aggs.timeseries.aggs.${metric.id}-denominator.filter`, {
overwrite(doc, `aggs.${series.id}.aggs.timeseries.aggs.${metric.id}-denominator.filter`, {
query_string: { query: metric.denominator || '*', analyze_wildcard: true },
});
@ -46,8 +46,12 @@ export function ratios(req, panel, series) {
metricAgg = {};
}
const aggBody = { metric: metricAgg };
_.set(doc, `aggs.${series.id}.aggs.timeseries.aggs.${metric.id}-numerator.aggs`, aggBody);
_.set(
overwrite(
doc,
`aggs.${series.id}.aggs.timeseries.aggs.${metric.id}-numerator.aggs`,
aggBody
);
overwrite(
doc,
`aggs.${series.id}.aggs.timeseries.aggs.${metric.id}-denominator.aggs`,
aggBody
@ -56,7 +60,7 @@ export function ratios(req, panel, series) {
denominatorPath = `${metric.id}-denominator>metric`;
}
_.set(doc, `aggs.${series.id}.aggs.timeseries.aggs.${metric.id}`, {
overwrite(doc, `aggs.${series.id}.aggs.timeseries.aggs.${metric.id}`, {
bucket_script: {
buckets_path: {
numerator: numeratorPath,

View file

@ -16,8 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import _ from 'lodash';
import { overwrite } from '../../helpers';
import { getBucketSize } from '../../helpers/get_bucket_size';
import { bucketTransform } from '../../helpers/bucket_transform';
import { getIntervalAndTimefield } from '../../get_interval_and_timefield';
@ -33,7 +32,7 @@ export function metricBuckets(req, panel, series, esQueryConfig, indexPatternObj
if (fn) {
try {
const bucket = fn(metric, series.metrics, intervalString);
_.set(doc, `aggs.${series.id}.aggs.timeseries.aggs.${metric.id}`, bucket);
overwrite(doc, `aggs.${series.id}.aggs.timeseries.aggs.${metric.id}`, bucket);
} catch (e) {
// meh
}

View file

@ -16,9 +16,10 @@
* specific language governing permissions and limitations
* under the License.
*/
const { set, get, isEmpty } = require('lodash');
import { overwrite } from '../../helpers';
import _ from 'lodash';
const isEmptyFilter = (filter = {}) => Boolean(filter.match_all) && isEmpty(filter.match_all);
const isEmptyFilter = (filter = {}) => Boolean(filter.match_all) && _.isEmpty(filter.match_all);
const hasSiblingPipelineAggregation = (aggs = {}) => Object.keys(aggs).length > 1;
/* For grouping by the 'Everything', the splitByEverything request processor
@ -30,12 +31,12 @@ const hasSiblingPipelineAggregation = (aggs = {}) => Object.keys(aggs).length >
*
*/
function removeEmptyTopLevelAggregation(doc, series) {
const filter = get(doc, `aggs.${series.id}.filter`);
const filter = _.get(doc, `aggs.${series.id}.filter`);
if (isEmptyFilter(filter) && !hasSiblingPipelineAggregation(doc.aggs[series.id].aggs)) {
const meta = get(doc, `aggs.${series.id}.meta`);
set(doc, `aggs`, doc.aggs[series.id].aggs);
set(doc, `aggs.timeseries.meta`, meta);
const meta = _.get(doc, `aggs.${series.id}.meta`);
overwrite(doc, `aggs`, doc.aggs[series.id].aggs);
overwrite(doc, `aggs.timeseries.meta`, meta);
}
return doc;

View file

@ -20,7 +20,7 @@
import { getBucketSize } from '../../helpers/get_bucket_size';
import { getIntervalAndTimefield } from '../../get_interval_and_timefield';
import { bucketTransform } from '../../helpers/bucket_transform';
import { set } from 'lodash';
import { overwrite } from '../../helpers';
export const filter = metric => metric.type === 'positive_rate';
@ -48,9 +48,13 @@ export const createPositiveRate = (doc, intervalString, aggRoot) => metric => {
const derivativeBucket = derivativeFn(derivativeMetric, fakeSeriesMetrics, intervalString);
const positiveOnlyBucket = positiveOnlyFn(positiveOnlyMetric, fakeSeriesMetrics, intervalString);
set(doc, `${aggRoot}.timeseries.aggs.${metric.id}-positive-rate-max`, maxBucket);
set(doc, `${aggRoot}.timeseries.aggs.${metric.id}-positive-rate-derivative`, derivativeBucket);
set(doc, `${aggRoot}.timeseries.aggs.${metric.id}`, positiveOnlyBucket);
overwrite(doc, `${aggRoot}.timeseries.aggs.${metric.id}-positive-rate-max`, maxBucket);
overwrite(
doc,
`${aggRoot}.timeseries.aggs.${metric.id}-positive-rate-derivative`,
derivativeBucket
);
overwrite(doc, `${aggRoot}.timeseries.aggs.${metric.id}`, positiveOnlyBucket);
};
export function positiveRate(req, panel, series, esQueryConfig, indexPatternObject, capabilities) {

View file

@ -17,7 +17,7 @@
* under the License.
*/
import _ from 'lodash';
import { overwrite } from '../../helpers';
import { getBucketSize } from '../../helpers/get_bucket_size';
import { bucketTransform } from '../../helpers/bucket_transform';
import { getIntervalAndTimefield } from '../../get_interval_and_timefield';
@ -40,7 +40,7 @@ export function siblingBuckets(
if (fn) {
try {
const bucket = fn(metric, series.metrics, bucketSize);
_.set(doc, `aggs.${series.id}.aggs.${metric.id}`, bucket);
overwrite(doc, `aggs.${series.id}.aggs.${metric.id}`, bucket);
} catch (e) {
// meh
}

View file

@ -17,7 +17,7 @@
* under the License.
*/
import _ from 'lodash';
import { overwrite } from '../../helpers';
export function splitByEverything(req, panel, series) {
return next => doc => {
@ -25,7 +25,7 @@ export function splitByEverything(req, panel, series) {
series.split_mode === 'everything' ||
(series.split_mode === 'terms' && !series.terms_field)
) {
_.set(doc, `aggs.${series.id}.filter.match_all`, {});
overwrite(doc, `aggs.${series.id}.filter.match_all`, {});
}
return next(doc);
};

View file

@ -17,7 +17,7 @@
* under the License.
*/
import { set } from 'lodash';
import { overwrite } from '../../helpers';
import { esQuery } from '../../../../../../data/server';
export function splitByFilter(req, panel, series, esQueryConfig, indexPattern) {
@ -26,7 +26,7 @@ export function splitByFilter(req, panel, series, esQueryConfig, indexPattern) {
return next(doc);
}
set(
overwrite(
doc,
`aggs.${series.id}.filter`,
esQuery.buildEsQuery(indexPattern, [series.filter], [], esQueryConfig)

View file

@ -17,7 +17,7 @@
* under the License.
*/
import { set } from 'lodash';
import { overwrite } from '../../helpers';
import { esQuery } from '../../../../../../data/server';
export function splitByFilters(req, panel, series, esQueryConfig, indexPattern) {
@ -26,7 +26,7 @@ export function splitByFilters(req, panel, series, esQueryConfig, indexPattern)
series.split_filters.forEach(filter => {
const builtEsQuery = esQuery.buildEsQuery(indexPattern, [filter.filter], [], esQueryConfig);
set(doc, `aggs.${series.id}.filters.filters.${filter.id}`, builtEsQuery);
overwrite(doc, `aggs.${series.id}.filters.filters.${filter.id}`, builtEsQuery);
});
}
return next(doc);

View file

@ -17,7 +17,7 @@
* under the License.
*/
import { set } from 'lodash';
import { overwrite } from '../../helpers';
import { basicAggs } from '../../../../../common/basic_aggs';
import { getBucketsPath } from '../../helpers/get_buckets_path';
import { bucketTransform } from '../../helpers/bucket_transform';
@ -27,13 +27,13 @@ export function splitByTerms(req, panel, series) {
if (series.split_mode === 'terms' && series.terms_field) {
const direction = series.terms_direction || 'desc';
const metric = series.metrics.find(item => item.id === series.terms_order_by);
set(doc, `aggs.${series.id}.terms.field`, series.terms_field);
set(doc, `aggs.${series.id}.terms.size`, series.terms_size);
overwrite(doc, `aggs.${series.id}.terms.field`, series.terms_field);
overwrite(doc, `aggs.${series.id}.terms.size`, series.terms_size);
if (series.terms_include) {
set(doc, `aggs.${series.id}.terms.include`, series.terms_include);
overwrite(doc, `aggs.${series.id}.terms.include`, series.terms_include);
}
if (series.terms_exclude) {
set(doc, `aggs.${series.id}.terms.exclude`, series.terms_exclude);
overwrite(doc, `aggs.${series.id}.terms.exclude`, series.terms_exclude);
}
if (metric && metric.type !== 'count' && ~basicAggs.indexOf(metric.type)) {
const sortAggKey = `${series.terms_order_by}-SORT`;
@ -42,12 +42,12 @@ export function splitByTerms(req, panel, series) {
series.terms_order_by,
sortAggKey
);
set(doc, `aggs.${series.id}.terms.order`, { [bucketPath]: direction });
set(doc, `aggs.${series.id}.aggs`, { [sortAggKey]: fn(metric) });
overwrite(doc, `aggs.${series.id}.terms.order`, { [bucketPath]: direction });
overwrite(doc, `aggs.${series.id}.aggs`, { [sortAggKey]: fn(metric) });
} else if (['_key', '_count'].includes(series.terms_order_by)) {
set(doc, `aggs.${series.id}.terms.order`, { [series.terms_order_by]: direction });
overwrite(doc, `aggs.${series.id}.terms.order`, { [series.terms_order_by]: direction });
} else {
set(doc, `aggs.${series.id}.terms.order`, { _count: direction });
overwrite(doc, `aggs.${series.id}.terms.order`, { _count: direction });
}
}
return next(doc);

View file

@ -17,7 +17,7 @@
* under the License.
*/
import { set } from 'lodash';
import { overwrite } from '../../helpers';
import { getBucketSize } from '../../helpers/get_bucket_size';
import { isLastValueTimerangeMode } from '../../helpers/get_timerange_mode';
import { getIntervalAndTimefield } from '../../get_interval_and_timefield';
@ -41,7 +41,7 @@ export function dateHistogram(req, panel, esQueryConfig, indexPatternObject, cap
panel.series.forEach(column => {
const aggRoot = calculateAggRoot(doc, column);
set(doc, `${aggRoot}.timeseries.date_histogram`, {
overwrite(doc, `${aggRoot}.timeseries.date_histogram`, {
field: timeField,
min_doc_count: 0,
time_zone: timezone,
@ -52,7 +52,7 @@ export function dateHistogram(req, panel, esQueryConfig, indexPatternObject, cap
...dateHistogramInterval(intervalString),
});
set(doc, aggRoot.replace(/\.aggs$/, '.meta'), {
overwrite(doc, aggRoot.replace(/\.aggs$/, '.meta'), {
timeField,
intervalString,
bucketSize,
@ -64,12 +64,12 @@ export function dateHistogram(req, panel, esQueryConfig, indexPatternObject, cap
panel.series.forEach(column => {
const aggRoot = calculateAggRoot(doc, column);
set(doc, `${aggRoot}.timeseries.auto_date_histogram`, {
overwrite(doc, `${aggRoot}.timeseries.auto_date_histogram`, {
field: timeField,
buckets: 1,
});
set(doc, aggRoot.replace(/\.aggs$/, '.meta'), meta);
overwrite(doc, aggRoot.replace(/\.aggs$/, '.meta'), meta);
});
};

View file

@ -19,7 +19,7 @@
const filter = metric => metric.type === 'filter_ratio';
import { bucketTransform } from '../../helpers/bucket_transform';
import _ from 'lodash';
import { overwrite } from '../../helpers';
import { calculateAggRoot } from './calculate_agg_root';
export function ratios(req, panel) {
@ -28,10 +28,10 @@ export function ratios(req, panel) {
const aggRoot = calculateAggRoot(doc, column);
if (column.metrics.some(filter)) {
column.metrics.filter(filter).forEach(metric => {
_.set(doc, `${aggRoot}.timeseries.aggs.${metric.id}-numerator.filter`, {
overwrite(doc, `${aggRoot}.timeseries.aggs.${metric.id}-numerator.filter`, {
query_string: { query: metric.numerator || '*', analyze_wildcard: true },
});
_.set(doc, `${aggRoot}.timeseries.aggs.${metric.id}-denominator.filter`, {
overwrite(doc, `${aggRoot}.timeseries.aggs.${metric.id}-denominator.filter`, {
query_string: { query: metric.denominator || '*', analyze_wildcard: true },
});
@ -45,13 +45,13 @@ export function ratios(req, panel) {
field: metric.field,
}),
};
_.set(doc, `${aggRoot}.timeseries.aggs.${metric.id}-numerator.aggs`, aggBody);
_.set(doc, `${aggBody}.timeseries.aggs.${metric.id}-denominator.aggs`, aggBody);
overwrite(doc, `${aggRoot}.timeseries.aggs.${metric.id}-numerator.aggs`, aggBody);
overwrite(doc, `${aggBody}.timeseries.aggs.${metric.id}-denominator.aggs`, aggBody);
numeratorPath = `${metric.id}-numerator>metric`;
denominatorPath = `${metric.id}-denominator>metric`;
}
_.set(doc, `${aggRoot}.timeseries.aggs.${metric.id}`, {
overwrite(doc, `${aggRoot}.timeseries.aggs.${metric.id}`, {
bucket_script: {
buckets_path: {
numerator: numeratorPath,

View file

@ -17,7 +17,7 @@
* under the License.
*/
import _ from 'lodash';
import { overwrite } from '../../helpers';
import { getBucketSize } from '../../helpers/get_bucket_size';
import { bucketTransform } from '../../helpers/bucket_transform';
import { getIntervalAndTimefield } from '../../get_interval_and_timefield';
@ -36,7 +36,7 @@ export function metricBuckets(req, panel, esQueryConfig, indexPatternObject) {
if (fn) {
try {
const bucket = fn(metric, column.metrics, intervalString);
_.set(doc, `${aggRoot}.timeseries.aggs.${metric.id}`, bucket);
overwrite(doc, `${aggRoot}.timeseries.aggs.${metric.id}`, bucket);
} catch (e) {
// meh
}

View file

@ -16,9 +16,9 @@
* specific language governing permissions and limitations
* under the License.
*/
const { set, get, isEmpty, forEach } = require('lodash');
const isEmptyFilter = (filter = {}) => Boolean(filter.match_all) && isEmpty(filter.match_all);
import _ from 'lodash';
import { overwrite } from '../../helpers';
const isEmptyFilter = (filter = {}) => Boolean(filter.match_all) && _.isEmpty(filter.match_all);
const hasSiblingPipelineAggregation = (aggs = {}) => Object.keys(aggs).length > 1;
/* Last query handler in the chain. You can use this handler
@ -29,26 +29,26 @@ const hasSiblingPipelineAggregation = (aggs = {}) => Object.keys(aggs).length >
*/
export function normalizeQuery() {
return () => doc => {
const series = get(doc, 'aggs.pivot.aggs');
const series = _.get(doc, 'aggs.pivot.aggs');
const normalizedSeries = {};
forEach(series, (value, seriesId) => {
const filter = get(value, `filter`);
_.forEach(series, (value, seriesId) => {
const filter = _.get(value, `filter`);
if (isEmptyFilter(filter) && !hasSiblingPipelineAggregation(value.aggs)) {
const agg = get(value, 'aggs.timeseries');
const agg = _.get(value, 'aggs.timeseries');
const meta = {
...get(value, 'meta'),
..._.get(value, 'meta'),
seriesId,
};
set(normalizedSeries, `${seriesId}`, agg);
set(normalizedSeries, `${seriesId}.meta`, meta);
overwrite(normalizedSeries, `${seriesId}`, agg);
overwrite(normalizedSeries, `${seriesId}.meta`, meta);
} else {
set(normalizedSeries, `${seriesId}`, value);
overwrite(normalizedSeries, `${seriesId}`, value);
}
});
set(doc, 'aggs.pivot.aggs', normalizedSeries);
overwrite(doc, 'aggs.pivot.aggs', normalizedSeries);
return doc;
};

View file

@ -17,7 +17,8 @@
* under the License.
*/
import { get, set, last } from 'lodash';
import { get, last } from 'lodash';
import { overwrite } from '../../helpers';
import { basicAggs } from '../../../../../common/basic_aggs';
import { getBucketsPath } from '../../helpers/get_buckets_path';
@ -27,13 +28,13 @@ export function pivot(req, panel) {
return next => doc => {
const { sort } = req.payload.state;
if (panel.pivot_id) {
set(doc, 'aggs.pivot.terms.field', panel.pivot_id);
set(doc, 'aggs.pivot.terms.size', panel.pivot_rows);
overwrite(doc, 'aggs.pivot.terms.field', panel.pivot_id);
overwrite(doc, 'aggs.pivot.terms.size', panel.pivot_rows);
if (sort) {
const series = panel.series.find(item => item.id === sort.column);
const metric = series && last(series.metrics);
if (metric && metric.type === 'count') {
set(doc, 'aggs.pivot.terms.order', { _count: sort.order });
overwrite(doc, 'aggs.pivot.terms.order', { _count: sort.order });
} else if (metric && basicAggs.includes(metric.type)) {
const sortAggKey = `${metric.id}-SORT`;
const fn = bucketTransform[metric.type];
@ -41,16 +42,16 @@ export function pivot(req, panel) {
metric.id,
sortAggKey
);
set(doc, `aggs.pivot.terms.order`, { [bucketPath]: sort.order });
set(doc, `aggs.pivot.aggs`, { [sortAggKey]: fn(metric) });
overwrite(doc, `aggs.pivot.terms.order`, { [bucketPath]: sort.order });
overwrite(doc, `aggs.pivot.aggs`, { [sortAggKey]: fn(metric) });
} else {
set(doc, 'aggs.pivot.terms.order', {
overwrite(doc, 'aggs.pivot.terms.order', {
_key: get(sort, 'order', 'asc'),
});
}
}
} else {
set(doc, 'aggs.pivot.filter.match_all', {});
overwrite(doc, 'aggs.pivot.filter.match_all', {});
}
return next(doc);
};

View file

@ -17,7 +17,7 @@
* under the License.
*/
import _ from 'lodash';
import { overwrite } from '../../helpers';
import { getBucketSize } from '../../helpers/get_bucket_size';
import { bucketTransform } from '../../helpers/bucket_transform';
import { getIntervalAndTimefield } from '../../get_interval_and_timefield';
@ -36,7 +36,7 @@ export function siblingBuckets(req, panel, esQueryConfig, indexPatternObject) {
if (fn) {
try {
const bucket = fn(metric, column.metrics, bucketSize);
_.set(doc, `${aggRoot}.${metric.id}`, bucket);
overwrite(doc, `${aggRoot}.${metric.id}`, bucket);
} catch (e) {
// meh
}

View file

@ -17,7 +17,7 @@
* under the License.
*/
import { set } from 'lodash';
import { overwrite } from '../../helpers';
import { esQuery } from '../../../../../../data/server';
export function splitByEverything(req, panel, esQueryConfig, indexPattern) {
@ -26,13 +26,13 @@ export function splitByEverything(req, panel, esQueryConfig, indexPattern) {
.filter(c => !(c.aggregate_by && c.aggregate_function))
.forEach(column => {
if (column.filter) {
set(
overwrite(
doc,
`aggs.pivot.aggs.${column.id}.filter`,
esQuery.buildEsQuery(indexPattern, [column.filter], [], esQueryConfig)
);
} else {
set(doc, `aggs.pivot.aggs.${column.id}.filter.match_all`, {});
overwrite(doc, `aggs.pivot.aggs.${column.id}.filter.match_all`, {});
}
});
return next(doc);

View file

@ -17,7 +17,7 @@
* under the License.
*/
import { set } from 'lodash';
import { overwrite } from '../../helpers';
import { esQuery } from '../../../../../../data/server';
export function splitByTerms(req, panel, esQueryConfig, indexPattern) {
@ -25,11 +25,11 @@ export function splitByTerms(req, panel, esQueryConfig, indexPattern) {
panel.series
.filter(c => c.aggregate_by && c.aggregate_function)
.forEach(column => {
set(doc, `aggs.pivot.aggs.${column.id}.terms.field`, column.aggregate_by);
set(doc, `aggs.pivot.aggs.${column.id}.terms.size`, 100);
overwrite(doc, `aggs.pivot.aggs.${column.id}.terms.field`, column.aggregate_by);
overwrite(doc, `aggs.pivot.aggs.${column.id}.terms.size`, 100);
if (column.filter) {
set(
overwrite(
doc,
`aggs.pivot.aggs.${column.id}.column_filter.filter`,
esQuery.buildEsQuery(indexPattern, [column.filter], [], esQueryConfig)