Move vis_vega_type/data_model tests to jest (#55186)
Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
This commit is contained in:
parent
9567cca7d0
commit
22e7ae80dc
|
@ -1,317 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import expect from '@kbn/expect';
|
||||
import { VegaParser } from '../vega_parser';
|
||||
import { bypassExternalUrlCheck } from '../../vega_view/vega_base_view';
|
||||
|
||||
describe(`VegaParser._setDefaultValue`, () => {
|
||||
function test(spec, expected, ...params) {
|
||||
return () => {
|
||||
const vp = new VegaParser(spec);
|
||||
vp._setDefaultValue(...params);
|
||||
expect(vp.spec).to.eql(expected);
|
||||
expect(vp.warnings).to.have.length(0);
|
||||
};
|
||||
}
|
||||
|
||||
it(`empty`, test({}, { config: { test: 42 } }, 42, 'config', 'test'));
|
||||
it(`exists`, test({ config: { test: 42 } }, { config: { test: 42 } }, 1, 'config', 'test'));
|
||||
it(`exists non-obj`, test({ config: false }, { config: false }, 42, 'config', 'test'));
|
||||
});
|
||||
|
||||
describe(`VegaParser._setDefaultColors`, () => {
|
||||
function test(spec, isVegaLite, expected) {
|
||||
return () => {
|
||||
const vp = new VegaParser(spec);
|
||||
vp.isVegaLite = isVegaLite;
|
||||
vp._setDefaultColors();
|
||||
expect(vp.spec).to.eql(expected);
|
||||
expect(vp.warnings).to.have.length(0);
|
||||
};
|
||||
}
|
||||
|
||||
it(
|
||||
`vegalite`,
|
||||
test({}, true, {
|
||||
config: {
|
||||
range: { category: { scheme: 'elastic' } },
|
||||
mark: { color: '#54B399' },
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
it(
|
||||
`vega`,
|
||||
test({}, false, {
|
||||
config: {
|
||||
range: { category: { scheme: 'elastic' } },
|
||||
arc: { fill: '#54B399' },
|
||||
area: { fill: '#54B399' },
|
||||
line: { stroke: '#54B399' },
|
||||
path: { stroke: '#54B399' },
|
||||
rect: { fill: '#54B399' },
|
||||
rule: { stroke: '#54B399' },
|
||||
shape: { stroke: '#54B399' },
|
||||
symbol: { fill: '#54B399' },
|
||||
trail: { fill: '#54B399' },
|
||||
},
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
describe('VegaParser._resolveEsQueries', () => {
|
||||
function test(spec, expected, warnCount) {
|
||||
return async () => {
|
||||
const vp = new VegaParser(spec, { search: async () => [[42]] }, 0, 0, {
|
||||
getFileLayers: async () => [{ name: 'file1', url: 'url1' }],
|
||||
getUrlForRegionLayer: async layer => {
|
||||
return layer.url;
|
||||
},
|
||||
});
|
||||
await vp._resolveDataUrls();
|
||||
|
||||
expect(vp.spec).to.eql(expected);
|
||||
expect(vp.warnings).to.have.length(warnCount || 0);
|
||||
};
|
||||
}
|
||||
|
||||
it('no data', test({}, {}));
|
||||
it('no data2', test({ a: 1 }, { a: 1 }));
|
||||
it('non-es data', test({ data: { a: 10 } }, { data: { a: 10 } }));
|
||||
it('es', test({ data: { url: { index: 'a' }, x: 1 } }, { data: { values: [42], x: 1 } }));
|
||||
it(
|
||||
'es',
|
||||
test({ data: { url: { '%type%': 'elasticsearch', index: 'a' } } }, { data: { values: [42] } })
|
||||
);
|
||||
it(
|
||||
'es arr',
|
||||
test(
|
||||
{ arr: [{ data: { url: { index: 'a' }, x: 1 } }] },
|
||||
{ arr: [{ data: { values: [42], x: 1 } }] }
|
||||
)
|
||||
);
|
||||
it(
|
||||
'emsfile',
|
||||
test(
|
||||
{ data: { url: { '%type%': 'emsfile', name: 'file1' } } },
|
||||
{ data: { url: bypassExternalUrlCheck('url1') } }
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
describe('VegaParser._parseSchema', () => {
|
||||
function test(schema, isVegaLite, warningCount) {
|
||||
return () => {
|
||||
const vp = new VegaParser({ $schema: schema });
|
||||
expect(vp._parseSchema()).to.be(isVegaLite);
|
||||
expect(vp.spec).to.eql({ $schema: schema });
|
||||
expect(vp.warnings).to.have.length(warningCount);
|
||||
};
|
||||
}
|
||||
|
||||
it('should warn on no vega version specified', () => {
|
||||
const vp = new VegaParser({});
|
||||
expect(vp._parseSchema()).to.be(false);
|
||||
expect(vp.spec).to.eql({ $schema: 'https://vega.github.io/schema/vega/v3.0.json' });
|
||||
expect(vp.warnings).to.have.length(1);
|
||||
});
|
||||
|
||||
it(
|
||||
'should not warn on current vega version',
|
||||
test('https://vega.github.io/schema/vega/v4.0.json', false, 0)
|
||||
);
|
||||
it(
|
||||
'should not warn on older vega version',
|
||||
test('https://vega.github.io/schema/vega/v3.0.json', false, 0)
|
||||
);
|
||||
it(
|
||||
'should warn on vega version too new to be supported',
|
||||
test('https://vega.github.io/schema/vega/v5.0.json', false, 1)
|
||||
);
|
||||
|
||||
it(
|
||||
'should not warn on current vega-lite version',
|
||||
test('https://vega.github.io/schema/vega-lite/v2.0.json', true, 0)
|
||||
);
|
||||
it(
|
||||
'should warn on vega-lite version too new to be supported',
|
||||
test('https://vega.github.io/schema/vega-lite/v3.0.json', true, 1)
|
||||
);
|
||||
});
|
||||
|
||||
describe('VegaParser._parseTooltips', () => {
|
||||
function test(tooltips, position, padding, centerOnMark) {
|
||||
return () => {
|
||||
const vp = new VegaParser(tooltips !== undefined ? { config: { kibana: { tooltips } } } : {});
|
||||
vp._config = vp._parseConfig();
|
||||
if (position === undefined) {
|
||||
// error
|
||||
expect(() => vp._parseTooltips()).to.throwError();
|
||||
} else if (position === false) {
|
||||
expect(vp._parseTooltips()).to.eql(false);
|
||||
} else {
|
||||
expect(vp._parseTooltips()).to.eql({ position, padding, centerOnMark });
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
it('undefined', test(undefined, 'top', 16, 50));
|
||||
it('{}', test({}, 'top', 16, 50));
|
||||
it('left', test({ position: 'left' }, 'left', 16, 50));
|
||||
it('padding', test({ position: 'bottom', padding: 60 }, 'bottom', 60, 50));
|
||||
it('padding2', test({ padding: 70 }, 'top', 70, 50));
|
||||
it('centerOnMark', test({}, 'top', 16, 50));
|
||||
it('centerOnMark=10', test({ centerOnMark: 10 }, 'top', 16, 10));
|
||||
it('centerOnMark=true', test({ centerOnMark: true }, 'top', 16, Number.MAX_VALUE));
|
||||
it('centerOnMark=false', test({ centerOnMark: false }, 'top', 16, -1));
|
||||
|
||||
it('false', test(false, false));
|
||||
|
||||
it('err1', test(true, undefined));
|
||||
it('err2', test({ position: 'foo' }, undefined));
|
||||
it('err3', test({ padding: 'foo' }, undefined));
|
||||
it('err4', test({ centerOnMark: {} }, undefined));
|
||||
});
|
||||
|
||||
describe('VegaParser._parseMapConfig', () => {
|
||||
function test(config, expected, warnCount) {
|
||||
return () => {
|
||||
const vp = new VegaParser();
|
||||
vp._config = config;
|
||||
expect(vp._parseMapConfig()).to.eql(expected);
|
||||
expect(vp.warnings).to.have.length(warnCount);
|
||||
};
|
||||
}
|
||||
|
||||
it(
|
||||
'empty',
|
||||
test(
|
||||
{},
|
||||
{
|
||||
delayRepaint: true,
|
||||
latitude: 0,
|
||||
longitude: 0,
|
||||
mapStyle: 'default',
|
||||
zoomControl: true,
|
||||
scrollWheelZoom: false,
|
||||
},
|
||||
0
|
||||
)
|
||||
);
|
||||
|
||||
it(
|
||||
'filled',
|
||||
test(
|
||||
{
|
||||
delayRepaint: true,
|
||||
latitude: 0,
|
||||
longitude: 0,
|
||||
mapStyle: 'default',
|
||||
zoomControl: true,
|
||||
scrollWheelZoom: false,
|
||||
maxBounds: [1, 2, 3, 4],
|
||||
},
|
||||
{
|
||||
delayRepaint: true,
|
||||
latitude: 0,
|
||||
longitude: 0,
|
||||
mapStyle: 'default',
|
||||
zoomControl: true,
|
||||
scrollWheelZoom: false,
|
||||
maxBounds: [1, 2, 3, 4],
|
||||
},
|
||||
0
|
||||
)
|
||||
);
|
||||
|
||||
it(
|
||||
'warnings',
|
||||
test(
|
||||
{
|
||||
delayRepaint: true,
|
||||
latitude: 0,
|
||||
longitude: 0,
|
||||
zoom: 'abc', // ignored
|
||||
mapStyle: 'abc',
|
||||
zoomControl: 'abc',
|
||||
scrollWheelZoom: 'abc',
|
||||
maxBounds: [2, 3, 4],
|
||||
},
|
||||
{
|
||||
delayRepaint: true,
|
||||
latitude: 0,
|
||||
longitude: 0,
|
||||
mapStyle: 'default',
|
||||
zoomControl: true,
|
||||
scrollWheelZoom: false,
|
||||
},
|
||||
5
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
describe('VegaParser._parseConfig', () => {
|
||||
function test(spec, expectedConfig, expectedSpec, warnCount) {
|
||||
return async () => {
|
||||
expectedSpec = expectedSpec || _.cloneDeep(spec);
|
||||
const vp = new VegaParser(spec);
|
||||
const config = await vp._parseConfig();
|
||||
expect(config).to.eql(expectedConfig);
|
||||
expect(vp.spec).to.eql(expectedSpec);
|
||||
expect(vp.warnings).to.have.length(warnCount || 0);
|
||||
};
|
||||
}
|
||||
|
||||
it('no config', test({}, {}, {}));
|
||||
it('simple config', test({ config: { a: 1 } }, {}));
|
||||
it('kibana config', test({ config: { kibana: { a: 1 } } }, { a: 1 }, { config: {} }));
|
||||
it('_hostConfig', test({ _hostConfig: { a: 1 } }, { a: 1 }, {}, 1));
|
||||
});
|
||||
|
||||
describe('VegaParser._calcSizing', () => {
|
||||
function test(spec, useResize, paddingWidth, paddingHeight, isVegaLite, expectedSpec, warnCount) {
|
||||
return async () => {
|
||||
expectedSpec = expectedSpec || _.cloneDeep(spec);
|
||||
const vp = new VegaParser(spec);
|
||||
vp.isVegaLite = !!isVegaLite;
|
||||
vp._calcSizing();
|
||||
expect(vp.useResize).to.eql(useResize);
|
||||
expect(vp.paddingWidth).to.eql(paddingWidth);
|
||||
expect(vp.paddingHeight).to.eql(paddingHeight);
|
||||
expect(vp.spec).to.eql(expectedSpec);
|
||||
expect(vp.warnings).to.have.length(warnCount || 0);
|
||||
};
|
||||
}
|
||||
|
||||
it('no size', test({ autosize: {} }, false, 0, 0));
|
||||
it('fit', test({ autosize: 'fit' }, true, 0, 0));
|
||||
it('fit obj', test({ autosize: { type: 'fit' } }, true, 0, 0));
|
||||
it('padding const', test({ autosize: 'fit', padding: 10 }, true, 20, 20));
|
||||
it(
|
||||
'padding obj',
|
||||
test({ autosize: 'fit', padding: { left: 5, bottom: 7, right: 6, top: 8 } }, true, 11, 15)
|
||||
);
|
||||
it('width height', test({ autosize: 'fit', width: 1, height: 2 }, true, 0, 0, false, false, 1));
|
||||
it(
|
||||
'VL width height',
|
||||
test({ autosize: 'fit', width: 1, height: 2 }, true, 0, 0, true, { autosize: 'fit' }, 0)
|
||||
);
|
||||
});
|
|
@ -17,11 +17,13 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import expect from '@kbn/expect';
|
||||
import sinon from 'sinon';
|
||||
import { cloneDeep } from 'lodash';
|
||||
import moment from 'moment';
|
||||
import { EsQueryParser } from '../es_query_parser';
|
||||
import { EsQueryParser } from './es_query_parser';
|
||||
|
||||
jest.mock('../helpers', () => ({
|
||||
getEsShardTimeout: jest.fn(() => '10000'),
|
||||
}));
|
||||
|
||||
const second = 1000;
|
||||
const minute = 60 * second;
|
||||
|
@ -39,41 +41,57 @@ function create(min, max, dashboardCtx) {
|
|||
getTimeBounds: () => ({ min, max }),
|
||||
},
|
||||
() => {},
|
||||
_.cloneDeep(dashboardCtx),
|
||||
cloneDeep(dashboardCtx),
|
||||
() => (inst.$$$warnCount = (inst.$$$warnCount || 0) + 1)
|
||||
);
|
||||
return inst;
|
||||
}
|
||||
|
||||
describe(`EsQueryParser time`, () => {
|
||||
it(`roundInterval(4s)`, () => expect(EsQueryParser._roundInterval(4 * second)).to.be(`1s`));
|
||||
it(`roundInterval(4hr)`, () => expect(EsQueryParser._roundInterval(4 * hour)).to.be(`3h`));
|
||||
it(`getTimeBound`, () => expect(create(1000, 2000)._getTimeBound({}, `min`)).to.be(1000));
|
||||
it(`getTimeBound(shift 2d)`, () =>
|
||||
expect(create(5, 2000)._getTimeBound({ shift: 2 }, `min`)).to.be(5 + 2 * day));
|
||||
it(`getTimeBound(shift -2hr)`, () =>
|
||||
expect(create(10 * day, 20 * day)._getTimeBound({ shift: -2, unit: `h` }, `min`)).to.be(
|
||||
10 * day - 2 * hour
|
||||
));
|
||||
it(`createRangeFilter({})`, () => {
|
||||
const obj = {};
|
||||
expect(create(1000, 2000)._createRangeFilter(obj))
|
||||
.to.eql({
|
||||
format: 'strict_date_optional_time',
|
||||
gte: moment(1000).toISOString(),
|
||||
lte: moment(2000).toISOString(),
|
||||
})
|
||||
.and.to.be(obj);
|
||||
test(`roundInterval(4s)`, () => {
|
||||
expect(EsQueryParser._roundInterval(4 * second)).toBe(`1s`);
|
||||
});
|
||||
it(`createRangeFilter(shift 1s)`, () => {
|
||||
|
||||
test(`roundInterval(4hr)`, () => {
|
||||
expect(EsQueryParser._roundInterval(4 * hour)).toBe(`3h`);
|
||||
});
|
||||
|
||||
test(`getTimeBound`, () => {
|
||||
expect(create(1000, 2000)._getTimeBound({}, `min`)).toBe(1000);
|
||||
});
|
||||
|
||||
test(`getTimeBound(shift 2d)`, () => {
|
||||
expect(create(5, 2000)._getTimeBound({ shift: 2 }, `min`)).toBe(5 + 2 * day);
|
||||
});
|
||||
|
||||
test(`getTimeBound(shift -2hr)`, () => {
|
||||
expect(create(10 * day, 20 * day)._getTimeBound({ shift: -2, unit: `h` }, `min`)).toBe(
|
||||
10 * day - 2 * hour
|
||||
);
|
||||
});
|
||||
|
||||
test(`createRangeFilter({})`, () => {
|
||||
const obj = {};
|
||||
const result = create(1000, 2000)._createRangeFilter(obj);
|
||||
|
||||
expect(result).toEqual({
|
||||
format: 'strict_date_optional_time',
|
||||
gte: moment(1000).toISOString(),
|
||||
lte: moment(2000).toISOString(),
|
||||
});
|
||||
expect(result).toBe(obj);
|
||||
});
|
||||
|
||||
test(`createRangeFilter(shift 1s)`, () => {
|
||||
const obj = { shift: 5, unit: 's' };
|
||||
expect(create(1000, 2000)._createRangeFilter(obj))
|
||||
.to.eql({
|
||||
format: 'strict_date_optional_time',
|
||||
gte: moment(6000).toISOString(),
|
||||
lte: moment(7000).toISOString(),
|
||||
})
|
||||
.and.to.be(obj);
|
||||
const result = create(1000, 2000)._createRangeFilter(obj);
|
||||
|
||||
expect(result).toEqual({
|
||||
format: 'strict_date_optional_time',
|
||||
gte: moment(6000).toISOString(),
|
||||
lte: moment(7000).toISOString(),
|
||||
});
|
||||
expect(result).toBe(obj);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -82,79 +100,78 @@ describe('EsQueryParser.populateData', () => {
|
|||
let parser;
|
||||
|
||||
beforeEach(() => {
|
||||
searchStub = sinon.stub();
|
||||
searchStub = jest.fn(() => Promise.resolve([{}, {}]));
|
||||
parser = new EsQueryParser({}, { search: searchStub }, undefined, undefined);
|
||||
|
||||
searchStub.returns(Promise.resolve([{}, {}]));
|
||||
});
|
||||
it('should set the timeout for each request', async () => {
|
||||
|
||||
test('should set the timeout for each request', async () => {
|
||||
await parser.populateData([
|
||||
{ url: { body: {} }, dataObject: {} },
|
||||
{ url: { body: {} }, dataObject: {} },
|
||||
]);
|
||||
expect(searchStub.firstCall.args[0][0].body.timeout).to.be.defined;
|
||||
expect(searchStub.mock.calls[0][0][0].body.timeout).toBe.defined;
|
||||
});
|
||||
|
||||
it('should remove possible timeout parameters on a request', async () => {
|
||||
test('should remove possible timeout parameters on a request', async () => {
|
||||
await parser.populateData([
|
||||
{ url: { timeout: '500h', body: { timeout: '500h' } }, dataObject: {} },
|
||||
]);
|
||||
expect(searchStub.firstCall.args[0][0].body.timeout).to.be.defined;
|
||||
expect(searchStub.firstCall.args[0][0].timeout).to.be(undefined);
|
||||
expect(searchStub.mock.calls[0][0][0].body.timeout).toBe.defined;
|
||||
expect(searchStub.mock.calls[0][0][0].timeout).toBe(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
describe(`EsQueryParser.injectQueryContextVars`, () => {
|
||||
function test(obj, expected, ctx) {
|
||||
function check(obj, expected, ctx) {
|
||||
return () => {
|
||||
create(rangeStart, rangeEnd, ctx)._injectContextVars(obj, true);
|
||||
expect(obj).to.eql(expected);
|
||||
expect(obj).toEqual(expected);
|
||||
};
|
||||
}
|
||||
|
||||
it(`empty`, test({}, {}));
|
||||
it(`simple`, () => {
|
||||
test(`empty`, check({}, {}));
|
||||
test(`simple`, () => {
|
||||
const obj = { a: { c: 10 }, b: [{ d: 2 }, 4, 5], c: [], d: {} };
|
||||
test(obj, _.cloneDeep(obj));
|
||||
check(obj, cloneDeep(obj));
|
||||
});
|
||||
it(`must clause empty`, test({ arr: ['%dashboard_context-must_clause%'] }, { arr: [] }, {}));
|
||||
it(
|
||||
test(`must clause empty`, check({ arr: ['%dashboard_context-must_clause%'] }, { arr: [] }, {}));
|
||||
test(
|
||||
`must clause arr`,
|
||||
test({ arr: ['%dashboard_context-must_clause%'] }, { arr: [...ctxArr.bool.must] }, ctxArr)
|
||||
check({ arr: ['%dashboard_context-must_clause%'] }, { arr: [...ctxArr.bool.must] }, ctxArr)
|
||||
);
|
||||
it(
|
||||
test(
|
||||
`must clause obj`,
|
||||
test({ arr: ['%dashboard_context-must_clause%'] }, { arr: [ctxObj.bool.must] }, ctxObj)
|
||||
check({ arr: ['%dashboard_context-must_clause%'] }, { arr: [ctxObj.bool.must] }, ctxObj)
|
||||
);
|
||||
it(
|
||||
test(
|
||||
`mixed clause arr`,
|
||||
test(
|
||||
check(
|
||||
{ arr: [1, '%dashboard_context-must_clause%', 2, '%dashboard_context-must_not_clause%'] },
|
||||
{ arr: [1, ...ctxArr.bool.must, 2, ...ctxArr.bool.must_not] },
|
||||
ctxArr
|
||||
)
|
||||
);
|
||||
it(
|
||||
test(
|
||||
`mixed clause obj`,
|
||||
test(
|
||||
check(
|
||||
{ arr: ['%dashboard_context-must_clause%', 1, '%dashboard_context-must_not_clause%', 2] },
|
||||
{ arr: [ctxObj.bool.must, 1, ctxObj.bool.must_not, 2] },
|
||||
ctxObj
|
||||
)
|
||||
);
|
||||
it(
|
||||
test(
|
||||
`%autointerval% = true`,
|
||||
test({ interval: { '%autointerval%': true } }, { interval: `1h` }, ctxObj)
|
||||
check({ interval: { '%autointerval%': true } }, { interval: `1h` }, ctxObj)
|
||||
);
|
||||
it(
|
||||
test(
|
||||
`%autointerval% = 10`,
|
||||
test({ interval: { '%autointerval%': 10 } }, { interval: `3h` }, ctxObj)
|
||||
check({ interval: { '%autointerval%': 10 } }, { interval: `3h` }, ctxObj)
|
||||
);
|
||||
it(`%timefilter% = min`, test({ a: { '%timefilter%': 'min' } }, { a: rangeStart }));
|
||||
it(`%timefilter% = max`, test({ a: { '%timefilter%': 'max' } }, { a: rangeEnd }));
|
||||
it(
|
||||
test(`%timefilter% = min`, check({ a: { '%timefilter%': 'min' } }, { a: rangeStart }));
|
||||
test(`%timefilter% = max`, check({ a: { '%timefilter%': 'max' } }, { a: rangeEnd }));
|
||||
test(
|
||||
`%timefilter% = true`,
|
||||
test(
|
||||
check(
|
||||
{ a: { '%timefilter%': true } },
|
||||
{
|
||||
a: {
|
||||
|
@ -168,24 +185,24 @@ describe(`EsQueryParser.injectQueryContextVars`, () => {
|
|||
});
|
||||
|
||||
describe(`EsQueryParser.parseEsRequest`, () => {
|
||||
function test(req, ctx, expected) {
|
||||
function check(req, ctx, expected) {
|
||||
return () => {
|
||||
create(rangeStart, rangeEnd, ctx).parseUrl({}, req);
|
||||
expect(req).to.eql(expected);
|
||||
expect(req).toEqual(expected);
|
||||
};
|
||||
}
|
||||
|
||||
it(
|
||||
test(
|
||||
`%context_query%=true`,
|
||||
test({ index: '_all', '%context_query%': true }, ctxArr, {
|
||||
check({ index: '_all', '%context_query%': true }, ctxArr, {
|
||||
index: '_all',
|
||||
body: { query: ctxArr },
|
||||
})
|
||||
);
|
||||
|
||||
it(
|
||||
test(
|
||||
`%context%=true`,
|
||||
test({ index: '_all', '%context%': true }, ctxArr, { index: '_all', body: { query: ctxArr } })
|
||||
check({ index: '_all', '%context%': true }, ctxArr, { index: '_all', body: { query: ctxArr } })
|
||||
);
|
||||
|
||||
const expectedForCtxAndTimefield = {
|
||||
|
@ -211,23 +228,23 @@ describe(`EsQueryParser.parseEsRequest`, () => {
|
|||
},
|
||||
};
|
||||
|
||||
it(
|
||||
test(
|
||||
`%context_query%='abc'`,
|
||||
test({ index: '_all', '%context_query%': 'abc' }, ctxArr, expectedForCtxAndTimefield)
|
||||
check({ index: '_all', '%context_query%': 'abc' }, ctxArr, expectedForCtxAndTimefield)
|
||||
);
|
||||
|
||||
it(
|
||||
test(
|
||||
`%context%=true, %timefield%='abc'`,
|
||||
test(
|
||||
check(
|
||||
{ index: '_all', '%context%': true, '%timefield%': 'abc' },
|
||||
ctxArr,
|
||||
expectedForCtxAndTimefield
|
||||
)
|
||||
);
|
||||
|
||||
it(
|
||||
test(
|
||||
`%timefield%='abc'`,
|
||||
test({ index: '_all', '%timefield%': 'abc' }, ctxArr, {
|
||||
check({ index: '_all', '%timefield%': 'abc' }, ctxArr, {
|
||||
index: '_all',
|
||||
body: {
|
||||
query: {
|
||||
|
@ -243,11 +260,11 @@ describe(`EsQueryParser.parseEsRequest`, () => {
|
|||
})
|
||||
);
|
||||
|
||||
it(`no esRequest`, test({ index: '_all' }, ctxArr, { index: '_all', body: {} }));
|
||||
test(`no esRequest`, check({ index: '_all' }, ctxArr, { index: '_all', body: {} }));
|
||||
|
||||
it(
|
||||
test(
|
||||
`esRequest`,
|
||||
test({ index: '_all', body: { query: 2 } }, ctxArr, {
|
||||
check({ index: '_all', body: { query: 2 } }, ctxArr, {
|
||||
index: '_all',
|
||||
body: { query: 2 },
|
||||
})
|
|
@ -17,8 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import { SearchCache } from '../search_cache';
|
||||
import { SearchCache } from './search_cache';
|
||||
|
||||
describe(`SearchCache`, () => {
|
||||
class FauxEs {
|
||||
|
@ -45,27 +44,27 @@ describe(`SearchCache`, () => {
|
|||
|
||||
// empty request
|
||||
let res = await sc.search([]);
|
||||
expect(res).to.eql([]);
|
||||
expect(sc._es.searches).to.eql([]);
|
||||
expect(res).toEqual([]);
|
||||
expect(sc._es.searches).toEqual([]);
|
||||
|
||||
// single request
|
||||
res = await sc.search([request1]);
|
||||
expect(res).to.eql([expected1]);
|
||||
expect(sc._es.searches).to.eql([request1]);
|
||||
expect(res).toEqual([expected1]);
|
||||
expect(sc._es.searches).toEqual([request1]);
|
||||
|
||||
// repeat the same search, use array notation
|
||||
res = await sc.search([request1]);
|
||||
expect(res).to.eql([expected1]);
|
||||
expect(sc._es.searches).to.eql([request1]); // no new entries
|
||||
expect(res).toEqual([expected1]);
|
||||
expect(sc._es.searches).toEqual([request1]); // no new entries
|
||||
|
||||
// new single search
|
||||
res = await sc.search([request2]);
|
||||
expect(res).to.eql([expected2]);
|
||||
expect(sc._es.searches).to.eql([request1, request2]);
|
||||
expect(res).toEqual([expected2]);
|
||||
expect(sc._es.searches).toEqual([request1, request2]);
|
||||
|
||||
// multiple search, some new, some old
|
||||
res = await sc.search([request1, request3, request2]);
|
||||
expect(res).to.eql([expected1, expected3, expected2]);
|
||||
expect(sc._es.searches).to.eql([request1, request2, request3]);
|
||||
expect(res).toEqual([expected1, expected3, expected2]);
|
||||
expect(sc._es.searches).toEqual([request1, request2, request3]);
|
||||
});
|
||||
});
|
|
@ -17,8 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import { TimeCache } from '../time_cache';
|
||||
import { TimeCache } from './time_cache';
|
||||
|
||||
describe(`TimeCache`, () => {
|
||||
class FauxTimefilter {
|
||||
|
@ -70,29 +69,29 @@ describe(`TimeCache`, () => {
|
|||
let filterAccess = 0;
|
||||
|
||||
// first call - gets bounds
|
||||
expect(tc.getTimeBounds()).to.eql({ min: 10000, max: 20000 });
|
||||
expect(time._accessCount).to.be(++timeAccess);
|
||||
expect(timefilter._accessCount).to.be(++filterAccess);
|
||||
expect(tc.getTimeBounds()).toEqual({ min: 10000, max: 20000 });
|
||||
expect(time._accessCount).toBe(++timeAccess);
|
||||
expect(timefilter._accessCount).toBe(++filterAccess);
|
||||
|
||||
// short diff, same result
|
||||
time.increment(10);
|
||||
timefilter.setTime(10010, 20010);
|
||||
expect(tc.getTimeBounds()).to.eql({ min: 10000, max: 20000 });
|
||||
expect(time._accessCount).to.be(++timeAccess);
|
||||
expect(timefilter._accessCount).to.be(filterAccess);
|
||||
expect(tc.getTimeBounds()).toEqual({ min: 10000, max: 20000 });
|
||||
expect(time._accessCount).toBe(++timeAccess);
|
||||
expect(timefilter._accessCount).toBe(filterAccess);
|
||||
|
||||
// longer diff, gets bounds but returns original
|
||||
time.increment(200);
|
||||
timefilter.setTime(10210, 20210);
|
||||
expect(tc.getTimeBounds()).to.eql({ min: 10000, max: 20000 });
|
||||
expect(time._accessCount).to.be(++timeAccess);
|
||||
expect(timefilter._accessCount).to.be(++filterAccess);
|
||||
expect(tc.getTimeBounds()).toEqual({ min: 10000, max: 20000 });
|
||||
expect(time._accessCount).toBe(++timeAccess);
|
||||
expect(timefilter._accessCount).toBe(++filterAccess);
|
||||
|
||||
// long diff, new result
|
||||
time.increment(10000);
|
||||
timefilter.setTime(20220, 30220);
|
||||
expect(tc.getTimeBounds()).to.eql({ min: 20220, max: 30220 });
|
||||
expect(time._accessCount).to.be(++timeAccess);
|
||||
expect(timefilter._accessCount).to.be(++filterAccess);
|
||||
expect(tc.getTimeBounds()).toEqual({ min: 20220, max: 30220 });
|
||||
expect(time._accessCount).toBe(++timeAccess);
|
||||
expect(timefilter._accessCount).toBe(++filterAccess);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,327 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { cloneDeep } from 'lodash';
|
||||
import { VegaParser } from './vega_parser';
|
||||
import { bypassExternalUrlCheck } from '../vega_view/vega_base_view';
|
||||
|
||||
describe(`VegaParser._setDefaultValue`, () => {
|
||||
function check(spec, expected, ...params) {
|
||||
return () => {
|
||||
const vp = new VegaParser(spec);
|
||||
vp._setDefaultValue(...params);
|
||||
expect(vp.spec).toEqual(expected);
|
||||
expect(vp.warnings).toHaveLength(0);
|
||||
};
|
||||
}
|
||||
|
||||
test(`empty`, check({}, { config: { test: 42 } }, 42, 'config', 'test'));
|
||||
test(`exists`, check({ config: { test: 42 } }, { config: { test: 42 } }, 1, 'config', 'test'));
|
||||
test(`exists non-obj`, check({ config: false }, { config: false }, 42, 'config', 'test'));
|
||||
});
|
||||
|
||||
describe(`VegaParser._setDefaultColors`, () => {
|
||||
function check(spec, isVegaLite, expected) {
|
||||
return () => {
|
||||
const vp = new VegaParser(spec);
|
||||
vp.isVegaLite = isVegaLite;
|
||||
vp._setDefaultColors();
|
||||
expect(vp.spec).toEqual(expected);
|
||||
expect(vp.warnings).toHaveLength(0);
|
||||
};
|
||||
}
|
||||
|
||||
test(
|
||||
`vegalite`,
|
||||
check({}, true, {
|
||||
config: {
|
||||
range: { category: { scheme: 'elastic' } },
|
||||
mark: { color: '#54B399' },
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
test(
|
||||
`vega`,
|
||||
check({}, false, {
|
||||
config: {
|
||||
range: { category: { scheme: 'elastic' } },
|
||||
arc: { fill: '#54B399' },
|
||||
area: { fill: '#54B399' },
|
||||
line: { stroke: '#54B399' },
|
||||
path: { stroke: '#54B399' },
|
||||
rect: { fill: '#54B399' },
|
||||
rule: { stroke: '#54B399' },
|
||||
shape: { stroke: '#54B399' },
|
||||
symbol: { fill: '#54B399' },
|
||||
trail: { fill: '#54B399' },
|
||||
},
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
describe('VegaParser._resolveEsQueries', () => {
|
||||
function check(spec, expected, warnCount) {
|
||||
return async () => {
|
||||
const vp = new VegaParser(spec, { search: async () => [[42]] }, 0, 0, {
|
||||
getFileLayers: async () => [{ name: 'file1', url: 'url1' }],
|
||||
getUrlForRegionLayer: async layer => {
|
||||
return layer.url;
|
||||
},
|
||||
});
|
||||
await vp._resolveDataUrls();
|
||||
|
||||
expect(vp.spec).toEqual(expected);
|
||||
expect(vp.warnings).toHaveLength(warnCount || 0);
|
||||
};
|
||||
}
|
||||
|
||||
test('no data', check({}, {}));
|
||||
test('no data2', check({ a: 1 }, { a: 1 }));
|
||||
test('non-es data', check({ data: { a: 10 } }, { data: { a: 10 } }));
|
||||
test('es', check({ data: { url: { index: 'a' }, x: 1 } }, { data: { values: [42], x: 1 } }));
|
||||
test(
|
||||
'es 2',
|
||||
check({ data: { url: { '%type%': 'elasticsearch', index: 'a' } } }, { data: { values: [42] } })
|
||||
);
|
||||
test(
|
||||
'es arr',
|
||||
check(
|
||||
{ arr: [{ data: { url: { index: 'a' }, x: 1 } }] },
|
||||
{ arr: [{ data: { values: [42], x: 1 } }] }
|
||||
)
|
||||
);
|
||||
test(
|
||||
'emsfile',
|
||||
check(
|
||||
{ data: { url: { '%type%': 'emsfile', name: 'file1' } } },
|
||||
{ data: { url: bypassExternalUrlCheck('url1') } }
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
describe('VegaParser._parseSchema', () => {
|
||||
function check(schema, isVegaLite, warningCount) {
|
||||
return () => {
|
||||
const vp = new VegaParser({ $schema: schema });
|
||||
expect(vp._parseSchema()).toBe(isVegaLite);
|
||||
expect(vp.spec).toEqual({ $schema: schema });
|
||||
expect(vp.warnings).toHaveLength(warningCount);
|
||||
};
|
||||
}
|
||||
|
||||
test('should warn on no vega version specified', () => {
|
||||
const vp = new VegaParser({});
|
||||
expect(vp._parseSchema()).toBe(false);
|
||||
expect(vp.spec).toEqual({ $schema: 'https://vega.github.io/schema/vega/v3.0.json' });
|
||||
expect(vp.warnings).toHaveLength(1);
|
||||
});
|
||||
|
||||
test(
|
||||
'should not warn on current vega version',
|
||||
check('https://vega.github.io/schema/vega/v4.0.json', false, 0)
|
||||
);
|
||||
test(
|
||||
'should not warn on older vega version',
|
||||
check('https://vega.github.io/schema/vega/v3.0.json', false, 0)
|
||||
);
|
||||
test(
|
||||
'should warn on vega version too new to be supported',
|
||||
check('https://vega.github.io/schema/vega/v5.0.json', false, 1)
|
||||
);
|
||||
|
||||
test(
|
||||
'should not warn on current vega-lite version',
|
||||
check('https://vega.github.io/schema/vega-lite/v2.0.json', true, 0)
|
||||
);
|
||||
test(
|
||||
'should warn on vega-lite version too new to be supported',
|
||||
check('https://vega.github.io/schema/vega-lite/v3.0.json', true, 1)
|
||||
);
|
||||
});
|
||||
|
||||
describe('VegaParser._parseTooltips', () => {
|
||||
function check(tooltips, position, padding, centerOnMark) {
|
||||
return () => {
|
||||
const vp = new VegaParser(tooltips !== undefined ? { config: { kibana: { tooltips } } } : {});
|
||||
vp._config = vp._parseConfig();
|
||||
if (position === undefined) {
|
||||
// error
|
||||
expect(() => vp._parseTooltips()).toThrow();
|
||||
} else if (position === false) {
|
||||
expect(vp._parseTooltips()).toEqual(false);
|
||||
} else {
|
||||
expect(vp._parseTooltips()).toEqual({ position, padding, centerOnMark });
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
test('undefined', check(undefined, 'top', 16, 50));
|
||||
test('{}', check({}, 'top', 16, 50));
|
||||
test('left', check({ position: 'left' }, 'left', 16, 50));
|
||||
test('padding', check({ position: 'bottom', padding: 60 }, 'bottom', 60, 50));
|
||||
test('padding2', check({ padding: 70 }, 'top', 70, 50));
|
||||
test('centerOnMark', check({}, 'top', 16, 50));
|
||||
test('centerOnMark=10', check({ centerOnMark: 10 }, 'top', 16, 10));
|
||||
test('centerOnMark=true', check({ centerOnMark: true }, 'top', 16, Number.MAX_VALUE));
|
||||
test('centerOnMark=false', check({ centerOnMark: false }, 'top', 16, -1));
|
||||
|
||||
test('false', check(false, false));
|
||||
|
||||
test('err1', check(true, undefined));
|
||||
test('err2', check({ position: 'foo' }, undefined));
|
||||
test('err3', check({ padding: 'foo' }, undefined));
|
||||
test('err4', check({ centerOnMark: {} }, undefined));
|
||||
});
|
||||
|
||||
describe('VegaParser._parseMapConfig', () => {
|
||||
function check(config, expected, warnCount) {
|
||||
return () => {
|
||||
const vp = new VegaParser();
|
||||
vp._config = config;
|
||||
expect(vp._parseMapConfig()).toEqual(expected);
|
||||
expect(vp.warnings).toHaveLength(warnCount);
|
||||
};
|
||||
}
|
||||
|
||||
test(
|
||||
'empty',
|
||||
check(
|
||||
{},
|
||||
{
|
||||
delayRepaint: true,
|
||||
latitude: 0,
|
||||
longitude: 0,
|
||||
mapStyle: 'default',
|
||||
zoomControl: true,
|
||||
scrollWheelZoom: false,
|
||||
},
|
||||
0
|
||||
)
|
||||
);
|
||||
|
||||
test(
|
||||
'filled',
|
||||
check(
|
||||
{
|
||||
delayRepaint: true,
|
||||
latitude: 0,
|
||||
longitude: 0,
|
||||
mapStyle: 'default',
|
||||
zoomControl: true,
|
||||
scrollWheelZoom: false,
|
||||
maxBounds: [1, 2, 3, 4],
|
||||
},
|
||||
{
|
||||
delayRepaint: true,
|
||||
latitude: 0,
|
||||
longitude: 0,
|
||||
mapStyle: 'default',
|
||||
zoomControl: true,
|
||||
scrollWheelZoom: false,
|
||||
maxBounds: [1, 2, 3, 4],
|
||||
},
|
||||
0
|
||||
)
|
||||
);
|
||||
|
||||
test(
|
||||
'warnings',
|
||||
check(
|
||||
{
|
||||
delayRepaint: true,
|
||||
latitude: 0,
|
||||
longitude: 0,
|
||||
zoom: 'abc', // ignored
|
||||
mapStyle: 'abc',
|
||||
zoomControl: 'abc',
|
||||
scrollWheelZoom: 'abc',
|
||||
maxBounds: [2, 3, 4],
|
||||
},
|
||||
{
|
||||
delayRepaint: true,
|
||||
latitude: 0,
|
||||
longitude: 0,
|
||||
mapStyle: 'default',
|
||||
zoomControl: true,
|
||||
scrollWheelZoom: false,
|
||||
},
|
||||
5
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
describe('VegaParser._parseConfig', () => {
|
||||
function check(spec, expectedConfig, expectedSpec, warnCount) {
|
||||
return async () => {
|
||||
expectedSpec = expectedSpec || cloneDeep(spec);
|
||||
const vp = new VegaParser(spec);
|
||||
const config = await vp._parseConfig();
|
||||
expect(config).toEqual(expectedConfig);
|
||||
expect(vp.spec).toEqual(expectedSpec);
|
||||
expect(vp.warnings).toHaveLength(warnCount || 0);
|
||||
};
|
||||
}
|
||||
|
||||
test('no config', check({}, {}, {}));
|
||||
test('simple config', check({ config: { a: 1 } }, {}));
|
||||
test('kibana config', check({ config: { kibana: { a: 1 } } }, { a: 1 }, { config: {} }));
|
||||
test('_hostConfig', check({ _hostConfig: { a: 1 } }, { a: 1 }, {}, 1));
|
||||
});
|
||||
|
||||
describe('VegaParser._calcSizing', () => {
|
||||
function check(
|
||||
spec,
|
||||
useResize,
|
||||
paddingWidth,
|
||||
paddingHeight,
|
||||
isVegaLite,
|
||||
expectedSpec,
|
||||
warnCount
|
||||
) {
|
||||
return async () => {
|
||||
expectedSpec = expectedSpec || cloneDeep(spec);
|
||||
const vp = new VegaParser(spec);
|
||||
vp.isVegaLite = !!isVegaLite;
|
||||
vp._calcSizing();
|
||||
expect(vp.useResize).toEqual(useResize);
|
||||
expect(vp.paddingWidth).toEqual(paddingWidth);
|
||||
expect(vp.paddingHeight).toEqual(paddingHeight);
|
||||
expect(vp.spec).toEqual(expectedSpec);
|
||||
expect(vp.warnings).toHaveLength(warnCount || 0);
|
||||
};
|
||||
}
|
||||
|
||||
test('no size', check({ autosize: {} }, false, 0, 0));
|
||||
test('fit', check({ autosize: 'fit' }, true, 0, 0));
|
||||
test('fit obj', check({ autosize: { type: 'fit' } }, true, 0, 0));
|
||||
test('padding const', check({ autosize: 'fit', padding: 10 }, true, 20, 20));
|
||||
test(
|
||||
'padding obj',
|
||||
check({ autosize: 'fit', padding: { left: 5, bottom: 7, right: 6, top: 8 } }, true, 11, 15)
|
||||
);
|
||||
test(
|
||||
'width height',
|
||||
check({ autosize: 'fit', width: 1, height: 2 }, true, 0, 0, false, false, 1)
|
||||
);
|
||||
test(
|
||||
'VL width height',
|
||||
check({ autosize: 'fit', width: 1, height: 2 }, true, 0, 0, true, { autosize: 'fit' }, 0)
|
||||
);
|
||||
});
|
Loading…
Reference in a new issue