[Reporting] convert all server unit tests to TypeScript

This commit is contained in:
Timothy Sullivan 2020-04-07 08:31:58 -07:00
parent 898504dc8f
commit d9ce4024ec
9 changed files with 268 additions and 162 deletions

View file

@ -4,6 +4,7 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
// @ts-ignore
import Puid from 'puid'; import Puid from 'puid';
import sinon from 'sinon'; import sinon from 'sinon';
import nodeCrypto from '@elastic/node-crypto'; import nodeCrypto from '@elastic/node-crypto';
@ -13,35 +14,39 @@ import { createMockReportingCore } from '../../../test_helpers';
import { LevelLogger } from '../../../server/lib/level_logger'; import { LevelLogger } from '../../../server/lib/level_logger';
import { setFieldFormats } from '../../../server/services'; import { setFieldFormats } from '../../../server/services';
import { executeJobFactory } from './execute_job'; import { executeJobFactory } from './execute_job';
import { JobDocPayloadDiscoverCsv } from '../types';
const delay = ms => new Promise(resolve => setTimeout(() => resolve(), ms)); const delay = (ms: number) => new Promise(resolve => setTimeout(() => resolve(), ms));
const puid = new Puid(); const puid = new Puid();
const getRandomScrollId = () => { const getRandomScrollId = () => {
return puid.generate(); return puid.generate();
}; };
const getJobDocPayload = (baseObj: any) => baseObj as JobDocPayloadDiscoverCsv;
describe('CSV Execute Job', function() { describe('CSV Execute Job', function() {
const encryptionKey = 'testEncryptionKey'; const encryptionKey = 'testEncryptionKey';
const headers = { const headers = {
sid: 'test', sid: 'test',
}; };
const mockLogger = new LevelLogger({ const mockLogger = new LevelLogger({
get: () => ({ get: () =>
debug: jest.fn(), ({
warn: jest.fn(), debug: jest.fn(),
error: jest.fn(), warn: jest.fn(),
}), error: jest.fn(),
} as any),
}); });
let defaultElasticsearchResponse; let defaultElasticsearchResponse: any;
let encryptedHeaders; let encryptedHeaders: any;
let clusterStub; let clusterStub: any;
let configGetStub; let configGetStub: any;
let mockReportingConfig; let mockReportingConfig: any;
let mockReportingPlugin; let mockReportingPlugin: any;
let callAsCurrentUserStub; let callAsCurrentUserStub: any;
let cancellationToken; let cancellationToken: any;
const mockElasticsearch = { const mockElasticsearch = {
dataClient: { dataClient: {
@ -77,7 +82,7 @@ describe('CSV Execute Job', function() {
_scroll_id: 'defaultScrollId', _scroll_id: 'defaultScrollId',
}; };
clusterStub = { clusterStub = {
callAsCurrentUser: function() {}, callAsCurrentUser() {},
}; };
callAsCurrentUserStub = sinon callAsCurrentUserStub = sinon
@ -88,17 +93,19 @@ describe('CSV Execute Job', function() {
mockUiSettingsClient.get.withArgs('csv:quoteValues').returns(true); mockUiSettingsClient.get.withArgs('csv:quoteValues').returns(true);
setFieldFormats({ setFieldFormats({
fieldFormatServiceFactory: function() { fieldFormatServiceFactory() {
const uiConfigMock = {}; const uiConfigMock = {};
uiConfigMock['format:defaultTypeMap'] = { (uiConfigMock as any)['format:defaultTypeMap'] = {
_default_: { id: 'string', params: {} }, _default_: { id: 'string', params: {} },
}; };
const fieldFormatsRegistry = new fieldFormats.FieldFormatsRegistry(); const fieldFormatsRegistry = new fieldFormats.FieldFormatsRegistry();
fieldFormatsRegistry.init(key => uiConfigMock[key], {}, [fieldFormats.StringFormat]); fieldFormatsRegistry.init(key => (uiConfigMock as any)[key], {}, [
fieldFormats.StringFormat,
]);
return fieldFormatsRegistry; return Promise.resolve(fieldFormatsRegistry);
}, },
}); });
}); });
@ -108,7 +115,11 @@ describe('CSV Execute Job', function() {
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
await executeJob( await executeJob(
'job456', 'job456',
{ headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, getJobDocPayload({
headers: encryptedHeaders,
fields: [],
searchRequest: { index: null, body: null },
}),
cancellationToken cancellationToken
); );
expect(callAsCurrentUserStub.called).toBe(true); expect(callAsCurrentUserStub.called).toBe(true);
@ -122,14 +133,14 @@ describe('CSV Execute Job', function() {
}; };
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
const job = { const job = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: [], fields: [],
searchRequest: { searchRequest: {
index, index,
body, body,
}, },
}; });
await executeJob('job777', job, cancellationToken); await executeJob('job777', job, cancellationToken);
@ -151,7 +162,11 @@ describe('CSV Execute Job', function() {
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
await executeJob( await executeJob(
'job456', 'job456',
{ headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, getJobDocPayload({
headers: encryptedHeaders,
fields: [],
searchRequest: { index: null, body: null },
}),
cancellationToken cancellationToken
); );
@ -165,7 +180,11 @@ describe('CSV Execute Job', function() {
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
await executeJob( await executeJob(
'job456', 'job456',
{ headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, getJobDocPayload({
headers: encryptedHeaders,
fields: [],
searchRequest: { index: null, body: null },
}),
cancellationToken cancellationToken
); );
@ -195,7 +214,11 @@ describe('CSV Execute Job', function() {
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
await executeJob( await executeJob(
'job456', 'job456',
{ headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, getJobDocPayload({
headers: encryptedHeaders,
fields: [],
searchRequest: { index: null, body: null },
}),
cancellationToken cancellationToken
); );
@ -230,7 +253,11 @@ describe('CSV Execute Job', function() {
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
await executeJob( await executeJob(
'job456', 'job456',
{ headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, getJobDocPayload({
headers: encryptedHeaders,
fields: [],
searchRequest: { index: null, body: null },
}),
cancellationToken cancellationToken
); );
@ -256,12 +283,12 @@ describe('CSV Execute Job', function() {
}); });
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: ['one', 'two'], fields: ['one', 'two'],
conflictedTypesFields: undefined, conflictedTypesFields: undefined,
searchRequest: { index: null, body: null }, searchRequest: { index: null, body: null },
}; });
await expect( await expect(
executeJob('job123', jobParams, cancellationToken) executeJob('job123', jobParams, cancellationToken)
).rejects.toMatchInlineSnapshot(`[TypeError: Cannot read property 'indexOf' of undefined]`); ).rejects.toMatchInlineSnapshot(`[TypeError: Cannot read property 'indexOf' of undefined]`);
@ -283,12 +310,12 @@ describe('CSV Execute Job', function() {
}); });
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: ['one', 'two'], fields: ['one', 'two'],
conflictedTypesFields: [], conflictedTypesFields: [],
searchRequest: { index: null, body: null }, searchRequest: { index: null, body: null },
}; });
const { csv_contains_formulas: csvContainsFormulas } = await executeJob( const { csv_contains_formulas: csvContainsFormulas } = await executeJob(
'job123', 'job123',
jobParams, jobParams,
@ -308,12 +335,12 @@ describe('CSV Execute Job', function() {
}); });
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: ['=SUM(A1:A2)', 'two'], fields: ['=SUM(A1:A2)', 'two'],
conflictedTypesFields: [], conflictedTypesFields: [],
searchRequest: { index: null, body: null }, searchRequest: { index: null, body: null },
}; });
const { csv_contains_formulas: csvContainsFormulas } = await executeJob( const { csv_contains_formulas: csvContainsFormulas } = await executeJob(
'job123', 'job123',
jobParams, jobParams,
@ -333,12 +360,12 @@ describe('CSV Execute Job', function() {
}); });
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: ['one', 'two'], fields: ['one', 'two'],
conflictedTypesFields: [], conflictedTypesFields: [],
searchRequest: { index: null, body: null }, searchRequest: { index: null, body: null },
}; });
const { csv_contains_formulas: csvContainsFormulas } = await executeJob( const { csv_contains_formulas: csvContainsFormulas } = await executeJob(
'job123', 'job123',
jobParams, jobParams,
@ -358,12 +385,12 @@ describe('CSV Execute Job', function() {
}); });
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: ['one', 'two'], fields: ['one', 'two'],
conflictedTypesFields: [], conflictedTypesFields: [],
searchRequest: { index: null, body: null }, searchRequest: { index: null, body: null },
}; });
const { csv_contains_formulas: csvContainsFormulas } = await executeJob( const { csv_contains_formulas: csvContainsFormulas } = await executeJob(
'job123', 'job123',
jobParams, jobParams,
@ -378,11 +405,11 @@ describe('CSV Execute Job', function() {
it('should reject Promise if search call errors out', async function() { it('should reject Promise if search call errors out', async function() {
callAsCurrentUserStub.rejects(new Error()); callAsCurrentUserStub.rejects(new Error());
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: [], fields: [],
searchRequest: { index: null, body: null }, searchRequest: { index: null, body: null },
}; });
await expect( await expect(
executeJob('job123', jobParams, cancellationToken) executeJob('job123', jobParams, cancellationToken)
).rejects.toMatchInlineSnapshot(`[Error]`); ).rejects.toMatchInlineSnapshot(`[Error]`);
@ -397,11 +424,11 @@ describe('CSV Execute Job', function() {
}); });
callAsCurrentUserStub.onSecondCall().rejects(new Error()); callAsCurrentUserStub.onSecondCall().rejects(new Error());
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: [], fields: [],
searchRequest: { index: null, body: null }, searchRequest: { index: null, body: null },
}; });
await expect( await expect(
executeJob('job123', jobParams, cancellationToken) executeJob('job123', jobParams, cancellationToken)
).rejects.toMatchInlineSnapshot(`[Error]`); ).rejects.toMatchInlineSnapshot(`[Error]`);
@ -418,11 +445,11 @@ describe('CSV Execute Job', function() {
}); });
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: [], fields: [],
searchRequest: { index: null, body: null }, searchRequest: { index: null, body: null },
}; });
await expect( await expect(
executeJob('job123', jobParams, cancellationToken) executeJob('job123', jobParams, cancellationToken)
).rejects.toMatchInlineSnapshot( ).rejects.toMatchInlineSnapshot(
@ -439,11 +466,11 @@ describe('CSV Execute Job', function() {
}); });
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: [], fields: [],
searchRequest: { index: null, body: null }, searchRequest: { index: null, body: null },
}; });
await expect( await expect(
executeJob('job123', jobParams, cancellationToken) executeJob('job123', jobParams, cancellationToken)
).rejects.toMatchInlineSnapshot( ).rejects.toMatchInlineSnapshot(
@ -467,11 +494,11 @@ describe('CSV Execute Job', function() {
}); });
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: [], fields: [],
searchRequest: { index: null, body: null }, searchRequest: { index: null, body: null },
}; });
await expect( await expect(
executeJob('job123', jobParams, cancellationToken) executeJob('job123', jobParams, cancellationToken)
).rejects.toMatchInlineSnapshot( ).rejects.toMatchInlineSnapshot(
@ -495,11 +522,11 @@ describe('CSV Execute Job', function() {
}); });
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: [], fields: [],
searchRequest: { index: null, body: null }, searchRequest: { index: null, body: null },
}; });
await expect( await expect(
executeJob('job123', jobParams, cancellationToken) executeJob('job123', jobParams, cancellationToken)
).rejects.toMatchInlineSnapshot( ).rejects.toMatchInlineSnapshot(
@ -533,7 +560,11 @@ describe('CSV Execute Job', function() {
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
executeJob( executeJob(
'job345', 'job345',
{ headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, getJobDocPayload({
headers: encryptedHeaders,
fields: [],
searchRequest: { index: null, body: null },
}),
cancellationToken cancellationToken
); );
@ -548,13 +579,17 @@ describe('CSV Execute Job', function() {
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
executeJob( executeJob(
'job345', 'job345',
{ headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, getJobDocPayload({
headers: encryptedHeaders,
fields: [],
searchRequest: { index: null, body: null },
}),
cancellationToken cancellationToken
); );
cancellationToken.cancel(); cancellationToken.cancel();
for (let i = 0; i < callAsCurrentUserStub.callCount; ++i) { for (let i = 0; i < callAsCurrentUserStub.callCount; ++i) {
expect(callAsCurrentUserStub.getCall(i).args[1]).to.not.be('clearScroll'); expect(callAsCurrentUserStub.getCall(i).args[1]).not.toBe('clearScroll'); // dead code?
} }
}); });
@ -562,7 +597,11 @@ describe('CSV Execute Job', function() {
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
executeJob( executeJob(
'job345', 'job345',
{ headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, getJobDocPayload({
headers: encryptedHeaders,
fields: [],
searchRequest: { index: null, body: null },
}),
cancellationToken cancellationToken
); );
await delay(100); await delay(100);
@ -578,11 +617,11 @@ describe('CSV Execute Job', function() {
describe('csv content', function() { describe('csv content', function() {
it('should write column headers to output, even if there are no results', async function() { it('should write column headers to output, even if there are no results', async function() {
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: ['one', 'two'], fields: ['one', 'two'],
searchRequest: { index: null, body: null }, searchRequest: { index: null, body: null },
}; });
const { content } = await executeJob('job123', jobParams, cancellationToken); const { content } = await executeJob('job123', jobParams, cancellationToken);
expect(content).toBe(`one,two\n`); expect(content).toBe(`one,two\n`);
}); });
@ -590,11 +629,11 @@ describe('CSV Execute Job', function() {
it('should use custom uiSettings csv:separator for header', async function() { it('should use custom uiSettings csv:separator for header', async function() {
mockUiSettingsClient.get.withArgs('csv:separator').returns(';'); mockUiSettingsClient.get.withArgs('csv:separator').returns(';');
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: ['one', 'two'], fields: ['one', 'two'],
searchRequest: { index: null, body: null }, searchRequest: { index: null, body: null },
}; });
const { content } = await executeJob('job123', jobParams, cancellationToken); const { content } = await executeJob('job123', jobParams, cancellationToken);
expect(content).toBe(`one;two\n`); expect(content).toBe(`one;two\n`);
}); });
@ -602,11 +641,11 @@ describe('CSV Execute Job', function() {
it('should escape column headers if uiSettings csv:quoteValues is true', async function() { it('should escape column headers if uiSettings csv:quoteValues is true', async function() {
mockUiSettingsClient.get.withArgs('csv:quoteValues').returns(true); mockUiSettingsClient.get.withArgs('csv:quoteValues').returns(true);
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: ['one and a half', 'two', 'three-and-four', 'five & six'], fields: ['one and a half', 'two', 'three-and-four', 'five & six'],
searchRequest: { index: null, body: null }, searchRequest: { index: null, body: null },
}; });
const { content } = await executeJob('job123', jobParams, cancellationToken); const { content } = await executeJob('job123', jobParams, cancellationToken);
expect(content).toBe(`"one and a half",two,"three-and-four","five & six"\n`); expect(content).toBe(`"one and a half",two,"three-and-four","five & six"\n`);
}); });
@ -614,11 +653,11 @@ describe('CSV Execute Job', function() {
it(`shouldn't escape column headers if uiSettings csv:quoteValues is false`, async function() { it(`shouldn't escape column headers if uiSettings csv:quoteValues is false`, async function() {
mockUiSettingsClient.get.withArgs('csv:quoteValues').returns(false); mockUiSettingsClient.get.withArgs('csv:quoteValues').returns(false);
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: ['one and a half', 'two', 'three-and-four', 'five & six'], fields: ['one and a half', 'two', 'three-and-four', 'five & six'],
searchRequest: { index: null, body: null }, searchRequest: { index: null, body: null },
}; });
const { content } = await executeJob('job123', jobParams, cancellationToken); const { content } = await executeJob('job123', jobParams, cancellationToken);
expect(content).toBe(`one and a half,two,three-and-four,five & six\n`); expect(content).toBe(`one and a half,two,three-and-four,five & six\n`);
}); });
@ -632,11 +671,11 @@ describe('CSV Execute Job', function() {
_scroll_id: 'scrollId', _scroll_id: 'scrollId',
}); });
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: ['one', 'two'], fields: ['one', 'two'],
searchRequest: { index: null, body: null }, searchRequest: { index: null, body: null },
}; });
const { content } = await executeJob('job123', jobParams, cancellationToken); const { content } = await executeJob('job123', jobParams, cancellationToken);
const lines = content.split('\n'); const lines = content.split('\n');
const headerLine = lines[0]; const headerLine = lines[0];
@ -652,12 +691,12 @@ describe('CSV Execute Job', function() {
_scroll_id: 'scrollId', _scroll_id: 'scrollId',
}); });
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: ['one', 'two'], fields: ['one', 'two'],
conflictedTypesFields: [], conflictedTypesFields: [],
searchRequest: { index: null, body: null }, searchRequest: { index: null, body: null },
}; });
const { content } = await executeJob('job123', jobParams, cancellationToken); const { content } = await executeJob('job123', jobParams, cancellationToken);
const lines = content.split('\n'); const lines = content.split('\n');
const valuesLine = lines[1]; const valuesLine = lines[1];
@ -679,12 +718,12 @@ describe('CSV Execute Job', function() {
_scroll_id: 'scrollId', _scroll_id: 'scrollId',
}); });
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: ['one', 'two'], fields: ['one', 'two'],
conflictedTypesFields: [], conflictedTypesFields: [],
searchRequest: { index: null, body: null }, searchRequest: { index: null, body: null },
}; });
const { content } = await executeJob('job123', jobParams, cancellationToken); const { content } = await executeJob('job123', jobParams, cancellationToken);
const lines = content.split('\n'); const lines = content.split('\n');
@ -701,7 +740,7 @@ describe('CSV Execute Job', function() {
_scroll_id: 'scrollId', _scroll_id: 'scrollId',
}); });
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: ['one', 'two'], fields: ['one', 'two'],
conflictedTypesFields: [], conflictedTypesFields: [],
@ -715,7 +754,7 @@ describe('CSV Execute Job', function() {
fieldFormatMap: '{"one":{"id":"string","params":{"transform": "upper"}}}', fieldFormatMap: '{"one":{"id":"string","params":{"transform": "upper"}}}',
}, },
}, },
}; });
const { content } = await executeJob('job123', jobParams, cancellationToken); const { content } = await executeJob('job123', jobParams, cancellationToken);
const lines = content.split('\n'); const lines = content.split('\n');
@ -729,18 +768,18 @@ describe('CSV Execute Job', function() {
// tests use these 'simple' characters to make the math easier // tests use these 'simple' characters to make the math easier
describe('when only the headers exceed the maxSizeBytes', function() { describe('when only the headers exceed the maxSizeBytes', function() {
let content; let content: string;
let maxSizeReached; let maxSizeReached: boolean;
beforeEach(async function() { beforeEach(async function() {
configGetStub.withArgs('csv', 'maxSizeBytes').returns(1); configGetStub.withArgs('csv', 'maxSizeBytes').returns(1);
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: ['one', 'two'], fields: ['one', 'two'],
searchRequest: { index: null, body: null }, searchRequest: { index: null, body: null },
}; });
({ content, max_size_reached: maxSizeReached } = await executeJob( ({ content, max_size_reached: maxSizeReached } = await executeJob(
'job123', 'job123',
@ -759,18 +798,18 @@ describe('CSV Execute Job', function() {
}); });
describe('when headers are equal to maxSizeBytes', function() { describe('when headers are equal to maxSizeBytes', function() {
let content; let content: string;
let maxSizeReached; let maxSizeReached: boolean;
beforeEach(async function() { beforeEach(async function() {
configGetStub.withArgs('csv', 'maxSizeBytes').returns(9); configGetStub.withArgs('csv', 'maxSizeBytes').returns(9);
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: ['one', 'two'], fields: ['one', 'two'],
searchRequest: { index: null, body: null }, searchRequest: { index: null, body: null },
}; });
({ content, max_size_reached: maxSizeReached } = await executeJob( ({ content, max_size_reached: maxSizeReached } = await executeJob(
'job123', 'job123',
@ -789,8 +828,8 @@ describe('CSV Execute Job', function() {
}); });
describe('when the data exceeds the maxSizeBytes', function() { describe('when the data exceeds the maxSizeBytes', function() {
let content; let content: string;
let maxSizeReached; let maxSizeReached: boolean;
beforeEach(async function() { beforeEach(async function() {
configGetStub.withArgs('csv', 'maxSizeBytes').returns(9); configGetStub.withArgs('csv', 'maxSizeBytes').returns(9);
@ -803,12 +842,12 @@ describe('CSV Execute Job', function() {
}); });
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: ['one', 'two'], fields: ['one', 'two'],
conflictedTypesFields: [], conflictedTypesFields: [],
searchRequest: { index: null, body: null }, searchRequest: { index: null, body: null },
}; });
({ content, max_size_reached: maxSizeReached } = await executeJob( ({ content, max_size_reached: maxSizeReached } = await executeJob(
'job123', 'job123',
@ -827,8 +866,8 @@ describe('CSV Execute Job', function() {
}); });
describe('when headers and data equal the maxSizeBytes', function() { describe('when headers and data equal the maxSizeBytes', function() {
let content; let content: string;
let maxSizeReached; let maxSizeReached: boolean;
beforeEach(async function() { beforeEach(async function() {
mockReportingPlugin.getUiSettingsServiceFactory = () => mockUiSettingsClient; mockReportingPlugin.getUiSettingsServiceFactory = () => mockUiSettingsClient;
@ -842,12 +881,12 @@ describe('CSV Execute Job', function() {
}); });
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: ['one', 'two'], fields: ['one', 'two'],
conflictedTypesFields: [], conflictedTypesFields: [],
searchRequest: { index: null, body: null }, searchRequest: { index: null, body: null },
}; });
({ content, max_size_reached: maxSizeReached } = await executeJob( ({ content, max_size_reached: maxSizeReached } = await executeJob(
'job123', 'job123',
@ -879,12 +918,12 @@ describe('CSV Execute Job', function() {
}); });
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: ['one', 'two'], fields: ['one', 'two'],
conflictedTypesFields: [], conflictedTypesFields: [],
searchRequest: { index: null, body: null }, searchRequest: { index: null, body: null },
}; });
await executeJob('job123', jobParams, cancellationToken); await executeJob('job123', jobParams, cancellationToken);
@ -905,12 +944,12 @@ describe('CSV Execute Job', function() {
}); });
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: ['one', 'two'], fields: ['one', 'two'],
conflictedTypesFields: [], conflictedTypesFields: [],
searchRequest: { index: null, body: null }, searchRequest: { index: null, body: null },
}; });
await executeJob('job123', jobParams, cancellationToken); await executeJob('job123', jobParams, cancellationToken);
@ -931,12 +970,12 @@ describe('CSV Execute Job', function() {
}); });
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
const jobParams = { const jobParams = getJobDocPayload({
headers: encryptedHeaders, headers: encryptedHeaders,
fields: ['one', 'two'], fields: ['one', 'two'],
conflictedTypesFields: [], conflictedTypesFields: [],
searchRequest: { index: null, body: null }, searchRequest: { index: null, body: null },
}; });
await executeJob('job123', jobParams, cancellationToken); await executeJob('job123', jobParams, cancellationToken);

View file

@ -5,19 +5,22 @@
*/ */
import * as Rx from 'rxjs'; import * as Rx from 'rxjs';
import { createMockReportingCore } from '../../../../test_helpers'; import { createMockReportingCore, createMockBrowserDriverFactory } from '../../../../test_helpers';
import { cryptoFactory } from '../../../../server/lib/crypto'; import { cryptoFactory } from '../../../../server/lib/crypto';
import { executeJobFactory } from './index'; import { executeJobFactory } from './index';
import { generatePngObservableFactory } from '../lib/generate_png'; import { generatePngObservableFactory } from '../lib/generate_png';
import { CancellationToken } from '../../../../common/cancellation_token';
import { LevelLogger } from '../../../../server/lib'; import { LevelLogger } from '../../../../server/lib';
import { ReportingCore, CaptureConfig } from '../../../../server/types';
import { JobDocPayloadPNG } from '../../types';
jest.mock('../lib/generate_png', () => ({ generatePngObservableFactory: jest.fn() })); jest.mock('../lib/generate_png', () => ({ generatePngObservableFactory: jest.fn() }));
let mockReporting; let mockReporting: ReportingCore;
const cancellationToken = { const cancellationToken = ({
on: jest.fn(), on: jest.fn(),
}; } as unknown) as CancellationToken;
const mockLoggerFactory = { const mockLoggerFactory = {
get: jest.fn().mockImplementation(() => ({ get: jest.fn().mockImplementation(() => ({
@ -28,12 +31,16 @@ const mockLoggerFactory = {
}; };
const getMockLogger = () => new LevelLogger(mockLoggerFactory); const getMockLogger = () => new LevelLogger(mockLoggerFactory);
const captureConfig = {} as CaptureConfig;
const mockEncryptionKey = 'abcabcsecuresecret'; const mockEncryptionKey = 'abcabcsecuresecret';
const encryptHeaders = async headers => { const encryptHeaders = async (headers: Record<string, string>) => {
const crypto = cryptoFactory(mockEncryptionKey); const crypto = cryptoFactory(mockEncryptionKey);
return await crypto.encrypt(headers); return await crypto.encrypt(headers);
}; };
const getJobDocPayload = (baseObj: any) => baseObj as JobDocPayloadPNG;
beforeEach(async () => { beforeEach(async () => {
const kbnConfig = { const kbnConfig = {
'server.basePath': '/sbp', 'server.basePath': '/sbp',
@ -45,8 +52,8 @@ beforeEach(async () => {
'kibanaServer.protocol': 'http', 'kibanaServer.protocol': 'http',
}; };
const mockReportingConfig = { const mockReportingConfig = {
get: (...keys) => reportingConfig[keys.join('.')], get: (...keys: string[]) => (reportingConfig as any)[keys.join('.')],
kbnConfig: { get: (...keys) => kbnConfig[keys.join('.')] }, kbnConfig: { get: (...keys: string[]) => (kbnConfig as any)[keys.join('.')] },
}; };
mockReporting = await createMockReportingCore(mockReportingConfig); mockReporting = await createMockReportingCore(mockReportingConfig);
@ -60,22 +67,30 @@ beforeEach(async () => {
mockGetElasticsearch.mockImplementation(() => Promise.resolve(mockElasticsearch)); mockGetElasticsearch.mockImplementation(() => Promise.resolve(mockElasticsearch));
mockReporting.getElasticsearchService = mockGetElasticsearch; mockReporting.getElasticsearchService = mockGetElasticsearch;
generatePngObservableFactory.mockReturnValue(jest.fn()); (generatePngObservableFactory as jest.Mock).mockReturnValue(jest.fn());
}); });
afterEach(() => generatePngObservableFactory.mockReset()); afterEach(() => (generatePngObservableFactory as jest.Mock).mockReset());
test(`passes browserTimezone to generatePng`, async () => { test(`passes browserTimezone to generatePng`, async () => {
const encryptedHeaders = await encryptHeaders({}); const encryptedHeaders = await encryptHeaders({});
const mockBrowserDriverFactory = await createMockBrowserDriverFactory(getMockLogger());
const generatePngObservable = generatePngObservableFactory(); const generatePngObservable = generatePngObservableFactory(
generatePngObservable.mockReturnValue(Rx.of(Buffer.from(''))); captureConfig,
mockBrowserDriverFactory
);
(generatePngObservable as jest.Mock).mockReturnValue(Rx.of(Buffer.from('')));
const executeJob = await executeJobFactory(mockReporting, getMockLogger()); const executeJob = await executeJobFactory(mockReporting, getMockLogger());
const browserTimezone = 'UTC'; const browserTimezone = 'UTC';
await executeJob( await executeJob(
'pngJobId', 'pngJobId',
{ relativeUrl: '/app/kibana#/something', browserTimezone, headers: encryptedHeaders }, getJobDocPayload({
relativeUrl: '/app/kibana#/something',
browserTimezone,
headers: encryptedHeaders,
}),
cancellationToken cancellationToken
); );
@ -92,12 +107,17 @@ test(`returns content_type of application/png`, async () => {
const executeJob = await executeJobFactory(mockReporting, getMockLogger()); const executeJob = await executeJobFactory(mockReporting, getMockLogger());
const encryptedHeaders = await encryptHeaders({}); const encryptedHeaders = await encryptHeaders({});
const generatePngObservable = generatePngObservableFactory(); const mockBrowserDriverFactory = await createMockBrowserDriverFactory(getMockLogger());
generatePngObservable.mockReturnValue(Rx.of(Buffer.from('')));
const generatePngObservable = generatePngObservableFactory(
captureConfig,
mockBrowserDriverFactory
);
(generatePngObservable as jest.Mock).mockReturnValue(Rx.of(Buffer.from('')));
const { content_type: contentType } = await executeJob( const { content_type: contentType } = await executeJob(
'pngJobId', 'pngJobId',
{ relativeUrl: '/app/kibana#/something', timeRange: {}, headers: encryptedHeaders }, getJobDocPayload({ relativeUrl: '/app/kibana#/something', headers: encryptedHeaders }),
cancellationToken cancellationToken
); );
expect(contentType).toBe('image/png'); expect(contentType).toBe('image/png');
@ -106,14 +126,19 @@ test(`returns content_type of application/png`, async () => {
test(`returns content of generatePng getBuffer base64 encoded`, async () => { test(`returns content of generatePng getBuffer base64 encoded`, async () => {
const testContent = 'test content'; const testContent = 'test content';
const generatePngObservable = generatePngObservableFactory(); const mockBrowserDriverFactory = await createMockBrowserDriverFactory(getMockLogger());
generatePngObservable.mockReturnValue(Rx.of({ buffer: Buffer.from(testContent) }));
const generatePngObservable = generatePngObservableFactory(
captureConfig,
mockBrowserDriverFactory
);
(generatePngObservable as jest.Mock).mockReturnValue(Rx.of({ buffer: Buffer.from(testContent) }));
const executeJob = await executeJobFactory(mockReporting, getMockLogger()); const executeJob = await executeJobFactory(mockReporting, getMockLogger());
const encryptedHeaders = await encryptHeaders({}); const encryptedHeaders = await encryptHeaders({});
const { content } = await executeJob( const { content } = await executeJob(
'pngJobId', 'pngJobId',
{ relativeUrl: '/app/kibana#/something', timeRange: {}, headers: encryptedHeaders }, getJobDocPayload({ relativeUrl: '/app/kibana#/something', headers: encryptedHeaders }),
cancellationToken cancellationToken
); );

View file

@ -5,19 +5,24 @@
*/ */
import * as Rx from 'rxjs'; import * as Rx from 'rxjs';
import { createMockReportingCore } from '../../../../test_helpers'; import { createMockReportingCore, createMockBrowserDriverFactory } from '../../../../test_helpers';
import { cryptoFactory } from '../../../../server/lib/crypto'; import { cryptoFactory } from '../../../../server/lib/crypto';
import { executeJobFactory } from './index';
import { generatePdfObservableFactory } from '../lib/generate_pdf';
import { LevelLogger } from '../../../../server/lib'; import { LevelLogger } from '../../../../server/lib';
import { CancellationToken } from '../../../../types';
import { ReportingCore, CaptureConfig } from '../../../../server/types';
import { generatePdfObservableFactory } from '../lib/generate_pdf';
import { JobDocPayloadPDF } from '../../types';
import { executeJobFactory } from './index';
jest.mock('../lib/generate_pdf', () => ({ generatePdfObservableFactory: jest.fn() })); jest.mock('../lib/generate_pdf', () => ({ generatePdfObservableFactory: jest.fn() }));
let mockReporting; let mockReporting: ReportingCore;
const cancellationToken = { const cancellationToken = ({
on: jest.fn(), on: jest.fn(),
}; } as unknown) as CancellationToken;
const captureConfig = {} as CaptureConfig;
const mockLoggerFactory = { const mockLoggerFactory = {
get: jest.fn().mockImplementation(() => ({ get: jest.fn().mockImplementation(() => ({
@ -29,11 +34,13 @@ const mockLoggerFactory = {
const getMockLogger = () => new LevelLogger(mockLoggerFactory); const getMockLogger = () => new LevelLogger(mockLoggerFactory);
const mockEncryptionKey = 'testencryptionkey'; const mockEncryptionKey = 'testencryptionkey';
const encryptHeaders = async headers => { const encryptHeaders = async (headers: Record<string, string>) => {
const crypto = cryptoFactory(mockEncryptionKey); const crypto = cryptoFactory(mockEncryptionKey);
return await crypto.encrypt(headers); return await crypto.encrypt(headers);
}; };
const getJobDocPayload = (baseObj: any) => baseObj as JobDocPayloadPDF;
beforeEach(async () => { beforeEach(async () => {
const kbnConfig = { const kbnConfig = {
'server.basePath': '/sbp', 'server.basePath': '/sbp',
@ -45,8 +52,8 @@ beforeEach(async () => {
'kibanaServer.protocol': 'http', 'kibanaServer.protocol': 'http',
}; };
const mockReportingConfig = { const mockReportingConfig = {
get: (...keys) => reportingConfig[keys.join('.')], get: (...keys: string[]) => (reportingConfig as any)[keys.join('.')],
kbnConfig: { get: (...keys) => kbnConfig[keys.join('.')] }, kbnConfig: { get: (...keys: string[]) => (kbnConfig as any)[keys.join('.')] },
}; };
mockReporting = await createMockReportingCore(mockReportingConfig); mockReporting = await createMockReportingCore(mockReportingConfig);
@ -60,21 +67,26 @@ beforeEach(async () => {
mockGetElasticsearch.mockImplementation(() => Promise.resolve(mockElasticsearch)); mockGetElasticsearch.mockImplementation(() => Promise.resolve(mockElasticsearch));
mockReporting.getElasticsearchService = mockGetElasticsearch; mockReporting.getElasticsearchService = mockGetElasticsearch;
generatePdfObservableFactory.mockReturnValue(jest.fn()); (generatePdfObservableFactory as jest.Mock).mockReturnValue(jest.fn());
}); });
afterEach(() => generatePdfObservableFactory.mockReset()); afterEach(() => (generatePdfObservableFactory as jest.Mock).mockReset());
test(`returns content_type of application/pdf`, async () => { test(`returns content_type of application/pdf`, async () => {
const executeJob = await executeJobFactory(mockReporting, getMockLogger()); const logger = getMockLogger();
const executeJob = await executeJobFactory(mockReporting, logger);
const mockBrowserDriverFactory = await createMockBrowserDriverFactory(logger);
const encryptedHeaders = await encryptHeaders({}); const encryptedHeaders = await encryptHeaders({});
const generatePdfObservable = generatePdfObservableFactory(); const generatePdfObservable = generatePdfObservableFactory(
generatePdfObservable.mockReturnValue(Rx.of(Buffer.from(''))); captureConfig,
mockBrowserDriverFactory
);
(generatePdfObservable as jest.Mock).mockReturnValue(Rx.of(Buffer.from('')));
const { content_type: contentType } = await executeJob( const { content_type: contentType } = await executeJob(
'pdfJobId', 'pdfJobId',
{ relativeUrls: [], timeRange: {}, headers: encryptedHeaders }, getJobDocPayload({ relativeUrls: [], headers: encryptedHeaders }),
cancellationToken cancellationToken
); );
expect(contentType).toBe('application/pdf'); expect(contentType).toBe('application/pdf');
@ -82,15 +94,19 @@ test(`returns content_type of application/pdf`, async () => {
test(`returns content of generatePdf getBuffer base64 encoded`, async () => { test(`returns content of generatePdf getBuffer base64 encoded`, async () => {
const testContent = 'test content'; const testContent = 'test content';
const mockBrowserDriverFactory = await createMockBrowserDriverFactory(getMockLogger());
const generatePdfObservable = generatePdfObservableFactory(); const generatePdfObservable = generatePdfObservableFactory(
generatePdfObservable.mockReturnValue(Rx.of({ buffer: Buffer.from(testContent) })); captureConfig,
mockBrowserDriverFactory
);
(generatePdfObservable as jest.Mock).mockReturnValue(Rx.of({ buffer: Buffer.from(testContent) }));
const executeJob = await executeJobFactory(mockReporting, getMockLogger()); const executeJob = await executeJobFactory(mockReporting, getMockLogger());
const encryptedHeaders = await encryptHeaders({}); const encryptedHeaders = await encryptHeaders({});
const { content } = await executeJob( const { content } = await executeJob(
'pdfJobId', 'pdfJobId',
{ relativeUrls: [], timeRange: {}, headers: encryptedHeaders }, getJobDocPayload({ relativeUrls: [], headers: encryptedHeaders }),
cancellationToken cancellationToken
); );

View file

@ -27,7 +27,7 @@ const describeWithContext = describe.each([
describeWithContext('config schema with context %j', context => { describeWithContext('config schema with context %j', context => {
it('produces correct config', async () => { it('produces correct config', async () => {
const schema = await getConfigSchema(reporting); const schema = await getConfigSchema(reporting);
const value = await schema.validate({}, { context }); const value: any = await schema.validate({}, { context });
value.capture.browser.chromium.disableSandbox = '<platform dependent>'; value.capture.browser.chromium.disableSandbox = '<platform dependent>';
await expect(value).toMatchSnapshot(); await expect(value).toMatchSnapshot();
}); });

View file

@ -6,7 +6,10 @@
import Hapi from 'hapi'; import Hapi from 'hapi';
import { createMockReportingCore } from '../../test_helpers'; import { createMockReportingCore } from '../../test_helpers';
import { ExportTypeDefinition } from '../../types';
import { ExportTypesRegistry } from '../lib/export_types_registry'; import { ExportTypesRegistry } from '../lib/export_types_registry';
import { LevelLogger } from '../lib/level_logger';
import { ReportingConfig, ReportingCore, ReportingSetupDeps } from '../types';
jest.mock('./lib/authorized_user_pre_routing', () => ({ jest.mock('./lib/authorized_user_pre_routing', () => ({
authorizedUserPreRoutingFactory: () => () => ({}), authorizedUserPreRoutingFactory: () => () => ({}),
@ -19,14 +22,14 @@ jest.mock('./lib/reporting_feature_pre_routing', () => ({
import { registerJobInfoRoutes } from './jobs'; import { registerJobInfoRoutes } from './jobs';
let mockServer; let mockServer: any;
let exportTypesRegistry; let exportTypesRegistry: ExportTypesRegistry;
let mockReportingPlugin; let mockReportingPlugin: ReportingCore;
let mockReportingConfig; let mockReportingConfig: ReportingConfig;
const mockLogger = { const mockLogger = ({
error: jest.fn(), error: jest.fn(),
debug: jest.fn(), debug: jest.fn(),
}; } as unknown) as LevelLogger;
beforeEach(async () => { beforeEach(async () => {
mockServer = new Hapi.Server({ debug: false, port: 8080, routes: { log: { collect: true } } }); mockServer = new Hapi.Server({ debug: false, port: 8080, routes: { log: { collect: true } } });
@ -35,38 +38,39 @@ beforeEach(async () => {
id: 'unencoded', id: 'unencoded',
jobType: 'unencodedJobType', jobType: 'unencodedJobType',
jobContentExtension: 'csv', jobContentExtension: 'csv',
}); } as ExportTypeDefinition<unknown, unknown, unknown, unknown>);
exportTypesRegistry.register({ exportTypesRegistry.register({
id: 'base64Encoded', id: 'base64Encoded',
jobType: 'base64EncodedJobType', jobType: 'base64EncodedJobType',
jobContentEncoding: 'base64', jobContentEncoding: 'base64',
jobContentExtension: 'pdf', jobContentExtension: 'pdf',
}); } as ExportTypeDefinition<unknown, unknown, unknown, unknown>);
mockReportingConfig = { get: jest.fn(), kbnConfig: { get: jest.fn() } }; mockReportingConfig = { get: jest.fn(), kbnConfig: { get: jest.fn() } };
mockReportingPlugin = await createMockReportingCore(mockReportingConfig); mockReportingPlugin = await createMockReportingCore(mockReportingConfig);
mockReportingPlugin.getExportTypesRegistry = () => exportTypesRegistry; mockReportingPlugin.getExportTypesRegistry = () => exportTypesRegistry;
}); });
const mockPlugins = { const mockPlugins = ({
elasticsearch: { elasticsearch: {
adminClient: { callAsInternalUser: jest.fn() }, adminClient: { callAsInternalUser: jest.fn() },
}, },
security: null, security: null,
}; } as unknown) as ReportingSetupDeps;
const getHits = (...sources) => { const getHits = (...sources: any) => {
return { return {
hits: { hits: {
hits: sources.map(source => ({ _source: source })), hits: sources.map((source: object) => ({ _source: source })),
}, },
}; };
}; };
const getErrorsFromRequest = request => const getErrorsFromRequest = (request: any) =>
request.logs.filter(log => log.tags.includes('error')).map(log => log.error); request.logs.filter((log: any) => log.tags.includes('error')).map((log: any) => log.error);
test(`returns 404 if job not found`, async () => { test(`returns 404 if job not found`, async () => {
// @ts-ignore
mockPlugins.elasticsearch.adminClient = { mockPlugins.elasticsearch.adminClient = {
callAsInternalUser: jest.fn().mockReturnValue(Promise.resolve(getHits())), callAsInternalUser: jest.fn().mockReturnValue(Promise.resolve(getHits())),
}; };
@ -84,6 +88,7 @@ test(`returns 404 if job not found`, async () => {
}); });
test(`returns 401 if not valid job type`, async () => { test(`returns 401 if not valid job type`, async () => {
// @ts-ignore
mockPlugins.elasticsearch.adminClient = { mockPlugins.elasticsearch.adminClient = {
callAsInternalUser: jest callAsInternalUser: jest
.fn() .fn()
@ -103,6 +108,7 @@ test(`returns 401 if not valid job type`, async () => {
describe(`when job is incomplete`, () => { describe(`when job is incomplete`, () => {
const getIncompleteResponse = async () => { const getIncompleteResponse = async () => {
// @ts-ignore
mockPlugins.elasticsearch.adminClient = { mockPlugins.elasticsearch.adminClient = {
callAsInternalUser: jest callAsInternalUser: jest
.fn() .fn()
@ -149,6 +155,7 @@ describe(`when job is failed`, () => {
status: 'failed', status: 'failed',
output: { content: 'job failure message' }, output: { content: 'job failure message' },
}); });
// @ts-ignore
mockPlugins.elasticsearch.adminClient = { mockPlugins.elasticsearch.adminClient = {
callAsInternalUser: jest.fn().mockReturnValue(Promise.resolve(hits)), callAsInternalUser: jest.fn().mockReturnValue(Promise.resolve(hits)),
}; };
@ -194,6 +201,7 @@ describe(`when job is completed`, () => {
title, title,
}, },
}); });
// @ts-ignore
mockPlugins.elasticsearch.adminClient = { mockPlugins.elasticsearch.adminClient = {
callAsInternalUser: jest.fn().mockReturnValue(Promise.resolve(hits)), callAsInternalUser: jest.fn().mockReturnValue(Promise.resolve(hits)),
}; };

View file

@ -11,18 +11,21 @@ import {
registerReportingUsageCollector, registerReportingUsageCollector,
getReportingUsageCollector, getReportingUsageCollector,
} from './reporting_usage_collector'; } from './reporting_usage_collector';
import { ReportingConfig } from '../types';
const exportTypesRegistry = getExportTypesRegistry(); const exportTypesRegistry = getExportTypesRegistry();
function getMockUsageCollection() { function getMockUsageCollection() {
class MockUsageCollector { class MockUsageCollector {
constructor(_server, { fetch }) { // @ts-ignore fetch is not used
private fetch: any;
constructor(_server: any, { fetch }: any) {
this.fetch = fetch; this.fetch = fetch;
} }
} }
return { return {
makeUsageCollector: options => { makeUsageCollector: (options: any) => {
return new MockUsageCollector(this, options); return new MockUsageCollector(null, options);
}, },
registerCollector: sinon.stub(), registerCollector: sinon.stub(),
}; };
@ -51,7 +54,7 @@ function getPluginsMock(
xpack_main: mockXpackMain, xpack_main: mockXpackMain,
}, },
}, },
}; } as any;
} }
const getMockReportingConfig = () => ({ const getMockReportingConfig = () => ({
@ -61,13 +64,13 @@ const getMockReportingConfig = () => ({
const getResponseMock = (customization = {}) => customization; const getResponseMock = (customization = {}) => customization;
describe('license checks', () => { describe('license checks', () => {
let mockConfig; let mockConfig: ReportingConfig;
beforeAll(async () => { beforeAll(async () => {
mockConfig = getMockReportingConfig(); mockConfig = getMockReportingConfig();
}); });
describe('with a basic license', () => { describe('with a basic license', () => {
let usageStats; let usageStats: any;
beforeAll(async () => { beforeAll(async () => {
const plugins = getPluginsMock({ license: 'basic' }); const plugins = getPluginsMock({ license: 'basic' });
const callClusterMock = jest.fn(() => Promise.resolve(getResponseMock())); const callClusterMock = jest.fn(() => Promise.resolve(getResponseMock()));
@ -75,9 +78,12 @@ describe('license checks', () => {
mockConfig, mockConfig,
plugins.usageCollection, plugins.usageCollection,
plugins.__LEGACY.plugins.xpack_main.info, plugins.__LEGACY.plugins.xpack_main.info,
exportTypesRegistry exportTypesRegistry,
function isReady() {
return Promise.resolve(true);
}
); );
usageStats = await fetch(callClusterMock, exportTypesRegistry); usageStats = await fetch(callClusterMock as any);
}); });
test('sets enables to true', async () => { test('sets enables to true', async () => {
@ -94,7 +100,7 @@ describe('license checks', () => {
}); });
describe('with no license', () => { describe('with no license', () => {
let usageStats; let usageStats: any;
beforeAll(async () => { beforeAll(async () => {
const plugins = getPluginsMock({ license: 'none' }); const plugins = getPluginsMock({ license: 'none' });
const callClusterMock = jest.fn(() => Promise.resolve(getResponseMock())); const callClusterMock = jest.fn(() => Promise.resolve(getResponseMock()));
@ -102,9 +108,12 @@ describe('license checks', () => {
mockConfig, mockConfig,
plugins.usageCollection, plugins.usageCollection,
plugins.__LEGACY.plugins.xpack_main.info, plugins.__LEGACY.plugins.xpack_main.info,
exportTypesRegistry exportTypesRegistry,
function isReady() {
return Promise.resolve(true);
}
); );
usageStats = await fetch(callClusterMock, exportTypesRegistry); usageStats = await fetch(callClusterMock as any);
}); });
test('sets enables to true', async () => { test('sets enables to true', async () => {
@ -121,7 +130,7 @@ describe('license checks', () => {
}); });
describe('with platinum license', () => { describe('with platinum license', () => {
let usageStats; let usageStats: any;
beforeAll(async () => { beforeAll(async () => {
const plugins = getPluginsMock({ license: 'platinum' }); const plugins = getPluginsMock({ license: 'platinum' });
const callClusterMock = jest.fn(() => Promise.resolve(getResponseMock())); const callClusterMock = jest.fn(() => Promise.resolve(getResponseMock()));
@ -129,9 +138,12 @@ describe('license checks', () => {
mockConfig, mockConfig,
plugins.usageCollection, plugins.usageCollection,
plugins.__LEGACY.plugins.xpack_main.info, plugins.__LEGACY.plugins.xpack_main.info,
exportTypesRegistry exportTypesRegistry,
function isReady() {
return Promise.resolve(true);
}
); );
usageStats = await fetch(callClusterMock, exportTypesRegistry); usageStats = await fetch(callClusterMock as any);
}); });
test('sets enables to true', async () => { test('sets enables to true', async () => {
@ -148,7 +160,7 @@ describe('license checks', () => {
}); });
describe('with no usage data', () => { describe('with no usage data', () => {
let usageStats; let usageStats: any;
beforeAll(async () => { beforeAll(async () => {
const plugins = getPluginsMock({ license: 'basic' }); const plugins = getPluginsMock({ license: 'basic' });
const callClusterMock = jest.fn(() => Promise.resolve({})); const callClusterMock = jest.fn(() => Promise.resolve({}));
@ -156,9 +168,12 @@ describe('license checks', () => {
mockConfig, mockConfig,
plugins.usageCollection, plugins.usageCollection,
plugins.__LEGACY.plugins.xpack_main.info, plugins.__LEGACY.plugins.xpack_main.info,
exportTypesRegistry exportTypesRegistry,
function isReady() {
return Promise.resolve(true);
}
); );
usageStats = await fetch(callClusterMock, exportTypesRegistry); usageStats = await fetch(callClusterMock as any);
}); });
test('sets enables to true', async () => { test('sets enables to true', async () => {
@ -179,7 +194,10 @@ describe('data modeling', () => {
mockConfig, mockConfig,
plugins.usageCollection, plugins.usageCollection,
plugins.__LEGACY.plugins.xpack_main.info, plugins.__LEGACY.plugins.xpack_main.info,
exportTypesRegistry exportTypesRegistry,
function isReady() {
return Promise.resolve(true);
}
); );
const callClusterMock = jest.fn(() => const callClusterMock = jest.fn(() =>
Promise.resolve( Promise.resolve(
@ -303,7 +321,7 @@ describe('data modeling', () => {
) )
); );
const usageStats = await fetch(callClusterMock); const usageStats = await fetch(callClusterMock as any);
expect(usageStats).toMatchInlineSnapshot(` expect(usageStats).toMatchInlineSnapshot(`
Object { Object {
"PNG": Object { "PNG": Object {

View file

@ -92,7 +92,7 @@ const defaultOpts: CreateMockBrowserDriverFactoryOpts = {
export const createMockBrowserDriverFactory = async ( export const createMockBrowserDriverFactory = async (
logger: Logger, logger: Logger,
opts: Partial<CreateMockBrowserDriverFactoryOpts> opts: Partial<CreateMockBrowserDriverFactoryOpts> = {}
): Promise<HeadlessChromiumDriverFactory> => { ): Promise<HeadlessChromiumDriverFactory> => {
const captureConfig = { const captureConfig = {
timeouts: { openUrl: 30000, waitForElements: 30000, renderComplete: 30000 }, timeouts: { openUrl: 30000, waitForElements: 30000, renderComplete: 30000 },

View file

@ -186,7 +186,7 @@ export type ESQueueWorkerExecuteFn<JobDocPayloadType> = (
jobId: string, jobId: string,
job: JobDocPayloadType, job: JobDocPayloadType,
cancellationToken?: CancellationToken cancellationToken?: CancellationToken
) => void; ) => Promise<any>;
/* /*
* ImmediateExecuteFn receives the job doc payload because the payload was * ImmediateExecuteFn receives the job doc payload because the payload was