[Reporting] Remove any types and references to Hapi (#49250)

* [Reporting] Remove any types and references to Hapi

* clarification comment

* fix import
This commit is contained in:
Tim Sullivan 2019-11-14 12:56:15 -07:00 committed by GitHub
parent 930c156585
commit 08471cc88a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
16 changed files with 135 additions and 79 deletions

View file

@ -21,7 +21,7 @@ export interface JobDocPayloadPNG extends JobDocPayload {
basePath?: string; basePath?: string;
browserTimezone: string; browserTimezone: string;
forceNow?: string; forceNow?: string;
layout: any; layout: LayoutInstance;
relativeUrl: string; relativeUrl: string;
objects: undefined; objects: undefined;
} }

View file

@ -8,7 +8,7 @@ import { resolve as resolvePath } from 'path';
import { existsSync } from 'fs'; import { existsSync } from 'fs';
import { chromium } from '../index'; import { chromium } from '../index';
import { BrowserType } from '../types'; import { BrowserDownload, BrowserType } from '../types';
import { md5 } from './checksum'; import { md5 } from './checksum';
import { asyncMap } from './util'; import { asyncMap } from './util';
@ -40,15 +40,7 @@ export async function ensureAllBrowsersDownloaded() {
* @param {BrowserSpec} browsers * @param {BrowserSpec} browsers
* @return {Promise<undefined>} * @return {Promise<undefined>}
*/ */
async function ensureDownloaded( async function ensureDownloaded(browsers: BrowserDownload[]) {
browsers: Array<{
paths: {
archivesPath: string;
baseUrl: string;
packages: Array<{ archiveFilename: string; archiveChecksum: string }>;
};
}>
) {
await asyncMap(browsers, async browser => { await asyncMap(browsers, async browser => {
const { archivesPath } = browser.paths; const { archivesPath } = browser.paths;

View file

@ -12,6 +12,7 @@ import { LevelLogger as Logger } from '../lib/level_logger';
import { extract } from './extract'; import { extract } from './extract';
// @ts-ignore // @ts-ignore
import { md5 } from './download/checksum'; import { md5 } from './download/checksum';
import { BrowserDownload } from './types';
const chmod = promisify(fs.chmod); const chmod = promisify(fs.chmod);
@ -28,7 +29,7 @@ interface PathResponse {
*/ */
export async function installBrowser( export async function installBrowser(
logger: Logger, logger: Logger,
browser: any, browser: BrowserDownload,
installsPath: string installsPath: string
): Promise<PathResponse> { ): Promise<PathResponse> {
const pkg = browser.paths.packages.find((p: Package) => p.platforms.includes(process.platform)); const pkg = browser.paths.packages.find((p: Package) => p.platforms.includes(process.platform));

View file

@ -5,3 +5,17 @@
*/ */
export type BrowserType = 'chromium'; export type BrowserType = 'chromium';
export interface BrowserDownload {
paths: {
archivesPath: string;
baseUrl: string;
packages: Array<{
archiveChecksum: string;
archiveFilename: string;
binaryChecksum: string;
binaryRelativePath: string;
platforms: string[];
}>;
};
}

View file

@ -5,6 +5,7 @@
*/ */
import { PLUGIN_ID } from '../../common/constants'; import { PLUGIN_ID } from '../../common/constants';
import { CancellationToken } from '../../common/cancellation_token';
import { import {
ESQueueInstance, ESQueueInstance,
QueueConfig, QueueConfig,
@ -14,6 +15,7 @@ import {
JobDoc, JobDoc,
JobDocPayload, JobDocPayload,
JobSource, JobSource,
RequestFacade,
ServerFacade, ServerFacade,
} from '../../types'; } from '../../types';
// @ts-ignore untyped dependency // @ts-ignore untyped dependency
@ -39,17 +41,23 @@ function createWorkerFn(server: ServerFacade) {
jobExecutors.set(exportType.jobType, executeJobFactory); jobExecutors.set(exportType.jobType, executeJobFactory);
} }
const workerFn = (job: JobSource, jobdoc: JobDocPayload | JobDoc, cancellationToken?: any) => { const workerFn = (
job: JobSource,
arg1: JobDocPayload | JobDoc,
arg2: CancellationToken | RequestFacade | undefined
) => {
// pass the work to the jobExecutor // pass the work to the jobExecutor
if (!jobExecutors.get(job._source.jobtype)) { if (!jobExecutors.get(job._source.jobtype)) {
throw new Error(`Unable to find a job executor for the claimed job: [${job._id}]`); throw new Error(`Unable to find a job executor for the claimed job: [${job._id}]`);
} }
// job executor function signature is different depending on whether it
// is ESQueueWorkerExecuteFn or ImmediateExecuteFn
if (job._id) { if (job._id) {
const jobExecutor = jobExecutors.get(job._source.jobtype) as ESQueueWorkerExecuteFn; const jobExecutor = jobExecutors.get(job._source.jobtype) as ESQueueWorkerExecuteFn;
return jobExecutor(job._id, jobdoc as JobDoc, cancellationToken); return jobExecutor(job._id, arg1 as JobDoc, arg2 as CancellationToken);
} else { } else {
const jobExecutor = jobExecutors.get(job._source.jobtype) as ImmediateExecuteFn; const jobExecutor = jobExecutors.get(job._source.jobtype) as ImmediateExecuteFn;
return jobExecutor(null, jobdoc as JobDocPayload, cancellationToken); return jobExecutor(null, arg1 as JobDocPayload, arg2 as RequestFacade);
} }
}; };
const workerOptions = { const workerOptions = {

View file

@ -5,16 +5,20 @@
*/ */
import { ServerFacade, Logger } from '../../../types'; import { ServerFacade, Logger } from '../../../types';
import { HeadlessChromiumDriverFactory } from '../../browsers/chromium/driver_factory';
import { validateBrowser } from './validate_browser'; import { validateBrowser } from './validate_browser';
import { validateConfig } from './validate_config'; import { validateConfig } from './validate_config';
import { validateMaxContentLength } from './validate_max_content_length'; import { validateMaxContentLength } from './validate_max_content_length';
export async function runValidations(server: ServerFacade, logger: Logger, browserFactory: any) { export async function runValidations(
server: ServerFacade,
logger: Logger,
browserFactory: HeadlessChromiumDriverFactory
) {
try { try {
const config = server.config();
await Promise.all([ await Promise.all([
validateBrowser(server, browserFactory, logger), validateBrowser(server, browserFactory, logger),
validateConfig(config, logger), validateConfig(server, logger),
validateMaxContentLength(server, logger), validateMaxContentLength(server, logger),
]); ]);
logger.debug(`Reporting plugin self-check ok!`); logger.debug(`Reporting plugin self-check ok!`);

View file

@ -5,15 +5,19 @@
*/ */
import crypto from 'crypto'; import crypto from 'crypto';
import { Logger } from '../../../types'; import { ServerFacade, Logger } from '../../../types';
export function validateConfig(serverFacade: ServerFacade, logger: Logger) {
const config = serverFacade.config();
export function validateConfig(config: any, logger: Logger) {
const encryptionKey = config.get('xpack.reporting.encryptionKey'); const encryptionKey = config.get('xpack.reporting.encryptionKey');
if (encryptionKey == null) { if (encryptionKey == null) {
logger.warning( logger.warning(
`Generating a random key for xpack.reporting.encryptionKey. To prevent pending reports from failing on restart, please set ` + `Generating a random key for xpack.reporting.encryptionKey. To prevent pending reports from failing on restart, please set ` +
`xpack.reporting.encryptionKey in kibana.yml` `xpack.reporting.encryptionKey in kibana.yml`
); );
config.set('xpack.reporting.encryptionKey', crypto.randomBytes(16).toString('hex'));
// @ts-ignore: No set() method on KibanaConfig, just get() and has()
config.set('xpack.reporting.encryptionKey', crypto.randomBytes(16).toString('hex')); // update config in memory to contain a usable encryption key
} }
} }

View file

@ -5,12 +5,12 @@
*/ */
import numeral from '@elastic/numeral'; import numeral from '@elastic/numeral';
import { defaults, get } from 'lodash'; import { defaults, get } from 'lodash';
import { Logger } from '../../../types'; import { Logger, ServerFacade } from '../../../types';
const KIBANA_MAX_SIZE_BYTES_PATH = 'xpack.reporting.csv.maxSizeBytes'; const KIBANA_MAX_SIZE_BYTES_PATH = 'xpack.reporting.csv.maxSizeBytes';
const ES_MAX_SIZE_BYTES_PATH = 'http.max_content_length'; const ES_MAX_SIZE_BYTES_PATH = 'http.max_content_length';
export async function validateMaxContentLength(server: any, logger: Logger) { export async function validateMaxContentLength(server: ServerFacade, logger: Logger) {
const config = server.config(); const config = server.config();
const { callWithInternalUser } = server.plugins.elasticsearch.getCluster('data'); const { callWithInternalUser } = server.plugins.elasticsearch.getCluster('data');
@ -22,7 +22,7 @@ export async function validateMaxContentLength(server: any, logger: Logger) {
const elasticSearchMaxContent = get(elasticClusterSettings, 'http.max_content_length', '100mb'); const elasticSearchMaxContent = get(elasticClusterSettings, 'http.max_content_length', '100mb');
const elasticSearchMaxContentBytes = numeral().unformat(elasticSearchMaxContent.toUpperCase()); const elasticSearchMaxContentBytes = numeral().unformat(elasticSearchMaxContent.toUpperCase());
const kibanaMaxContentBytes = config.get(KIBANA_MAX_SIZE_BYTES_PATH); const kibanaMaxContentBytes: number = config.get(KIBANA_MAX_SIZE_BYTES_PATH);
if (kibanaMaxContentBytes > elasticSearchMaxContentBytes) { if (kibanaMaxContentBytes > elasticSearchMaxContentBytes) {
logger.warning( logger.warning(

View file

@ -76,7 +76,10 @@ export function registerGenerateFromJobParams(
const { exportType } = request.params; const { exportType } = request.params;
let response; let response;
try { try {
const jobParams = rison.decode(jobParamsRison); const jobParams = rison.decode(jobParamsRison) as object | null;
if (!jobParams) {
throw new Error('missing jobParams!');
}
response = await handler(exportType, jobParams, request, h); response = await handler(exportType, jobParams, request, h);
} catch (err) { } catch (err) {
throw boom.badRequest(`invalid rison: ${jobParamsRison}`); throw boom.badRequest(`invalid rison: ${jobParamsRison}`);

View file

@ -26,7 +26,7 @@ export function registerRoutes(server: ServerFacade, logger: Logger) {
*/ */
async function handler( async function handler(
exportTypeId: string, exportTypeId: string,
jobParams: any, jobParams: object,
request: RequestFacade, request: RequestFacade,
h: ReportingResponseToolkit h: ReportingResponseToolkit
) { ) {

View file

@ -4,9 +4,15 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import * as _ from 'lodash';
// @ts-ignore // @ts-ignore
import contentDisposition from 'content-disposition'; import contentDisposition from 'content-disposition';
import * as _ from 'lodash'; import {
ServerFacade,
ExportTypeDefinition,
JobDocExecuted,
JobDocOutputExecuted,
} from '../../../types';
import { oncePerServer } from '../../lib/once_per_server'; import { oncePerServer } from '../../lib/once_per_server';
import { CSV_JOB_TYPE } from '../../../common/constants'; import { CSV_JOB_TYPE } from '../../../common/constants';
@ -16,10 +22,10 @@ interface ICustomHeaders {
const DEFAULT_TITLE = 'report'; const DEFAULT_TITLE = 'report';
const getTitle = (exportType: any, title?: string): string => const getTitle = (exportType: ExportTypeDefinition, title?: string): string =>
`${title || DEFAULT_TITLE}.${exportType.jobContentExtension}`; `${title || DEFAULT_TITLE}.${exportType.jobContentExtension}`;
const getReportingHeaders = (output: any, exportType: any) => { const getReportingHeaders = (output: JobDocOutputExecuted, exportType: ExportTypeDefinition) => {
const metaDataHeaders: ICustomHeaders = {}; const metaDataHeaders: ICustomHeaders = {};
if (exportType.jobType === CSV_JOB_TYPE) { if (exportType.jobType === CSV_JOB_TYPE) {
@ -33,20 +39,22 @@ const getReportingHeaders = (output: any, exportType: any) => {
return metaDataHeaders; return metaDataHeaders;
}; };
function getDocumentPayloadFn(server: any) { function getDocumentPayloadFn(server: ServerFacade) {
const exportTypesRegistry = server.plugins.reporting.exportTypesRegistry; const exportTypesRegistry = server.plugins.reporting!.exportTypesRegistry;
function encodeContent(content: string, exportType: any) { function encodeContent(content: string | null, exportType: ExportTypeDefinition) {
switch (exportType.jobContentEncoding) { switch (exportType.jobContentEncoding) {
case 'base64': case 'base64':
return Buffer.from(content, 'base64'); return content ? Buffer.from(content, 'base64') : content; // Buffer.from rejects null
default: default:
return content; return content;
} }
} }
function getCompleted(output: any, jobType: string, title: any) { function getCompleted(output: JobDocOutputExecuted, jobType: string, title: string) {
const exportType = exportTypesRegistry.get((item: any) => item.jobType === jobType); const exportType = exportTypesRegistry.get(
(item: ExportTypeDefinition) => item.jobType === jobType
);
const filename = getTitle(exportType, title); const filename = getTitle(exportType, title);
const headers = getReportingHeaders(output, exportType); const headers = getReportingHeaders(output, exportType);
@ -61,7 +69,7 @@ function getDocumentPayloadFn(server: any) {
}; };
} }
function getFailure(output: any) { function getFailure(output: JobDocOutputExecuted) {
return { return {
statusCode: 500, statusCode: 500,
content: { content: {
@ -72,19 +80,18 @@ function getDocumentPayloadFn(server: any) {
}; };
} }
function getIncomplete(status: any) { function getIncomplete(status: string) {
return { return {
statusCode: 503, statusCode: 503,
content: status, content: status,
contentType: 'application/json', contentType: 'application/json',
headers: { headers: { 'retry-after': 30 },
'retry-after': 30,
},
}; };
} }
return function getDocumentPayload(doc: any) { return function getDocumentPayload(doc: { _source: JobDocExecuted }) {
const { status, output, jobtype: jobType, payload: { title } = { title: '' } } = doc._source; const { status, jobtype: jobType, payload: { title } = { title: '' } } = doc._source;
const { output } = doc._source as { output: JobDocOutputExecuted };
if (status === 'completed') { if (status === 'completed') {
return getCompleted(output, jobType, title); return getCompleted(output, jobType, title);

View file

@ -7,13 +7,13 @@
import { RequestFacade, ReportingResponseToolkit, JobDocPayload } from '../../types'; import { RequestFacade, ReportingResponseToolkit, JobDocPayload } from '../../types';
export type HandlerFunction = ( export type HandlerFunction = (
exportType: any, exportType: string,
jobParams: any, jobParams: object,
request: RequestFacade, request: RequestFacade,
h: ReportingResponseToolkit h: ReportingResponseToolkit
) => any; ) => any;
export type HandlerErrorFunction = (exportType: any, err: Error) => any; export type HandlerErrorFunction = (exportType: string, err: Error) => any;
export interface QueuedJobPayload { export interface QueuedJobPayload {
error?: boolean; error?: boolean;

View file

@ -5,6 +5,7 @@
*/ */
import { get } from 'lodash'; import { get } from 'lodash';
import { ServerFacade, ESCallCluster } from '../../types';
import { import {
AggregationBuckets, AggregationBuckets,
AggregationResults, AggregationResults,
@ -13,7 +14,6 @@ import {
KeyCountBucket, KeyCountBucket,
RangeAggregationResults, RangeAggregationResults,
RangeStats, RangeStats,
UsageObject,
} from './types'; } from './types';
import { decorateRangeStats } from './decorate_range_stats'; import { decorateRangeStats } from './decorate_range_stats';
// @ts-ignore untyped module // @ts-ignore untyped module
@ -80,7 +80,10 @@ type RangeStatSets = Partial<
last7Days: RangeStats; last7Days: RangeStats;
} }
>; >;
async function handleResponse(server: any, response: AggregationResults): Promise<RangeStatSets> { async function handleResponse(
server: ServerFacade,
response: AggregationResults
): Promise<RangeStatSets> {
const buckets = get(response, 'aggregations.ranges.buckets'); const buckets = get(response, 'aggregations.ranges.buckets');
if (!buckets) { if (!buckets) {
return {}; return {};
@ -98,7 +101,7 @@ async function handleResponse(server: any, response: AggregationResults): Promis
}; };
} }
export async function getReportingUsage(server: any, callCluster: any) { export async function getReportingUsage(server: ServerFacade, callCluster: ESCallCluster) {
const config = server.config(); const config = server.config();
const reportingIndex = config.get('xpack.reporting.index'); const reportingIndex = config.get('xpack.reporting.index');
@ -135,7 +138,7 @@ export async function getReportingUsage(server: any, callCluster: any) {
return callCluster('search', params) return callCluster('search', params)
.then((response: AggregationResults) => handleResponse(server, response)) .then((response: AggregationResults) => handleResponse(server, response))
.then(async (usage: UsageObject) => { .then(async (usage: RangeStatSets) => {
// Allow this to explicitly throw an exception if/when this config is deprecated, // Allow this to explicitly throw an exception if/when this config is deprecated,
// because we shouldn't collect browserType in that case! // because we shouldn't collect browserType in that case!
const browserType = config.get('xpack.reporting.capture.browser.type'); const browserType = config.get('xpack.reporting.capture.browser.type');

View file

@ -6,26 +6,28 @@
// @ts-ignore untyped module // @ts-ignore untyped module
import { KIBANA_STATS_TYPE_MONITORING } from '../../../monitoring/common/constants'; import { KIBANA_STATS_TYPE_MONITORING } from '../../../monitoring/common/constants';
import { ServerFacade, ESCallCluster } from '../../types';
import { KIBANA_REPORTING_TYPE } from '../../common/constants'; import { KIBANA_REPORTING_TYPE } from '../../common/constants';
import { getReportingUsage } from './get_reporting_usage'; import { getReportingUsage } from './get_reporting_usage';
import { RangeStats } from './types';
/* /*
* @param {Object} server * @param {Object} server
* @return {Object} kibana usage stats type collection object * @return {Object} kibana usage stats type collection object
*/ */
export function getReportingUsageCollector(server: any, isReady: () => boolean) { export function getReportingUsageCollector(server: ServerFacade, isReady: () => boolean) {
const { collectorSet } = server.usage; const { collectorSet } = server.usage;
return collectorSet.makeUsageCollector({ return collectorSet.makeUsageCollector({
type: KIBANA_REPORTING_TYPE, type: KIBANA_REPORTING_TYPE,
isReady, isReady,
fetch: (callCluster: any) => getReportingUsage(server, callCluster), fetch: (callCluster: ESCallCluster) => getReportingUsage(server, callCluster),
/* /*
* Format the response data into a model for internal upload * Format the response data into a model for internal upload
* 1. Make this data part of the "kibana_stats" type * 1. Make this data part of the "kibana_stats" type
* 2. Organize the payload in the usage.xpack.reporting namespace of the data payload * 2. Organize the payload in the usage.xpack.reporting namespace of the data payload
*/ */
formatForBulkUpload: (result: any) => { formatForBulkUpload: (result: RangeStats) => {
return { return {
type: KIBANA_STATS_TYPE_MONITORING, type: KIBANA_STATS_TYPE_MONITORING,
payload: { payload: {

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
interface AvailableTotal { export interface AvailableTotal {
available: boolean; available: boolean;
total: number; total: number;
} }
@ -56,13 +56,5 @@ export type RangeStats = JobTypes & {
status: StatusCounts; status: StatusCounts;
}; };
export type UsageObject = RangeStats & {
available: boolean;
enabled: boolean;
browser_type: string;
lastDay: RangeStats;
last7Days: RangeStats;
};
export type ExportType = 'csv' | 'printable_pdf' | 'PNG'; export type ExportType = 'csv' | 'printable_pdf' | 'PNG';
export type FeatureAvailabilityMap = { [F in ExportType]: boolean }; export type FeatureAvailabilityMap = { [F in ExportType]: boolean };

View file

@ -8,7 +8,11 @@ import { ResponseObject } from 'hapi';
import { EventEmitter } from 'events'; import { EventEmitter } from 'events';
import { Legacy } from 'kibana'; import { Legacy } from 'kibana';
import { XPackMainPlugin } from '../xpack_main/xpack_main'; import { XPackMainPlugin } from '../xpack_main/xpack_main';
import { ElasticsearchPlugin } from '../../../../src/legacy/core_plugins/elasticsearch'; import {
ElasticsearchPlugin,
CallCluster,
} from '../../../../src/legacy/core_plugins/elasticsearch';
import { CancellationToken } from './common/cancellation_token';
import { HeadlessChromiumDriverFactory } from './server/browsers/chromium/driver_factory'; import { HeadlessChromiumDriverFactory } from './server/browsers/chromium/driver_factory';
import { BrowserType } from './server/browsers/types'; import { BrowserType } from './server/browsers/types';
@ -18,9 +22,11 @@ export interface ReportingPlugin {
queue: { queue: {
addJob: (type: string, payload: object, options: object) => Job; addJob: (type: string, payload: object, options: object) => Job;
}; };
// TODO: convert exportTypesRegistry to TS
exportTypesRegistry: { exportTypesRegistry: {
getById: (id: string) => ExportTypeDefinition; getById: (id: string) => ExportTypeDefinition;
getAll: () => ExportTypeDefinition[]; getAll: () => ExportTypeDefinition[];
get: (callback: (item: ExportTypeDefinition) => boolean) => ExportTypeDefinition;
}; };
browserDriverFactory: HeadlessChromiumDriverFactory; browserDriverFactory: HeadlessChromiumDriverFactory;
} }
@ -52,18 +58,6 @@ export interface NetworkPolicy {
rules: NetworkPolicyRule[]; rules: NetworkPolicyRule[];
} }
// Tracks which parts of the legacy plugin system are being used
export type ReportingPluginSpecOptions = Legacy.PluginSpecOptions;
export type ServerFacade = Legacy.Server & {
plugins: {
reporting?: ReportingPlugin;
xpack_main?: XPackMainPlugin & {
status?: any;
};
};
};
interface ListQuery { interface ListQuery {
page: string; page: string;
size: string; size: string;
@ -79,7 +73,21 @@ interface DownloadParams {
docId: string; docId: string;
} }
// Tracks which parts of the legacy plugin system are being used /*
* Legacy System
*/
export type ReportingPluginSpecOptions = Legacy.PluginSpecOptions;
export type ServerFacade = Legacy.Server & {
plugins: {
reporting?: ReportingPlugin;
xpack_main?: XPackMainPlugin & {
status?: any;
};
};
};
interface ReportingRequest { interface ReportingRequest {
query: ListQuery & GenerateQuery; query: ListQuery & GenerateQuery;
params: DownloadParams; params: DownloadParams;
@ -100,6 +108,12 @@ export type ResponseFacade = ResponseObject & {
export type ReportingResponseToolkit = Legacy.ResponseToolkit; export type ReportingResponseToolkit = Legacy.ResponseToolkit;
export type ESCallCluster = CallCluster;
/*
* Reporting Config
*/
export interface CaptureConfig { export interface CaptureConfig {
browser: { browser: {
type: BrowserType; type: BrowserType;
@ -184,6 +198,11 @@ export interface JobDocPayload {
type: string | null; type: string | null;
} }
export interface JobSource {
_id: string;
_source: JobDoc;
}
export interface JobDocOutput { export interface JobDocOutput {
content: string; // encoded content content: string; // encoded content
contentType: string; contentType: string;
@ -196,9 +215,11 @@ export interface JobDoc {
status: string; // completed, failed, etc status: string; // completed, failed, etc
} }
export interface JobSource { export interface JobDocExecuted {
_id: string; jobtype: string;
_source: JobDoc; output: JobDocOutputExecuted;
payload: JobDocPayload;
status: string; // completed, failed, etc
} }
/* /*
@ -230,7 +251,11 @@ export type ESQueueCreateJobFn = (
request: RequestFacade request: RequestFacade
) => Promise<object>; ) => Promise<object>;
export type ESQueueWorkerExecuteFn = (jobId: string, job: JobDoc, cancellationToken: any) => void; export type ESQueueWorkerExecuteFn = (
jobId: string,
job: JobDoc,
cancellationToken?: CancellationToken
) => void;
export type JobIDForImmediate = null; export type JobIDForImmediate = null;
export type ImmediateExecuteFn = ( export type ImmediateExecuteFn = (
@ -262,6 +287,7 @@ export interface ExportTypeDefinition {
id: string; id: string;
name: string; name: string;
jobType: string; jobType: string;
jobContentEncoding?: string;
jobContentExtension: string; jobContentExtension: string;
createJobFactory: CreateJobFactory; createJobFactory: CreateJobFactory;
executeJobFactory: ExecuteJobFactory | ExecuteImmediateJobFactory; executeJobFactory: ExecuteJobFactory | ExecuteImmediateJobFactory;