[Reporting] Remove any types and references to Hapi (#49250)

* [Reporting] Remove any types and references to Hapi

* clarification comment

* fix import
This commit is contained in:
Tim Sullivan 2019-11-14 12:56:15 -07:00 committed by GitHub
parent 930c156585
commit 08471cc88a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
16 changed files with 135 additions and 79 deletions

View file

@ -21,7 +21,7 @@ export interface JobDocPayloadPNG extends JobDocPayload {
basePath?: string;
browserTimezone: string;
forceNow?: string;
layout: any;
layout: LayoutInstance;
relativeUrl: string;
objects: undefined;
}

View file

@ -8,7 +8,7 @@ import { resolve as resolvePath } from 'path';
import { existsSync } from 'fs';
import { chromium } from '../index';
import { BrowserType } from '../types';
import { BrowserDownload, BrowserType } from '../types';
import { md5 } from './checksum';
import { asyncMap } from './util';
@ -40,15 +40,7 @@ export async function ensureAllBrowsersDownloaded() {
* @param {BrowserSpec} browsers
* @return {Promise<undefined>}
*/
async function ensureDownloaded(
browsers: Array<{
paths: {
archivesPath: string;
baseUrl: string;
packages: Array<{ archiveFilename: string; archiveChecksum: string }>;
};
}>
) {
async function ensureDownloaded(browsers: BrowserDownload[]) {
await asyncMap(browsers, async browser => {
const { archivesPath } = browser.paths;

View file

@ -12,6 +12,7 @@ import { LevelLogger as Logger } from '../lib/level_logger';
import { extract } from './extract';
// @ts-ignore
import { md5 } from './download/checksum';
import { BrowserDownload } from './types';
const chmod = promisify(fs.chmod);
@ -28,7 +29,7 @@ interface PathResponse {
*/
export async function installBrowser(
logger: Logger,
browser: any,
browser: BrowserDownload,
installsPath: string
): Promise<PathResponse> {
const pkg = browser.paths.packages.find((p: Package) => p.platforms.includes(process.platform));

View file

@ -5,3 +5,17 @@
*/
export type BrowserType = 'chromium';
export interface BrowserDownload {
paths: {
archivesPath: string;
baseUrl: string;
packages: Array<{
archiveChecksum: string;
archiveFilename: string;
binaryChecksum: string;
binaryRelativePath: string;
platforms: string[];
}>;
};
}

View file

@ -5,6 +5,7 @@
*/
import { PLUGIN_ID } from '../../common/constants';
import { CancellationToken } from '../../common/cancellation_token';
import {
ESQueueInstance,
QueueConfig,
@ -14,6 +15,7 @@ import {
JobDoc,
JobDocPayload,
JobSource,
RequestFacade,
ServerFacade,
} from '../../types';
// @ts-ignore untyped dependency
@ -39,17 +41,23 @@ function createWorkerFn(server: ServerFacade) {
jobExecutors.set(exportType.jobType, executeJobFactory);
}
const workerFn = (job: JobSource, jobdoc: JobDocPayload | JobDoc, cancellationToken?: any) => {
const workerFn = (
job: JobSource,
arg1: JobDocPayload | JobDoc,
arg2: CancellationToken | RequestFacade | undefined
) => {
// pass the work to the jobExecutor
if (!jobExecutors.get(job._source.jobtype)) {
throw new Error(`Unable to find a job executor for the claimed job: [${job._id}]`);
}
// job executor function signature is different depending on whether it
// is ESQueueWorkerExecuteFn or ImmediateExecuteFn
if (job._id) {
const jobExecutor = jobExecutors.get(job._source.jobtype) as ESQueueWorkerExecuteFn;
return jobExecutor(job._id, jobdoc as JobDoc, cancellationToken);
return jobExecutor(job._id, arg1 as JobDoc, arg2 as CancellationToken);
} else {
const jobExecutor = jobExecutors.get(job._source.jobtype) as ImmediateExecuteFn;
return jobExecutor(null, jobdoc as JobDocPayload, cancellationToken);
return jobExecutor(null, arg1 as JobDocPayload, arg2 as RequestFacade);
}
};
const workerOptions = {

View file

@ -5,16 +5,20 @@
*/
import { ServerFacade, Logger } from '../../../types';
import { HeadlessChromiumDriverFactory } from '../../browsers/chromium/driver_factory';
import { validateBrowser } from './validate_browser';
import { validateConfig } from './validate_config';
import { validateMaxContentLength } from './validate_max_content_length';
export async function runValidations(server: ServerFacade, logger: Logger, browserFactory: any) {
export async function runValidations(
server: ServerFacade,
logger: Logger,
browserFactory: HeadlessChromiumDriverFactory
) {
try {
const config = server.config();
await Promise.all([
validateBrowser(server, browserFactory, logger),
validateConfig(config, logger),
validateConfig(server, logger),
validateMaxContentLength(server, logger),
]);
logger.debug(`Reporting plugin self-check ok!`);

View file

@ -5,15 +5,19 @@
*/
import crypto from 'crypto';
import { Logger } from '../../../types';
import { ServerFacade, Logger } from '../../../types';
export function validateConfig(serverFacade: ServerFacade, logger: Logger) {
const config = serverFacade.config();
export function validateConfig(config: any, logger: Logger) {
const encryptionKey = config.get('xpack.reporting.encryptionKey');
if (encryptionKey == null) {
logger.warning(
`Generating a random key for xpack.reporting.encryptionKey. To prevent pending reports from failing on restart, please set ` +
`xpack.reporting.encryptionKey in kibana.yml`
);
config.set('xpack.reporting.encryptionKey', crypto.randomBytes(16).toString('hex'));
// @ts-ignore: No set() method on KibanaConfig, just get() and has()
config.set('xpack.reporting.encryptionKey', crypto.randomBytes(16).toString('hex')); // update config in memory to contain a usable encryption key
}
}

View file

@ -5,12 +5,12 @@
*/
import numeral from '@elastic/numeral';
import { defaults, get } from 'lodash';
import { Logger } from '../../../types';
import { Logger, ServerFacade } from '../../../types';
const KIBANA_MAX_SIZE_BYTES_PATH = 'xpack.reporting.csv.maxSizeBytes';
const ES_MAX_SIZE_BYTES_PATH = 'http.max_content_length';
export async function validateMaxContentLength(server: any, logger: Logger) {
export async function validateMaxContentLength(server: ServerFacade, logger: Logger) {
const config = server.config();
const { callWithInternalUser } = server.plugins.elasticsearch.getCluster('data');
@ -22,7 +22,7 @@ export async function validateMaxContentLength(server: any, logger: Logger) {
const elasticSearchMaxContent = get(elasticClusterSettings, 'http.max_content_length', '100mb');
const elasticSearchMaxContentBytes = numeral().unformat(elasticSearchMaxContent.toUpperCase());
const kibanaMaxContentBytes = config.get(KIBANA_MAX_SIZE_BYTES_PATH);
const kibanaMaxContentBytes: number = config.get(KIBANA_MAX_SIZE_BYTES_PATH);
if (kibanaMaxContentBytes > elasticSearchMaxContentBytes) {
logger.warning(

View file

@ -76,7 +76,10 @@ export function registerGenerateFromJobParams(
const { exportType } = request.params;
let response;
try {
const jobParams = rison.decode(jobParamsRison);
const jobParams = rison.decode(jobParamsRison) as object | null;
if (!jobParams) {
throw new Error('missing jobParams!');
}
response = await handler(exportType, jobParams, request, h);
} catch (err) {
throw boom.badRequest(`invalid rison: ${jobParamsRison}`);

View file

@ -26,7 +26,7 @@ export function registerRoutes(server: ServerFacade, logger: Logger) {
*/
async function handler(
exportTypeId: string,
jobParams: any,
jobParams: object,
request: RequestFacade,
h: ReportingResponseToolkit
) {

View file

@ -4,9 +4,15 @@
* you may not use this file except in compliance with the Elastic License.
*/
import * as _ from 'lodash';
// @ts-ignore
import contentDisposition from 'content-disposition';
import * as _ from 'lodash';
import {
ServerFacade,
ExportTypeDefinition,
JobDocExecuted,
JobDocOutputExecuted,
} from '../../../types';
import { oncePerServer } from '../../lib/once_per_server';
import { CSV_JOB_TYPE } from '../../../common/constants';
@ -16,10 +22,10 @@ interface ICustomHeaders {
const DEFAULT_TITLE = 'report';
const getTitle = (exportType: any, title?: string): string =>
const getTitle = (exportType: ExportTypeDefinition, title?: string): string =>
`${title || DEFAULT_TITLE}.${exportType.jobContentExtension}`;
const getReportingHeaders = (output: any, exportType: any) => {
const getReportingHeaders = (output: JobDocOutputExecuted, exportType: ExportTypeDefinition) => {
const metaDataHeaders: ICustomHeaders = {};
if (exportType.jobType === CSV_JOB_TYPE) {
@ -33,20 +39,22 @@ const getReportingHeaders = (output: any, exportType: any) => {
return metaDataHeaders;
};
function getDocumentPayloadFn(server: any) {
const exportTypesRegistry = server.plugins.reporting.exportTypesRegistry;
function getDocumentPayloadFn(server: ServerFacade) {
const exportTypesRegistry = server.plugins.reporting!.exportTypesRegistry;
function encodeContent(content: string, exportType: any) {
function encodeContent(content: string | null, exportType: ExportTypeDefinition) {
switch (exportType.jobContentEncoding) {
case 'base64':
return Buffer.from(content, 'base64');
return content ? Buffer.from(content, 'base64') : content; // Buffer.from rejects null
default:
return content;
}
}
function getCompleted(output: any, jobType: string, title: any) {
const exportType = exportTypesRegistry.get((item: any) => item.jobType === jobType);
function getCompleted(output: JobDocOutputExecuted, jobType: string, title: string) {
const exportType = exportTypesRegistry.get(
(item: ExportTypeDefinition) => item.jobType === jobType
);
const filename = getTitle(exportType, title);
const headers = getReportingHeaders(output, exportType);
@ -61,7 +69,7 @@ function getDocumentPayloadFn(server: any) {
};
}
function getFailure(output: any) {
function getFailure(output: JobDocOutputExecuted) {
return {
statusCode: 500,
content: {
@ -72,19 +80,18 @@ function getDocumentPayloadFn(server: any) {
};
}
function getIncomplete(status: any) {
function getIncomplete(status: string) {
return {
statusCode: 503,
content: status,
contentType: 'application/json',
headers: {
'retry-after': 30,
},
headers: { 'retry-after': 30 },
};
}
return function getDocumentPayload(doc: any) {
const { status, output, jobtype: jobType, payload: { title } = { title: '' } } = doc._source;
return function getDocumentPayload(doc: { _source: JobDocExecuted }) {
const { status, jobtype: jobType, payload: { title } = { title: '' } } = doc._source;
const { output } = doc._source as { output: JobDocOutputExecuted };
if (status === 'completed') {
return getCompleted(output, jobType, title);

View file

@ -7,13 +7,13 @@
import { RequestFacade, ReportingResponseToolkit, JobDocPayload } from '../../types';
export type HandlerFunction = (
exportType: any,
jobParams: any,
exportType: string,
jobParams: object,
request: RequestFacade,
h: ReportingResponseToolkit
) => any;
export type HandlerErrorFunction = (exportType: any, err: Error) => any;
export type HandlerErrorFunction = (exportType: string, err: Error) => any;
export interface QueuedJobPayload {
error?: boolean;

View file

@ -5,6 +5,7 @@
*/
import { get } from 'lodash';
import { ServerFacade, ESCallCluster } from '../../types';
import {
AggregationBuckets,
AggregationResults,
@ -13,7 +14,6 @@ import {
KeyCountBucket,
RangeAggregationResults,
RangeStats,
UsageObject,
} from './types';
import { decorateRangeStats } from './decorate_range_stats';
// @ts-ignore untyped module
@ -80,7 +80,10 @@ type RangeStatSets = Partial<
last7Days: RangeStats;
}
>;
async function handleResponse(server: any, response: AggregationResults): Promise<RangeStatSets> {
async function handleResponse(
server: ServerFacade,
response: AggregationResults
): Promise<RangeStatSets> {
const buckets = get(response, 'aggregations.ranges.buckets');
if (!buckets) {
return {};
@ -98,7 +101,7 @@ async function handleResponse(server: any, response: AggregationResults): Promis
};
}
export async function getReportingUsage(server: any, callCluster: any) {
export async function getReportingUsage(server: ServerFacade, callCluster: ESCallCluster) {
const config = server.config();
const reportingIndex = config.get('xpack.reporting.index');
@ -135,7 +138,7 @@ export async function getReportingUsage(server: any, callCluster: any) {
return callCluster('search', params)
.then((response: AggregationResults) => handleResponse(server, response))
.then(async (usage: UsageObject) => {
.then(async (usage: RangeStatSets) => {
// Allow this to explicitly throw an exception if/when this config is deprecated,
// because we shouldn't collect browserType in that case!
const browserType = config.get('xpack.reporting.capture.browser.type');

View file

@ -6,26 +6,28 @@
// @ts-ignore untyped module
import { KIBANA_STATS_TYPE_MONITORING } from '../../../monitoring/common/constants';
import { ServerFacade, ESCallCluster } from '../../types';
import { KIBANA_REPORTING_TYPE } from '../../common/constants';
import { getReportingUsage } from './get_reporting_usage';
import { RangeStats } from './types';
/*
* @param {Object} server
* @return {Object} kibana usage stats type collection object
*/
export function getReportingUsageCollector(server: any, isReady: () => boolean) {
export function getReportingUsageCollector(server: ServerFacade, isReady: () => boolean) {
const { collectorSet } = server.usage;
return collectorSet.makeUsageCollector({
type: KIBANA_REPORTING_TYPE,
isReady,
fetch: (callCluster: any) => getReportingUsage(server, callCluster),
fetch: (callCluster: ESCallCluster) => getReportingUsage(server, callCluster),
/*
* Format the response data into a model for internal upload
* 1. Make this data part of the "kibana_stats" type
* 2. Organize the payload in the usage.xpack.reporting namespace of the data payload
*/
formatForBulkUpload: (result: any) => {
formatForBulkUpload: (result: RangeStats) => {
return {
type: KIBANA_STATS_TYPE_MONITORING,
payload: {

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
interface AvailableTotal {
export interface AvailableTotal {
available: boolean;
total: number;
}
@ -56,13 +56,5 @@ export type RangeStats = JobTypes & {
status: StatusCounts;
};
export type UsageObject = RangeStats & {
available: boolean;
enabled: boolean;
browser_type: string;
lastDay: RangeStats;
last7Days: RangeStats;
};
export type ExportType = 'csv' | 'printable_pdf' | 'PNG';
export type FeatureAvailabilityMap = { [F in ExportType]: boolean };

View file

@ -8,7 +8,11 @@ import { ResponseObject } from 'hapi';
import { EventEmitter } from 'events';
import { Legacy } from 'kibana';
import { XPackMainPlugin } from '../xpack_main/xpack_main';
import { ElasticsearchPlugin } from '../../../../src/legacy/core_plugins/elasticsearch';
import {
ElasticsearchPlugin,
CallCluster,
} from '../../../../src/legacy/core_plugins/elasticsearch';
import { CancellationToken } from './common/cancellation_token';
import { HeadlessChromiumDriverFactory } from './server/browsers/chromium/driver_factory';
import { BrowserType } from './server/browsers/types';
@ -18,9 +22,11 @@ export interface ReportingPlugin {
queue: {
addJob: (type: string, payload: object, options: object) => Job;
};
// TODO: convert exportTypesRegistry to TS
exportTypesRegistry: {
getById: (id: string) => ExportTypeDefinition;
getAll: () => ExportTypeDefinition[];
get: (callback: (item: ExportTypeDefinition) => boolean) => ExportTypeDefinition;
};
browserDriverFactory: HeadlessChromiumDriverFactory;
}
@ -52,18 +58,6 @@ export interface NetworkPolicy {
rules: NetworkPolicyRule[];
}
// Tracks which parts of the legacy plugin system are being used
export type ReportingPluginSpecOptions = Legacy.PluginSpecOptions;
export type ServerFacade = Legacy.Server & {
plugins: {
reporting?: ReportingPlugin;
xpack_main?: XPackMainPlugin & {
status?: any;
};
};
};
interface ListQuery {
page: string;
size: string;
@ -79,7 +73,21 @@ interface DownloadParams {
docId: string;
}
// Tracks which parts of the legacy plugin system are being used
/*
* Legacy System
*/
export type ReportingPluginSpecOptions = Legacy.PluginSpecOptions;
export type ServerFacade = Legacy.Server & {
plugins: {
reporting?: ReportingPlugin;
xpack_main?: XPackMainPlugin & {
status?: any;
};
};
};
interface ReportingRequest {
query: ListQuery & GenerateQuery;
params: DownloadParams;
@ -100,6 +108,12 @@ export type ResponseFacade = ResponseObject & {
export type ReportingResponseToolkit = Legacy.ResponseToolkit;
export type ESCallCluster = CallCluster;
/*
* Reporting Config
*/
export interface CaptureConfig {
browser: {
type: BrowserType;
@ -184,6 +198,11 @@ export interface JobDocPayload {
type: string | null;
}
export interface JobSource {
_id: string;
_source: JobDoc;
}
export interface JobDocOutput {
content: string; // encoded content
contentType: string;
@ -196,9 +215,11 @@ export interface JobDoc {
status: string; // completed, failed, etc
}
export interface JobSource {
_id: string;
_source: JobDoc;
export interface JobDocExecuted {
jobtype: string;
output: JobDocOutputExecuted;
payload: JobDocPayload;
status: string; // completed, failed, etc
}
/*
@ -230,7 +251,11 @@ export type ESQueueCreateJobFn = (
request: RequestFacade
) => Promise<object>;
export type ESQueueWorkerExecuteFn = (jobId: string, job: JobDoc, cancellationToken: any) => void;
export type ESQueueWorkerExecuteFn = (
jobId: string,
job: JobDoc,
cancellationToken?: CancellationToken
) => void;
export type JobIDForImmediate = null;
export type ImmediateExecuteFn = (
@ -262,6 +287,7 @@ export interface ExportTypeDefinition {
id: string;
name: string;
jobType: string;
jobContentEncoding?: string;
jobContentExtension: string;
createJobFactory: CreateJobFactory;
executeJobFactory: ExecuteJobFactory | ExecuteImmediateJobFactory;