[Logs UI] Remove legacy singletons (#77743)

Removes the `npStart` legacy singleton used during the migration to the new platform. The singleton was used in API calls to access the `http.fetch` service. To remove the singleton we have injected `fetch` as a dependency in all functions.
This commit is contained in:
Alejandro Fernández Gómez 2020-10-02 18:57:50 +02:00 committed by GitHub
parent bb4ad196ea
commit 2899e83df8
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
59 changed files with 796 additions and 626 deletions

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { HttpSetup } from 'src/core/public'; import type { HttpHandler } from 'src/core/public';
import { import {
INFRA_ALERT_PREVIEW_PATH, INFRA_ALERT_PREVIEW_PATH,
METRIC_THRESHOLD_ALERT_TYPE_ID, METRIC_THRESHOLD_ALERT_TYPE_ID,
@ -22,7 +22,7 @@ export async function getAlertPreview({
params, params,
alertType, alertType,
}: { }: {
fetch: HttpSetup['fetch']; fetch: HttpHandler;
params: AlertPreviewRequestParams; params: AlertPreviewRequestParams;
alertType: PreviewableAlertTypes; alertType: PreviewableAlertTypes;
}): Promise<AlertPreviewSuccessResponsePayload> { }): Promise<AlertPreviewSuccessResponsePayload> {

View file

@ -5,21 +5,25 @@
*/ */
import * as rt from 'io-ts'; import * as rt from 'io-ts';
import { pipe } from 'fp-ts/lib/pipeable'; import type { HttpHandler } from 'src/core/public';
import { fold } from 'fp-ts/lib/Either';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import { getDatafeedId, getJobId } from '../../../../../common/log_analysis'; import { getDatafeedId, getJobId } from '../../../../../common/log_analysis';
import { throwErrors, createPlainError } from '../../../../../common/runtime_types'; import { decodeOrThrow } from '../../../../../common/runtime_types';
interface DeleteJobsRequestArgs<JobType extends string> {
spaceId: string;
sourceId: string;
jobTypes: JobType[];
}
export const callDeleteJobs = async <JobType extends string>( export const callDeleteJobs = async <JobType extends string>(
spaceId: string, requestArgs: DeleteJobsRequestArgs<JobType>,
sourceId: string, fetch: HttpHandler
jobTypes: JobType[]
) => { ) => {
const { spaceId, sourceId, jobTypes } = requestArgs;
// NOTE: Deleting the jobs via this API will delete the datafeeds at the same time // NOTE: Deleting the jobs via this API will delete the datafeeds at the same time
const deleteJobsResponse = await npStart.http.fetch('/api/ml/jobs/delete_jobs', { const deleteJobsResponse = await fetch('/api/ml/jobs/delete_jobs', {
method: 'POST', method: 'POST',
body: JSON.stringify( body: JSON.stringify(
deleteJobsRequestPayloadRT.encode({ deleteJobsRequestPayloadRT.encode({
@ -28,28 +32,29 @@ export const callDeleteJobs = async <JobType extends string>(
), ),
}); });
return pipe( return decodeOrThrow(deleteJobsResponsePayloadRT)(deleteJobsResponse);
deleteJobsResponsePayloadRT.decode(deleteJobsResponse),
fold(throwErrors(createPlainError), identity)
);
}; };
export const callGetJobDeletionTasks = async () => { export const callGetJobDeletionTasks = async (fetch: HttpHandler) => {
const jobDeletionTasksResponse = await npStart.http.fetch('/api/ml/jobs/deleting_jobs_tasks'); const jobDeletionTasksResponse = await fetch('/api/ml/jobs/deleting_jobs_tasks');
return pipe( return decodeOrThrow(getJobDeletionTasksResponsePayloadRT)(jobDeletionTasksResponse);
getJobDeletionTasksResponsePayloadRT.decode(jobDeletionTasksResponse),
fold(throwErrors(createPlainError), identity)
);
}; };
interface StopDatafeedsRequestArgs<JobType extends string> {
spaceId: string;
sourceId: string;
jobTypes: JobType[];
}
export const callStopDatafeeds = async <JobType extends string>( export const callStopDatafeeds = async <JobType extends string>(
spaceId: string, requestArgs: StopDatafeedsRequestArgs<JobType>,
sourceId: string, fetch: HttpHandler
jobTypes: JobType[]
) => { ) => {
const { spaceId, sourceId, jobTypes } = requestArgs;
// Stop datafeed due to https://github.com/elastic/kibana/issues/44652 // Stop datafeed due to https://github.com/elastic/kibana/issues/44652
const stopDatafeedResponse = await npStart.http.fetch('/api/ml/jobs/stop_datafeeds', { const stopDatafeedResponse = await fetch('/api/ml/jobs/stop_datafeeds', {
method: 'POST', method: 'POST',
body: JSON.stringify( body: JSON.stringify(
stopDatafeedsRequestPayloadRT.encode({ stopDatafeedsRequestPayloadRT.encode({
@ -58,10 +63,7 @@ export const callStopDatafeeds = async <JobType extends string>(
), ),
}); });
return pipe( return decodeOrThrow(stopDatafeedsResponsePayloadRT)(stopDatafeedResponse);
stopDatafeedsResponsePayloadRT.decode(stopDatafeedResponse),
fold(throwErrors(createPlainError), identity)
);
}; };
export const deleteJobsRequestPayloadRT = rt.type({ export const deleteJobsRequestPayloadRT = rt.type({

View file

@ -4,21 +4,24 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { fold } from 'fp-ts/lib/Either';
import { identity } from 'fp-ts/lib/function';
import { pipe } from 'fp-ts/lib/pipeable';
import * as rt from 'io-ts'; import * as rt from 'io-ts';
import { npStart } from '../../../../legacy_singletons'; import type { HttpHandler } from 'src/core/public';
import { getJobId, jobCustomSettingsRT } from '../../../../../common/log_analysis'; import { getJobId, jobCustomSettingsRT } from '../../../../../common/log_analysis';
import { createPlainError, throwErrors } from '../../../../../common/runtime_types'; import { decodeOrThrow } from '../../../../../common/runtime_types';
interface RequestArgs<JobType extends string> {
spaceId: string;
sourceId: string;
jobTypes: JobType[];
}
export const callJobsSummaryAPI = async <JobType extends string>( export const callJobsSummaryAPI = async <JobType extends string>(
spaceId: string, requestArgs: RequestArgs<JobType>,
sourceId: string, fetch: HttpHandler
jobTypes: JobType[]
) => { ) => {
const response = await npStart.http.fetch('/api/ml/jobs/jobs_summary', { const { spaceId, sourceId, jobTypes } = requestArgs;
const response = await fetch('/api/ml/jobs/jobs_summary', {
method: 'POST', method: 'POST',
body: JSON.stringify( body: JSON.stringify(
fetchJobStatusRequestPayloadRT.encode({ fetchJobStatusRequestPayloadRT.encode({
@ -26,10 +29,7 @@ export const callJobsSummaryAPI = async <JobType extends string>(
}) })
), ),
}); });
return pipe( return decodeOrThrow(fetchJobStatusResponsePayloadRT)(response);
fetchJobStatusResponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
}; };
export const fetchJobStatusRequestPayloadRT = rt.type({ export const fetchJobStatusRequestPayloadRT = rt.type({

View file

@ -4,24 +4,18 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { fold } from 'fp-ts/lib/Either';
import { identity } from 'fp-ts/lib/function';
import { pipe } from 'fp-ts/lib/pipeable';
import * as rt from 'io-ts'; import * as rt from 'io-ts';
import { npStart } from '../../../../legacy_singletons'; import type { HttpHandler } from 'src/core/public';
import { jobCustomSettingsRT } from '../../../../../common/log_analysis'; import { jobCustomSettingsRT } from '../../../../../common/log_analysis';
import { createPlainError, throwErrors } from '../../../../../common/runtime_types'; import { decodeOrThrow } from '../../../../../common/runtime_types';
export const callGetMlModuleAPI = async (moduleId: string) => { export const callGetMlModuleAPI = async (moduleId: string, fetch: HttpHandler) => {
const response = await npStart.http.fetch(`/api/ml/modules/get_module/${moduleId}`, { const response = await fetch(`/api/ml/modules/get_module/${moduleId}`, {
method: 'GET', method: 'GET',
}); });
return pipe( return decodeOrThrow(getMlModuleResponsePayloadRT)(response);
getMlModuleResponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
}; };
const jobDefinitionRT = rt.type({ const jobDefinitionRT = rt.type({

View file

@ -4,27 +4,38 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { fold } from 'fp-ts/lib/Either';
import { identity } from 'fp-ts/lib/function';
import { pipe } from 'fp-ts/lib/pipeable';
import * as rt from 'io-ts'; import * as rt from 'io-ts';
import { npStart } from '../../../../legacy_singletons'; import type { HttpHandler } from 'src/core/public';
import { getJobIdPrefix, jobCustomSettingsRT } from '../../../../../common/log_analysis'; import { getJobIdPrefix, jobCustomSettingsRT } from '../../../../../common/log_analysis';
import { createPlainError, throwErrors } from '../../../../../common/runtime_types'; import { decodeOrThrow } from '../../../../../common/runtime_types';
export const callSetupMlModuleAPI = async ( interface RequestArgs {
moduleId: string, moduleId: string;
start: number | undefined, start?: number;
end: number | undefined, end?: number;
spaceId: string, spaceId: string;
sourceId: string, sourceId: string;
indexPattern: string, indexPattern: string;
jobOverrides: SetupMlModuleJobOverrides[] = [], jobOverrides?: SetupMlModuleJobOverrides[];
datafeedOverrides: SetupMlModuleDatafeedOverrides[] = [], datafeedOverrides?: SetupMlModuleDatafeedOverrides[];
query?: object query?: object;
) => { }
const response = await npStart.http.fetch(`/api/ml/modules/setup/${moduleId}`, {
export const callSetupMlModuleAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => {
const {
moduleId,
start,
end,
spaceId,
sourceId,
indexPattern,
jobOverrides = [],
datafeedOverrides = [],
query,
} = requestArgs;
const response = await fetch(`/api/ml/modules/setup/${moduleId}`, {
method: 'POST', method: 'POST',
body: JSON.stringify( body: JSON.stringify(
setupMlModuleRequestPayloadRT.encode({ setupMlModuleRequestPayloadRT.encode({
@ -40,10 +51,7 @@ export const callSetupMlModuleAPI = async (
), ),
}); });
return pipe( return decodeOrThrow(setupMlModuleResponsePayloadRT)(response);
setupMlModuleResponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
}; };
const setupMlModuleTimeParamsRT = rt.partial({ const setupMlModuleTimeParamsRT = rt.partial({

View file

@ -4,21 +4,24 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import type { HttpHandler } from 'src/core/public';
import { import {
LOG_ANALYSIS_VALIDATE_DATASETS_PATH, LOG_ANALYSIS_VALIDATE_DATASETS_PATH,
validateLogEntryDatasetsRequestPayloadRT, validateLogEntryDatasetsRequestPayloadRT,
validateLogEntryDatasetsResponsePayloadRT, validateLogEntryDatasetsResponsePayloadRT,
} from '../../../../../common/http_api'; } from '../../../../../common/http_api';
import { decodeOrThrow } from '../../../../../common/runtime_types'; import { decodeOrThrow } from '../../../../../common/runtime_types';
import { npStart } from '../../../../legacy_singletons';
export const callValidateDatasetsAPI = async ( interface RequestArgs {
indices: string[], indices: string[];
timestampField: string, timestampField: string;
startTime: number, startTime: number;
endTime: number endTime: number;
) => { }
const response = await npStart.http.fetch(LOG_ANALYSIS_VALIDATE_DATASETS_PATH, {
export const callValidateDatasetsAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => {
const { indices, timestampField, startTime, endTime } = requestArgs;
const response = await fetch(LOG_ANALYSIS_VALIDATE_DATASETS_PATH, {
method: 'POST', method: 'POST',
body: JSON.stringify( body: JSON.stringify(
validateLogEntryDatasetsRequestPayloadRT.encode({ validateLogEntryDatasetsRequestPayloadRT.encode({

View file

@ -4,10 +4,8 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { fold } from 'fp-ts/lib/Either'; import type { HttpHandler } from 'src/core/public';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import { import {
LOG_ANALYSIS_VALIDATE_INDICES_PATH, LOG_ANALYSIS_VALIDATE_INDICES_PATH,
ValidationIndicesFieldSpecification, ValidationIndicesFieldSpecification,
@ -15,19 +13,19 @@ import {
validationIndicesResponsePayloadRT, validationIndicesResponsePayloadRT,
} from '../../../../../common/http_api'; } from '../../../../../common/http_api';
import { throwErrors, createPlainError } from '../../../../../common/runtime_types'; import { decodeOrThrow } from '../../../../../common/runtime_types';
export const callValidateIndicesAPI = async ( interface RequestArgs {
indices: string[], indices: string[];
fields: ValidationIndicesFieldSpecification[] fields: ValidationIndicesFieldSpecification[];
) => { }
const response = await npStart.http.fetch(LOG_ANALYSIS_VALIDATE_INDICES_PATH, {
export const callValidateIndicesAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => {
const { indices, fields } = requestArgs;
const response = await fetch(LOG_ANALYSIS_VALIDATE_INDICES_PATH, {
method: 'POST', method: 'POST',
body: JSON.stringify(validationIndicesRequestPayloadRT.encode({ data: { indices, fields } })), body: JSON.stringify(validationIndicesRequestPayloadRT.encode({ data: { indices, fields } })),
}); });
return pipe( return decodeOrThrow(validationIndicesResponsePayloadRT)(response);
validationIndicesResponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
}; };

View file

@ -6,18 +6,16 @@
import createContainer from 'constate'; import createContainer from 'constate';
import { useMemo, useState, useEffect } from 'react'; import { useMemo, useState, useEffect } from 'react';
import { fold } from 'fp-ts/lib/Either';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { npStart } from '../../../legacy_singletons';
import { import {
getMlCapabilitiesResponsePayloadRT, getMlCapabilitiesResponsePayloadRT,
GetMlCapabilitiesResponsePayload, GetMlCapabilitiesResponsePayload,
} from './api/ml_api_types'; } from './api/ml_api_types';
import { throwErrors, createPlainError } from '../../../../common/runtime_types'; import { decodeOrThrow } from '../../../../common/runtime_types';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
export const useLogAnalysisCapabilities = () => { export const useLogAnalysisCapabilities = () => {
const { services } = useKibanaContextForPlugin();
const [mlCapabilities, setMlCapabilities] = useState<GetMlCapabilitiesResponsePayload>( const [mlCapabilities, setMlCapabilities] = useState<GetMlCapabilitiesResponsePayload>(
initialMlCapabilities initialMlCapabilities
); );
@ -26,12 +24,9 @@ export const useLogAnalysisCapabilities = () => {
{ {
cancelPreviousOn: 'resolution', cancelPreviousOn: 'resolution',
createPromise: async () => { createPromise: async () => {
const rawResponse = await npStart.http.fetch('/api/ml/ml_capabilities'); const rawResponse = await services.http.fetch('/api/ml/ml_capabilities');
return pipe( return decodeOrThrow(getMlCapabilitiesResponsePayloadRT)(rawResponse);
getMlCapabilitiesResponsePayloadRT.decode(rawResponse),
fold(throwErrors(createPlainError), identity)
);
}, },
onResolve: (response) => { onResolve: (response) => {
setMlCapabilities(response); setMlCapabilities(response);

View file

@ -3,17 +3,18 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import type { HttpHandler } from 'src/core/public';
import { getJobId } from '../../../../common/log_analysis'; import { getJobId } from '../../../../common/log_analysis';
import { callDeleteJobs, callGetJobDeletionTasks, callStopDatafeeds } from './api/ml_cleanup'; import { callDeleteJobs, callGetJobDeletionTasks, callStopDatafeeds } from './api/ml_cleanup';
export const cleanUpJobsAndDatafeeds = async <JobType extends string>( export const cleanUpJobsAndDatafeeds = async <JobType extends string>(
spaceId: string, spaceId: string,
sourceId: string, sourceId: string,
jobTypes: JobType[] jobTypes: JobType[],
fetch: HttpHandler
) => { ) => {
try { try {
await callStopDatafeeds(spaceId, sourceId, jobTypes); await callStopDatafeeds({ spaceId, sourceId, jobTypes }, fetch);
} catch (err) { } catch (err) {
// Proceed only if datafeed has been deleted or didn't exist in the first place // Proceed only if datafeed has been deleted or didn't exist in the first place
if (err?.res?.status !== 404) { if (err?.res?.status !== 404) {
@ -21,27 +22,29 @@ export const cleanUpJobsAndDatafeeds = async <JobType extends string>(
} }
} }
return await deleteJobs(spaceId, sourceId, jobTypes); return await deleteJobs(spaceId, sourceId, jobTypes, fetch);
}; };
const deleteJobs = async <JobType extends string>( const deleteJobs = async <JobType extends string>(
spaceId: string, spaceId: string,
sourceId: string, sourceId: string,
jobTypes: JobType[] jobTypes: JobType[],
fetch: HttpHandler
) => { ) => {
const deleteJobsResponse = await callDeleteJobs(spaceId, sourceId, jobTypes); const deleteJobsResponse = await callDeleteJobs({ spaceId, sourceId, jobTypes }, fetch);
await waitUntilJobsAreDeleted(spaceId, sourceId, jobTypes); await waitUntilJobsAreDeleted(spaceId, sourceId, jobTypes, fetch);
return deleteJobsResponse; return deleteJobsResponse;
}; };
const waitUntilJobsAreDeleted = async <JobType extends string>( const waitUntilJobsAreDeleted = async <JobType extends string>(
spaceId: string, spaceId: string,
sourceId: string, sourceId: string,
jobTypes: JobType[] jobTypes: JobType[],
fetch: HttpHandler
) => { ) => {
const moduleJobIds = jobTypes.map((jobType) => getJobId(spaceId, sourceId, jobType)); const moduleJobIds = jobTypes.map((jobType) => getJobId(spaceId, sourceId, jobType));
while (true) { while (true) {
const { jobIds: jobIdsBeingDeleted } = await callGetJobDeletionTasks(); const { jobIds: jobIdsBeingDeleted } = await callGetJobDeletionTasks(fetch);
const needToWait = jobIdsBeingDeleted.some((jobId) => moduleJobIds.includes(jobId)); const needToWait = jobIdsBeingDeleted.some((jobId) => moduleJobIds.includes(jobId));
if (needToWait) { if (needToWait) {

View file

@ -6,6 +6,7 @@
import { useCallback, useMemo } from 'react'; import { useCallback, useMemo } from 'react';
import { DatasetFilter } from '../../../../common/log_analysis'; import { DatasetFilter } from '../../../../common/log_analysis';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { useModuleStatus } from './log_analysis_module_status'; import { useModuleStatus } from './log_analysis_module_status';
import { ModuleDescriptor, ModuleSourceConfiguration } from './log_analysis_module_types'; import { ModuleDescriptor, ModuleSourceConfiguration } from './log_analysis_module_types';
@ -17,6 +18,7 @@ export const useLogAnalysisModule = <JobType extends string>({
sourceConfiguration: ModuleSourceConfiguration; sourceConfiguration: ModuleSourceConfiguration;
moduleDescriptor: ModuleDescriptor<JobType>; moduleDescriptor: ModuleDescriptor<JobType>;
}) => { }) => {
const { services } = useKibanaContextForPlugin();
const { spaceId, sourceId, timestampField } = sourceConfiguration; const { spaceId, sourceId, timestampField } = sourceConfiguration;
const [moduleStatus, dispatchModuleStatus] = useModuleStatus(moduleDescriptor.jobTypes); const [moduleStatus, dispatchModuleStatus] = useModuleStatus(moduleDescriptor.jobTypes);
@ -25,7 +27,7 @@ export const useLogAnalysisModule = <JobType extends string>({
cancelPreviousOn: 'resolution', cancelPreviousOn: 'resolution',
createPromise: async () => { createPromise: async () => {
dispatchModuleStatus({ type: 'fetchingJobStatuses' }); dispatchModuleStatus({ type: 'fetchingJobStatuses' });
return await moduleDescriptor.getJobSummary(spaceId, sourceId); return await moduleDescriptor.getJobSummary(spaceId, sourceId, services.http.fetch);
}, },
onResolve: (jobResponse) => { onResolve: (jobResponse) => {
dispatchModuleStatus({ dispatchModuleStatus({
@ -52,13 +54,23 @@ export const useLogAnalysisModule = <JobType extends string>({
datasetFilter: DatasetFilter datasetFilter: DatasetFilter
) => { ) => {
dispatchModuleStatus({ type: 'startedSetup' }); dispatchModuleStatus({ type: 'startedSetup' });
const setupResult = await moduleDescriptor.setUpModule(start, end, datasetFilter, { const setupResult = await moduleDescriptor.setUpModule(
indices: selectedIndices, start,
sourceId, end,
datasetFilter,
{
indices: selectedIndices,
sourceId,
spaceId,
timestampField,
},
services.http.fetch
);
const jobSummaries = await moduleDescriptor.getJobSummary(
spaceId, spaceId,
timestampField, sourceId,
}); services.http.fetch
const jobSummaries = await moduleDescriptor.getJobSummary(spaceId, sourceId); );
return { setupResult, jobSummaries }; return { setupResult, jobSummaries };
}, },
onResolve: ({ setupResult: { datafeeds, jobs }, jobSummaries }) => { onResolve: ({ setupResult: { datafeeds, jobs }, jobSummaries }) => {
@ -82,7 +94,7 @@ export const useLogAnalysisModule = <JobType extends string>({
{ {
cancelPreviousOn: 'resolution', cancelPreviousOn: 'resolution',
createPromise: async () => { createPromise: async () => {
return await moduleDescriptor.cleanUpModule(spaceId, sourceId); return await moduleDescriptor.cleanUpModule(spaceId, sourceId, services.http.fetch);
}, },
}, },
[spaceId, sourceId] [spaceId, sourceId]

View file

@ -6,6 +6,7 @@
import { useCallback, useMemo, useState } from 'react'; import { useCallback, useMemo, useState } from 'react';
import { getJobId } from '../../../../common/log_analysis'; import { getJobId } from '../../../../common/log_analysis';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { JobSummary } from './api/ml_get_jobs_summary_api'; import { JobSummary } from './api/ml_get_jobs_summary_api';
import { GetMlModuleResponsePayload, JobDefinition } from './api/ml_get_module'; import { GetMlModuleResponsePayload, JobDefinition } from './api/ml_get_module';
@ -18,6 +19,7 @@ export const useLogAnalysisModuleDefinition = <JobType extends string>({
sourceConfiguration: ModuleSourceConfiguration; sourceConfiguration: ModuleSourceConfiguration;
moduleDescriptor: ModuleDescriptor<JobType>; moduleDescriptor: ModuleDescriptor<JobType>;
}) => { }) => {
const { services } = useKibanaContextForPlugin();
const [moduleDefinition, setModuleDefinition] = useState< const [moduleDefinition, setModuleDefinition] = useState<
GetMlModuleResponsePayload | undefined GetMlModuleResponsePayload | undefined
>(); >();
@ -40,7 +42,7 @@ export const useLogAnalysisModuleDefinition = <JobType extends string>({
{ {
cancelPreviousOn: 'resolution', cancelPreviousOn: 'resolution',
createPromise: async () => { createPromise: async () => {
return await moduleDescriptor.getModuleDefinition(); return await moduleDescriptor.getModuleDefinition(services.http.fetch);
}, },
onResolve: (response) => { onResolve: (response) => {
setModuleDefinition(response); setModuleDefinition(response);

View file

@ -4,6 +4,7 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import type { HttpHandler } from 'src/core/public';
import { import {
ValidateLogEntryDatasetsResponsePayload, ValidateLogEntryDatasetsResponsePayload,
ValidationIndicesResponsePayload, ValidationIndicesResponsePayload,
@ -23,24 +24,35 @@ export interface ModuleDescriptor<JobType extends string> {
jobTypes: JobType[]; jobTypes: JobType[];
bucketSpan: number; bucketSpan: number;
getJobIds: (spaceId: string, sourceId: string) => Record<JobType, string>; getJobIds: (spaceId: string, sourceId: string) => Record<JobType, string>;
getJobSummary: (spaceId: string, sourceId: string) => Promise<FetchJobStatusResponsePayload>; getJobSummary: (
getModuleDefinition: () => Promise<GetMlModuleResponsePayload>; spaceId: string,
sourceId: string,
fetch: HttpHandler
) => Promise<FetchJobStatusResponsePayload>;
getModuleDefinition: (fetch: HttpHandler) => Promise<GetMlModuleResponsePayload>;
setUpModule: ( setUpModule: (
start: number | undefined, start: number | undefined,
end: number | undefined, end: number | undefined,
datasetFilter: DatasetFilter, datasetFilter: DatasetFilter,
sourceConfiguration: ModuleSourceConfiguration sourceConfiguration: ModuleSourceConfiguration,
fetch: HttpHandler
) => Promise<SetupMlModuleResponsePayload>; ) => Promise<SetupMlModuleResponsePayload>;
cleanUpModule: (spaceId: string, sourceId: string) => Promise<DeleteJobsResponsePayload>; cleanUpModule: (
spaceId: string,
sourceId: string,
fetch: HttpHandler
) => Promise<DeleteJobsResponsePayload>;
validateSetupIndices: ( validateSetupIndices: (
indices: string[], indices: string[],
timestampField: string timestampField: string,
fetch: HttpHandler
) => Promise<ValidationIndicesResponsePayload>; ) => Promise<ValidationIndicesResponsePayload>;
validateSetupDatasets: ( validateSetupDatasets: (
indices: string[], indices: string[],
timestampField: string, timestampField: string,
startTime: number, startTime: number,
endTime: number endTime: number,
fetch: HttpHandler
) => Promise<ValidateLogEntryDatasetsResponsePayload>; ) => Promise<ValidateLogEntryDatasetsResponsePayload>;
} }

View file

@ -18,6 +18,7 @@ import {
ValidationIndicesError, ValidationIndicesError,
ValidationUIError, ValidationUIError,
} from '../../../components/logging/log_analysis_setup/initial_configuration_step'; } from '../../../components/logging/log_analysis_setup/initial_configuration_step';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { ModuleDescriptor, ModuleSourceConfiguration } from './log_analysis_module_types'; import { ModuleDescriptor, ModuleSourceConfiguration } from './log_analysis_module_types';
@ -43,6 +44,7 @@ export const useAnalysisSetupState = <JobType extends string>({
setUpModule, setUpModule,
sourceConfiguration, sourceConfiguration,
}: AnalysisSetupStateArguments<JobType>) => { }: AnalysisSetupStateArguments<JobType>) => {
const { services } = useKibanaContextForPlugin();
const [startTime, setStartTime] = useState<number | undefined>(Date.now() - fourWeeksInMs); const [startTime, setStartTime] = useState<number | undefined>(Date.now() - fourWeeksInMs);
const [endTime, setEndTime] = useState<number | undefined>(undefined); const [endTime, setEndTime] = useState<number | undefined>(undefined);
@ -158,7 +160,8 @@ export const useAnalysisSetupState = <JobType extends string>({
createPromise: async () => { createPromise: async () => {
return await validateSetupIndices( return await validateSetupIndices(
sourceConfiguration.indices, sourceConfiguration.indices,
sourceConfiguration.timestampField sourceConfiguration.timestampField,
services.http.fetch
); );
}, },
onResolve: ({ data: { errors } }) => { onResolve: ({ data: { errors } }) => {
@ -183,7 +186,8 @@ export const useAnalysisSetupState = <JobType extends string>({
validIndexNames, validIndexNames,
sourceConfiguration.timestampField, sourceConfiguration.timestampField,
startTime ?? 0, startTime ?? 0,
endTime ?? Date.now() endTime ?? Date.now(),
services.http.fetch
); );
}, },
onResolve: ({ data: { datasets } }) => { onResolve: ({ data: { datasets } }) => {

View file

@ -5,6 +5,7 @@
*/ */
import { i18n } from '@kbn/i18n'; import { i18n } from '@kbn/i18n';
import type { HttpHandler } from 'src/core/public';
import { import {
bucketSpan, bucketSpan,
categoriesMessageField, categoriesMessageField,
@ -42,22 +43,26 @@ const getJobIds = (spaceId: string, sourceId: string) =>
{} as Record<LogEntryCategoriesJobType, string> {} as Record<LogEntryCategoriesJobType, string>
); );
const getJobSummary = async (spaceId: string, sourceId: string) => { const getJobSummary = async (spaceId: string, sourceId: string, fetch: HttpHandler) => {
const response = await callJobsSummaryAPI(spaceId, sourceId, logEntryCategoriesJobTypes); const response = await callJobsSummaryAPI(
{ spaceId, sourceId, jobTypes: logEntryCategoriesJobTypes },
fetch
);
const jobIds = Object.values(getJobIds(spaceId, sourceId)); const jobIds = Object.values(getJobIds(spaceId, sourceId));
return response.filter((jobSummary) => jobIds.includes(jobSummary.id)); return response.filter((jobSummary) => jobIds.includes(jobSummary.id));
}; };
const getModuleDefinition = async () => { const getModuleDefinition = async (fetch: HttpHandler) => {
return await callGetMlModuleAPI(moduleId); return await callGetMlModuleAPI(moduleId, fetch);
}; };
const setUpModule = async ( const setUpModule = async (
start: number | undefined, start: number | undefined,
end: number | undefined, end: number | undefined,
datasetFilter: DatasetFilter, datasetFilter: DatasetFilter,
{ spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration { spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration,
fetch: HttpHandler
) => { ) => {
const indexNamePattern = indices.join(','); const indexNamePattern = indices.join(',');
const jobOverrides = [ const jobOverrides = [
@ -101,46 +106,59 @@ const setUpModule = async (
}; };
return callSetupMlModuleAPI( return callSetupMlModuleAPI(
moduleId, {
start, moduleId,
end, start,
spaceId, end,
sourceId, spaceId,
indexNamePattern, sourceId,
jobOverrides, indexPattern: indexNamePattern,
[], jobOverrides,
query query,
},
fetch
); );
}; };
const cleanUpModule = async (spaceId: string, sourceId: string) => { const cleanUpModule = async (spaceId: string, sourceId: string, fetch: HttpHandler) => {
return await cleanUpJobsAndDatafeeds(spaceId, sourceId, logEntryCategoriesJobTypes); return await cleanUpJobsAndDatafeeds(spaceId, sourceId, logEntryCategoriesJobTypes, fetch);
}; };
const validateSetupIndices = async (indices: string[], timestampField: string) => { const validateSetupIndices = async (
return await callValidateIndicesAPI(indices, [ indices: string[],
timestampField: string,
fetch: HttpHandler
) => {
return await callValidateIndicesAPI(
{ {
name: timestampField, indices,
validTypes: ['date'], fields: [
{
name: timestampField,
validTypes: ['date'],
},
{
name: partitionField,
validTypes: ['keyword'],
},
{
name: categoriesMessageField,
validTypes: ['text'],
},
],
}, },
{ fetch
name: partitionField, );
validTypes: ['keyword'],
},
{
name: categoriesMessageField,
validTypes: ['text'],
},
]);
}; };
const validateSetupDatasets = async ( const validateSetupDatasets = async (
indices: string[], indices: string[],
timestampField: string, timestampField: string,
startTime: number, startTime: number,
endTime: number endTime: number,
fetch: HttpHandler
) => { ) => {
return await callValidateDatasetsAPI(indices, timestampField, startTime, endTime); return await callValidateDatasetsAPI({ indices, timestampField, startTime, endTime }, fetch);
}; };
export const logEntryCategoriesModule: ModuleDescriptor<LogEntryCategoriesJobType> = { export const logEntryCategoriesModule: ModuleDescriptor<LogEntryCategoriesJobType> = {

View file

@ -5,6 +5,7 @@
*/ */
import { i18n } from '@kbn/i18n'; import { i18n } from '@kbn/i18n';
import type { HttpHandler } from 'src/core/public';
import { import {
bucketSpan, bucketSpan,
DatasetFilter, DatasetFilter,
@ -41,22 +42,26 @@ const getJobIds = (spaceId: string, sourceId: string) =>
{} as Record<LogEntryRateJobType, string> {} as Record<LogEntryRateJobType, string>
); );
const getJobSummary = async (spaceId: string, sourceId: string) => { const getJobSummary = async (spaceId: string, sourceId: string, fetch: HttpHandler) => {
const response = await callJobsSummaryAPI(spaceId, sourceId, logEntryRateJobTypes); const response = await callJobsSummaryAPI(
{ spaceId, sourceId, jobTypes: logEntryRateJobTypes },
fetch
);
const jobIds = Object.values(getJobIds(spaceId, sourceId)); const jobIds = Object.values(getJobIds(spaceId, sourceId));
return response.filter((jobSummary) => jobIds.includes(jobSummary.id)); return response.filter((jobSummary) => jobIds.includes(jobSummary.id));
}; };
const getModuleDefinition = async () => { const getModuleDefinition = async (fetch: HttpHandler) => {
return await callGetMlModuleAPI(moduleId); return await callGetMlModuleAPI(moduleId, fetch);
}; };
const setUpModule = async ( const setUpModule = async (
start: number | undefined, start: number | undefined,
end: number | undefined, end: number | undefined,
datasetFilter: DatasetFilter, datasetFilter: DatasetFilter,
{ spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration { spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration,
fetch: HttpHandler
) => { ) => {
const indexNamePattern = indices.join(','); const indexNamePattern = indices.join(',');
const jobOverrides = [ const jobOverrides = [
@ -93,42 +98,55 @@ const setUpModule = async (
: undefined; : undefined;
return callSetupMlModuleAPI( return callSetupMlModuleAPI(
moduleId, {
start, moduleId,
end, start,
spaceId, end,
sourceId, spaceId,
indexNamePattern, sourceId,
jobOverrides, indexPattern: indexNamePattern,
[], jobOverrides,
query query,
},
fetch
); );
}; };
const cleanUpModule = async (spaceId: string, sourceId: string) => { const cleanUpModule = async (spaceId: string, sourceId: string, fetch: HttpHandler) => {
return await cleanUpJobsAndDatafeeds(spaceId, sourceId, logEntryRateJobTypes); return await cleanUpJobsAndDatafeeds(spaceId, sourceId, logEntryRateJobTypes, fetch);
}; };
const validateSetupIndices = async (indices: string[], timestampField: string) => { const validateSetupIndices = async (
return await callValidateIndicesAPI(indices, [ indices: string[],
timestampField: string,
fetch: HttpHandler
) => {
return await callValidateIndicesAPI(
{ {
name: timestampField, indices,
validTypes: ['date'], fields: [
{
name: timestampField,
validTypes: ['date'],
},
{
name: partitionField,
validTypes: ['keyword'],
},
],
}, },
{ fetch
name: partitionField, );
validTypes: ['keyword'],
},
]);
}; };
const validateSetupDatasets = async ( const validateSetupDatasets = async (
indices: string[], indices: string[],
timestampField: string, timestampField: string,
startTime: number, startTime: number,
endTime: number endTime: number,
fetch: HttpHandler
) => { ) => {
return await callValidateDatasetsAPI(indices, timestampField, startTime, endTime); return await callValidateDatasetsAPI({ indices, timestampField, startTime, endTime }, fetch);
}; };
export const logEntryRateModule: ModuleDescriptor<LogEntryRateJobType> = { export const logEntryRateModule: ModuleDescriptor<LogEntryRateJobType> = {

View file

@ -4,12 +4,9 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { fold } from 'fp-ts/lib/Either'; import type { HttpHandler } from 'src/core/public';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import { throwErrors, createPlainError } from '../../../../../common/runtime_types'; import { decodeOrThrow } from '../../../../../common/runtime_types';
import { import {
LOG_ENTRIES_PATH, LOG_ENTRIES_PATH,
@ -18,11 +15,11 @@ import {
logEntriesResponseRT, logEntriesResponseRT,
} from '../../../../../common/http_api'; } from '../../../../../common/http_api';
export const fetchLogEntries = async (requestArgs: LogEntriesRequest) => { export const fetchLogEntries = async (requestArgs: LogEntriesRequest, fetch: HttpHandler) => {
const response = await npStart.http.fetch(LOG_ENTRIES_PATH, { const response = await fetch(LOG_ENTRIES_PATH, {
method: 'POST', method: 'POST',
body: JSON.stringify(logEntriesRequestRT.encode(requestArgs)), body: JSON.stringify(logEntriesRequestRT.encode(requestArgs)),
}); });
return pipe(logEntriesResponseRT.decode(response), fold(throwErrors(createPlainError), identity)); return decodeOrThrow(logEntriesResponseRT)(response);
}; };

View file

@ -4,12 +4,9 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { fold } from 'fp-ts/lib/Either'; import type { HttpHandler } from 'src/core/public';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import { throwErrors, createPlainError } from '../../../../../common/runtime_types'; import { decodeOrThrow } from '../../../../../common/runtime_types';
import { import {
LOG_ENTRIES_ITEM_PATH, LOG_ENTRIES_ITEM_PATH,
@ -18,14 +15,14 @@ import {
logEntriesItemResponseRT, logEntriesItemResponseRT,
} from '../../../../../common/http_api'; } from '../../../../../common/http_api';
export const fetchLogEntriesItem = async (requestArgs: LogEntriesItemRequest) => { export const fetchLogEntriesItem = async (
const response = await npStart.http.fetch(LOG_ENTRIES_ITEM_PATH, { requestArgs: LogEntriesItemRequest,
fetch: HttpHandler
) => {
const response = await fetch(LOG_ENTRIES_ITEM_PATH, {
method: 'POST', method: 'POST',
body: JSON.stringify(logEntriesItemRequestRT.encode(requestArgs)), body: JSON.stringify(logEntriesItemRequestRT.encode(requestArgs)),
}); });
return pipe( return decodeOrThrow(logEntriesItemResponseRT)(response);
logEntriesItemResponseRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
}; };

View file

@ -14,6 +14,7 @@ import {
LogEntriesBaseRequest, LogEntriesBaseRequest,
} from '../../../../common/http_api'; } from '../../../../common/http_api';
import { fetchLogEntries } from './api/fetch_log_entries'; import { fetchLogEntries } from './api/fetch_log_entries';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
const DESIRED_BUFFER_PAGES = 2; const DESIRED_BUFFER_PAGES = 2;
const LIVE_STREAM_INTERVAL = 5000; const LIVE_STREAM_INTERVAL = 5000;
@ -144,6 +145,7 @@ const useFetchEntriesEffect = (
dispatch: Dispatch, dispatch: Dispatch,
props: LogEntriesProps props: LogEntriesProps
) => { ) => {
const { services } = useKibanaContextForPlugin();
const [prevParams, cachePrevParams] = useState<LogEntriesProps | undefined>(); const [prevParams, cachePrevParams] = useState<LogEntriesProps | undefined>();
const [startedStreaming, setStartedStreaming] = useState(false); const [startedStreaming, setStartedStreaming] = useState(false);
@ -172,7 +174,7 @@ const useFetchEntriesEffect = (
before: 'last', before: 'last',
}; };
const { data: payload } = await fetchLogEntries(fetchArgs); const { data: payload } = await fetchLogEntries(fetchArgs, services.http.fetch);
dispatch({ type: Action.ReceiveNewEntries, payload }); dispatch({ type: Action.ReceiveNewEntries, payload });
// Move position to the bottom if it's the first load. // Move position to the bottom if it's the first load.
@ -228,7 +230,7 @@ const useFetchEntriesEffect = (
after: state.bottomCursor, after: state.bottomCursor,
}; };
const { data: payload } = await fetchLogEntries(fetchArgs); const { data: payload } = await fetchLogEntries(fetchArgs, services.http.fetch);
dispatch({ dispatch({
type: getEntriesBefore ? Action.ReceiveEntriesBefore : Action.ReceiveEntriesAfter, type: getEntriesBefore ? Action.ReceiveEntriesBefore : Action.ReceiveEntriesAfter,

View file

@ -9,6 +9,7 @@ import { isString } from 'lodash';
import React, { useContext, useEffect, useMemo, useState } from 'react'; import React, { useContext, useEffect, useMemo, useState } from 'react';
import { LogEntriesItem } from '../../../common/http_api'; import { LogEntriesItem } from '../../../common/http_api';
import { useKibanaContextForPlugin } from '../../hooks/use_kibana';
import { UrlStateContainer } from '../../utils/url_state'; import { UrlStateContainer } from '../../utils/url_state';
import { useTrackedPromise } from '../../utils/use_tracked_promise'; import { useTrackedPromise } from '../../utils/use_tracked_promise';
import { fetchLogEntriesItem } from './log_entries/api/fetch_log_entries_item'; import { fetchLogEntriesItem } from './log_entries/api/fetch_log_entries_item';
@ -26,6 +27,7 @@ export interface FlyoutOptionsUrlState {
} }
export const useLogFlyout = () => { export const useLogFlyout = () => {
const { services } = useKibanaContextForPlugin();
const { sourceId } = useLogSourceContext(); const { sourceId } = useLogSourceContext();
const [flyoutVisible, setFlyoutVisibility] = useState<boolean>(false); const [flyoutVisible, setFlyoutVisibility] = useState<boolean>(false);
const [flyoutId, setFlyoutId] = useState<string | null>(null); const [flyoutId, setFlyoutId] = useState<string | null>(null);
@ -39,7 +41,7 @@ export const useLogFlyout = () => {
if (!flyoutId) { if (!flyoutId) {
return; return;
} }
return await fetchLogEntriesItem({ sourceId, id: flyoutId }); return await fetchLogEntriesItem({ sourceId, id: flyoutId }, services.http.fetch);
}, },
onResolve: (response) => { onResolve: (response) => {
if (response) { if (response) {

View file

@ -4,12 +4,9 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { fold } from 'fp-ts/lib/Either'; import type { HttpHandler } from 'src/core/public';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import { throwErrors, createPlainError } from '../../../../../common/runtime_types'; import { decodeOrThrow } from '../../../../../common/runtime_types';
import { import {
LOG_ENTRIES_HIGHLIGHTS_PATH, LOG_ENTRIES_HIGHLIGHTS_PATH,
@ -18,14 +15,14 @@ import {
logEntriesHighlightsResponseRT, logEntriesHighlightsResponseRT,
} from '../../../../../common/http_api'; } from '../../../../../common/http_api';
export const fetchLogEntriesHighlights = async (requestArgs: LogEntriesHighlightsRequest) => { export const fetchLogEntriesHighlights = async (
const response = await npStart.http.fetch(LOG_ENTRIES_HIGHLIGHTS_PATH, { requestArgs: LogEntriesHighlightsRequest,
fetch: HttpHandler
) => {
const response = await fetch(LOG_ENTRIES_HIGHLIGHTS_PATH, {
method: 'POST', method: 'POST',
body: JSON.stringify(logEntriesHighlightsRequestRT.encode(requestArgs)), body: JSON.stringify(logEntriesHighlightsRequestRT.encode(requestArgs)),
}); });
return pipe( return decodeOrThrow(logEntriesHighlightsResponseRT)(response);
logEntriesHighlightsResponseRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
}; };

View file

@ -3,11 +3,9 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { fold } from 'fp-ts/lib/Either';
import { pipe } from 'fp-ts/lib/pipeable'; import type { HttpHandler } from 'src/core/public';
import { identity } from 'fp-ts/lib/function'; import { decodeOrThrow } from '../../../../../common/runtime_types';
import { npStart } from '../../../../legacy_singletons';
import { throwErrors, createPlainError } from '../../../../../common/runtime_types';
import { import {
LOG_ENTRIES_SUMMARY_HIGHLIGHTS_PATH, LOG_ENTRIES_SUMMARY_HIGHLIGHTS_PATH,
@ -17,15 +15,13 @@ import {
} from '../../../../../common/http_api'; } from '../../../../../common/http_api';
export const fetchLogSummaryHighlights = async ( export const fetchLogSummaryHighlights = async (
requestArgs: LogEntriesSummaryHighlightsRequest requestArgs: LogEntriesSummaryHighlightsRequest,
fetch: HttpHandler
) => { ) => {
const response = await npStart.http.fetch(LOG_ENTRIES_SUMMARY_HIGHLIGHTS_PATH, { const response = await fetch(LOG_ENTRIES_SUMMARY_HIGHLIGHTS_PATH, {
method: 'POST', method: 'POST',
body: JSON.stringify(logEntriesSummaryHighlightsRequestRT.encode(requestArgs)), body: JSON.stringify(logEntriesSummaryHighlightsRequestRT.encode(requestArgs)),
}); });
return pipe( return decodeOrThrow(logEntriesSummaryHighlightsResponseRT)(response);
logEntriesSummaryHighlightsResponseRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
}; };

View file

@ -10,6 +10,7 @@ import { TimeKey } from '../../../../common/time';
import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { fetchLogEntriesHighlights } from './api/fetch_log_entries_highlights'; import { fetchLogEntriesHighlights } from './api/fetch_log_entries_highlights';
import { LogEntry, LogEntriesHighlightsResponse } from '../../../../common/http_api'; import { LogEntry, LogEntriesHighlightsResponse } from '../../../../common/http_api';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
export const useLogEntryHighlights = ( export const useLogEntryHighlights = (
sourceId: string, sourceId: string,
@ -21,6 +22,7 @@ export const useLogEntryHighlights = (
filterQuery: string | null, filterQuery: string | null,
highlightTerms: string[] highlightTerms: string[]
) => { ) => {
const { services } = useKibanaContextForPlugin();
const [logEntryHighlights, setLogEntryHighlights] = useState< const [logEntryHighlights, setLogEntryHighlights] = useState<
LogEntriesHighlightsResponse['data'] LogEntriesHighlightsResponse['data']
>([]); >([]);
@ -32,15 +34,18 @@ export const useLogEntryHighlights = (
throw new Error('Skipping request: Insufficient parameters'); throw new Error('Skipping request: Insufficient parameters');
} }
return await fetchLogEntriesHighlights({ return await fetchLogEntriesHighlights(
sourceId, {
startTimestamp, sourceId,
endTimestamp, startTimestamp,
center: centerPoint, endTimestamp,
size, center: centerPoint,
query: filterQuery || undefined, size,
highlightTerms, query: filterQuery || undefined,
}); highlightTerms,
},
services.http.fetch
);
}, },
onResolve: (response) => { onResolve: (response) => {
setLogEntryHighlights(response.data); setLogEntryHighlights(response.data);

View file

@ -11,6 +11,7 @@ import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { fetchLogSummaryHighlights } from './api/fetch_log_summary_highlights'; import { fetchLogSummaryHighlights } from './api/fetch_log_summary_highlights';
import { LogEntriesSummaryHighlightsResponse } from '../../../../common/http_api'; import { LogEntriesSummaryHighlightsResponse } from '../../../../common/http_api';
import { useBucketSize } from '../log_summary/bucket_size'; import { useBucketSize } from '../log_summary/bucket_size';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
export const useLogSummaryHighlights = ( export const useLogSummaryHighlights = (
sourceId: string, sourceId: string,
@ -20,6 +21,7 @@ export const useLogSummaryHighlights = (
filterQuery: string | null, filterQuery: string | null,
highlightTerms: string[] highlightTerms: string[]
) => { ) => {
const { services } = useKibanaContextForPlugin();
const [logSummaryHighlights, setLogSummaryHighlights] = useState< const [logSummaryHighlights, setLogSummaryHighlights] = useState<
LogEntriesSummaryHighlightsResponse['data'] LogEntriesSummaryHighlightsResponse['data']
>([]); >([]);
@ -34,14 +36,17 @@ export const useLogSummaryHighlights = (
throw new Error('Skipping request: Insufficient parameters'); throw new Error('Skipping request: Insufficient parameters');
} }
return await fetchLogSummaryHighlights({ return await fetchLogSummaryHighlights(
sourceId, {
startTimestamp, sourceId,
endTimestamp, startTimestamp,
bucketSize, endTimestamp,
query: filterQuery, bucketSize,
highlightTerms, query: filterQuery,
}); highlightTerms,
},
services.http.fetch
);
}, },
onResolve: (response) => { onResolve: (response) => {
setLogSummaryHighlights(response.data); setLogSummaryHighlights(response.data);

View file

@ -4,17 +4,14 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { HttpSetup } from 'src/core/public'; import type { HttpHandler } from 'src/core/public';
import { import {
getLogSourceConfigurationPath, getLogSourceConfigurationPath,
getLogSourceConfigurationSuccessResponsePayloadRT, getLogSourceConfigurationSuccessResponsePayloadRT,
} from '../../../../../common/http_api/log_sources'; } from '../../../../../common/http_api/log_sources';
import { decodeOrThrow } from '../../../../../common/runtime_types'; import { decodeOrThrow } from '../../../../../common/runtime_types';
export const callFetchLogSourceConfigurationAPI = async ( export const callFetchLogSourceConfigurationAPI = async (sourceId: string, fetch: HttpHandler) => {
sourceId: string,
fetch: HttpSetup['fetch']
) => {
const response = await fetch(getLogSourceConfigurationPath(sourceId), { const response = await fetch(getLogSourceConfigurationPath(sourceId), {
method: 'GET', method: 'GET',
}); });

View file

@ -4,14 +4,14 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { HttpSetup } from 'src/core/public'; import type { HttpHandler } from 'src/core/public';
import { import {
getLogSourceStatusPath, getLogSourceStatusPath,
getLogSourceStatusSuccessResponsePayloadRT, getLogSourceStatusSuccessResponsePayloadRT,
} from '../../../../../common/http_api/log_sources'; } from '../../../../../common/http_api/log_sources';
import { decodeOrThrow } from '../../../../../common/runtime_types'; import { decodeOrThrow } from '../../../../../common/runtime_types';
export const callFetchLogSourceStatusAPI = async (sourceId: string, fetch: HttpSetup['fetch']) => { export const callFetchLogSourceStatusAPI = async (sourceId: string, fetch: HttpHandler) => {
const response = await fetch(getLogSourceStatusPath(sourceId), { const response = await fetch(getLogSourceStatusPath(sourceId), {
method: 'GET', method: 'GET',
}); });

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { HttpSetup } from 'src/core/public'; import type { HttpHandler } from 'src/core/public';
import { import {
getLogSourceConfigurationPath, getLogSourceConfigurationPath,
patchLogSourceConfigurationSuccessResponsePayloadRT, patchLogSourceConfigurationSuccessResponsePayloadRT,
@ -16,7 +16,7 @@ import { decodeOrThrow } from '../../../../../common/runtime_types';
export const callPatchLogSourceConfigurationAPI = async ( export const callPatchLogSourceConfigurationAPI = async (
sourceId: string, sourceId: string,
patchedProperties: LogSourceConfigurationPropertiesPatch, patchedProperties: LogSourceConfigurationPropertiesPatch,
fetch: HttpSetup['fetch'] fetch: HttpHandler
) => { ) => {
const response = await fetch(getLogSourceConfigurationPath(sourceId), { const response = await fetch(getLogSourceConfigurationPath(sourceId), {
method: 'PATCH', method: 'PATCH',

View file

@ -7,7 +7,7 @@
import createContainer from 'constate'; import createContainer from 'constate';
import { useCallback, useMemo, useState } from 'react'; import { useCallback, useMemo, useState } from 'react';
import { useMountedState } from 'react-use'; import { useMountedState } from 'react-use';
import { HttpSetup } from 'src/core/public'; import type { HttpHandler } from 'src/core/public';
import { import {
LogSourceConfiguration, LogSourceConfiguration,
LogSourceConfigurationProperties, LogSourceConfigurationProperties,
@ -26,13 +26,7 @@ export {
LogSourceStatus, LogSourceStatus,
}; };
export const useLogSource = ({ export const useLogSource = ({ sourceId, fetch }: { sourceId: string; fetch: HttpHandler }) => {
sourceId,
fetch,
}: {
sourceId: string;
fetch: HttpSetup['fetch'];
}) => {
const getIsMounted = useMountedState(); const getIsMounted = useMountedState();
const [sourceConfiguration, setSourceConfiguration] = useState< const [sourceConfiguration, setSourceConfiguration] = useState<
LogSourceConfiguration | undefined LogSourceConfiguration | undefined

View file

@ -9,6 +9,7 @@ import { esKuery } from '../../../../../../../src/plugins/data/public';
import { fetchLogEntries } from '../log_entries/api/fetch_log_entries'; import { fetchLogEntries } from '../log_entries/api/fetch_log_entries';
import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { LogEntry, LogEntriesCursor } from '../../../../common/http_api'; import { LogEntry, LogEntriesCursor } from '../../../../common/http_api';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
interface LogStreamProps { interface LogStreamProps {
sourceId: string; sourceId: string;
@ -31,6 +32,7 @@ export function useLogStream({
query, query,
center, center,
}: LogStreamProps): LogStreamState { }: LogStreamProps): LogStreamState {
const { services } = useKibanaContextForPlugin();
const [entries, setEntries] = useState<LogStreamState['entries']>([]); const [entries, setEntries] = useState<LogStreamState['entries']>([]);
const parsedQuery = useMemo(() => { const parsedQuery = useMemo(() => {
@ -47,13 +49,16 @@ export function useLogStream({
setEntries([]); setEntries([]);
const fetchPosition = center ? { center } : { before: 'last' }; const fetchPosition = center ? { center } : { before: 'last' };
return fetchLogEntries({ return fetchLogEntries(
sourceId, {
startTimestamp, sourceId,
endTimestamp, startTimestamp,
query: parsedQuery, endTimestamp,
...fetchPosition, query: parsedQuery,
}); ...fetchPosition,
},
services.http.fetch
);
}, },
onResolve: ({ data }) => { onResolve: ({ data }) => {
setEntries(data.entries); setEntries(data.entries);

View file

@ -4,11 +4,8 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { fold } from 'fp-ts/lib/Either'; import type { HttpHandler } from 'src/core/public';
import { pipe } from 'fp-ts/lib/pipeable'; import { decodeOrThrow } from '../../../../../common/runtime_types';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import { throwErrors, createPlainError } from '../../../../../common/runtime_types';
import { import {
LOG_ENTRIES_SUMMARY_PATH, LOG_ENTRIES_SUMMARY_PATH,
@ -17,14 +14,14 @@ import {
logEntriesSummaryResponseRT, logEntriesSummaryResponseRT,
} from '../../../../../common/http_api'; } from '../../../../../common/http_api';
export const fetchLogSummary = async (requestArgs: LogEntriesSummaryRequest) => { export const fetchLogSummary = async (
const response = await npStart.http.fetch(LOG_ENTRIES_SUMMARY_PATH, { requestArgs: LogEntriesSummaryRequest,
fetch: HttpHandler
) => {
const response = await fetch(LOG_ENTRIES_SUMMARY_PATH, {
method: 'POST', method: 'POST',
body: JSON.stringify(logEntriesSummaryRequestRT.encode(requestArgs)), body: JSON.stringify(logEntriesSummaryRequestRT.encode(requestArgs)),
}); });
return pipe( return decodeOrThrow(logEntriesSummaryResponseRT)(response);
logEntriesSummaryResponseRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
}; };

View file

@ -5,6 +5,8 @@
*/ */
import { renderHook } from '@testing-library/react-hooks'; import { renderHook } from '@testing-library/react-hooks';
// We are using this inside a `jest.mock` call. Jest requires dynamic dependencies to be prefixed with `mock`
import { coreMock as mockCoreMock } from 'src/core/public/mocks';
import { useLogSummary } from './log_summary'; import { useLogSummary } from './log_summary';
@ -16,6 +18,10 @@ import { datemathToEpochMillis } from '../../../utils/datemath';
jest.mock('./api/fetch_log_summary', () => ({ fetchLogSummary: jest.fn() })); jest.mock('./api/fetch_log_summary', () => ({ fetchLogSummary: jest.fn() }));
const fetchLogSummaryMock = fetchLogSummary as jest.MockedFunction<typeof fetchLogSummary>; const fetchLogSummaryMock = fetchLogSummary as jest.MockedFunction<typeof fetchLogSummary>;
jest.mock('../../../hooks/use_kibana', () => ({
useKibanaContextForPlugin: () => ({ services: mockCoreMock.createStart() }),
}));
describe('useLogSummary hook', () => { describe('useLogSummary hook', () => {
beforeEach(() => { beforeEach(() => {
fetchLogSummaryMock.mockClear(); fetchLogSummaryMock.mockClear();
@ -53,7 +59,8 @@ describe('useLogSummary hook', () => {
expect(fetchLogSummaryMock).toHaveBeenLastCalledWith( expect(fetchLogSummaryMock).toHaveBeenLastCalledWith(
expect.objectContaining({ expect.objectContaining({
sourceId: 'INITIAL_SOURCE_ID', sourceId: 'INITIAL_SOURCE_ID',
}) }),
expect.anything()
); );
expect(result.current.buckets).toEqual(firstMockResponse.data.buckets); expect(result.current.buckets).toEqual(firstMockResponse.data.buckets);
@ -64,7 +71,8 @@ describe('useLogSummary hook', () => {
expect(fetchLogSummaryMock).toHaveBeenLastCalledWith( expect(fetchLogSummaryMock).toHaveBeenLastCalledWith(
expect.objectContaining({ expect.objectContaining({
sourceId: 'CHANGED_SOURCE_ID', sourceId: 'CHANGED_SOURCE_ID',
}) }),
expect.anything()
); );
expect(result.current.buckets).toEqual(secondMockResponse.data.buckets); expect(result.current.buckets).toEqual(secondMockResponse.data.buckets);
}); });
@ -96,7 +104,8 @@ describe('useLogSummary hook', () => {
expect(fetchLogSummaryMock).toHaveBeenLastCalledWith( expect(fetchLogSummaryMock).toHaveBeenLastCalledWith(
expect.objectContaining({ expect.objectContaining({
query: 'INITIAL_FILTER_QUERY', query: 'INITIAL_FILTER_QUERY',
}) }),
expect.anything()
); );
expect(result.current.buckets).toEqual(firstMockResponse.data.buckets); expect(result.current.buckets).toEqual(firstMockResponse.data.buckets);
@ -107,7 +116,8 @@ describe('useLogSummary hook', () => {
expect(fetchLogSummaryMock).toHaveBeenLastCalledWith( expect(fetchLogSummaryMock).toHaveBeenLastCalledWith(
expect.objectContaining({ expect.objectContaining({
query: 'CHANGED_FILTER_QUERY', query: 'CHANGED_FILTER_QUERY',
}) }),
expect.anything()
); );
expect(result.current.buckets).toEqual(secondMockResponse.data.buckets); expect(result.current.buckets).toEqual(secondMockResponse.data.buckets);
}); });
@ -132,7 +142,8 @@ describe('useLogSummary hook', () => {
expect.objectContaining({ expect.objectContaining({
startTimestamp: firstRange.startTimestamp, startTimestamp: firstRange.startTimestamp,
endTimestamp: firstRange.endTimestamp, endTimestamp: firstRange.endTimestamp,
}) }),
expect.anything()
); );
const secondRange = createMockDateRange('now-20s', 'now'); const secondRange = createMockDateRange('now-20s', 'now');
@ -145,7 +156,8 @@ describe('useLogSummary hook', () => {
expect.objectContaining({ expect.objectContaining({
startTimestamp: secondRange.startTimestamp, startTimestamp: secondRange.startTimestamp,
endTimestamp: secondRange.endTimestamp, endTimestamp: secondRange.endTimestamp,
}) }),
expect.anything()
); );
}); });
}); });

View file

@ -10,6 +10,7 @@ import { useCancellableEffect } from '../../../utils/cancellable_effect';
import { fetchLogSummary } from './api/fetch_log_summary'; import { fetchLogSummary } from './api/fetch_log_summary';
import { LogEntriesSummaryResponse } from '../../../../common/http_api'; import { LogEntriesSummaryResponse } from '../../../../common/http_api';
import { useBucketSize } from './bucket_size'; import { useBucketSize } from './bucket_size';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
export type LogSummaryBuckets = LogEntriesSummaryResponse['data']['buckets']; export type LogSummaryBuckets = LogEntriesSummaryResponse['data']['buckets'];
@ -19,6 +20,7 @@ export const useLogSummary = (
endTimestamp: number | null, endTimestamp: number | null,
filterQuery: string | null filterQuery: string | null
) => { ) => {
const { services } = useKibanaContextForPlugin();
const [logSummaryBuckets, setLogSummaryBuckets] = useState<LogSummaryBuckets>([]); const [logSummaryBuckets, setLogSummaryBuckets] = useState<LogSummaryBuckets>([]);
const bucketSize = useBucketSize(startTimestamp, endTimestamp); const bucketSize = useBucketSize(startTimestamp, endTimestamp);
@ -28,13 +30,16 @@ export const useLogSummary = (
return; return;
} }
fetchLogSummary({ fetchLogSummary(
sourceId, {
startTimestamp, sourceId,
endTimestamp, startTimestamp,
bucketSize, endTimestamp,
query: filterQuery, bucketSize,
}).then((response) => { query: filterQuery,
},
services.http.fetch
).then((response) => {
if (!getIsCancelled()) { if (!getIsCancelled()) {
setLogSummaryBuckets(response.data.buckets); setLogSummaryBuckets(response.data.buckets);
} }

View file

@ -5,21 +5,24 @@
*/ */
import * as rt from 'io-ts'; import * as rt from 'io-ts';
import { pipe } from 'fp-ts/lib/pipeable'; import type { HttpHandler } from 'src/core/public';
import { fold } from 'fp-ts/lib/Either';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../legacy_singletons';
import { getDatafeedId, getJobId } from '../../../../common/infra_ml'; import { getDatafeedId, getJobId } from '../../../../common/infra_ml';
import { throwErrors, createPlainError } from '../../../../common/runtime_types'; import { decodeOrThrow } from '../../../../common/runtime_types';
interface DeleteJobsRequestArgs<JobType extends string> {
spaceId: string;
sourceId: string;
jobTypes: JobType[];
}
export const callDeleteJobs = async <JobType extends string>( export const callDeleteJobs = async <JobType extends string>(
spaceId: string, requestArgs: DeleteJobsRequestArgs<JobType>,
sourceId: string, fetch: HttpHandler
jobTypes: JobType[]
) => { ) => {
const { spaceId, sourceId, jobTypes } = requestArgs;
// NOTE: Deleting the jobs via this API will delete the datafeeds at the same time // NOTE: Deleting the jobs via this API will delete the datafeeds at the same time
const deleteJobsResponse = await npStart.http.fetch('/api/ml/jobs/delete_jobs', { const deleteJobsResponse = await fetch('/api/ml/jobs/delete_jobs', {
method: 'POST', method: 'POST',
body: JSON.stringify( body: JSON.stringify(
deleteJobsRequestPayloadRT.encode({ deleteJobsRequestPayloadRT.encode({
@ -28,28 +31,29 @@ export const callDeleteJobs = async <JobType extends string>(
), ),
}); });
return pipe( return decodeOrThrow(deleteJobsResponsePayloadRT)(deleteJobsResponse);
deleteJobsResponsePayloadRT.decode(deleteJobsResponse),
fold(throwErrors(createPlainError), identity)
);
}; };
export const callGetJobDeletionTasks = async () => { export const callGetJobDeletionTasks = async (fetch: HttpHandler) => {
const jobDeletionTasksResponse = await npStart.http.fetch('/api/ml/jobs/deleting_jobs_tasks'); const jobDeletionTasksResponse = await fetch('/api/ml/jobs/deleting_jobs_tasks');
return pipe( return decodeOrThrow(getJobDeletionTasksResponsePayloadRT)(jobDeletionTasksResponse);
getJobDeletionTasksResponsePayloadRT.decode(jobDeletionTasksResponse),
fold(throwErrors(createPlainError), identity)
);
}; };
interface StopDatafeedsRequestArgs<JobType extends string> {
spaceId: string;
sourceId: string;
jobTypes: JobType[];
}
export const callStopDatafeeds = async <JobType extends string>( export const callStopDatafeeds = async <JobType extends string>(
spaceId: string, requestArgs: StopDatafeedsRequestArgs<JobType>,
sourceId: string, fetch: HttpHandler
jobTypes: JobType[]
) => { ) => {
const { spaceId, sourceId, jobTypes } = requestArgs;
// Stop datafeed due to https://github.com/elastic/kibana/issues/44652 // Stop datafeed due to https://github.com/elastic/kibana/issues/44652
const stopDatafeedResponse = await npStart.http.fetch('/api/ml/jobs/stop_datafeeds', { const stopDatafeedResponse = await fetch('/api/ml/jobs/stop_datafeeds', {
method: 'POST', method: 'POST',
body: JSON.stringify( body: JSON.stringify(
stopDatafeedsRequestPayloadRT.encode({ stopDatafeedsRequestPayloadRT.encode({
@ -58,10 +62,7 @@ export const callStopDatafeeds = async <JobType extends string>(
), ),
}); });
return pipe( return decodeOrThrow(stopDatafeedsResponsePayloadRT)(stopDatafeedResponse);
stopDatafeedsResponsePayloadRT.decode(stopDatafeedResponse),
fold(throwErrors(createPlainError), identity)
);
}; };
export const deleteJobsRequestPayloadRT = rt.type({ export const deleteJobsRequestPayloadRT = rt.type({

View file

@ -4,21 +4,24 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { fold } from 'fp-ts/lib/Either';
import { identity } from 'fp-ts/lib/function';
import { pipe } from 'fp-ts/lib/pipeable';
import * as rt from 'io-ts'; import * as rt from 'io-ts';
import { npStart } from '../../../legacy_singletons'; import type { HttpHandler } from 'src/core/public';
import { getJobId, jobCustomSettingsRT } from '../../../../common/infra_ml'; import { getJobId, jobCustomSettingsRT } from '../../../../common/infra_ml';
import { createPlainError, throwErrors } from '../../../../common/runtime_types'; import { decodeOrThrow } from '../../../../common/runtime_types';
interface RequestArgs<JobType extends string> {
spaceId: string;
sourceId: string;
jobTypes: JobType[];
}
export const callJobsSummaryAPI = async <JobType extends string>( export const callJobsSummaryAPI = async <JobType extends string>(
spaceId: string, requestArgs: RequestArgs<JobType>,
sourceId: string, fetch: HttpHandler
jobTypes: JobType[]
) => { ) => {
const response = await npStart.http.fetch('/api/ml/jobs/jobs_summary', { const { spaceId, sourceId, jobTypes } = requestArgs;
const response = await fetch('/api/ml/jobs/jobs_summary', {
method: 'POST', method: 'POST',
body: JSON.stringify( body: JSON.stringify(
fetchJobStatusRequestPayloadRT.encode({ fetchJobStatusRequestPayloadRT.encode({
@ -26,10 +29,7 @@ export const callJobsSummaryAPI = async <JobType extends string>(
}) })
), ),
}); });
return pipe( return decodeOrThrow(fetchJobStatusResponsePayloadRT)(response);
fetchJobStatusResponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
}; };
export const fetchJobStatusRequestPayloadRT = rt.type({ export const fetchJobStatusRequestPayloadRT = rt.type({

View file

@ -4,24 +4,18 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { fold } from 'fp-ts/lib/Either';
import { identity } from 'fp-ts/lib/function';
import { pipe } from 'fp-ts/lib/pipeable';
import * as rt from 'io-ts'; import * as rt from 'io-ts';
import { npStart } from '../../../legacy_singletons'; import type { HttpHandler } from 'src/core/public';
import { jobCustomSettingsRT } from '../../../../common/log_analysis'; import { jobCustomSettingsRT } from '../../../../common/log_analysis';
import { createPlainError, throwErrors } from '../../../../common/runtime_types'; import { decodeOrThrow } from '../../../../common/runtime_types';
export const callGetMlModuleAPI = async (moduleId: string) => { export const callGetMlModuleAPI = async (moduleId: string, fetch: HttpHandler) => {
const response = await npStart.http.fetch(`/api/ml/modules/get_module/${moduleId}`, { const response = await fetch(`/api/ml/modules/get_module/${moduleId}`, {
method: 'GET', method: 'GET',
}); });
return pipe( return decodeOrThrow(getMlModuleResponsePayloadRT)(response);
getMlModuleResponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
}; };
const jobDefinitionRT = rt.type({ const jobDefinitionRT = rt.type({

View file

@ -4,27 +4,38 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { fold } from 'fp-ts/lib/Either';
import { identity } from 'fp-ts/lib/function';
import { pipe } from 'fp-ts/lib/pipeable';
import * as rt from 'io-ts'; import * as rt from 'io-ts';
import { npStart } from '../../../legacy_singletons'; import type { HttpHandler } from 'src/core/public';
import { getJobIdPrefix, jobCustomSettingsRT } from '../../../../common/infra_ml'; import { getJobIdPrefix, jobCustomSettingsRT } from '../../../../common/infra_ml';
import { createPlainError, throwErrors } from '../../../../common/runtime_types'; import { decodeOrThrow } from '../../../../common/runtime_types';
export const callSetupMlModuleAPI = async ( interface RequestArgs {
moduleId: string, moduleId: string;
start: number | undefined, start?: number;
end: number | undefined, end?: number;
spaceId: string, spaceId: string;
sourceId: string, sourceId: string;
indexPattern: string, indexPattern: string;
jobOverrides: SetupMlModuleJobOverrides[] = [], jobOverrides?: SetupMlModuleJobOverrides[];
datafeedOverrides: SetupMlModuleDatafeedOverrides[] = [], datafeedOverrides?: SetupMlModuleDatafeedOverrides[];
query?: object query?: object;
) => { }
const response = await npStart.http.fetch(`/api/ml/modules/setup/${moduleId}`, {
export const callSetupMlModuleAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => {
const {
moduleId,
start,
end,
spaceId,
sourceId,
indexPattern,
jobOverrides = [],
datafeedOverrides = [],
query,
} = requestArgs;
const response = await fetch(`/api/ml/modules/setup/${moduleId}`, {
method: 'POST', method: 'POST',
body: JSON.stringify( body: JSON.stringify(
setupMlModuleRequestPayloadRT.encode({ setupMlModuleRequestPayloadRT.encode({
@ -40,10 +51,7 @@ export const callSetupMlModuleAPI = async (
), ),
}); });
return pipe( return decodeOrThrow(setupMlModuleResponsePayloadRT)(response);
setupMlModuleResponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
}; };
const setupMlModuleTimeParamsRT = rt.partial({ const setupMlModuleTimeParamsRT = rt.partial({

View file

@ -10,14 +10,15 @@ import { fold } from 'fp-ts/lib/Either';
import { pipe } from 'fp-ts/lib/pipeable'; import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function'; import { identity } from 'fp-ts/lib/function';
import { useTrackedPromise } from '../../utils/use_tracked_promise'; import { useTrackedPromise } from '../../utils/use_tracked_promise';
import { npStart } from '../../legacy_singletons';
import { import {
getMlCapabilitiesResponsePayloadRT, getMlCapabilitiesResponsePayloadRT,
GetMlCapabilitiesResponsePayload, GetMlCapabilitiesResponsePayload,
} from './api/ml_api_types'; } from './api/ml_api_types';
import { throwErrors, createPlainError } from '../../../common/runtime_types'; import { throwErrors, createPlainError } from '../../../common/runtime_types';
import { useKibanaContextForPlugin } from '../../hooks/use_kibana';
export const useInfraMLCapabilities = () => { export const useInfraMLCapabilities = () => {
const { services } = useKibanaContextForPlugin();
const [mlCapabilities, setMlCapabilities] = useState<GetMlCapabilitiesResponsePayload>( const [mlCapabilities, setMlCapabilities] = useState<GetMlCapabilitiesResponsePayload>(
initialMlCapabilities initialMlCapabilities
); );
@ -26,7 +27,7 @@ export const useInfraMLCapabilities = () => {
{ {
cancelPreviousOn: 'resolution', cancelPreviousOn: 'resolution',
createPromise: async () => { createPromise: async () => {
const rawResponse = await npStart.http.fetch('/api/ml/ml_capabilities'); const rawResponse = await services.http.fetch('/api/ml/ml_capabilities');
return pipe( return pipe(
getMlCapabilitiesResponsePayloadRT.decode(rawResponse), getMlCapabilitiesResponsePayloadRT.decode(rawResponse),

View file

@ -4,16 +4,18 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { HttpHandler } from 'src/core/public';
import { getJobId } from '../../../common/infra_ml'; import { getJobId } from '../../../common/infra_ml';
import { callDeleteJobs, callGetJobDeletionTasks, callStopDatafeeds } from './api/ml_cleanup'; import { callDeleteJobs, callGetJobDeletionTasks, callStopDatafeeds } from './api/ml_cleanup';
export const cleanUpJobsAndDatafeeds = async <JobType extends string>( export const cleanUpJobsAndDatafeeds = async <JobType extends string>(
spaceId: string, spaceId: string,
sourceId: string, sourceId: string,
jobTypes: JobType[] jobTypes: JobType[],
fetch: HttpHandler
) => { ) => {
try { try {
await callStopDatafeeds(spaceId, sourceId, jobTypes); await callStopDatafeeds({ spaceId, sourceId, jobTypes }, fetch);
} catch (err) { } catch (err) {
// Proceed only if datafeed has been deleted or didn't exist in the first place // Proceed only if datafeed has been deleted or didn't exist in the first place
if (err?.res?.status !== 404) { if (err?.res?.status !== 404) {
@ -21,27 +23,29 @@ export const cleanUpJobsAndDatafeeds = async <JobType extends string>(
} }
} }
return await deleteJobs(spaceId, sourceId, jobTypes); return await deleteJobs(spaceId, sourceId, jobTypes, fetch);
}; };
const deleteJobs = async <JobType extends string>( const deleteJobs = async <JobType extends string>(
spaceId: string, spaceId: string,
sourceId: string, sourceId: string,
jobTypes: JobType[] jobTypes: JobType[],
fetch: HttpHandler
) => { ) => {
const deleteJobsResponse = await callDeleteJobs(spaceId, sourceId, jobTypes); const deleteJobsResponse = await callDeleteJobs({ spaceId, sourceId, jobTypes }, fetch);
await waitUntilJobsAreDeleted(spaceId, sourceId, jobTypes); await waitUntilJobsAreDeleted(spaceId, sourceId, jobTypes, fetch);
return deleteJobsResponse; return deleteJobsResponse;
}; };
const waitUntilJobsAreDeleted = async <JobType extends string>( const waitUntilJobsAreDeleted = async <JobType extends string>(
spaceId: string, spaceId: string,
sourceId: string, sourceId: string,
jobTypes: JobType[] jobTypes: JobType[],
fetch: HttpHandler
) => { ) => {
const moduleJobIds = jobTypes.map((jobType) => getJobId(spaceId, sourceId, jobType)); const moduleJobIds = jobTypes.map((jobType) => getJobId(spaceId, sourceId, jobType));
while (true) { while (true) {
const { jobIds: jobIdsBeingDeleted } = await callGetJobDeletionTasks(); const { jobIds: jobIdsBeingDeleted } = await callGetJobDeletionTasks(fetch);
const needToWait = jobIdsBeingDeleted.some((jobId) => moduleJobIds.includes(jobId)); const needToWait = jobIdsBeingDeleted.some((jobId) => moduleJobIds.includes(jobId));
if (needToWait) { if (needToWait) {

View file

@ -6,6 +6,7 @@
import { useCallback, useMemo } from 'react'; import { useCallback, useMemo } from 'react';
import { DatasetFilter } from '../../../common/infra_ml'; import { DatasetFilter } from '../../../common/infra_ml';
import { useKibanaContextForPlugin } from '../../hooks/use_kibana';
import { useTrackedPromise } from '../../utils/use_tracked_promise'; import { useTrackedPromise } from '../../utils/use_tracked_promise';
import { useModuleStatus } from './infra_ml_module_status'; import { useModuleStatus } from './infra_ml_module_status';
import { ModuleDescriptor, ModuleSourceConfiguration } from './infra_ml_module_types'; import { ModuleDescriptor, ModuleSourceConfiguration } from './infra_ml_module_types';
@ -17,6 +18,7 @@ export const useInfraMLModule = <JobType extends string>({
sourceConfiguration: ModuleSourceConfiguration; sourceConfiguration: ModuleSourceConfiguration;
moduleDescriptor: ModuleDescriptor<JobType>; moduleDescriptor: ModuleDescriptor<JobType>;
}) => { }) => {
const { services } = useKibanaContextForPlugin();
const { spaceId, sourceId, timestampField } = sourceConfiguration; const { spaceId, sourceId, timestampField } = sourceConfiguration;
const [moduleStatus, dispatchModuleStatus] = useModuleStatus(moduleDescriptor.jobTypes); const [moduleStatus, dispatchModuleStatus] = useModuleStatus(moduleDescriptor.jobTypes);
@ -25,7 +27,7 @@ export const useInfraMLModule = <JobType extends string>({
cancelPreviousOn: 'resolution', cancelPreviousOn: 'resolution',
createPromise: async () => { createPromise: async () => {
dispatchModuleStatus({ type: 'fetchingJobStatuses' }); dispatchModuleStatus({ type: 'fetchingJobStatuses' });
return await moduleDescriptor.getJobSummary(spaceId, sourceId); return await moduleDescriptor.getJobSummary(spaceId, sourceId, services.http.fetch);
}, },
onResolve: (jobResponse) => { onResolve: (jobResponse) => {
dispatchModuleStatus({ dispatchModuleStatus({
@ -54,18 +56,25 @@ export const useInfraMLModule = <JobType extends string>({
) => { ) => {
dispatchModuleStatus({ type: 'startedSetup' }); dispatchModuleStatus({ type: 'startedSetup' });
const setupResult = await moduleDescriptor.setUpModule( const setupResult = await moduleDescriptor.setUpModule(
start,
end,
datasetFilter,
{ {
indices: selectedIndices, start,
sourceId, end,
spaceId, datasetFilter,
timestampField, moduleSourceConfiguration: {
indices: selectedIndices,
sourceId,
spaceId,
timestampField,
},
partitionField,
}, },
partitionField services.http.fetch
);
const jobSummaries = await moduleDescriptor.getJobSummary(
spaceId,
sourceId,
services.http.fetch
); );
const jobSummaries = await moduleDescriptor.getJobSummary(spaceId, sourceId);
return { setupResult, jobSummaries }; return { setupResult, jobSummaries };
}, },
onResolve: ({ setupResult: { datafeeds, jobs }, jobSummaries }) => { onResolve: ({ setupResult: { datafeeds, jobs }, jobSummaries }) => {
@ -89,7 +98,7 @@ export const useInfraMLModule = <JobType extends string>({
{ {
cancelPreviousOn: 'resolution', cancelPreviousOn: 'resolution',
createPromise: async () => { createPromise: async () => {
return await moduleDescriptor.cleanUpModule(spaceId, sourceId); return await moduleDescriptor.cleanUpModule(spaceId, sourceId, services.http.fetch);
}, },
}, },
[spaceId, sourceId] [spaceId, sourceId]

View file

@ -6,6 +6,7 @@
import { useCallback, useMemo, useState } from 'react'; import { useCallback, useMemo, useState } from 'react';
import { getJobId } from '../../../common/log_analysis'; import { getJobId } from '../../../common/log_analysis';
import { useKibanaContextForPlugin } from '../../hooks/use_kibana';
import { useTrackedPromise } from '../../utils/use_tracked_promise'; import { useTrackedPromise } from '../../utils/use_tracked_promise';
import { JobSummary } from './api/ml_get_jobs_summary_api'; import { JobSummary } from './api/ml_get_jobs_summary_api';
import { GetMlModuleResponsePayload, JobDefinition } from './api/ml_get_module'; import { GetMlModuleResponsePayload, JobDefinition } from './api/ml_get_module';
@ -18,6 +19,7 @@ export const useInfraMLModuleDefinition = <JobType extends string>({
sourceConfiguration: ModuleSourceConfiguration; sourceConfiguration: ModuleSourceConfiguration;
moduleDescriptor: ModuleDescriptor<JobType>; moduleDescriptor: ModuleDescriptor<JobType>;
}) => { }) => {
const { services } = useKibanaContextForPlugin();
const [moduleDefinition, setModuleDefinition] = useState< const [moduleDefinition, setModuleDefinition] = useState<
GetMlModuleResponsePayload | undefined GetMlModuleResponsePayload | undefined
>(); >();
@ -40,7 +42,7 @@ export const useInfraMLModuleDefinition = <JobType extends string>({
{ {
cancelPreviousOn: 'resolution', cancelPreviousOn: 'resolution',
createPromise: async () => { createPromise: async () => {
return await moduleDescriptor.getModuleDefinition(); return await moduleDescriptor.getModuleDefinition(services.http.fetch);
}, },
onResolve: (response) => { onResolve: (response) => {
setModuleDefinition(response); setModuleDefinition(response);

View file

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { HttpHandler } from 'src/core/public';
import { import {
ValidateLogEntryDatasetsResponsePayload, ValidateLogEntryDatasetsResponsePayload,
ValidationIndicesResponsePayload, ValidationIndicesResponsePayload,
@ -16,6 +16,14 @@ import { SetupMlModuleResponsePayload } from './api/ml_setup_module_api';
export { JobModelSizeStats, JobSummary } from './api/ml_get_jobs_summary_api'; export { JobModelSizeStats, JobSummary } from './api/ml_get_jobs_summary_api';
export interface SetUpModuleArgs {
start?: number | undefined;
end?: number | undefined;
datasetFilter?: DatasetFilter;
moduleSourceConfiguration: ModuleSourceConfiguration;
partitionField?: string;
}
export interface ModuleDescriptor<JobType extends string> { export interface ModuleDescriptor<JobType extends string> {
moduleId: string; moduleId: string;
moduleName: string; moduleName: string;
@ -23,25 +31,32 @@ export interface ModuleDescriptor<JobType extends string> {
jobTypes: JobType[]; jobTypes: JobType[];
bucketSpan: number; bucketSpan: number;
getJobIds: (spaceId: string, sourceId: string) => Record<JobType, string>; getJobIds: (spaceId: string, sourceId: string) => Record<JobType, string>;
getJobSummary: (spaceId: string, sourceId: string) => Promise<FetchJobStatusResponsePayload>; getJobSummary: (
getModuleDefinition: () => Promise<GetMlModuleResponsePayload>; spaceId: string,
sourceId: string,
fetch: HttpHandler
) => Promise<FetchJobStatusResponsePayload>;
getModuleDefinition: (fetch: HttpHandler) => Promise<GetMlModuleResponsePayload>;
setUpModule: ( setUpModule: (
start: number | undefined, setUpModuleArgs: SetUpModuleArgs,
end: number | undefined, fetch: HttpHandler
datasetFilter: DatasetFilter,
sourceConfiguration: ModuleSourceConfiguration,
partitionField?: string
) => Promise<SetupMlModuleResponsePayload>; ) => Promise<SetupMlModuleResponsePayload>;
cleanUpModule: (spaceId: string, sourceId: string) => Promise<DeleteJobsResponsePayload>; cleanUpModule: (
spaceId: string,
sourceId: string,
fetch: HttpHandler
) => Promise<DeleteJobsResponsePayload>;
validateSetupIndices?: ( validateSetupIndices?: (
indices: string[], indices: string[],
timestampField: string timestampField: string,
fetch: HttpHandler
) => Promise<ValidationIndicesResponsePayload>; ) => Promise<ValidationIndicesResponsePayload>;
validateSetupDatasets?: ( validateSetupDatasets?: (
indices: string[], indices: string[],
timestampField: string, timestampField: string,
startTime: number, startTime: number,
endTime: number endTime: number,
fetch: HttpHandler
) => Promise<ValidateLogEntryDatasetsResponsePayload>; ) => Promise<ValidateLogEntryDatasetsResponsePayload>;
} }

View file

@ -5,7 +5,8 @@
*/ */
import { i18n } from '@kbn/i18n'; import { i18n } from '@kbn/i18n';
import { ModuleDescriptor, ModuleSourceConfiguration } from '../../infra_ml_module_types'; import { HttpHandler } from 'src/core/public';
import { ModuleDescriptor, SetUpModuleArgs } from '../../infra_ml_module_types';
import { cleanUpJobsAndDatafeeds } from '../../infra_ml_cleanup'; import { cleanUpJobsAndDatafeeds } from '../../infra_ml_cleanup';
import { callJobsSummaryAPI } from '../../api/ml_get_jobs_summary_api'; import { callJobsSummaryAPI } from '../../api/ml_get_jobs_summary_api';
import { callGetMlModuleAPI } from '../../api/ml_get_module'; import { callGetMlModuleAPI } from '../../api/ml_get_module';
@ -14,7 +15,6 @@ import {
metricsHostsJobTypes, metricsHostsJobTypes,
getJobId, getJobId,
MetricsHostsJobType, MetricsHostsJobType,
DatasetFilter,
bucketSpan, bucketSpan,
} from '../../../../../common/infra_ml'; } from '../../../../../common/infra_ml';
// eslint-disable-next-line @kbn/eslint/no-restricted-paths // eslint-disable-next-line @kbn/eslint/no-restricted-paths
@ -48,24 +48,28 @@ const getJobIds = (spaceId: string, sourceId: string) =>
{} as Record<MetricsHostsJobType, string> {} as Record<MetricsHostsJobType, string>
); );
const getJobSummary = async (spaceId: string, sourceId: string) => { const getJobSummary = async (spaceId: string, sourceId: string, fetch: HttpHandler) => {
const response = await callJobsSummaryAPI(spaceId, sourceId, metricsHostsJobTypes); const response = await callJobsSummaryAPI(
{ spaceId, sourceId, jobTypes: metricsHostsJobTypes },
fetch
);
const jobIds = Object.values(getJobIds(spaceId, sourceId)); const jobIds = Object.values(getJobIds(spaceId, sourceId));
return response.filter((jobSummary) => jobIds.includes(jobSummary.id)); return response.filter((jobSummary) => jobIds.includes(jobSummary.id));
}; };
const getModuleDefinition = async () => { const getModuleDefinition = async (fetch: HttpHandler) => {
return await callGetMlModuleAPI(moduleId); return await callGetMlModuleAPI(moduleId, fetch);
}; };
const setUpModule = async ( const setUpModule = async (setUpModuleArgs: SetUpModuleArgs, fetch: HttpHandler) => {
start: number | undefined, const {
end: number | undefined, start,
datasetFilter: DatasetFilter, end,
{ spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration, moduleSourceConfiguration: { spaceId, sourceId, indices, timestampField },
partitionField?: string partitionField,
) => { } = setUpModuleArgs;
const indexNamePattern = indices.join(','); const indexNamePattern = indices.join(',');
const jobIds: JobType[] = ['hosts_memory_usage', 'hosts_network_in', 'hosts_network_out']; const jobIds: JobType[] = ['hosts_memory_usage', 'hosts_network_in', 'hosts_network_out'];
@ -128,14 +132,17 @@ const setUpModule = async (
}); });
return callSetupMlModuleAPI( return callSetupMlModuleAPI(
moduleId, {
start, moduleId,
end, start,
spaceId, end,
sourceId, spaceId,
indexNamePattern, sourceId,
jobOverrides, indexPattern: indexNamePattern,
datafeedOverrides jobOverrides,
datafeedOverrides,
},
fetch
); );
}; };
@ -159,8 +166,8 @@ const getDefaultJobConfigs = (jobId: JobType): { datafeed: any; job: any } => {
} }
}; };
const cleanUpModule = async (spaceId: string, sourceId: string) => { const cleanUpModule = async (spaceId: string, sourceId: string, fetch: HttpHandler) => {
return await cleanUpJobsAndDatafeeds(spaceId, sourceId, metricsHostsJobTypes); return await cleanUpJobsAndDatafeeds(spaceId, sourceId, metricsHostsJobTypes, fetch);
}; };
export const metricHostsModule: ModuleDescriptor<MetricsHostsJobType> = { export const metricHostsModule: ModuleDescriptor<MetricsHostsJobType> = {

View file

@ -5,7 +5,8 @@
*/ */
import { i18n } from '@kbn/i18n'; import { i18n } from '@kbn/i18n';
import { ModuleDescriptor, ModuleSourceConfiguration } from '../../infra_ml_module_types'; import { HttpHandler } from 'src/core/public';
import { ModuleDescriptor, SetUpModuleArgs } from '../../infra_ml_module_types';
import { cleanUpJobsAndDatafeeds } from '../../infra_ml_cleanup'; import { cleanUpJobsAndDatafeeds } from '../../infra_ml_cleanup';
import { callJobsSummaryAPI } from '../../api/ml_get_jobs_summary_api'; import { callJobsSummaryAPI } from '../../api/ml_get_jobs_summary_api';
import { callGetMlModuleAPI } from '../../api/ml_get_module'; import { callGetMlModuleAPI } from '../../api/ml_get_module';
@ -14,7 +15,6 @@ import {
metricsK8SJobTypes, metricsK8SJobTypes,
getJobId, getJobId,
MetricK8sJobType, MetricK8sJobType,
DatasetFilter,
bucketSpan, bucketSpan,
} from '../../../../../common/infra_ml'; } from '../../../../../common/infra_ml';
// eslint-disable-next-line @kbn/eslint/no-restricted-paths // eslint-disable-next-line @kbn/eslint/no-restricted-paths
@ -49,24 +49,28 @@ const getJobIds = (spaceId: string, sourceId: string) =>
{} as Record<MetricK8sJobType, string> {} as Record<MetricK8sJobType, string>
); );
const getJobSummary = async (spaceId: string, sourceId: string) => { const getJobSummary = async (spaceId: string, sourceId: string, fetch: HttpHandler) => {
const response = await callJobsSummaryAPI(spaceId, sourceId, metricsK8SJobTypes); const response = await callJobsSummaryAPI(
{ spaceId, sourceId, jobTypes: metricsK8SJobTypes },
fetch
);
const jobIds = Object.values(getJobIds(spaceId, sourceId)); const jobIds = Object.values(getJobIds(spaceId, sourceId));
return response.filter((jobSummary) => jobIds.includes(jobSummary.id)); return response.filter((jobSummary) => jobIds.includes(jobSummary.id));
}; };
const getModuleDefinition = async () => { const getModuleDefinition = async (fetch: HttpHandler) => {
return await callGetMlModuleAPI(moduleId); return await callGetMlModuleAPI(moduleId, fetch);
}; };
const setUpModule = async ( const setUpModule = async (setUpModuleArgs: SetUpModuleArgs, fetch: HttpHandler) => {
start: number | undefined, const {
end: number | undefined, start,
datasetFilter: DatasetFilter, end,
{ spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration, moduleSourceConfiguration: { spaceId, sourceId, indices, timestampField },
partitionField?: string partitionField,
) => { } = setUpModuleArgs;
const indexNamePattern = indices.join(','); const indexNamePattern = indices.join(',');
const jobIds: JobType[] = ['k8s_memory_usage', 'k8s_network_in', 'k8s_network_out']; const jobIds: JobType[] = ['k8s_memory_usage', 'k8s_network_in', 'k8s_network_out'];
const jobOverrides = jobIds.map((id) => { const jobOverrides = jobIds.map((id) => {
@ -133,14 +137,17 @@ const setUpModule = async (
}); });
return callSetupMlModuleAPI( return callSetupMlModuleAPI(
moduleId, {
start, moduleId,
end, start,
spaceId, end,
sourceId, spaceId,
indexNamePattern, sourceId,
jobOverrides, indexPattern: indexNamePattern,
datafeedOverrides jobOverrides,
datafeedOverrides,
},
fetch
); );
}; };
@ -164,8 +171,8 @@ const getDefaultJobConfigs = (jobId: JobType): { datafeed: any; job: any } => {
} }
}; };
const cleanUpModule = async (spaceId: string, sourceId: string) => { const cleanUpModule = async (spaceId: string, sourceId: string, fetch: HttpHandler) => {
return await cleanUpJobsAndDatafeeds(spaceId, sourceId, metricsK8SJobTypes); return await cleanUpJobsAndDatafeeds(spaceId, sourceId, metricsK8SJobTypes, fetch);
}; };
export const metricHostsModule: ModuleDescriptor<MetricK8sJobType> = { export const metricHostsModule: ModuleDescriptor<MetricK8sJobType> = {

View file

@ -1,14 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { CoreStart } from 'kibana/public';
let npStart: CoreStart;
export function registerStartSingleton(start: CoreStart) {
npStart = start;
}
export { npStart };

View file

@ -14,7 +14,6 @@ import { createMemoryHistory } from 'history';
import React from 'react'; import React from 'react';
import { Route, Router, Switch } from 'react-router-dom'; import { Route, Router, Switch } from 'react-router-dom';
import { httpServiceMock } from 'src/core/public/mocks'; import { httpServiceMock } from 'src/core/public/mocks';
// import { HttpSetup } from 'src/core/public';
import { KibanaContextProvider } from 'src/plugins/kibana_react/public'; import { KibanaContextProvider } from 'src/plugins/kibana_react/public';
import { useLogSource } from '../../containers/logs/log_source'; import { useLogSource } from '../../containers/logs/log_source';
import { import {

View file

@ -4,24 +4,28 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { fold } from 'fp-ts/lib/Either'; import type { HttpHandler } from 'src/core/public';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import { import {
getLogEntryCategoryDatasetsRequestPayloadRT, getLogEntryCategoryDatasetsRequestPayloadRT,
getLogEntryCategoryDatasetsSuccessReponsePayloadRT, getLogEntryCategoryDatasetsSuccessReponsePayloadRT,
LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_DATASETS_PATH, LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_DATASETS_PATH,
} from '../../../../../common/http_api/log_analysis'; } from '../../../../../common/http_api/log_analysis';
import { createPlainError, throwErrors } from '../../../../../common/runtime_types'; import { decodeOrThrow } from '../../../../../common/runtime_types';
interface RequestArgs {
sourceId: string;
startTime: number;
endTime: number;
}
export const callGetLogEntryCategoryDatasetsAPI = async ( export const callGetLogEntryCategoryDatasetsAPI = async (
sourceId: string, requestArgs: RequestArgs,
startTime: number, fetch: HttpHandler
endTime: number
) => { ) => {
const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_DATASETS_PATH, { const { sourceId, startTime, endTime } = requestArgs;
const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_DATASETS_PATH, {
method: 'POST', method: 'POST',
body: JSON.stringify( body: JSON.stringify(
getLogEntryCategoryDatasetsRequestPayloadRT.encode({ getLogEntryCategoryDatasetsRequestPayloadRT.encode({
@ -36,8 +40,5 @@ export const callGetLogEntryCategoryDatasetsAPI = async (
), ),
}); });
return pipe( return decodeOrThrow(getLogEntryCategoryDatasetsSuccessReponsePayloadRT)(response);
getLogEntryCategoryDatasetsSuccessReponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
}; };

View file

@ -4,26 +4,30 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { fold } from 'fp-ts/lib/Either'; import type { HttpHandler } from 'src/core/public';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import { import {
getLogEntryCategoryExamplesRequestPayloadRT, getLogEntryCategoryExamplesRequestPayloadRT,
getLogEntryCategoryExamplesSuccessReponsePayloadRT, getLogEntryCategoryExamplesSuccessReponsePayloadRT,
LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_EXAMPLES_PATH, LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_EXAMPLES_PATH,
} from '../../../../../common/http_api/log_analysis'; } from '../../../../../common/http_api/log_analysis';
import { createPlainError, throwErrors } from '../../../../../common/runtime_types'; import { decodeOrThrow } from '../../../../../common/runtime_types';
interface RequestArgs {
sourceId: string;
startTime: number;
endTime: number;
categoryId: number;
exampleCount: number;
}
export const callGetLogEntryCategoryExamplesAPI = async ( export const callGetLogEntryCategoryExamplesAPI = async (
sourceId: string, requestArgs: RequestArgs,
startTime: number, fetch: HttpHandler
endTime: number,
categoryId: number,
exampleCount: number
) => { ) => {
const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_EXAMPLES_PATH, { const { sourceId, startTime, endTime, categoryId, exampleCount } = requestArgs;
const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_EXAMPLES_PATH, {
method: 'POST', method: 'POST',
body: JSON.stringify( body: JSON.stringify(
getLogEntryCategoryExamplesRequestPayloadRT.encode({ getLogEntryCategoryExamplesRequestPayloadRT.encode({
@ -40,8 +44,5 @@ export const callGetLogEntryCategoryExamplesAPI = async (
), ),
}); });
return pipe( return decodeOrThrow(getLogEntryCategoryExamplesSuccessReponsePayloadRT)(response);
getLogEntryCategoryExamplesSuccessReponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
}; };

View file

@ -4,28 +4,31 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { fold } from 'fp-ts/lib/Either'; import type { HttpHandler } from 'src/core/public';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import { import {
getLogEntryCategoriesRequestPayloadRT, getLogEntryCategoriesRequestPayloadRT,
getLogEntryCategoriesSuccessReponsePayloadRT, getLogEntryCategoriesSuccessReponsePayloadRT,
LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORIES_PATH, LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORIES_PATH,
} from '../../../../../common/http_api/log_analysis'; } from '../../../../../common/http_api/log_analysis';
import { createPlainError, throwErrors } from '../../../../../common/runtime_types'; import { decodeOrThrow } from '../../../../../common/runtime_types';
interface RequestArgs {
sourceId: string;
startTime: number;
endTime: number;
categoryCount: number;
datasets?: string[];
}
export const callGetTopLogEntryCategoriesAPI = async ( export const callGetTopLogEntryCategoriesAPI = async (
sourceId: string, requestArgs: RequestArgs,
startTime: number, fetch: HttpHandler
endTime: number,
categoryCount: number,
datasets?: string[]
) => { ) => {
const { sourceId, startTime, endTime, categoryCount, datasets } = requestArgs;
const intervalDuration = endTime - startTime; const intervalDuration = endTime - startTime;
const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORIES_PATH, { const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORIES_PATH, {
method: 'POST', method: 'POST',
body: JSON.stringify( body: JSON.stringify(
getLogEntryCategoriesRequestPayloadRT.encode({ getLogEntryCategoriesRequestPayloadRT.encode({
@ -60,8 +63,5 @@ export const callGetTopLogEntryCategoriesAPI = async (
), ),
}); });
return pipe( return decodeOrThrow(getLogEntryCategoriesSuccessReponsePayloadRT)(response);
getLogEntryCategoriesSuccessReponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
}; };

View file

@ -13,6 +13,7 @@ import {
import { useTrackedPromise, CanceledPromiseError } from '../../../utils/use_tracked_promise'; import { useTrackedPromise, CanceledPromiseError } from '../../../utils/use_tracked_promise';
import { callGetTopLogEntryCategoriesAPI } from './service_calls/get_top_log_entry_categories'; import { callGetTopLogEntryCategoriesAPI } from './service_calls/get_top_log_entry_categories';
import { callGetLogEntryCategoryDatasetsAPI } from './service_calls/get_log_entry_category_datasets'; import { callGetLogEntryCategoryDatasetsAPI } from './service_calls/get_log_entry_category_datasets';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
type TopLogEntryCategories = GetLogEntryCategoriesSuccessResponsePayload['data']['categories']; type TopLogEntryCategories = GetLogEntryCategoriesSuccessResponsePayload['data']['categories'];
type LogEntryCategoryDatasets = GetLogEntryCategoryDatasetsSuccessResponsePayload['data']['datasets']; type LogEntryCategoryDatasets = GetLogEntryCategoryDatasetsSuccessResponsePayload['data']['datasets'];
@ -34,6 +35,7 @@ export const useLogEntryCategoriesResults = ({
sourceId: string; sourceId: string;
startTime: number; startTime: number;
}) => { }) => {
const { services } = useKibanaContextForPlugin();
const [topLogEntryCategories, setTopLogEntryCategories] = useState<TopLogEntryCategories>([]); const [topLogEntryCategories, setTopLogEntryCategories] = useState<TopLogEntryCategories>([]);
const [logEntryCategoryDatasets, setLogEntryCategoryDatasets] = useState< const [logEntryCategoryDatasets, setLogEntryCategoryDatasets] = useState<
LogEntryCategoryDatasets LogEntryCategoryDatasets
@ -44,11 +46,14 @@ export const useLogEntryCategoriesResults = ({
cancelPreviousOn: 'creation', cancelPreviousOn: 'creation',
createPromise: async () => { createPromise: async () => {
return await callGetTopLogEntryCategoriesAPI( return await callGetTopLogEntryCategoriesAPI(
sourceId, {
startTime, sourceId,
endTime, startTime,
categoriesCount, endTime,
filteredDatasets categoryCount: categoriesCount,
datasets: filteredDatasets,
},
services.http.fetch
); );
}, },
onResolve: ({ data: { categories } }) => { onResolve: ({ data: { categories } }) => {
@ -71,7 +76,10 @@ export const useLogEntryCategoriesResults = ({
{ {
cancelPreviousOn: 'creation', cancelPreviousOn: 'creation',
createPromise: async () => { createPromise: async () => {
return await callGetLogEntryCategoryDatasetsAPI(sourceId, startTime, endTime); return await callGetLogEntryCategoryDatasetsAPI(
{ sourceId, startTime, endTime },
services.http.fetch
);
}, },
onResolve: ({ data: { datasets } }) => { onResolve: ({ data: { datasets } }) => {
setLogEntryCategoryDatasets(datasets); setLogEntryCategoryDatasets(datasets);

View file

@ -7,6 +7,7 @@
import { useMemo, useState } from 'react'; import { useMemo, useState } from 'react';
import { LogEntryCategoryExample } from '../../../../common/http_api'; import { LogEntryCategoryExample } from '../../../../common/http_api';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { callGetLogEntryCategoryExamplesAPI } from './service_calls/get_log_entry_category_examples'; import { callGetLogEntryCategoryExamplesAPI } from './service_calls/get_log_entry_category_examples';
@ -23,6 +24,8 @@ export const useLogEntryCategoryExamples = ({
sourceId: string; sourceId: string;
startTime: number; startTime: number;
}) => { }) => {
const { services } = useKibanaContextForPlugin();
const [logEntryCategoryExamples, setLogEntryCategoryExamples] = useState< const [logEntryCategoryExamples, setLogEntryCategoryExamples] = useState<
LogEntryCategoryExample[] LogEntryCategoryExample[]
>([]); >([]);
@ -32,11 +35,14 @@ export const useLogEntryCategoryExamples = ({
cancelPreviousOn: 'creation', cancelPreviousOn: 'creation',
createPromise: async () => { createPromise: async () => {
return await callGetLogEntryCategoryExamplesAPI( return await callGetLogEntryCategoryExamplesAPI(
sourceId, {
startTime, sourceId,
endTime, startTime,
categoryId, endTime,
exampleCount categoryId,
exampleCount,
},
services.http.fetch
); );
}, },
onResolve: ({ data: { examples } }) => { onResolve: ({ data: { examples } }) => {

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { npStart } from '../../../../legacy_singletons'; import type { HttpHandler } from 'src/core/public';
import { import {
getLogEntryAnomaliesRequestPayloadRT, getLogEntryAnomaliesRequestPayloadRT,
getLogEntryAnomaliesSuccessReponsePayloadRT, getLogEntryAnomaliesSuccessReponsePayloadRT,
@ -13,15 +13,18 @@ import {
import { decodeOrThrow } from '../../../../../common/runtime_types'; import { decodeOrThrow } from '../../../../../common/runtime_types';
import { Sort, Pagination } from '../../../../../common/http_api/log_analysis'; import { Sort, Pagination } from '../../../../../common/http_api/log_analysis';
export const callGetLogEntryAnomaliesAPI = async ( interface RequestArgs {
sourceId: string, sourceId: string;
startTime: number, startTime: number;
endTime: number, endTime: number;
sort: Sort, sort: Sort;
pagination: Pagination, pagination: Pagination;
datasets?: string[] datasets?: string[];
) => { }
const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_PATH, {
export const callGetLogEntryAnomaliesAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => {
const { sourceId, startTime, endTime, sort, pagination, datasets } = requestArgs;
const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_PATH, {
method: 'POST', method: 'POST',
body: JSON.stringify( body: JSON.stringify(
getLogEntryAnomaliesRequestPayloadRT.encode({ getLogEntryAnomaliesRequestPayloadRT.encode({

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { npStart } from '../../../../legacy_singletons'; import type { HttpHandler } from 'src/core/public';
import { decodeOrThrow } from '../../../../../common/runtime_types'; import { decodeOrThrow } from '../../../../../common/runtime_types';
import { import {
getLogEntryAnomaliesDatasetsRequestPayloadRT, getLogEntryAnomaliesDatasetsRequestPayloadRT,
@ -12,12 +12,18 @@ import {
LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_DATASETS_PATH, LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_DATASETS_PATH,
} from '../../../../../common/http_api/log_analysis'; } from '../../../../../common/http_api/log_analysis';
interface RequestArgs {
sourceId: string;
startTime: number;
endTime: number;
}
export const callGetLogEntryAnomaliesDatasetsAPI = async ( export const callGetLogEntryAnomaliesDatasetsAPI = async (
sourceId: string, requestArgs: RequestArgs,
startTime: number, fetch: HttpHandler
endTime: number
) => { ) => {
const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_DATASETS_PATH, { const { sourceId, startTime, endTime } = requestArgs;
const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_DATASETS_PATH, {
method: 'POST', method: 'POST',
body: JSON.stringify( body: JSON.stringify(
getLogEntryAnomaliesDatasetsRequestPayloadRT.encode({ getLogEntryAnomaliesDatasetsRequestPayloadRT.encode({

View file

@ -4,27 +4,27 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { fold } from 'fp-ts/lib/Either'; import type { HttpHandler } from 'src/core/public';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import { import {
getLogEntryExamplesRequestPayloadRT, getLogEntryExamplesRequestPayloadRT,
getLogEntryExamplesSuccessReponsePayloadRT, getLogEntryExamplesSuccessReponsePayloadRT,
LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH, LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH,
} from '../../../../../common/http_api/log_analysis'; } from '../../../../../common/http_api/log_analysis';
import { createPlainError, throwErrors } from '../../../../../common/runtime_types'; import { decodeOrThrow } from '../../../../../common/runtime_types';
export const callGetLogEntryExamplesAPI = async ( interface RequestArgs {
sourceId: string, sourceId: string;
startTime: number, startTime: number;
endTime: number, endTime: number;
dataset: string, dataset: string;
exampleCount: number, exampleCount: number;
categoryId?: string categoryId?: string;
) => { }
const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH, {
export const callGetLogEntryExamplesAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => {
const { sourceId, startTime, endTime, dataset, exampleCount, categoryId } = requestArgs;
const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH, {
method: 'POST', method: 'POST',
body: JSON.stringify( body: JSON.stringify(
getLogEntryExamplesRequestPayloadRT.encode({ getLogEntryExamplesRequestPayloadRT.encode({
@ -42,8 +42,5 @@ export const callGetLogEntryExamplesAPI = async (
), ),
}); });
return pipe( return decodeOrThrow(getLogEntryExamplesSuccessReponsePayloadRT)(response);
getLogEntryExamplesSuccessReponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
}; };

View file

@ -4,25 +4,25 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { fold } from 'fp-ts/lib/Either'; import type { HttpHandler } from 'src/core/public';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import { import {
getLogEntryRateRequestPayloadRT, getLogEntryRateRequestPayloadRT,
getLogEntryRateSuccessReponsePayloadRT, getLogEntryRateSuccessReponsePayloadRT,
LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH, LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH,
} from '../../../../../common/http_api/log_analysis'; } from '../../../../../common/http_api/log_analysis';
import { createPlainError, throwErrors } from '../../../../../common/runtime_types'; import { decodeOrThrow } from '../../../../../common/runtime_types';
export const callGetLogEntryRateAPI = async ( interface RequestArgs {
sourceId: string, sourceId: string;
startTime: number, startTime: number;
endTime: number, endTime: number;
bucketDuration: number, bucketDuration: number;
datasets?: string[] datasets?: string[];
) => { }
const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH, {
export const callGetLogEntryRateAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => {
const { sourceId, startTime, endTime, bucketDuration, datasets } = requestArgs;
const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH, {
method: 'POST', method: 'POST',
body: JSON.stringify( body: JSON.stringify(
getLogEntryRateRequestPayloadRT.encode({ getLogEntryRateRequestPayloadRT.encode({
@ -38,8 +38,5 @@ export const callGetLogEntryRateAPI = async (
}) })
), ),
}); });
return pipe( return decodeOrThrow(getLogEntryRateSuccessReponsePayloadRT)(response);
getLogEntryRateSuccessReponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
}; };

View file

@ -16,6 +16,7 @@ import {
GetLogEntryAnomaliesDatasetsSuccessResponsePayload, GetLogEntryAnomaliesDatasetsSuccessResponsePayload,
LogEntryAnomaly, LogEntryAnomaly,
} from '../../../../common/http_api/log_analysis'; } from '../../../../common/http_api/log_analysis';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
export type SortOptions = Sort; export type SortOptions = Sort;
export type PaginationOptions = Pick<Pagination, 'pageSize'>; export type PaginationOptions = Pick<Pagination, 'pageSize'>;
@ -161,6 +162,8 @@ export const useLogEntryAnomaliesResults = ({
}; };
}; };
const { services } = useKibanaContextForPlugin();
const [reducerState, dispatch] = useReducer(stateReducer, STATE_DEFAULTS, initStateReducer); const [reducerState, dispatch] = useReducer(stateReducer, STATE_DEFAULTS, initStateReducer);
const [logEntryAnomalies, setLogEntryAnomalies] = useState<LogEntryAnomalies>([]); const [logEntryAnomalies, setLogEntryAnomalies] = useState<LogEntryAnomalies>([]);
@ -177,15 +180,18 @@ export const useLogEntryAnomaliesResults = ({
filteredDatasets: queryFilteredDatasets, filteredDatasets: queryFilteredDatasets,
} = reducerState; } = reducerState;
return await callGetLogEntryAnomaliesAPI( return await callGetLogEntryAnomaliesAPI(
sourceId,
queryStartTime,
queryEndTime,
sortOptions,
{ {
...paginationOptions, sourceId,
cursor: paginationCursor, startTime: queryStartTime,
endTime: queryEndTime,
sort: sortOptions,
pagination: {
...paginationOptions,
cursor: paginationCursor,
},
datasets: queryFilteredDatasets,
}, },
queryFilteredDatasets services.http.fetch
); );
}, },
onResolve: ({ data: { anomalies, paginationCursors: requestCursors, hasMoreEntries } }) => { onResolve: ({ data: { anomalies, paginationCursors: requestCursors, hasMoreEntries } }) => {
@ -286,7 +292,10 @@ export const useLogEntryAnomaliesResults = ({
{ {
cancelPreviousOn: 'creation', cancelPreviousOn: 'creation',
createPromise: async () => { createPromise: async () => {
return await callGetLogEntryAnomaliesDatasetsAPI(sourceId, startTime, endTime); return await callGetLogEntryAnomaliesDatasetsAPI(
{ sourceId, startTime, endTime },
services.http.fetch
);
}, },
onResolve: ({ data: { datasets } }) => { onResolve: ({ data: { datasets } }) => {
setLogEntryAnomaliesDatasets(datasets); setLogEntryAnomaliesDatasets(datasets);

View file

@ -7,6 +7,7 @@
import { useMemo, useState } from 'react'; import { useMemo, useState } from 'react';
import { LogEntryExample } from '../../../../common/http_api'; import { LogEntryExample } from '../../../../common/http_api';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { callGetLogEntryExamplesAPI } from './service_calls/get_log_entry_examples'; import { callGetLogEntryExamplesAPI } from './service_calls/get_log_entry_examples';
@ -25,6 +26,7 @@ export const useLogEntryExamples = ({
startTime: number; startTime: number;
categoryId?: string; categoryId?: string;
}) => { }) => {
const { services } = useKibanaContextForPlugin();
const [logEntryExamples, setLogEntryExamples] = useState<LogEntryExample[]>([]); const [logEntryExamples, setLogEntryExamples] = useState<LogEntryExample[]>([]);
const [getLogEntryExamplesRequest, getLogEntryExamples] = useTrackedPromise( const [getLogEntryExamplesRequest, getLogEntryExamples] = useTrackedPromise(
@ -32,12 +34,15 @@ export const useLogEntryExamples = ({
cancelPreviousOn: 'creation', cancelPreviousOn: 'creation',
createPromise: async () => { createPromise: async () => {
return await callGetLogEntryExamplesAPI( return await callGetLogEntryExamplesAPI(
sourceId, {
startTime, sourceId,
endTime, startTime,
dataset, endTime,
exampleCount, dataset,
categoryId exampleCount,
categoryId,
},
services.http.fetch
); );
}, },
onResolve: ({ data: { examples } }) => { onResolve: ({ data: { examples } }) => {

View file

@ -12,6 +12,7 @@ import {
LogEntryRatePartition, LogEntryRatePartition,
LogEntryRateAnomaly, LogEntryRateAnomaly,
} from '../../../../common/http_api/log_analysis'; } from '../../../../common/http_api/log_analysis';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { callGetLogEntryRateAPI } from './service_calls/get_log_entry_rate'; import { callGetLogEntryRateAPI } from './service_calls/get_log_entry_rate';
@ -49,6 +50,7 @@ export const useLogEntryRateResults = ({
bucketDuration: number; bucketDuration: number;
filteredDatasets?: string[]; filteredDatasets?: string[];
}) => { }) => {
const { services } = useKibanaContextForPlugin();
const [logEntryRate, setLogEntryRate] = useState<LogEntryRateResults | null>(null); const [logEntryRate, setLogEntryRate] = useState<LogEntryRateResults | null>(null);
const [getLogEntryRateRequest, getLogEntryRate] = useTrackedPromise( const [getLogEntryRateRequest, getLogEntryRate] = useTrackedPromise(
@ -56,11 +58,14 @@ export const useLogEntryRateResults = ({
cancelPreviousOn: 'resolution', cancelPreviousOn: 'resolution',
createPromise: async () => { createPromise: async () => {
return await callGetLogEntryRateAPI( return await callGetLogEntryRateAPI(
sourceId, {
startTime, sourceId,
endTime, startTime,
bucketDuration, endTime,
filteredDatasets bucketDuration,
datasets: filteredDatasets,
},
services.http.fetch
); );
}, },
onResolve: ({ data }) => { onResolve: ({ data }) => {

View file

@ -5,6 +5,7 @@
*/ */
import { useMemo, useState, useCallback, useEffect, useReducer } from 'react'; import { useMemo, useState, useCallback, useEffect, useReducer } from 'react';
import { HttpHandler } from 'src/core/public';
import { import {
INFA_ML_GET_METRICS_HOSTS_ANOMALIES_PATH, INFA_ML_GET_METRICS_HOSTS_ANOMALIES_PATH,
Metric, Metric,
@ -16,8 +17,8 @@ import {
getMetricsHostsAnomaliesSuccessReponsePayloadRT, getMetricsHostsAnomaliesSuccessReponsePayloadRT,
} from '../../../../../common/http_api/infra_ml'; } from '../../../../../common/http_api/infra_ml';
import { useTrackedPromise } from '../../../../utils/use_tracked_promise'; import { useTrackedPromise } from '../../../../utils/use_tracked_promise';
import { npStart } from '../../../../legacy_singletons';
import { decodeOrThrow } from '../../../../../common/runtime_types'; import { decodeOrThrow } from '../../../../../common/runtime_types';
import { useKibanaContextForPlugin } from '../../../../hooks/use_kibana';
export type SortOptions = Sort; export type SortOptions = Sort;
export type PaginationOptions = Pick<Pagination, 'pageSize'>; export type PaginationOptions = Pick<Pagination, 'pageSize'>;
@ -149,6 +150,7 @@ export const useMetricsHostsAnomaliesResults = ({
onGetMetricsHostsAnomaliesDatasetsError?: (error: Error) => void; onGetMetricsHostsAnomaliesDatasetsError?: (error: Error) => void;
filteredDatasets?: string[]; filteredDatasets?: string[];
}) => { }) => {
const { services } = useKibanaContextForPlugin();
const initStateReducer = (stateDefaults: ReducerStateDefaults): ReducerState => { const initStateReducer = (stateDefaults: ReducerStateDefaults): ReducerState => {
return { return {
...stateDefaults, ...stateDefaults,
@ -177,15 +179,18 @@ export const useMetricsHostsAnomaliesResults = ({
paginationCursor, paginationCursor,
} = reducerState; } = reducerState;
return await callGetMetricHostsAnomaliesAPI( return await callGetMetricHostsAnomaliesAPI(
sourceId,
queryStartTime,
queryEndTime,
metric,
sortOptions,
{ {
...paginationOptions, sourceId,
cursor: paginationCursor, startTime: queryStartTime,
} endTime: queryEndTime,
metric,
sort: sortOptions,
pagination: {
...paginationOptions,
cursor: paginationCursor,
},
},
services.http.fetch
); );
}, },
onResolve: ({ data: { anomalies, paginationCursors: requestCursors, hasMoreEntries } }) => { onResolve: ({ data: { anomalies, paginationCursors: requestCursors, hasMoreEntries } }) => {
@ -288,15 +293,21 @@ export const useMetricsHostsAnomaliesResults = ({
}; };
}; };
interface RequestArgs {
sourceId: string;
startTime: number;
endTime: number;
metric: Metric;
sort: Sort;
pagination: Pagination;
}
export const callGetMetricHostsAnomaliesAPI = async ( export const callGetMetricHostsAnomaliesAPI = async (
sourceId: string, requestArgs: RequestArgs,
startTime: number, fetch: HttpHandler
endTime: number,
metric: Metric,
sort: Sort,
pagination: Pagination
) => { ) => {
const response = await npStart.http.fetch(INFA_ML_GET_METRICS_HOSTS_ANOMALIES_PATH, { const { sourceId, startTime, endTime, metric, sort, pagination } = requestArgs;
const response = await fetch(INFA_ML_GET_METRICS_HOSTS_ANOMALIES_PATH, {
method: 'POST', method: 'POST',
body: JSON.stringify( body: JSON.stringify(
getMetricsHostsAnomaliesRequestPayloadRT.encode({ getMetricsHostsAnomaliesRequestPayloadRT.encode({

View file

@ -5,6 +5,7 @@
*/ */
import { useMemo, useState, useCallback, useEffect, useReducer } from 'react'; import { useMemo, useState, useCallback, useEffect, useReducer } from 'react';
import { HttpHandler } from 'src/core/public';
import { import {
Sort, Sort,
Pagination, Pagination,
@ -16,8 +17,8 @@ import {
Metric, Metric,
} from '../../../../../common/http_api/infra_ml'; } from '../../../../../common/http_api/infra_ml';
import { useTrackedPromise } from '../../../../utils/use_tracked_promise'; import { useTrackedPromise } from '../../../../utils/use_tracked_promise';
import { npStart } from '../../../../legacy_singletons';
import { decodeOrThrow } from '../../../../../common/runtime_types'; import { decodeOrThrow } from '../../../../../common/runtime_types';
import { useKibanaContextForPlugin } from '../../../../hooks/use_kibana';
export type SortOptions = Sort; export type SortOptions = Sort;
export type PaginationOptions = Pick<Pagination, 'pageSize'>; export type PaginationOptions = Pick<Pagination, 'pageSize'>;
@ -149,6 +150,7 @@ export const useMetricsK8sAnomaliesResults = ({
onGetMetricsHostsAnomaliesDatasetsError?: (error: Error) => void; onGetMetricsHostsAnomaliesDatasetsError?: (error: Error) => void;
filteredDatasets?: string[]; filteredDatasets?: string[];
}) => { }) => {
const { services } = useKibanaContextForPlugin();
const initStateReducer = (stateDefaults: ReducerStateDefaults): ReducerState => { const initStateReducer = (stateDefaults: ReducerStateDefaults): ReducerState => {
return { return {
...stateDefaults, ...stateDefaults,
@ -178,16 +180,19 @@ export const useMetricsK8sAnomaliesResults = ({
filteredDatasets: queryFilteredDatasets, filteredDatasets: queryFilteredDatasets,
} = reducerState; } = reducerState;
return await callGetMetricsK8sAnomaliesAPI( return await callGetMetricsK8sAnomaliesAPI(
sourceId,
queryStartTime,
queryEndTime,
metric,
sortOptions,
{ {
...paginationOptions, sourceId,
cursor: paginationCursor, startTime: queryStartTime,
endTime: queryEndTime,
metric,
sort: sortOptions,
pagination: {
...paginationOptions,
cursor: paginationCursor,
},
datasets: queryFilteredDatasets,
}, },
queryFilteredDatasets services.http.fetch
); );
}, },
onResolve: ({ data: { anomalies, paginationCursors: requestCursors, hasMoreEntries } }) => { onResolve: ({ data: { anomalies, paginationCursors: requestCursors, hasMoreEntries } }) => {
@ -290,16 +295,22 @@ export const useMetricsK8sAnomaliesResults = ({
}; };
}; };
interface RequestArgs {
sourceId: string;
startTime: number;
endTime: number;
metric: Metric;
sort: Sort;
pagination: Pagination;
datasets?: string[];
}
export const callGetMetricsK8sAnomaliesAPI = async ( export const callGetMetricsK8sAnomaliesAPI = async (
sourceId: string, requestArgs: RequestArgs,
startTime: number, fetch: HttpHandler
endTime: number,
metric: Metric,
sort: Sort,
pagination: Pagination,
datasets?: string[]
) => { ) => {
const response = await npStart.http.fetch(INFA_ML_GET_METRICS_K8S_ANOMALIES_PATH, { const { sourceId, startTime, endTime, metric, sort, pagination, datasets } = requestArgs;
const response = await fetch(INFA_ML_GET_METRICS_K8S_ANOMALIES_PATH, {
method: 'POST', method: 'POST',
body: JSON.stringify( body: JSON.stringify(
getMetricsK8sAnomaliesRequestPayloadRT.encode({ getMetricsK8sAnomaliesRequestPayloadRT.encode({

View file

@ -9,7 +9,6 @@ import { DEFAULT_APP_CATEGORIES } from '../../../../src/core/public';
import { createMetricThresholdAlertType } from './alerting/metric_threshold'; import { createMetricThresholdAlertType } from './alerting/metric_threshold';
import { createInventoryMetricAlertType } from './alerting/inventory'; import { createInventoryMetricAlertType } from './alerting/inventory';
import { getAlertType as getLogsAlertType } from './alerting/log_threshold'; import { getAlertType as getLogsAlertType } from './alerting/log_threshold';
import { registerStartSingleton } from './legacy_singletons';
import { registerFeatures } from './register_feature'; import { registerFeatures } from './register_feature';
import { import {
InfraClientSetupDeps, InfraClientSetupDeps,
@ -98,9 +97,7 @@ export class Plugin implements InfraClientPluginClass {
}); });
} }
start(core: InfraClientCoreStart, _plugins: InfraClientStartDeps) { start(_core: InfraClientCoreStart, _plugins: InfraClientStartDeps) {}
registerStartSingleton(core);
}
stop() {} stop() {}
} }