[Logs UI] Remove legacy singletons (#77743)

Removes the `npStart` legacy singleton used during the migration to the new platform. The singleton was used in API calls to access the `http.fetch` service. To remove the singleton we have injected `fetch` as a dependency in all functions.
This commit is contained in:
Alejandro Fernández Gómez 2020-10-02 18:57:50 +02:00 committed by GitHub
parent bb4ad196ea
commit 2899e83df8
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
59 changed files with 796 additions and 626 deletions

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { HttpSetup } from 'src/core/public';
import type { HttpHandler } from 'src/core/public';
import {
INFRA_ALERT_PREVIEW_PATH,
METRIC_THRESHOLD_ALERT_TYPE_ID,
@ -22,7 +22,7 @@ export async function getAlertPreview({
params,
alertType,
}: {
fetch: HttpSetup['fetch'];
fetch: HttpHandler;
params: AlertPreviewRequestParams;
alertType: PreviewableAlertTypes;
}): Promise<AlertPreviewSuccessResponsePayload> {

View file

@ -5,21 +5,25 @@
*/
import * as rt from 'io-ts';
import { pipe } from 'fp-ts/lib/pipeable';
import { fold } from 'fp-ts/lib/Either';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import type { HttpHandler } from 'src/core/public';
import { getDatafeedId, getJobId } from '../../../../../common/log_analysis';
import { throwErrors, createPlainError } from '../../../../../common/runtime_types';
import { decodeOrThrow } from '../../../../../common/runtime_types';
interface DeleteJobsRequestArgs<JobType extends string> {
spaceId: string;
sourceId: string;
jobTypes: JobType[];
}
export const callDeleteJobs = async <JobType extends string>(
spaceId: string,
sourceId: string,
jobTypes: JobType[]
requestArgs: DeleteJobsRequestArgs<JobType>,
fetch: HttpHandler
) => {
const { spaceId, sourceId, jobTypes } = requestArgs;
// NOTE: Deleting the jobs via this API will delete the datafeeds at the same time
const deleteJobsResponse = await npStart.http.fetch('/api/ml/jobs/delete_jobs', {
const deleteJobsResponse = await fetch('/api/ml/jobs/delete_jobs', {
method: 'POST',
body: JSON.stringify(
deleteJobsRequestPayloadRT.encode({
@ -28,28 +32,29 @@ export const callDeleteJobs = async <JobType extends string>(
),
});
return pipe(
deleteJobsResponsePayloadRT.decode(deleteJobsResponse),
fold(throwErrors(createPlainError), identity)
);
return decodeOrThrow(deleteJobsResponsePayloadRT)(deleteJobsResponse);
};
export const callGetJobDeletionTasks = async () => {
const jobDeletionTasksResponse = await npStart.http.fetch('/api/ml/jobs/deleting_jobs_tasks');
export const callGetJobDeletionTasks = async (fetch: HttpHandler) => {
const jobDeletionTasksResponse = await fetch('/api/ml/jobs/deleting_jobs_tasks');
return pipe(
getJobDeletionTasksResponsePayloadRT.decode(jobDeletionTasksResponse),
fold(throwErrors(createPlainError), identity)
);
return decodeOrThrow(getJobDeletionTasksResponsePayloadRT)(jobDeletionTasksResponse);
};
interface StopDatafeedsRequestArgs<JobType extends string> {
spaceId: string;
sourceId: string;
jobTypes: JobType[];
}
export const callStopDatafeeds = async <JobType extends string>(
spaceId: string,
sourceId: string,
jobTypes: JobType[]
requestArgs: StopDatafeedsRequestArgs<JobType>,
fetch: HttpHandler
) => {
const { spaceId, sourceId, jobTypes } = requestArgs;
// Stop datafeed due to https://github.com/elastic/kibana/issues/44652
const stopDatafeedResponse = await npStart.http.fetch('/api/ml/jobs/stop_datafeeds', {
const stopDatafeedResponse = await fetch('/api/ml/jobs/stop_datafeeds', {
method: 'POST',
body: JSON.stringify(
stopDatafeedsRequestPayloadRT.encode({
@ -58,10 +63,7 @@ export const callStopDatafeeds = async <JobType extends string>(
),
});
return pipe(
stopDatafeedsResponsePayloadRT.decode(stopDatafeedResponse),
fold(throwErrors(createPlainError), identity)
);
return decodeOrThrow(stopDatafeedsResponsePayloadRT)(stopDatafeedResponse);
};
export const deleteJobsRequestPayloadRT = rt.type({

View file

@ -4,21 +4,24 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { fold } from 'fp-ts/lib/Either';
import { identity } from 'fp-ts/lib/function';
import { pipe } from 'fp-ts/lib/pipeable';
import * as rt from 'io-ts';
import { npStart } from '../../../../legacy_singletons';
import type { HttpHandler } from 'src/core/public';
import { getJobId, jobCustomSettingsRT } from '../../../../../common/log_analysis';
import { createPlainError, throwErrors } from '../../../../../common/runtime_types';
import { decodeOrThrow } from '../../../../../common/runtime_types';
interface RequestArgs<JobType extends string> {
spaceId: string;
sourceId: string;
jobTypes: JobType[];
}
export const callJobsSummaryAPI = async <JobType extends string>(
spaceId: string,
sourceId: string,
jobTypes: JobType[]
requestArgs: RequestArgs<JobType>,
fetch: HttpHandler
) => {
const response = await npStart.http.fetch('/api/ml/jobs/jobs_summary', {
const { spaceId, sourceId, jobTypes } = requestArgs;
const response = await fetch('/api/ml/jobs/jobs_summary', {
method: 'POST',
body: JSON.stringify(
fetchJobStatusRequestPayloadRT.encode({
@ -26,10 +29,7 @@ export const callJobsSummaryAPI = async <JobType extends string>(
})
),
});
return pipe(
fetchJobStatusResponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
return decodeOrThrow(fetchJobStatusResponsePayloadRT)(response);
};
export const fetchJobStatusRequestPayloadRT = rt.type({

View file

@ -4,24 +4,18 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { fold } from 'fp-ts/lib/Either';
import { identity } from 'fp-ts/lib/function';
import { pipe } from 'fp-ts/lib/pipeable';
import * as rt from 'io-ts';
import { npStart } from '../../../../legacy_singletons';
import type { HttpHandler } from 'src/core/public';
import { jobCustomSettingsRT } from '../../../../../common/log_analysis';
import { createPlainError, throwErrors } from '../../../../../common/runtime_types';
import { decodeOrThrow } from '../../../../../common/runtime_types';
export const callGetMlModuleAPI = async (moduleId: string) => {
const response = await npStart.http.fetch(`/api/ml/modules/get_module/${moduleId}`, {
export const callGetMlModuleAPI = async (moduleId: string, fetch: HttpHandler) => {
const response = await fetch(`/api/ml/modules/get_module/${moduleId}`, {
method: 'GET',
});
return pipe(
getMlModuleResponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
return decodeOrThrow(getMlModuleResponsePayloadRT)(response);
};
const jobDefinitionRT = rt.type({

View file

@ -4,27 +4,38 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { fold } from 'fp-ts/lib/Either';
import { identity } from 'fp-ts/lib/function';
import { pipe } from 'fp-ts/lib/pipeable';
import * as rt from 'io-ts';
import { npStart } from '../../../../legacy_singletons';
import type { HttpHandler } from 'src/core/public';
import { getJobIdPrefix, jobCustomSettingsRT } from '../../../../../common/log_analysis';
import { createPlainError, throwErrors } from '../../../../../common/runtime_types';
import { decodeOrThrow } from '../../../../../common/runtime_types';
export const callSetupMlModuleAPI = async (
moduleId: string,
start: number | undefined,
end: number | undefined,
spaceId: string,
sourceId: string,
indexPattern: string,
jobOverrides: SetupMlModuleJobOverrides[] = [],
datafeedOverrides: SetupMlModuleDatafeedOverrides[] = [],
query?: object
) => {
const response = await npStart.http.fetch(`/api/ml/modules/setup/${moduleId}`, {
interface RequestArgs {
moduleId: string;
start?: number;
end?: number;
spaceId: string;
sourceId: string;
indexPattern: string;
jobOverrides?: SetupMlModuleJobOverrides[];
datafeedOverrides?: SetupMlModuleDatafeedOverrides[];
query?: object;
}
export const callSetupMlModuleAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => {
const {
moduleId,
start,
end,
spaceId,
sourceId,
indexPattern,
jobOverrides = [],
datafeedOverrides = [],
query,
} = requestArgs;
const response = await fetch(`/api/ml/modules/setup/${moduleId}`, {
method: 'POST',
body: JSON.stringify(
setupMlModuleRequestPayloadRT.encode({
@ -40,10 +51,7 @@ export const callSetupMlModuleAPI = async (
),
});
return pipe(
setupMlModuleResponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
return decodeOrThrow(setupMlModuleResponsePayloadRT)(response);
};
const setupMlModuleTimeParamsRT = rt.partial({

View file

@ -4,21 +4,24 @@
* you may not use this file except in compliance with the Elastic License.
*/
import type { HttpHandler } from 'src/core/public';
import {
LOG_ANALYSIS_VALIDATE_DATASETS_PATH,
validateLogEntryDatasetsRequestPayloadRT,
validateLogEntryDatasetsResponsePayloadRT,
} from '../../../../../common/http_api';
import { decodeOrThrow } from '../../../../../common/runtime_types';
import { npStart } from '../../../../legacy_singletons';
export const callValidateDatasetsAPI = async (
indices: string[],
timestampField: string,
startTime: number,
endTime: number
) => {
const response = await npStart.http.fetch(LOG_ANALYSIS_VALIDATE_DATASETS_PATH, {
interface RequestArgs {
indices: string[];
timestampField: string;
startTime: number;
endTime: number;
}
export const callValidateDatasetsAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => {
const { indices, timestampField, startTime, endTime } = requestArgs;
const response = await fetch(LOG_ANALYSIS_VALIDATE_DATASETS_PATH, {
method: 'POST',
body: JSON.stringify(
validateLogEntryDatasetsRequestPayloadRT.encode({

View file

@ -4,10 +4,8 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { fold } from 'fp-ts/lib/Either';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import type { HttpHandler } from 'src/core/public';
import {
LOG_ANALYSIS_VALIDATE_INDICES_PATH,
ValidationIndicesFieldSpecification,
@ -15,19 +13,19 @@ import {
validationIndicesResponsePayloadRT,
} from '../../../../../common/http_api';
import { throwErrors, createPlainError } from '../../../../../common/runtime_types';
import { decodeOrThrow } from '../../../../../common/runtime_types';
export const callValidateIndicesAPI = async (
indices: string[],
fields: ValidationIndicesFieldSpecification[]
) => {
const response = await npStart.http.fetch(LOG_ANALYSIS_VALIDATE_INDICES_PATH, {
interface RequestArgs {
indices: string[];
fields: ValidationIndicesFieldSpecification[];
}
export const callValidateIndicesAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => {
const { indices, fields } = requestArgs;
const response = await fetch(LOG_ANALYSIS_VALIDATE_INDICES_PATH, {
method: 'POST',
body: JSON.stringify(validationIndicesRequestPayloadRT.encode({ data: { indices, fields } })),
});
return pipe(
validationIndicesResponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
return decodeOrThrow(validationIndicesResponsePayloadRT)(response);
};

View file

@ -6,18 +6,16 @@
import createContainer from 'constate';
import { useMemo, useState, useEffect } from 'react';
import { fold } from 'fp-ts/lib/Either';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { npStart } from '../../../legacy_singletons';
import {
getMlCapabilitiesResponsePayloadRT,
GetMlCapabilitiesResponsePayload,
} from './api/ml_api_types';
import { throwErrors, createPlainError } from '../../../../common/runtime_types';
import { decodeOrThrow } from '../../../../common/runtime_types';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
export const useLogAnalysisCapabilities = () => {
const { services } = useKibanaContextForPlugin();
const [mlCapabilities, setMlCapabilities] = useState<GetMlCapabilitiesResponsePayload>(
initialMlCapabilities
);
@ -26,12 +24,9 @@ export const useLogAnalysisCapabilities = () => {
{
cancelPreviousOn: 'resolution',
createPromise: async () => {
const rawResponse = await npStart.http.fetch('/api/ml/ml_capabilities');
const rawResponse = await services.http.fetch('/api/ml/ml_capabilities');
return pipe(
getMlCapabilitiesResponsePayloadRT.decode(rawResponse),
fold(throwErrors(createPlainError), identity)
);
return decodeOrThrow(getMlCapabilitiesResponsePayloadRT)(rawResponse);
},
onResolve: (response) => {
setMlCapabilities(response);

View file

@ -3,17 +3,18 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import type { HttpHandler } from 'src/core/public';
import { getJobId } from '../../../../common/log_analysis';
import { callDeleteJobs, callGetJobDeletionTasks, callStopDatafeeds } from './api/ml_cleanup';
export const cleanUpJobsAndDatafeeds = async <JobType extends string>(
spaceId: string,
sourceId: string,
jobTypes: JobType[]
jobTypes: JobType[],
fetch: HttpHandler
) => {
try {
await callStopDatafeeds(spaceId, sourceId, jobTypes);
await callStopDatafeeds({ spaceId, sourceId, jobTypes }, fetch);
} catch (err) {
// Proceed only if datafeed has been deleted or didn't exist in the first place
if (err?.res?.status !== 404) {
@ -21,27 +22,29 @@ export const cleanUpJobsAndDatafeeds = async <JobType extends string>(
}
}
return await deleteJobs(spaceId, sourceId, jobTypes);
return await deleteJobs(spaceId, sourceId, jobTypes, fetch);
};
const deleteJobs = async <JobType extends string>(
spaceId: string,
sourceId: string,
jobTypes: JobType[]
jobTypes: JobType[],
fetch: HttpHandler
) => {
const deleteJobsResponse = await callDeleteJobs(spaceId, sourceId, jobTypes);
await waitUntilJobsAreDeleted(spaceId, sourceId, jobTypes);
const deleteJobsResponse = await callDeleteJobs({ spaceId, sourceId, jobTypes }, fetch);
await waitUntilJobsAreDeleted(spaceId, sourceId, jobTypes, fetch);
return deleteJobsResponse;
};
const waitUntilJobsAreDeleted = async <JobType extends string>(
spaceId: string,
sourceId: string,
jobTypes: JobType[]
jobTypes: JobType[],
fetch: HttpHandler
) => {
const moduleJobIds = jobTypes.map((jobType) => getJobId(spaceId, sourceId, jobType));
while (true) {
const { jobIds: jobIdsBeingDeleted } = await callGetJobDeletionTasks();
const { jobIds: jobIdsBeingDeleted } = await callGetJobDeletionTasks(fetch);
const needToWait = jobIdsBeingDeleted.some((jobId) => moduleJobIds.includes(jobId));
if (needToWait) {

View file

@ -6,6 +6,7 @@
import { useCallback, useMemo } from 'react';
import { DatasetFilter } from '../../../../common/log_analysis';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { useModuleStatus } from './log_analysis_module_status';
import { ModuleDescriptor, ModuleSourceConfiguration } from './log_analysis_module_types';
@ -17,6 +18,7 @@ export const useLogAnalysisModule = <JobType extends string>({
sourceConfiguration: ModuleSourceConfiguration;
moduleDescriptor: ModuleDescriptor<JobType>;
}) => {
const { services } = useKibanaContextForPlugin();
const { spaceId, sourceId, timestampField } = sourceConfiguration;
const [moduleStatus, dispatchModuleStatus] = useModuleStatus(moduleDescriptor.jobTypes);
@ -25,7 +27,7 @@ export const useLogAnalysisModule = <JobType extends string>({
cancelPreviousOn: 'resolution',
createPromise: async () => {
dispatchModuleStatus({ type: 'fetchingJobStatuses' });
return await moduleDescriptor.getJobSummary(spaceId, sourceId);
return await moduleDescriptor.getJobSummary(spaceId, sourceId, services.http.fetch);
},
onResolve: (jobResponse) => {
dispatchModuleStatus({
@ -52,13 +54,23 @@ export const useLogAnalysisModule = <JobType extends string>({
datasetFilter: DatasetFilter
) => {
dispatchModuleStatus({ type: 'startedSetup' });
const setupResult = await moduleDescriptor.setUpModule(start, end, datasetFilter, {
indices: selectedIndices,
sourceId,
const setupResult = await moduleDescriptor.setUpModule(
start,
end,
datasetFilter,
{
indices: selectedIndices,
sourceId,
spaceId,
timestampField,
},
services.http.fetch
);
const jobSummaries = await moduleDescriptor.getJobSummary(
spaceId,
timestampField,
});
const jobSummaries = await moduleDescriptor.getJobSummary(spaceId, sourceId);
sourceId,
services.http.fetch
);
return { setupResult, jobSummaries };
},
onResolve: ({ setupResult: { datafeeds, jobs }, jobSummaries }) => {
@ -82,7 +94,7 @@ export const useLogAnalysisModule = <JobType extends string>({
{
cancelPreviousOn: 'resolution',
createPromise: async () => {
return await moduleDescriptor.cleanUpModule(spaceId, sourceId);
return await moduleDescriptor.cleanUpModule(spaceId, sourceId, services.http.fetch);
},
},
[spaceId, sourceId]

View file

@ -6,6 +6,7 @@
import { useCallback, useMemo, useState } from 'react';
import { getJobId } from '../../../../common/log_analysis';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { JobSummary } from './api/ml_get_jobs_summary_api';
import { GetMlModuleResponsePayload, JobDefinition } from './api/ml_get_module';
@ -18,6 +19,7 @@ export const useLogAnalysisModuleDefinition = <JobType extends string>({
sourceConfiguration: ModuleSourceConfiguration;
moduleDescriptor: ModuleDescriptor<JobType>;
}) => {
const { services } = useKibanaContextForPlugin();
const [moduleDefinition, setModuleDefinition] = useState<
GetMlModuleResponsePayload | undefined
>();
@ -40,7 +42,7 @@ export const useLogAnalysisModuleDefinition = <JobType extends string>({
{
cancelPreviousOn: 'resolution',
createPromise: async () => {
return await moduleDescriptor.getModuleDefinition();
return await moduleDescriptor.getModuleDefinition(services.http.fetch);
},
onResolve: (response) => {
setModuleDefinition(response);

View file

@ -4,6 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import type { HttpHandler } from 'src/core/public';
import {
ValidateLogEntryDatasetsResponsePayload,
ValidationIndicesResponsePayload,
@ -23,24 +24,35 @@ export interface ModuleDescriptor<JobType extends string> {
jobTypes: JobType[];
bucketSpan: number;
getJobIds: (spaceId: string, sourceId: string) => Record<JobType, string>;
getJobSummary: (spaceId: string, sourceId: string) => Promise<FetchJobStatusResponsePayload>;
getModuleDefinition: () => Promise<GetMlModuleResponsePayload>;
getJobSummary: (
spaceId: string,
sourceId: string,
fetch: HttpHandler
) => Promise<FetchJobStatusResponsePayload>;
getModuleDefinition: (fetch: HttpHandler) => Promise<GetMlModuleResponsePayload>;
setUpModule: (
start: number | undefined,
end: number | undefined,
datasetFilter: DatasetFilter,
sourceConfiguration: ModuleSourceConfiguration
sourceConfiguration: ModuleSourceConfiguration,
fetch: HttpHandler
) => Promise<SetupMlModuleResponsePayload>;
cleanUpModule: (spaceId: string, sourceId: string) => Promise<DeleteJobsResponsePayload>;
cleanUpModule: (
spaceId: string,
sourceId: string,
fetch: HttpHandler
) => Promise<DeleteJobsResponsePayload>;
validateSetupIndices: (
indices: string[],
timestampField: string
timestampField: string,
fetch: HttpHandler
) => Promise<ValidationIndicesResponsePayload>;
validateSetupDatasets: (
indices: string[],
timestampField: string,
startTime: number,
endTime: number
endTime: number,
fetch: HttpHandler
) => Promise<ValidateLogEntryDatasetsResponsePayload>;
}

View file

@ -18,6 +18,7 @@ import {
ValidationIndicesError,
ValidationUIError,
} from '../../../components/logging/log_analysis_setup/initial_configuration_step';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { ModuleDescriptor, ModuleSourceConfiguration } from './log_analysis_module_types';
@ -43,6 +44,7 @@ export const useAnalysisSetupState = <JobType extends string>({
setUpModule,
sourceConfiguration,
}: AnalysisSetupStateArguments<JobType>) => {
const { services } = useKibanaContextForPlugin();
const [startTime, setStartTime] = useState<number | undefined>(Date.now() - fourWeeksInMs);
const [endTime, setEndTime] = useState<number | undefined>(undefined);
@ -158,7 +160,8 @@ export const useAnalysisSetupState = <JobType extends string>({
createPromise: async () => {
return await validateSetupIndices(
sourceConfiguration.indices,
sourceConfiguration.timestampField
sourceConfiguration.timestampField,
services.http.fetch
);
},
onResolve: ({ data: { errors } }) => {
@ -183,7 +186,8 @@ export const useAnalysisSetupState = <JobType extends string>({
validIndexNames,
sourceConfiguration.timestampField,
startTime ?? 0,
endTime ?? Date.now()
endTime ?? Date.now(),
services.http.fetch
);
},
onResolve: ({ data: { datasets } }) => {

View file

@ -5,6 +5,7 @@
*/
import { i18n } from '@kbn/i18n';
import type { HttpHandler } from 'src/core/public';
import {
bucketSpan,
categoriesMessageField,
@ -42,22 +43,26 @@ const getJobIds = (spaceId: string, sourceId: string) =>
{} as Record<LogEntryCategoriesJobType, string>
);
const getJobSummary = async (spaceId: string, sourceId: string) => {
const response = await callJobsSummaryAPI(spaceId, sourceId, logEntryCategoriesJobTypes);
const getJobSummary = async (spaceId: string, sourceId: string, fetch: HttpHandler) => {
const response = await callJobsSummaryAPI(
{ spaceId, sourceId, jobTypes: logEntryCategoriesJobTypes },
fetch
);
const jobIds = Object.values(getJobIds(spaceId, sourceId));
return response.filter((jobSummary) => jobIds.includes(jobSummary.id));
};
const getModuleDefinition = async () => {
return await callGetMlModuleAPI(moduleId);
const getModuleDefinition = async (fetch: HttpHandler) => {
return await callGetMlModuleAPI(moduleId, fetch);
};
const setUpModule = async (
start: number | undefined,
end: number | undefined,
datasetFilter: DatasetFilter,
{ spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration
{ spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration,
fetch: HttpHandler
) => {
const indexNamePattern = indices.join(',');
const jobOverrides = [
@ -101,46 +106,59 @@ const setUpModule = async (
};
return callSetupMlModuleAPI(
moduleId,
start,
end,
spaceId,
sourceId,
indexNamePattern,
jobOverrides,
[],
query
{
moduleId,
start,
end,
spaceId,
sourceId,
indexPattern: indexNamePattern,
jobOverrides,
query,
},
fetch
);
};
const cleanUpModule = async (spaceId: string, sourceId: string) => {
return await cleanUpJobsAndDatafeeds(spaceId, sourceId, logEntryCategoriesJobTypes);
const cleanUpModule = async (spaceId: string, sourceId: string, fetch: HttpHandler) => {
return await cleanUpJobsAndDatafeeds(spaceId, sourceId, logEntryCategoriesJobTypes, fetch);
};
const validateSetupIndices = async (indices: string[], timestampField: string) => {
return await callValidateIndicesAPI(indices, [
const validateSetupIndices = async (
indices: string[],
timestampField: string,
fetch: HttpHandler
) => {
return await callValidateIndicesAPI(
{
name: timestampField,
validTypes: ['date'],
indices,
fields: [
{
name: timestampField,
validTypes: ['date'],
},
{
name: partitionField,
validTypes: ['keyword'],
},
{
name: categoriesMessageField,
validTypes: ['text'],
},
],
},
{
name: partitionField,
validTypes: ['keyword'],
},
{
name: categoriesMessageField,
validTypes: ['text'],
},
]);
fetch
);
};
const validateSetupDatasets = async (
indices: string[],
timestampField: string,
startTime: number,
endTime: number
endTime: number,
fetch: HttpHandler
) => {
return await callValidateDatasetsAPI(indices, timestampField, startTime, endTime);
return await callValidateDatasetsAPI({ indices, timestampField, startTime, endTime }, fetch);
};
export const logEntryCategoriesModule: ModuleDescriptor<LogEntryCategoriesJobType> = {

View file

@ -5,6 +5,7 @@
*/
import { i18n } from '@kbn/i18n';
import type { HttpHandler } from 'src/core/public';
import {
bucketSpan,
DatasetFilter,
@ -41,22 +42,26 @@ const getJobIds = (spaceId: string, sourceId: string) =>
{} as Record<LogEntryRateJobType, string>
);
const getJobSummary = async (spaceId: string, sourceId: string) => {
const response = await callJobsSummaryAPI(spaceId, sourceId, logEntryRateJobTypes);
const getJobSummary = async (spaceId: string, sourceId: string, fetch: HttpHandler) => {
const response = await callJobsSummaryAPI(
{ spaceId, sourceId, jobTypes: logEntryRateJobTypes },
fetch
);
const jobIds = Object.values(getJobIds(spaceId, sourceId));
return response.filter((jobSummary) => jobIds.includes(jobSummary.id));
};
const getModuleDefinition = async () => {
return await callGetMlModuleAPI(moduleId);
const getModuleDefinition = async (fetch: HttpHandler) => {
return await callGetMlModuleAPI(moduleId, fetch);
};
const setUpModule = async (
start: number | undefined,
end: number | undefined,
datasetFilter: DatasetFilter,
{ spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration
{ spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration,
fetch: HttpHandler
) => {
const indexNamePattern = indices.join(',');
const jobOverrides = [
@ -93,42 +98,55 @@ const setUpModule = async (
: undefined;
return callSetupMlModuleAPI(
moduleId,
start,
end,
spaceId,
sourceId,
indexNamePattern,
jobOverrides,
[],
query
{
moduleId,
start,
end,
spaceId,
sourceId,
indexPattern: indexNamePattern,
jobOverrides,
query,
},
fetch
);
};
const cleanUpModule = async (spaceId: string, sourceId: string) => {
return await cleanUpJobsAndDatafeeds(spaceId, sourceId, logEntryRateJobTypes);
const cleanUpModule = async (spaceId: string, sourceId: string, fetch: HttpHandler) => {
return await cleanUpJobsAndDatafeeds(spaceId, sourceId, logEntryRateJobTypes, fetch);
};
const validateSetupIndices = async (indices: string[], timestampField: string) => {
return await callValidateIndicesAPI(indices, [
const validateSetupIndices = async (
indices: string[],
timestampField: string,
fetch: HttpHandler
) => {
return await callValidateIndicesAPI(
{
name: timestampField,
validTypes: ['date'],
indices,
fields: [
{
name: timestampField,
validTypes: ['date'],
},
{
name: partitionField,
validTypes: ['keyword'],
},
],
},
{
name: partitionField,
validTypes: ['keyword'],
},
]);
fetch
);
};
const validateSetupDatasets = async (
indices: string[],
timestampField: string,
startTime: number,
endTime: number
endTime: number,
fetch: HttpHandler
) => {
return await callValidateDatasetsAPI(indices, timestampField, startTime, endTime);
return await callValidateDatasetsAPI({ indices, timestampField, startTime, endTime }, fetch);
};
export const logEntryRateModule: ModuleDescriptor<LogEntryRateJobType> = {

View file

@ -4,12 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { fold } from 'fp-ts/lib/Either';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import type { HttpHandler } from 'src/core/public';
import { throwErrors, createPlainError } from '../../../../../common/runtime_types';
import { decodeOrThrow } from '../../../../../common/runtime_types';
import {
LOG_ENTRIES_PATH,
@ -18,11 +15,11 @@ import {
logEntriesResponseRT,
} from '../../../../../common/http_api';
export const fetchLogEntries = async (requestArgs: LogEntriesRequest) => {
const response = await npStart.http.fetch(LOG_ENTRIES_PATH, {
export const fetchLogEntries = async (requestArgs: LogEntriesRequest, fetch: HttpHandler) => {
const response = await fetch(LOG_ENTRIES_PATH, {
method: 'POST',
body: JSON.stringify(logEntriesRequestRT.encode(requestArgs)),
});
return pipe(logEntriesResponseRT.decode(response), fold(throwErrors(createPlainError), identity));
return decodeOrThrow(logEntriesResponseRT)(response);
};

View file

@ -4,12 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { fold } from 'fp-ts/lib/Either';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import type { HttpHandler } from 'src/core/public';
import { throwErrors, createPlainError } from '../../../../../common/runtime_types';
import { decodeOrThrow } from '../../../../../common/runtime_types';
import {
LOG_ENTRIES_ITEM_PATH,
@ -18,14 +15,14 @@ import {
logEntriesItemResponseRT,
} from '../../../../../common/http_api';
export const fetchLogEntriesItem = async (requestArgs: LogEntriesItemRequest) => {
const response = await npStart.http.fetch(LOG_ENTRIES_ITEM_PATH, {
export const fetchLogEntriesItem = async (
requestArgs: LogEntriesItemRequest,
fetch: HttpHandler
) => {
const response = await fetch(LOG_ENTRIES_ITEM_PATH, {
method: 'POST',
body: JSON.stringify(logEntriesItemRequestRT.encode(requestArgs)),
});
return pipe(
logEntriesItemResponseRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
return decodeOrThrow(logEntriesItemResponseRT)(response);
};

View file

@ -14,6 +14,7 @@ import {
LogEntriesBaseRequest,
} from '../../../../common/http_api';
import { fetchLogEntries } from './api/fetch_log_entries';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
const DESIRED_BUFFER_PAGES = 2;
const LIVE_STREAM_INTERVAL = 5000;
@ -144,6 +145,7 @@ const useFetchEntriesEffect = (
dispatch: Dispatch,
props: LogEntriesProps
) => {
const { services } = useKibanaContextForPlugin();
const [prevParams, cachePrevParams] = useState<LogEntriesProps | undefined>();
const [startedStreaming, setStartedStreaming] = useState(false);
@ -172,7 +174,7 @@ const useFetchEntriesEffect = (
before: 'last',
};
const { data: payload } = await fetchLogEntries(fetchArgs);
const { data: payload } = await fetchLogEntries(fetchArgs, services.http.fetch);
dispatch({ type: Action.ReceiveNewEntries, payload });
// Move position to the bottom if it's the first load.
@ -228,7 +230,7 @@ const useFetchEntriesEffect = (
after: state.bottomCursor,
};
const { data: payload } = await fetchLogEntries(fetchArgs);
const { data: payload } = await fetchLogEntries(fetchArgs, services.http.fetch);
dispatch({
type: getEntriesBefore ? Action.ReceiveEntriesBefore : Action.ReceiveEntriesAfter,

View file

@ -9,6 +9,7 @@ import { isString } from 'lodash';
import React, { useContext, useEffect, useMemo, useState } from 'react';
import { LogEntriesItem } from '../../../common/http_api';
import { useKibanaContextForPlugin } from '../../hooks/use_kibana';
import { UrlStateContainer } from '../../utils/url_state';
import { useTrackedPromise } from '../../utils/use_tracked_promise';
import { fetchLogEntriesItem } from './log_entries/api/fetch_log_entries_item';
@ -26,6 +27,7 @@ export interface FlyoutOptionsUrlState {
}
export const useLogFlyout = () => {
const { services } = useKibanaContextForPlugin();
const { sourceId } = useLogSourceContext();
const [flyoutVisible, setFlyoutVisibility] = useState<boolean>(false);
const [flyoutId, setFlyoutId] = useState<string | null>(null);
@ -39,7 +41,7 @@ export const useLogFlyout = () => {
if (!flyoutId) {
return;
}
return await fetchLogEntriesItem({ sourceId, id: flyoutId });
return await fetchLogEntriesItem({ sourceId, id: flyoutId }, services.http.fetch);
},
onResolve: (response) => {
if (response) {

View file

@ -4,12 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { fold } from 'fp-ts/lib/Either';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import type { HttpHandler } from 'src/core/public';
import { throwErrors, createPlainError } from '../../../../../common/runtime_types';
import { decodeOrThrow } from '../../../../../common/runtime_types';
import {
LOG_ENTRIES_HIGHLIGHTS_PATH,
@ -18,14 +15,14 @@ import {
logEntriesHighlightsResponseRT,
} from '../../../../../common/http_api';
export const fetchLogEntriesHighlights = async (requestArgs: LogEntriesHighlightsRequest) => {
const response = await npStart.http.fetch(LOG_ENTRIES_HIGHLIGHTS_PATH, {
export const fetchLogEntriesHighlights = async (
requestArgs: LogEntriesHighlightsRequest,
fetch: HttpHandler
) => {
const response = await fetch(LOG_ENTRIES_HIGHLIGHTS_PATH, {
method: 'POST',
body: JSON.stringify(logEntriesHighlightsRequestRT.encode(requestArgs)),
});
return pipe(
logEntriesHighlightsResponseRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
return decodeOrThrow(logEntriesHighlightsResponseRT)(response);
};

View file

@ -3,11 +3,9 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { fold } from 'fp-ts/lib/Either';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import { throwErrors, createPlainError } from '../../../../../common/runtime_types';
import type { HttpHandler } from 'src/core/public';
import { decodeOrThrow } from '../../../../../common/runtime_types';
import {
LOG_ENTRIES_SUMMARY_HIGHLIGHTS_PATH,
@ -17,15 +15,13 @@ import {
} from '../../../../../common/http_api';
export const fetchLogSummaryHighlights = async (
requestArgs: LogEntriesSummaryHighlightsRequest
requestArgs: LogEntriesSummaryHighlightsRequest,
fetch: HttpHandler
) => {
const response = await npStart.http.fetch(LOG_ENTRIES_SUMMARY_HIGHLIGHTS_PATH, {
const response = await fetch(LOG_ENTRIES_SUMMARY_HIGHLIGHTS_PATH, {
method: 'POST',
body: JSON.stringify(logEntriesSummaryHighlightsRequestRT.encode(requestArgs)),
});
return pipe(
logEntriesSummaryHighlightsResponseRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
return decodeOrThrow(logEntriesSummaryHighlightsResponseRT)(response);
};

View file

@ -10,6 +10,7 @@ import { TimeKey } from '../../../../common/time';
import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { fetchLogEntriesHighlights } from './api/fetch_log_entries_highlights';
import { LogEntry, LogEntriesHighlightsResponse } from '../../../../common/http_api';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
export const useLogEntryHighlights = (
sourceId: string,
@ -21,6 +22,7 @@ export const useLogEntryHighlights = (
filterQuery: string | null,
highlightTerms: string[]
) => {
const { services } = useKibanaContextForPlugin();
const [logEntryHighlights, setLogEntryHighlights] = useState<
LogEntriesHighlightsResponse['data']
>([]);
@ -32,15 +34,18 @@ export const useLogEntryHighlights = (
throw new Error('Skipping request: Insufficient parameters');
}
return await fetchLogEntriesHighlights({
sourceId,
startTimestamp,
endTimestamp,
center: centerPoint,
size,
query: filterQuery || undefined,
highlightTerms,
});
return await fetchLogEntriesHighlights(
{
sourceId,
startTimestamp,
endTimestamp,
center: centerPoint,
size,
query: filterQuery || undefined,
highlightTerms,
},
services.http.fetch
);
},
onResolve: (response) => {
setLogEntryHighlights(response.data);

View file

@ -11,6 +11,7 @@ import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { fetchLogSummaryHighlights } from './api/fetch_log_summary_highlights';
import { LogEntriesSummaryHighlightsResponse } from '../../../../common/http_api';
import { useBucketSize } from '../log_summary/bucket_size';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
export const useLogSummaryHighlights = (
sourceId: string,
@ -20,6 +21,7 @@ export const useLogSummaryHighlights = (
filterQuery: string | null,
highlightTerms: string[]
) => {
const { services } = useKibanaContextForPlugin();
const [logSummaryHighlights, setLogSummaryHighlights] = useState<
LogEntriesSummaryHighlightsResponse['data']
>([]);
@ -34,14 +36,17 @@ export const useLogSummaryHighlights = (
throw new Error('Skipping request: Insufficient parameters');
}
return await fetchLogSummaryHighlights({
sourceId,
startTimestamp,
endTimestamp,
bucketSize,
query: filterQuery,
highlightTerms,
});
return await fetchLogSummaryHighlights(
{
sourceId,
startTimestamp,
endTimestamp,
bucketSize,
query: filterQuery,
highlightTerms,
},
services.http.fetch
);
},
onResolve: (response) => {
setLogSummaryHighlights(response.data);

View file

@ -4,17 +4,14 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { HttpSetup } from 'src/core/public';
import type { HttpHandler } from 'src/core/public';
import {
getLogSourceConfigurationPath,
getLogSourceConfigurationSuccessResponsePayloadRT,
} from '../../../../../common/http_api/log_sources';
import { decodeOrThrow } from '../../../../../common/runtime_types';
export const callFetchLogSourceConfigurationAPI = async (
sourceId: string,
fetch: HttpSetup['fetch']
) => {
export const callFetchLogSourceConfigurationAPI = async (sourceId: string, fetch: HttpHandler) => {
const response = await fetch(getLogSourceConfigurationPath(sourceId), {
method: 'GET',
});

View file

@ -4,14 +4,14 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { HttpSetup } from 'src/core/public';
import type { HttpHandler } from 'src/core/public';
import {
getLogSourceStatusPath,
getLogSourceStatusSuccessResponsePayloadRT,
} from '../../../../../common/http_api/log_sources';
import { decodeOrThrow } from '../../../../../common/runtime_types';
export const callFetchLogSourceStatusAPI = async (sourceId: string, fetch: HttpSetup['fetch']) => {
export const callFetchLogSourceStatusAPI = async (sourceId: string, fetch: HttpHandler) => {
const response = await fetch(getLogSourceStatusPath(sourceId), {
method: 'GET',
});

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { HttpSetup } from 'src/core/public';
import type { HttpHandler } from 'src/core/public';
import {
getLogSourceConfigurationPath,
patchLogSourceConfigurationSuccessResponsePayloadRT,
@ -16,7 +16,7 @@ import { decodeOrThrow } from '../../../../../common/runtime_types';
export const callPatchLogSourceConfigurationAPI = async (
sourceId: string,
patchedProperties: LogSourceConfigurationPropertiesPatch,
fetch: HttpSetup['fetch']
fetch: HttpHandler
) => {
const response = await fetch(getLogSourceConfigurationPath(sourceId), {
method: 'PATCH',

View file

@ -7,7 +7,7 @@
import createContainer from 'constate';
import { useCallback, useMemo, useState } from 'react';
import { useMountedState } from 'react-use';
import { HttpSetup } from 'src/core/public';
import type { HttpHandler } from 'src/core/public';
import {
LogSourceConfiguration,
LogSourceConfigurationProperties,
@ -26,13 +26,7 @@ export {
LogSourceStatus,
};
export const useLogSource = ({
sourceId,
fetch,
}: {
sourceId: string;
fetch: HttpSetup['fetch'];
}) => {
export const useLogSource = ({ sourceId, fetch }: { sourceId: string; fetch: HttpHandler }) => {
const getIsMounted = useMountedState();
const [sourceConfiguration, setSourceConfiguration] = useState<
LogSourceConfiguration | undefined

View file

@ -9,6 +9,7 @@ import { esKuery } from '../../../../../../../src/plugins/data/public';
import { fetchLogEntries } from '../log_entries/api/fetch_log_entries';
import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { LogEntry, LogEntriesCursor } from '../../../../common/http_api';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
interface LogStreamProps {
sourceId: string;
@ -31,6 +32,7 @@ export function useLogStream({
query,
center,
}: LogStreamProps): LogStreamState {
const { services } = useKibanaContextForPlugin();
const [entries, setEntries] = useState<LogStreamState['entries']>([]);
const parsedQuery = useMemo(() => {
@ -47,13 +49,16 @@ export function useLogStream({
setEntries([]);
const fetchPosition = center ? { center } : { before: 'last' };
return fetchLogEntries({
sourceId,
startTimestamp,
endTimestamp,
query: parsedQuery,
...fetchPosition,
});
return fetchLogEntries(
{
sourceId,
startTimestamp,
endTimestamp,
query: parsedQuery,
...fetchPosition,
},
services.http.fetch
);
},
onResolve: ({ data }) => {
setEntries(data.entries);

View file

@ -4,11 +4,8 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { fold } from 'fp-ts/lib/Either';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import { throwErrors, createPlainError } from '../../../../../common/runtime_types';
import type { HttpHandler } from 'src/core/public';
import { decodeOrThrow } from '../../../../../common/runtime_types';
import {
LOG_ENTRIES_SUMMARY_PATH,
@ -17,14 +14,14 @@ import {
logEntriesSummaryResponseRT,
} from '../../../../../common/http_api';
export const fetchLogSummary = async (requestArgs: LogEntriesSummaryRequest) => {
const response = await npStart.http.fetch(LOG_ENTRIES_SUMMARY_PATH, {
export const fetchLogSummary = async (
requestArgs: LogEntriesSummaryRequest,
fetch: HttpHandler
) => {
const response = await fetch(LOG_ENTRIES_SUMMARY_PATH, {
method: 'POST',
body: JSON.stringify(logEntriesSummaryRequestRT.encode(requestArgs)),
});
return pipe(
logEntriesSummaryResponseRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
return decodeOrThrow(logEntriesSummaryResponseRT)(response);
};

View file

@ -5,6 +5,8 @@
*/
import { renderHook } from '@testing-library/react-hooks';
// We are using this inside a `jest.mock` call. Jest requires dynamic dependencies to be prefixed with `mock`
import { coreMock as mockCoreMock } from 'src/core/public/mocks';
import { useLogSummary } from './log_summary';
@ -16,6 +18,10 @@ import { datemathToEpochMillis } from '../../../utils/datemath';
jest.mock('./api/fetch_log_summary', () => ({ fetchLogSummary: jest.fn() }));
const fetchLogSummaryMock = fetchLogSummary as jest.MockedFunction<typeof fetchLogSummary>;
jest.mock('../../../hooks/use_kibana', () => ({
useKibanaContextForPlugin: () => ({ services: mockCoreMock.createStart() }),
}));
describe('useLogSummary hook', () => {
beforeEach(() => {
fetchLogSummaryMock.mockClear();
@ -53,7 +59,8 @@ describe('useLogSummary hook', () => {
expect(fetchLogSummaryMock).toHaveBeenLastCalledWith(
expect.objectContaining({
sourceId: 'INITIAL_SOURCE_ID',
})
}),
expect.anything()
);
expect(result.current.buckets).toEqual(firstMockResponse.data.buckets);
@ -64,7 +71,8 @@ describe('useLogSummary hook', () => {
expect(fetchLogSummaryMock).toHaveBeenLastCalledWith(
expect.objectContaining({
sourceId: 'CHANGED_SOURCE_ID',
})
}),
expect.anything()
);
expect(result.current.buckets).toEqual(secondMockResponse.data.buckets);
});
@ -96,7 +104,8 @@ describe('useLogSummary hook', () => {
expect(fetchLogSummaryMock).toHaveBeenLastCalledWith(
expect.objectContaining({
query: 'INITIAL_FILTER_QUERY',
})
}),
expect.anything()
);
expect(result.current.buckets).toEqual(firstMockResponse.data.buckets);
@ -107,7 +116,8 @@ describe('useLogSummary hook', () => {
expect(fetchLogSummaryMock).toHaveBeenLastCalledWith(
expect.objectContaining({
query: 'CHANGED_FILTER_QUERY',
})
}),
expect.anything()
);
expect(result.current.buckets).toEqual(secondMockResponse.data.buckets);
});
@ -132,7 +142,8 @@ describe('useLogSummary hook', () => {
expect.objectContaining({
startTimestamp: firstRange.startTimestamp,
endTimestamp: firstRange.endTimestamp,
})
}),
expect.anything()
);
const secondRange = createMockDateRange('now-20s', 'now');
@ -145,7 +156,8 @@ describe('useLogSummary hook', () => {
expect.objectContaining({
startTimestamp: secondRange.startTimestamp,
endTimestamp: secondRange.endTimestamp,
})
}),
expect.anything()
);
});
});

View file

@ -10,6 +10,7 @@ import { useCancellableEffect } from '../../../utils/cancellable_effect';
import { fetchLogSummary } from './api/fetch_log_summary';
import { LogEntriesSummaryResponse } from '../../../../common/http_api';
import { useBucketSize } from './bucket_size';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
export type LogSummaryBuckets = LogEntriesSummaryResponse['data']['buckets'];
@ -19,6 +20,7 @@ export const useLogSummary = (
endTimestamp: number | null,
filterQuery: string | null
) => {
const { services } = useKibanaContextForPlugin();
const [logSummaryBuckets, setLogSummaryBuckets] = useState<LogSummaryBuckets>([]);
const bucketSize = useBucketSize(startTimestamp, endTimestamp);
@ -28,13 +30,16 @@ export const useLogSummary = (
return;
}
fetchLogSummary({
sourceId,
startTimestamp,
endTimestamp,
bucketSize,
query: filterQuery,
}).then((response) => {
fetchLogSummary(
{
sourceId,
startTimestamp,
endTimestamp,
bucketSize,
query: filterQuery,
},
services.http.fetch
).then((response) => {
if (!getIsCancelled()) {
setLogSummaryBuckets(response.data.buckets);
}

View file

@ -5,21 +5,24 @@
*/
import * as rt from 'io-ts';
import { pipe } from 'fp-ts/lib/pipeable';
import { fold } from 'fp-ts/lib/Either';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../legacy_singletons';
import type { HttpHandler } from 'src/core/public';
import { getDatafeedId, getJobId } from '../../../../common/infra_ml';
import { throwErrors, createPlainError } from '../../../../common/runtime_types';
import { decodeOrThrow } from '../../../../common/runtime_types';
interface DeleteJobsRequestArgs<JobType extends string> {
spaceId: string;
sourceId: string;
jobTypes: JobType[];
}
export const callDeleteJobs = async <JobType extends string>(
spaceId: string,
sourceId: string,
jobTypes: JobType[]
requestArgs: DeleteJobsRequestArgs<JobType>,
fetch: HttpHandler
) => {
const { spaceId, sourceId, jobTypes } = requestArgs;
// NOTE: Deleting the jobs via this API will delete the datafeeds at the same time
const deleteJobsResponse = await npStart.http.fetch('/api/ml/jobs/delete_jobs', {
const deleteJobsResponse = await fetch('/api/ml/jobs/delete_jobs', {
method: 'POST',
body: JSON.stringify(
deleteJobsRequestPayloadRT.encode({
@ -28,28 +31,29 @@ export const callDeleteJobs = async <JobType extends string>(
),
});
return pipe(
deleteJobsResponsePayloadRT.decode(deleteJobsResponse),
fold(throwErrors(createPlainError), identity)
);
return decodeOrThrow(deleteJobsResponsePayloadRT)(deleteJobsResponse);
};
export const callGetJobDeletionTasks = async () => {
const jobDeletionTasksResponse = await npStart.http.fetch('/api/ml/jobs/deleting_jobs_tasks');
export const callGetJobDeletionTasks = async (fetch: HttpHandler) => {
const jobDeletionTasksResponse = await fetch('/api/ml/jobs/deleting_jobs_tasks');
return pipe(
getJobDeletionTasksResponsePayloadRT.decode(jobDeletionTasksResponse),
fold(throwErrors(createPlainError), identity)
);
return decodeOrThrow(getJobDeletionTasksResponsePayloadRT)(jobDeletionTasksResponse);
};
interface StopDatafeedsRequestArgs<JobType extends string> {
spaceId: string;
sourceId: string;
jobTypes: JobType[];
}
export const callStopDatafeeds = async <JobType extends string>(
spaceId: string,
sourceId: string,
jobTypes: JobType[]
requestArgs: StopDatafeedsRequestArgs<JobType>,
fetch: HttpHandler
) => {
const { spaceId, sourceId, jobTypes } = requestArgs;
// Stop datafeed due to https://github.com/elastic/kibana/issues/44652
const stopDatafeedResponse = await npStart.http.fetch('/api/ml/jobs/stop_datafeeds', {
const stopDatafeedResponse = await fetch('/api/ml/jobs/stop_datafeeds', {
method: 'POST',
body: JSON.stringify(
stopDatafeedsRequestPayloadRT.encode({
@ -58,10 +62,7 @@ export const callStopDatafeeds = async <JobType extends string>(
),
});
return pipe(
stopDatafeedsResponsePayloadRT.decode(stopDatafeedResponse),
fold(throwErrors(createPlainError), identity)
);
return decodeOrThrow(stopDatafeedsResponsePayloadRT)(stopDatafeedResponse);
};
export const deleteJobsRequestPayloadRT = rt.type({

View file

@ -4,21 +4,24 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { fold } from 'fp-ts/lib/Either';
import { identity } from 'fp-ts/lib/function';
import { pipe } from 'fp-ts/lib/pipeable';
import * as rt from 'io-ts';
import { npStart } from '../../../legacy_singletons';
import type { HttpHandler } from 'src/core/public';
import { getJobId, jobCustomSettingsRT } from '../../../../common/infra_ml';
import { createPlainError, throwErrors } from '../../../../common/runtime_types';
import { decodeOrThrow } from '../../../../common/runtime_types';
interface RequestArgs<JobType extends string> {
spaceId: string;
sourceId: string;
jobTypes: JobType[];
}
export const callJobsSummaryAPI = async <JobType extends string>(
spaceId: string,
sourceId: string,
jobTypes: JobType[]
requestArgs: RequestArgs<JobType>,
fetch: HttpHandler
) => {
const response = await npStart.http.fetch('/api/ml/jobs/jobs_summary', {
const { spaceId, sourceId, jobTypes } = requestArgs;
const response = await fetch('/api/ml/jobs/jobs_summary', {
method: 'POST',
body: JSON.stringify(
fetchJobStatusRequestPayloadRT.encode({
@ -26,10 +29,7 @@ export const callJobsSummaryAPI = async <JobType extends string>(
})
),
});
return pipe(
fetchJobStatusResponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
return decodeOrThrow(fetchJobStatusResponsePayloadRT)(response);
};
export const fetchJobStatusRequestPayloadRT = rt.type({

View file

@ -4,24 +4,18 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { fold } from 'fp-ts/lib/Either';
import { identity } from 'fp-ts/lib/function';
import { pipe } from 'fp-ts/lib/pipeable';
import * as rt from 'io-ts';
import { npStart } from '../../../legacy_singletons';
import type { HttpHandler } from 'src/core/public';
import { jobCustomSettingsRT } from '../../../../common/log_analysis';
import { createPlainError, throwErrors } from '../../../../common/runtime_types';
import { decodeOrThrow } from '../../../../common/runtime_types';
export const callGetMlModuleAPI = async (moduleId: string) => {
const response = await npStart.http.fetch(`/api/ml/modules/get_module/${moduleId}`, {
export const callGetMlModuleAPI = async (moduleId: string, fetch: HttpHandler) => {
const response = await fetch(`/api/ml/modules/get_module/${moduleId}`, {
method: 'GET',
});
return pipe(
getMlModuleResponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
return decodeOrThrow(getMlModuleResponsePayloadRT)(response);
};
const jobDefinitionRT = rt.type({

View file

@ -4,27 +4,38 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { fold } from 'fp-ts/lib/Either';
import { identity } from 'fp-ts/lib/function';
import { pipe } from 'fp-ts/lib/pipeable';
import * as rt from 'io-ts';
import { npStart } from '../../../legacy_singletons';
import type { HttpHandler } from 'src/core/public';
import { getJobIdPrefix, jobCustomSettingsRT } from '../../../../common/infra_ml';
import { createPlainError, throwErrors } from '../../../../common/runtime_types';
import { decodeOrThrow } from '../../../../common/runtime_types';
export const callSetupMlModuleAPI = async (
moduleId: string,
start: number | undefined,
end: number | undefined,
spaceId: string,
sourceId: string,
indexPattern: string,
jobOverrides: SetupMlModuleJobOverrides[] = [],
datafeedOverrides: SetupMlModuleDatafeedOverrides[] = [],
query?: object
) => {
const response = await npStart.http.fetch(`/api/ml/modules/setup/${moduleId}`, {
interface RequestArgs {
moduleId: string;
start?: number;
end?: number;
spaceId: string;
sourceId: string;
indexPattern: string;
jobOverrides?: SetupMlModuleJobOverrides[];
datafeedOverrides?: SetupMlModuleDatafeedOverrides[];
query?: object;
}
export const callSetupMlModuleAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => {
const {
moduleId,
start,
end,
spaceId,
sourceId,
indexPattern,
jobOverrides = [],
datafeedOverrides = [],
query,
} = requestArgs;
const response = await fetch(`/api/ml/modules/setup/${moduleId}`, {
method: 'POST',
body: JSON.stringify(
setupMlModuleRequestPayloadRT.encode({
@ -40,10 +51,7 @@ export const callSetupMlModuleAPI = async (
),
});
return pipe(
setupMlModuleResponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
return decodeOrThrow(setupMlModuleResponsePayloadRT)(response);
};
const setupMlModuleTimeParamsRT = rt.partial({

View file

@ -10,14 +10,15 @@ import { fold } from 'fp-ts/lib/Either';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { useTrackedPromise } from '../../utils/use_tracked_promise';
import { npStart } from '../../legacy_singletons';
import {
getMlCapabilitiesResponsePayloadRT,
GetMlCapabilitiesResponsePayload,
} from './api/ml_api_types';
import { throwErrors, createPlainError } from '../../../common/runtime_types';
import { useKibanaContextForPlugin } from '../../hooks/use_kibana';
export const useInfraMLCapabilities = () => {
const { services } = useKibanaContextForPlugin();
const [mlCapabilities, setMlCapabilities] = useState<GetMlCapabilitiesResponsePayload>(
initialMlCapabilities
);
@ -26,7 +27,7 @@ export const useInfraMLCapabilities = () => {
{
cancelPreviousOn: 'resolution',
createPromise: async () => {
const rawResponse = await npStart.http.fetch('/api/ml/ml_capabilities');
const rawResponse = await services.http.fetch('/api/ml/ml_capabilities');
return pipe(
getMlCapabilitiesResponsePayloadRT.decode(rawResponse),

View file

@ -4,16 +4,18 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { HttpHandler } from 'src/core/public';
import { getJobId } from '../../../common/infra_ml';
import { callDeleteJobs, callGetJobDeletionTasks, callStopDatafeeds } from './api/ml_cleanup';
export const cleanUpJobsAndDatafeeds = async <JobType extends string>(
spaceId: string,
sourceId: string,
jobTypes: JobType[]
jobTypes: JobType[],
fetch: HttpHandler
) => {
try {
await callStopDatafeeds(spaceId, sourceId, jobTypes);
await callStopDatafeeds({ spaceId, sourceId, jobTypes }, fetch);
} catch (err) {
// Proceed only if datafeed has been deleted or didn't exist in the first place
if (err?.res?.status !== 404) {
@ -21,27 +23,29 @@ export const cleanUpJobsAndDatafeeds = async <JobType extends string>(
}
}
return await deleteJobs(spaceId, sourceId, jobTypes);
return await deleteJobs(spaceId, sourceId, jobTypes, fetch);
};
const deleteJobs = async <JobType extends string>(
spaceId: string,
sourceId: string,
jobTypes: JobType[]
jobTypes: JobType[],
fetch: HttpHandler
) => {
const deleteJobsResponse = await callDeleteJobs(spaceId, sourceId, jobTypes);
await waitUntilJobsAreDeleted(spaceId, sourceId, jobTypes);
const deleteJobsResponse = await callDeleteJobs({ spaceId, sourceId, jobTypes }, fetch);
await waitUntilJobsAreDeleted(spaceId, sourceId, jobTypes, fetch);
return deleteJobsResponse;
};
const waitUntilJobsAreDeleted = async <JobType extends string>(
spaceId: string,
sourceId: string,
jobTypes: JobType[]
jobTypes: JobType[],
fetch: HttpHandler
) => {
const moduleJobIds = jobTypes.map((jobType) => getJobId(spaceId, sourceId, jobType));
while (true) {
const { jobIds: jobIdsBeingDeleted } = await callGetJobDeletionTasks();
const { jobIds: jobIdsBeingDeleted } = await callGetJobDeletionTasks(fetch);
const needToWait = jobIdsBeingDeleted.some((jobId) => moduleJobIds.includes(jobId));
if (needToWait) {

View file

@ -6,6 +6,7 @@
import { useCallback, useMemo } from 'react';
import { DatasetFilter } from '../../../common/infra_ml';
import { useKibanaContextForPlugin } from '../../hooks/use_kibana';
import { useTrackedPromise } from '../../utils/use_tracked_promise';
import { useModuleStatus } from './infra_ml_module_status';
import { ModuleDescriptor, ModuleSourceConfiguration } from './infra_ml_module_types';
@ -17,6 +18,7 @@ export const useInfraMLModule = <JobType extends string>({
sourceConfiguration: ModuleSourceConfiguration;
moduleDescriptor: ModuleDescriptor<JobType>;
}) => {
const { services } = useKibanaContextForPlugin();
const { spaceId, sourceId, timestampField } = sourceConfiguration;
const [moduleStatus, dispatchModuleStatus] = useModuleStatus(moduleDescriptor.jobTypes);
@ -25,7 +27,7 @@ export const useInfraMLModule = <JobType extends string>({
cancelPreviousOn: 'resolution',
createPromise: async () => {
dispatchModuleStatus({ type: 'fetchingJobStatuses' });
return await moduleDescriptor.getJobSummary(spaceId, sourceId);
return await moduleDescriptor.getJobSummary(spaceId, sourceId, services.http.fetch);
},
onResolve: (jobResponse) => {
dispatchModuleStatus({
@ -54,18 +56,25 @@ export const useInfraMLModule = <JobType extends string>({
) => {
dispatchModuleStatus({ type: 'startedSetup' });
const setupResult = await moduleDescriptor.setUpModule(
start,
end,
datasetFilter,
{
indices: selectedIndices,
sourceId,
spaceId,
timestampField,
start,
end,
datasetFilter,
moduleSourceConfiguration: {
indices: selectedIndices,
sourceId,
spaceId,
timestampField,
},
partitionField,
},
partitionField
services.http.fetch
);
const jobSummaries = await moduleDescriptor.getJobSummary(
spaceId,
sourceId,
services.http.fetch
);
const jobSummaries = await moduleDescriptor.getJobSummary(spaceId, sourceId);
return { setupResult, jobSummaries };
},
onResolve: ({ setupResult: { datafeeds, jobs }, jobSummaries }) => {
@ -89,7 +98,7 @@ export const useInfraMLModule = <JobType extends string>({
{
cancelPreviousOn: 'resolution',
createPromise: async () => {
return await moduleDescriptor.cleanUpModule(spaceId, sourceId);
return await moduleDescriptor.cleanUpModule(spaceId, sourceId, services.http.fetch);
},
},
[spaceId, sourceId]

View file

@ -6,6 +6,7 @@
import { useCallback, useMemo, useState } from 'react';
import { getJobId } from '../../../common/log_analysis';
import { useKibanaContextForPlugin } from '../../hooks/use_kibana';
import { useTrackedPromise } from '../../utils/use_tracked_promise';
import { JobSummary } from './api/ml_get_jobs_summary_api';
import { GetMlModuleResponsePayload, JobDefinition } from './api/ml_get_module';
@ -18,6 +19,7 @@ export const useInfraMLModuleDefinition = <JobType extends string>({
sourceConfiguration: ModuleSourceConfiguration;
moduleDescriptor: ModuleDescriptor<JobType>;
}) => {
const { services } = useKibanaContextForPlugin();
const [moduleDefinition, setModuleDefinition] = useState<
GetMlModuleResponsePayload | undefined
>();
@ -40,7 +42,7 @@ export const useInfraMLModuleDefinition = <JobType extends string>({
{
cancelPreviousOn: 'resolution',
createPromise: async () => {
return await moduleDescriptor.getModuleDefinition();
return await moduleDescriptor.getModuleDefinition(services.http.fetch);
},
onResolve: (response) => {
setModuleDefinition(response);

View file

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { HttpHandler } from 'src/core/public';
import {
ValidateLogEntryDatasetsResponsePayload,
ValidationIndicesResponsePayload,
@ -16,6 +16,14 @@ import { SetupMlModuleResponsePayload } from './api/ml_setup_module_api';
export { JobModelSizeStats, JobSummary } from './api/ml_get_jobs_summary_api';
export interface SetUpModuleArgs {
start?: number | undefined;
end?: number | undefined;
datasetFilter?: DatasetFilter;
moduleSourceConfiguration: ModuleSourceConfiguration;
partitionField?: string;
}
export interface ModuleDescriptor<JobType extends string> {
moduleId: string;
moduleName: string;
@ -23,25 +31,32 @@ export interface ModuleDescriptor<JobType extends string> {
jobTypes: JobType[];
bucketSpan: number;
getJobIds: (spaceId: string, sourceId: string) => Record<JobType, string>;
getJobSummary: (spaceId: string, sourceId: string) => Promise<FetchJobStatusResponsePayload>;
getModuleDefinition: () => Promise<GetMlModuleResponsePayload>;
getJobSummary: (
spaceId: string,
sourceId: string,
fetch: HttpHandler
) => Promise<FetchJobStatusResponsePayload>;
getModuleDefinition: (fetch: HttpHandler) => Promise<GetMlModuleResponsePayload>;
setUpModule: (
start: number | undefined,
end: number | undefined,
datasetFilter: DatasetFilter,
sourceConfiguration: ModuleSourceConfiguration,
partitionField?: string
setUpModuleArgs: SetUpModuleArgs,
fetch: HttpHandler
) => Promise<SetupMlModuleResponsePayload>;
cleanUpModule: (spaceId: string, sourceId: string) => Promise<DeleteJobsResponsePayload>;
cleanUpModule: (
spaceId: string,
sourceId: string,
fetch: HttpHandler
) => Promise<DeleteJobsResponsePayload>;
validateSetupIndices?: (
indices: string[],
timestampField: string
timestampField: string,
fetch: HttpHandler
) => Promise<ValidationIndicesResponsePayload>;
validateSetupDatasets?: (
indices: string[],
timestampField: string,
startTime: number,
endTime: number
endTime: number,
fetch: HttpHandler
) => Promise<ValidateLogEntryDatasetsResponsePayload>;
}

View file

@ -5,7 +5,8 @@
*/
import { i18n } from '@kbn/i18n';
import { ModuleDescriptor, ModuleSourceConfiguration } from '../../infra_ml_module_types';
import { HttpHandler } from 'src/core/public';
import { ModuleDescriptor, SetUpModuleArgs } from '../../infra_ml_module_types';
import { cleanUpJobsAndDatafeeds } from '../../infra_ml_cleanup';
import { callJobsSummaryAPI } from '../../api/ml_get_jobs_summary_api';
import { callGetMlModuleAPI } from '../../api/ml_get_module';
@ -14,7 +15,6 @@ import {
metricsHostsJobTypes,
getJobId,
MetricsHostsJobType,
DatasetFilter,
bucketSpan,
} from '../../../../../common/infra_ml';
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
@ -48,24 +48,28 @@ const getJobIds = (spaceId: string, sourceId: string) =>
{} as Record<MetricsHostsJobType, string>
);
const getJobSummary = async (spaceId: string, sourceId: string) => {
const response = await callJobsSummaryAPI(spaceId, sourceId, metricsHostsJobTypes);
const getJobSummary = async (spaceId: string, sourceId: string, fetch: HttpHandler) => {
const response = await callJobsSummaryAPI(
{ spaceId, sourceId, jobTypes: metricsHostsJobTypes },
fetch
);
const jobIds = Object.values(getJobIds(spaceId, sourceId));
return response.filter((jobSummary) => jobIds.includes(jobSummary.id));
};
const getModuleDefinition = async () => {
return await callGetMlModuleAPI(moduleId);
const getModuleDefinition = async (fetch: HttpHandler) => {
return await callGetMlModuleAPI(moduleId, fetch);
};
const setUpModule = async (
start: number | undefined,
end: number | undefined,
datasetFilter: DatasetFilter,
{ spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration,
partitionField?: string
) => {
const setUpModule = async (setUpModuleArgs: SetUpModuleArgs, fetch: HttpHandler) => {
const {
start,
end,
moduleSourceConfiguration: { spaceId, sourceId, indices, timestampField },
partitionField,
} = setUpModuleArgs;
const indexNamePattern = indices.join(',');
const jobIds: JobType[] = ['hosts_memory_usage', 'hosts_network_in', 'hosts_network_out'];
@ -128,14 +132,17 @@ const setUpModule = async (
});
return callSetupMlModuleAPI(
moduleId,
start,
end,
spaceId,
sourceId,
indexNamePattern,
jobOverrides,
datafeedOverrides
{
moduleId,
start,
end,
spaceId,
sourceId,
indexPattern: indexNamePattern,
jobOverrides,
datafeedOverrides,
},
fetch
);
};
@ -159,8 +166,8 @@ const getDefaultJobConfigs = (jobId: JobType): { datafeed: any; job: any } => {
}
};
const cleanUpModule = async (spaceId: string, sourceId: string) => {
return await cleanUpJobsAndDatafeeds(spaceId, sourceId, metricsHostsJobTypes);
const cleanUpModule = async (spaceId: string, sourceId: string, fetch: HttpHandler) => {
return await cleanUpJobsAndDatafeeds(spaceId, sourceId, metricsHostsJobTypes, fetch);
};
export const metricHostsModule: ModuleDescriptor<MetricsHostsJobType> = {

View file

@ -5,7 +5,8 @@
*/
import { i18n } from '@kbn/i18n';
import { ModuleDescriptor, ModuleSourceConfiguration } from '../../infra_ml_module_types';
import { HttpHandler } from 'src/core/public';
import { ModuleDescriptor, SetUpModuleArgs } from '../../infra_ml_module_types';
import { cleanUpJobsAndDatafeeds } from '../../infra_ml_cleanup';
import { callJobsSummaryAPI } from '../../api/ml_get_jobs_summary_api';
import { callGetMlModuleAPI } from '../../api/ml_get_module';
@ -14,7 +15,6 @@ import {
metricsK8SJobTypes,
getJobId,
MetricK8sJobType,
DatasetFilter,
bucketSpan,
} from '../../../../../common/infra_ml';
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
@ -49,24 +49,28 @@ const getJobIds = (spaceId: string, sourceId: string) =>
{} as Record<MetricK8sJobType, string>
);
const getJobSummary = async (spaceId: string, sourceId: string) => {
const response = await callJobsSummaryAPI(spaceId, sourceId, metricsK8SJobTypes);
const getJobSummary = async (spaceId: string, sourceId: string, fetch: HttpHandler) => {
const response = await callJobsSummaryAPI(
{ spaceId, sourceId, jobTypes: metricsK8SJobTypes },
fetch
);
const jobIds = Object.values(getJobIds(spaceId, sourceId));
return response.filter((jobSummary) => jobIds.includes(jobSummary.id));
};
const getModuleDefinition = async () => {
return await callGetMlModuleAPI(moduleId);
const getModuleDefinition = async (fetch: HttpHandler) => {
return await callGetMlModuleAPI(moduleId, fetch);
};
const setUpModule = async (
start: number | undefined,
end: number | undefined,
datasetFilter: DatasetFilter,
{ spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration,
partitionField?: string
) => {
const setUpModule = async (setUpModuleArgs: SetUpModuleArgs, fetch: HttpHandler) => {
const {
start,
end,
moduleSourceConfiguration: { spaceId, sourceId, indices, timestampField },
partitionField,
} = setUpModuleArgs;
const indexNamePattern = indices.join(',');
const jobIds: JobType[] = ['k8s_memory_usage', 'k8s_network_in', 'k8s_network_out'];
const jobOverrides = jobIds.map((id) => {
@ -133,14 +137,17 @@ const setUpModule = async (
});
return callSetupMlModuleAPI(
moduleId,
start,
end,
spaceId,
sourceId,
indexNamePattern,
jobOverrides,
datafeedOverrides
{
moduleId,
start,
end,
spaceId,
sourceId,
indexPattern: indexNamePattern,
jobOverrides,
datafeedOverrides,
},
fetch
);
};
@ -164,8 +171,8 @@ const getDefaultJobConfigs = (jobId: JobType): { datafeed: any; job: any } => {
}
};
const cleanUpModule = async (spaceId: string, sourceId: string) => {
return await cleanUpJobsAndDatafeeds(spaceId, sourceId, metricsK8SJobTypes);
const cleanUpModule = async (spaceId: string, sourceId: string, fetch: HttpHandler) => {
return await cleanUpJobsAndDatafeeds(spaceId, sourceId, metricsK8SJobTypes, fetch);
};
export const metricHostsModule: ModuleDescriptor<MetricK8sJobType> = {

View file

@ -1,14 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { CoreStart } from 'kibana/public';
let npStart: CoreStart;
export function registerStartSingleton(start: CoreStart) {
npStart = start;
}
export { npStart };

View file

@ -14,7 +14,6 @@ import { createMemoryHistory } from 'history';
import React from 'react';
import { Route, Router, Switch } from 'react-router-dom';
import { httpServiceMock } from 'src/core/public/mocks';
// import { HttpSetup } from 'src/core/public';
import { KibanaContextProvider } from 'src/plugins/kibana_react/public';
import { useLogSource } from '../../containers/logs/log_source';
import {

View file

@ -4,24 +4,28 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { fold } from 'fp-ts/lib/Either';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import type { HttpHandler } from 'src/core/public';
import {
getLogEntryCategoryDatasetsRequestPayloadRT,
getLogEntryCategoryDatasetsSuccessReponsePayloadRT,
LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_DATASETS_PATH,
} from '../../../../../common/http_api/log_analysis';
import { createPlainError, throwErrors } from '../../../../../common/runtime_types';
import { decodeOrThrow } from '../../../../../common/runtime_types';
interface RequestArgs {
sourceId: string;
startTime: number;
endTime: number;
}
export const callGetLogEntryCategoryDatasetsAPI = async (
sourceId: string,
startTime: number,
endTime: number
requestArgs: RequestArgs,
fetch: HttpHandler
) => {
const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_DATASETS_PATH, {
const { sourceId, startTime, endTime } = requestArgs;
const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_DATASETS_PATH, {
method: 'POST',
body: JSON.stringify(
getLogEntryCategoryDatasetsRequestPayloadRT.encode({
@ -36,8 +40,5 @@ export const callGetLogEntryCategoryDatasetsAPI = async (
),
});
return pipe(
getLogEntryCategoryDatasetsSuccessReponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
return decodeOrThrow(getLogEntryCategoryDatasetsSuccessReponsePayloadRT)(response);
};

View file

@ -4,26 +4,30 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { fold } from 'fp-ts/lib/Either';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import type { HttpHandler } from 'src/core/public';
import {
getLogEntryCategoryExamplesRequestPayloadRT,
getLogEntryCategoryExamplesSuccessReponsePayloadRT,
LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_EXAMPLES_PATH,
} from '../../../../../common/http_api/log_analysis';
import { createPlainError, throwErrors } from '../../../../../common/runtime_types';
import { decodeOrThrow } from '../../../../../common/runtime_types';
interface RequestArgs {
sourceId: string;
startTime: number;
endTime: number;
categoryId: number;
exampleCount: number;
}
export const callGetLogEntryCategoryExamplesAPI = async (
sourceId: string,
startTime: number,
endTime: number,
categoryId: number,
exampleCount: number
requestArgs: RequestArgs,
fetch: HttpHandler
) => {
const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_EXAMPLES_PATH, {
const { sourceId, startTime, endTime, categoryId, exampleCount } = requestArgs;
const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_EXAMPLES_PATH, {
method: 'POST',
body: JSON.stringify(
getLogEntryCategoryExamplesRequestPayloadRT.encode({
@ -40,8 +44,5 @@ export const callGetLogEntryCategoryExamplesAPI = async (
),
});
return pipe(
getLogEntryCategoryExamplesSuccessReponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
return decodeOrThrow(getLogEntryCategoryExamplesSuccessReponsePayloadRT)(response);
};

View file

@ -4,28 +4,31 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { fold } from 'fp-ts/lib/Either';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import type { HttpHandler } from 'src/core/public';
import {
getLogEntryCategoriesRequestPayloadRT,
getLogEntryCategoriesSuccessReponsePayloadRT,
LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORIES_PATH,
} from '../../../../../common/http_api/log_analysis';
import { createPlainError, throwErrors } from '../../../../../common/runtime_types';
import { decodeOrThrow } from '../../../../../common/runtime_types';
interface RequestArgs {
sourceId: string;
startTime: number;
endTime: number;
categoryCount: number;
datasets?: string[];
}
export const callGetTopLogEntryCategoriesAPI = async (
sourceId: string,
startTime: number,
endTime: number,
categoryCount: number,
datasets?: string[]
requestArgs: RequestArgs,
fetch: HttpHandler
) => {
const { sourceId, startTime, endTime, categoryCount, datasets } = requestArgs;
const intervalDuration = endTime - startTime;
const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORIES_PATH, {
const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORIES_PATH, {
method: 'POST',
body: JSON.stringify(
getLogEntryCategoriesRequestPayloadRT.encode({
@ -60,8 +63,5 @@ export const callGetTopLogEntryCategoriesAPI = async (
),
});
return pipe(
getLogEntryCategoriesSuccessReponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
return decodeOrThrow(getLogEntryCategoriesSuccessReponsePayloadRT)(response);
};

View file

@ -13,6 +13,7 @@ import {
import { useTrackedPromise, CanceledPromiseError } from '../../../utils/use_tracked_promise';
import { callGetTopLogEntryCategoriesAPI } from './service_calls/get_top_log_entry_categories';
import { callGetLogEntryCategoryDatasetsAPI } from './service_calls/get_log_entry_category_datasets';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
type TopLogEntryCategories = GetLogEntryCategoriesSuccessResponsePayload['data']['categories'];
type LogEntryCategoryDatasets = GetLogEntryCategoryDatasetsSuccessResponsePayload['data']['datasets'];
@ -34,6 +35,7 @@ export const useLogEntryCategoriesResults = ({
sourceId: string;
startTime: number;
}) => {
const { services } = useKibanaContextForPlugin();
const [topLogEntryCategories, setTopLogEntryCategories] = useState<TopLogEntryCategories>([]);
const [logEntryCategoryDatasets, setLogEntryCategoryDatasets] = useState<
LogEntryCategoryDatasets
@ -44,11 +46,14 @@ export const useLogEntryCategoriesResults = ({
cancelPreviousOn: 'creation',
createPromise: async () => {
return await callGetTopLogEntryCategoriesAPI(
sourceId,
startTime,
endTime,
categoriesCount,
filteredDatasets
{
sourceId,
startTime,
endTime,
categoryCount: categoriesCount,
datasets: filteredDatasets,
},
services.http.fetch
);
},
onResolve: ({ data: { categories } }) => {
@ -71,7 +76,10 @@ export const useLogEntryCategoriesResults = ({
{
cancelPreviousOn: 'creation',
createPromise: async () => {
return await callGetLogEntryCategoryDatasetsAPI(sourceId, startTime, endTime);
return await callGetLogEntryCategoryDatasetsAPI(
{ sourceId, startTime, endTime },
services.http.fetch
);
},
onResolve: ({ data: { datasets } }) => {
setLogEntryCategoryDatasets(datasets);

View file

@ -7,6 +7,7 @@
import { useMemo, useState } from 'react';
import { LogEntryCategoryExample } from '../../../../common/http_api';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { callGetLogEntryCategoryExamplesAPI } from './service_calls/get_log_entry_category_examples';
@ -23,6 +24,8 @@ export const useLogEntryCategoryExamples = ({
sourceId: string;
startTime: number;
}) => {
const { services } = useKibanaContextForPlugin();
const [logEntryCategoryExamples, setLogEntryCategoryExamples] = useState<
LogEntryCategoryExample[]
>([]);
@ -32,11 +35,14 @@ export const useLogEntryCategoryExamples = ({
cancelPreviousOn: 'creation',
createPromise: async () => {
return await callGetLogEntryCategoryExamplesAPI(
sourceId,
startTime,
endTime,
categoryId,
exampleCount
{
sourceId,
startTime,
endTime,
categoryId,
exampleCount,
},
services.http.fetch
);
},
onResolve: ({ data: { examples } }) => {

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { npStart } from '../../../../legacy_singletons';
import type { HttpHandler } from 'src/core/public';
import {
getLogEntryAnomaliesRequestPayloadRT,
getLogEntryAnomaliesSuccessReponsePayloadRT,
@ -13,15 +13,18 @@ import {
import { decodeOrThrow } from '../../../../../common/runtime_types';
import { Sort, Pagination } from '../../../../../common/http_api/log_analysis';
export const callGetLogEntryAnomaliesAPI = async (
sourceId: string,
startTime: number,
endTime: number,
sort: Sort,
pagination: Pagination,
datasets?: string[]
) => {
const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_PATH, {
interface RequestArgs {
sourceId: string;
startTime: number;
endTime: number;
sort: Sort;
pagination: Pagination;
datasets?: string[];
}
export const callGetLogEntryAnomaliesAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => {
const { sourceId, startTime, endTime, sort, pagination, datasets } = requestArgs;
const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_PATH, {
method: 'POST',
body: JSON.stringify(
getLogEntryAnomaliesRequestPayloadRT.encode({

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { npStart } from '../../../../legacy_singletons';
import type { HttpHandler } from 'src/core/public';
import { decodeOrThrow } from '../../../../../common/runtime_types';
import {
getLogEntryAnomaliesDatasetsRequestPayloadRT,
@ -12,12 +12,18 @@ import {
LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_DATASETS_PATH,
} from '../../../../../common/http_api/log_analysis';
interface RequestArgs {
sourceId: string;
startTime: number;
endTime: number;
}
export const callGetLogEntryAnomaliesDatasetsAPI = async (
sourceId: string,
startTime: number,
endTime: number
requestArgs: RequestArgs,
fetch: HttpHandler
) => {
const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_DATASETS_PATH, {
const { sourceId, startTime, endTime } = requestArgs;
const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_DATASETS_PATH, {
method: 'POST',
body: JSON.stringify(
getLogEntryAnomaliesDatasetsRequestPayloadRT.encode({

View file

@ -4,27 +4,27 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { fold } from 'fp-ts/lib/Either';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import type { HttpHandler } from 'src/core/public';
import {
getLogEntryExamplesRequestPayloadRT,
getLogEntryExamplesSuccessReponsePayloadRT,
LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH,
} from '../../../../../common/http_api/log_analysis';
import { createPlainError, throwErrors } from '../../../../../common/runtime_types';
import { decodeOrThrow } from '../../../../../common/runtime_types';
export const callGetLogEntryExamplesAPI = async (
sourceId: string,
startTime: number,
endTime: number,
dataset: string,
exampleCount: number,
categoryId?: string
) => {
const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH, {
interface RequestArgs {
sourceId: string;
startTime: number;
endTime: number;
dataset: string;
exampleCount: number;
categoryId?: string;
}
export const callGetLogEntryExamplesAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => {
const { sourceId, startTime, endTime, dataset, exampleCount, categoryId } = requestArgs;
const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH, {
method: 'POST',
body: JSON.stringify(
getLogEntryExamplesRequestPayloadRT.encode({
@ -42,8 +42,5 @@ export const callGetLogEntryExamplesAPI = async (
),
});
return pipe(
getLogEntryExamplesSuccessReponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
return decodeOrThrow(getLogEntryExamplesSuccessReponsePayloadRT)(response);
};

View file

@ -4,25 +4,25 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { fold } from 'fp-ts/lib/Either';
import { pipe } from 'fp-ts/lib/pipeable';
import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import type { HttpHandler } from 'src/core/public';
import {
getLogEntryRateRequestPayloadRT,
getLogEntryRateSuccessReponsePayloadRT,
LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH,
} from '../../../../../common/http_api/log_analysis';
import { createPlainError, throwErrors } from '../../../../../common/runtime_types';
import { decodeOrThrow } from '../../../../../common/runtime_types';
export const callGetLogEntryRateAPI = async (
sourceId: string,
startTime: number,
endTime: number,
bucketDuration: number,
datasets?: string[]
) => {
const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH, {
interface RequestArgs {
sourceId: string;
startTime: number;
endTime: number;
bucketDuration: number;
datasets?: string[];
}
export const callGetLogEntryRateAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => {
const { sourceId, startTime, endTime, bucketDuration, datasets } = requestArgs;
const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH, {
method: 'POST',
body: JSON.stringify(
getLogEntryRateRequestPayloadRT.encode({
@ -38,8 +38,5 @@ export const callGetLogEntryRateAPI = async (
})
),
});
return pipe(
getLogEntryRateSuccessReponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
return decodeOrThrow(getLogEntryRateSuccessReponsePayloadRT)(response);
};

View file

@ -16,6 +16,7 @@ import {
GetLogEntryAnomaliesDatasetsSuccessResponsePayload,
LogEntryAnomaly,
} from '../../../../common/http_api/log_analysis';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
export type SortOptions = Sort;
export type PaginationOptions = Pick<Pagination, 'pageSize'>;
@ -161,6 +162,8 @@ export const useLogEntryAnomaliesResults = ({
};
};
const { services } = useKibanaContextForPlugin();
const [reducerState, dispatch] = useReducer(stateReducer, STATE_DEFAULTS, initStateReducer);
const [logEntryAnomalies, setLogEntryAnomalies] = useState<LogEntryAnomalies>([]);
@ -177,15 +180,18 @@ export const useLogEntryAnomaliesResults = ({
filteredDatasets: queryFilteredDatasets,
} = reducerState;
return await callGetLogEntryAnomaliesAPI(
sourceId,
queryStartTime,
queryEndTime,
sortOptions,
{
...paginationOptions,
cursor: paginationCursor,
sourceId,
startTime: queryStartTime,
endTime: queryEndTime,
sort: sortOptions,
pagination: {
...paginationOptions,
cursor: paginationCursor,
},
datasets: queryFilteredDatasets,
},
queryFilteredDatasets
services.http.fetch
);
},
onResolve: ({ data: { anomalies, paginationCursors: requestCursors, hasMoreEntries } }) => {
@ -286,7 +292,10 @@ export const useLogEntryAnomaliesResults = ({
{
cancelPreviousOn: 'creation',
createPromise: async () => {
return await callGetLogEntryAnomaliesDatasetsAPI(sourceId, startTime, endTime);
return await callGetLogEntryAnomaliesDatasetsAPI(
{ sourceId, startTime, endTime },
services.http.fetch
);
},
onResolve: ({ data: { datasets } }) => {
setLogEntryAnomaliesDatasets(datasets);

View file

@ -7,6 +7,7 @@
import { useMemo, useState } from 'react';
import { LogEntryExample } from '../../../../common/http_api';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { callGetLogEntryExamplesAPI } from './service_calls/get_log_entry_examples';
@ -25,6 +26,7 @@ export const useLogEntryExamples = ({
startTime: number;
categoryId?: string;
}) => {
const { services } = useKibanaContextForPlugin();
const [logEntryExamples, setLogEntryExamples] = useState<LogEntryExample[]>([]);
const [getLogEntryExamplesRequest, getLogEntryExamples] = useTrackedPromise(
@ -32,12 +34,15 @@ export const useLogEntryExamples = ({
cancelPreviousOn: 'creation',
createPromise: async () => {
return await callGetLogEntryExamplesAPI(
sourceId,
startTime,
endTime,
dataset,
exampleCount,
categoryId
{
sourceId,
startTime,
endTime,
dataset,
exampleCount,
categoryId,
},
services.http.fetch
);
},
onResolve: ({ data: { examples } }) => {

View file

@ -12,6 +12,7 @@ import {
LogEntryRatePartition,
LogEntryRateAnomaly,
} from '../../../../common/http_api/log_analysis';
import { useKibanaContextForPlugin } from '../../../hooks/use_kibana';
import { useTrackedPromise } from '../../../utils/use_tracked_promise';
import { callGetLogEntryRateAPI } from './service_calls/get_log_entry_rate';
@ -49,6 +50,7 @@ export const useLogEntryRateResults = ({
bucketDuration: number;
filteredDatasets?: string[];
}) => {
const { services } = useKibanaContextForPlugin();
const [logEntryRate, setLogEntryRate] = useState<LogEntryRateResults | null>(null);
const [getLogEntryRateRequest, getLogEntryRate] = useTrackedPromise(
@ -56,11 +58,14 @@ export const useLogEntryRateResults = ({
cancelPreviousOn: 'resolution',
createPromise: async () => {
return await callGetLogEntryRateAPI(
sourceId,
startTime,
endTime,
bucketDuration,
filteredDatasets
{
sourceId,
startTime,
endTime,
bucketDuration,
datasets: filteredDatasets,
},
services.http.fetch
);
},
onResolve: ({ data }) => {

View file

@ -5,6 +5,7 @@
*/
import { useMemo, useState, useCallback, useEffect, useReducer } from 'react';
import { HttpHandler } from 'src/core/public';
import {
INFA_ML_GET_METRICS_HOSTS_ANOMALIES_PATH,
Metric,
@ -16,8 +17,8 @@ import {
getMetricsHostsAnomaliesSuccessReponsePayloadRT,
} from '../../../../../common/http_api/infra_ml';
import { useTrackedPromise } from '../../../../utils/use_tracked_promise';
import { npStart } from '../../../../legacy_singletons';
import { decodeOrThrow } from '../../../../../common/runtime_types';
import { useKibanaContextForPlugin } from '../../../../hooks/use_kibana';
export type SortOptions = Sort;
export type PaginationOptions = Pick<Pagination, 'pageSize'>;
@ -149,6 +150,7 @@ export const useMetricsHostsAnomaliesResults = ({
onGetMetricsHostsAnomaliesDatasetsError?: (error: Error) => void;
filteredDatasets?: string[];
}) => {
const { services } = useKibanaContextForPlugin();
const initStateReducer = (stateDefaults: ReducerStateDefaults): ReducerState => {
return {
...stateDefaults,
@ -177,15 +179,18 @@ export const useMetricsHostsAnomaliesResults = ({
paginationCursor,
} = reducerState;
return await callGetMetricHostsAnomaliesAPI(
sourceId,
queryStartTime,
queryEndTime,
metric,
sortOptions,
{
...paginationOptions,
cursor: paginationCursor,
}
sourceId,
startTime: queryStartTime,
endTime: queryEndTime,
metric,
sort: sortOptions,
pagination: {
...paginationOptions,
cursor: paginationCursor,
},
},
services.http.fetch
);
},
onResolve: ({ data: { anomalies, paginationCursors: requestCursors, hasMoreEntries } }) => {
@ -288,15 +293,21 @@ export const useMetricsHostsAnomaliesResults = ({
};
};
interface RequestArgs {
sourceId: string;
startTime: number;
endTime: number;
metric: Metric;
sort: Sort;
pagination: Pagination;
}
export const callGetMetricHostsAnomaliesAPI = async (
sourceId: string,
startTime: number,
endTime: number,
metric: Metric,
sort: Sort,
pagination: Pagination
requestArgs: RequestArgs,
fetch: HttpHandler
) => {
const response = await npStart.http.fetch(INFA_ML_GET_METRICS_HOSTS_ANOMALIES_PATH, {
const { sourceId, startTime, endTime, metric, sort, pagination } = requestArgs;
const response = await fetch(INFA_ML_GET_METRICS_HOSTS_ANOMALIES_PATH, {
method: 'POST',
body: JSON.stringify(
getMetricsHostsAnomaliesRequestPayloadRT.encode({

View file

@ -5,6 +5,7 @@
*/
import { useMemo, useState, useCallback, useEffect, useReducer } from 'react';
import { HttpHandler } from 'src/core/public';
import {
Sort,
Pagination,
@ -16,8 +17,8 @@ import {
Metric,
} from '../../../../../common/http_api/infra_ml';
import { useTrackedPromise } from '../../../../utils/use_tracked_promise';
import { npStart } from '../../../../legacy_singletons';
import { decodeOrThrow } from '../../../../../common/runtime_types';
import { useKibanaContextForPlugin } from '../../../../hooks/use_kibana';
export type SortOptions = Sort;
export type PaginationOptions = Pick<Pagination, 'pageSize'>;
@ -149,6 +150,7 @@ export const useMetricsK8sAnomaliesResults = ({
onGetMetricsHostsAnomaliesDatasetsError?: (error: Error) => void;
filteredDatasets?: string[];
}) => {
const { services } = useKibanaContextForPlugin();
const initStateReducer = (stateDefaults: ReducerStateDefaults): ReducerState => {
return {
...stateDefaults,
@ -178,16 +180,19 @@ export const useMetricsK8sAnomaliesResults = ({
filteredDatasets: queryFilteredDatasets,
} = reducerState;
return await callGetMetricsK8sAnomaliesAPI(
sourceId,
queryStartTime,
queryEndTime,
metric,
sortOptions,
{
...paginationOptions,
cursor: paginationCursor,
sourceId,
startTime: queryStartTime,
endTime: queryEndTime,
metric,
sort: sortOptions,
pagination: {
...paginationOptions,
cursor: paginationCursor,
},
datasets: queryFilteredDatasets,
},
queryFilteredDatasets
services.http.fetch
);
},
onResolve: ({ data: { anomalies, paginationCursors: requestCursors, hasMoreEntries } }) => {
@ -290,16 +295,22 @@ export const useMetricsK8sAnomaliesResults = ({
};
};
interface RequestArgs {
sourceId: string;
startTime: number;
endTime: number;
metric: Metric;
sort: Sort;
pagination: Pagination;
datasets?: string[];
}
export const callGetMetricsK8sAnomaliesAPI = async (
sourceId: string,
startTime: number,
endTime: number,
metric: Metric,
sort: Sort,
pagination: Pagination,
datasets?: string[]
requestArgs: RequestArgs,
fetch: HttpHandler
) => {
const response = await npStart.http.fetch(INFA_ML_GET_METRICS_K8S_ANOMALIES_PATH, {
const { sourceId, startTime, endTime, metric, sort, pagination, datasets } = requestArgs;
const response = await fetch(INFA_ML_GET_METRICS_K8S_ANOMALIES_PATH, {
method: 'POST',
body: JSON.stringify(
getMetricsK8sAnomaliesRequestPayloadRT.encode({

View file

@ -9,7 +9,6 @@ import { DEFAULT_APP_CATEGORIES } from '../../../../src/core/public';
import { createMetricThresholdAlertType } from './alerting/metric_threshold';
import { createInventoryMetricAlertType } from './alerting/inventory';
import { getAlertType as getLogsAlertType } from './alerting/log_threshold';
import { registerStartSingleton } from './legacy_singletons';
import { registerFeatures } from './register_feature';
import {
InfraClientSetupDeps,
@ -98,9 +97,7 @@ export class Plugin implements InfraClientPluginClass {
});
}
start(core: InfraClientCoreStart, _plugins: InfraClientStartDeps) {
registerStartSingleton(core);
}
start(_core: InfraClientCoreStart, _plugins: InfraClientStartDeps) {}
stop() {}
}