diff --git a/x-pack/plugins/infra/public/alerting/common/components/get_alert_preview.ts b/x-pack/plugins/infra/public/alerting/common/components/get_alert_preview.ts index 207d8a722a8c..ea50ea6f11f3 100644 --- a/x-pack/plugins/infra/public/alerting/common/components/get_alert_preview.ts +++ b/x-pack/plugins/infra/public/alerting/common/components/get_alert_preview.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { HttpSetup } from 'src/core/public'; +import type { HttpHandler } from 'src/core/public'; import { INFRA_ALERT_PREVIEW_PATH, METRIC_THRESHOLD_ALERT_TYPE_ID, @@ -22,7 +22,7 @@ export async function getAlertPreview({ params, alertType, }: { - fetch: HttpSetup['fetch']; + fetch: HttpHandler; params: AlertPreviewRequestParams; alertType: PreviewableAlertTypes; }): Promise { diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_cleanup.ts b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_cleanup.ts index 6fa2ac175ace..4fdd6bdd282b 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_cleanup.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_cleanup.ts @@ -5,21 +5,25 @@ */ import * as rt from 'io-ts'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { fold } from 'fp-ts/lib/Either'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { getDatafeedId, getJobId } from '../../../../../common/log_analysis'; -import { throwErrors, createPlainError } from '../../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; + +interface DeleteJobsRequestArgs { + spaceId: string; + sourceId: string; + jobTypes: JobType[]; +} export const callDeleteJobs = async ( - spaceId: string, - sourceId: string, - jobTypes: JobType[] + requestArgs: DeleteJobsRequestArgs, + fetch: HttpHandler ) => { + const { spaceId, sourceId, jobTypes } = requestArgs; + // NOTE: Deleting the jobs via this API will delete the datafeeds at the same time - const deleteJobsResponse = await npStart.http.fetch('/api/ml/jobs/delete_jobs', { + const deleteJobsResponse = await fetch('/api/ml/jobs/delete_jobs', { method: 'POST', body: JSON.stringify( deleteJobsRequestPayloadRT.encode({ @@ -28,28 +32,29 @@ export const callDeleteJobs = async ( ), }); - return pipe( - deleteJobsResponsePayloadRT.decode(deleteJobsResponse), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(deleteJobsResponsePayloadRT)(deleteJobsResponse); }; -export const callGetJobDeletionTasks = async () => { - const jobDeletionTasksResponse = await npStart.http.fetch('/api/ml/jobs/deleting_jobs_tasks'); +export const callGetJobDeletionTasks = async (fetch: HttpHandler) => { + const jobDeletionTasksResponse = await fetch('/api/ml/jobs/deleting_jobs_tasks'); - return pipe( - getJobDeletionTasksResponsePayloadRT.decode(jobDeletionTasksResponse), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(getJobDeletionTasksResponsePayloadRT)(jobDeletionTasksResponse); }; +interface StopDatafeedsRequestArgs { + spaceId: string; + sourceId: string; + jobTypes: JobType[]; +} + export const callStopDatafeeds = async ( - spaceId: string, - sourceId: string, - jobTypes: JobType[] + requestArgs: StopDatafeedsRequestArgs, + fetch: HttpHandler ) => { + const { spaceId, sourceId, jobTypes } = requestArgs; + // Stop datafeed due to https://github.com/elastic/kibana/issues/44652 - const stopDatafeedResponse = await npStart.http.fetch('/api/ml/jobs/stop_datafeeds', { + const stopDatafeedResponse = await fetch('/api/ml/jobs/stop_datafeeds', { method: 'POST', body: JSON.stringify( stopDatafeedsRequestPayloadRT.encode({ @@ -58,10 +63,7 @@ export const callStopDatafeeds = async ( ), }); - return pipe( - stopDatafeedsResponsePayloadRT.decode(stopDatafeedResponse), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(stopDatafeedsResponsePayloadRT)(stopDatafeedResponse); }; export const deleteJobsRequestPayloadRT = rt.type({ diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_get_jobs_summary_api.ts b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_get_jobs_summary_api.ts index 7441c0ab7d34..7cb477dbe5b3 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_get_jobs_summary_api.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_get_jobs_summary_api.ts @@ -4,21 +4,24 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { identity } from 'fp-ts/lib/function'; -import { pipe } from 'fp-ts/lib/pipeable'; import * as rt from 'io-ts'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { getJobId, jobCustomSettingsRT } from '../../../../../common/log_analysis'; -import { createPlainError, throwErrors } from '../../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; + +interface RequestArgs { + spaceId: string; + sourceId: string; + jobTypes: JobType[]; +} export const callJobsSummaryAPI = async ( - spaceId: string, - sourceId: string, - jobTypes: JobType[] + requestArgs: RequestArgs, + fetch: HttpHandler ) => { - const response = await npStart.http.fetch('/api/ml/jobs/jobs_summary', { + const { spaceId, sourceId, jobTypes } = requestArgs; + const response = await fetch('/api/ml/jobs/jobs_summary', { method: 'POST', body: JSON.stringify( fetchJobStatusRequestPayloadRT.encode({ @@ -26,10 +29,7 @@ export const callJobsSummaryAPI = async ( }) ), }); - return pipe( - fetchJobStatusResponsePayloadRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(fetchJobStatusResponsePayloadRT)(response); }; export const fetchJobStatusRequestPayloadRT = rt.type({ diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_get_module.ts b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_get_module.ts index b6b40d6dc651..2bf18d4e52c7 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_get_module.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_get_module.ts @@ -4,24 +4,18 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { identity } from 'fp-ts/lib/function'; -import { pipe } from 'fp-ts/lib/pipeable'; import * as rt from 'io-ts'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { jobCustomSettingsRT } from '../../../../../common/log_analysis'; -import { createPlainError, throwErrors } from '../../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; -export const callGetMlModuleAPI = async (moduleId: string) => { - const response = await npStart.http.fetch(`/api/ml/modules/get_module/${moduleId}`, { +export const callGetMlModuleAPI = async (moduleId: string, fetch: HttpHandler) => { + const response = await fetch(`/api/ml/modules/get_module/${moduleId}`, { method: 'GET', }); - return pipe( - getMlModuleResponsePayloadRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(getMlModuleResponsePayloadRT)(response); }; const jobDefinitionRT = rt.type({ diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_setup_module_api.ts b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_setup_module_api.ts index 7c8d63374924..1f203ef9618b 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_setup_module_api.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_setup_module_api.ts @@ -4,27 +4,38 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { identity } from 'fp-ts/lib/function'; -import { pipe } from 'fp-ts/lib/pipeable'; import * as rt from 'io-ts'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { getJobIdPrefix, jobCustomSettingsRT } from '../../../../../common/log_analysis'; -import { createPlainError, throwErrors } from '../../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; -export const callSetupMlModuleAPI = async ( - moduleId: string, - start: number | undefined, - end: number | undefined, - spaceId: string, - sourceId: string, - indexPattern: string, - jobOverrides: SetupMlModuleJobOverrides[] = [], - datafeedOverrides: SetupMlModuleDatafeedOverrides[] = [], - query?: object -) => { - const response = await npStart.http.fetch(`/api/ml/modules/setup/${moduleId}`, { +interface RequestArgs { + moduleId: string; + start?: number; + end?: number; + spaceId: string; + sourceId: string; + indexPattern: string; + jobOverrides?: SetupMlModuleJobOverrides[]; + datafeedOverrides?: SetupMlModuleDatafeedOverrides[]; + query?: object; +} + +export const callSetupMlModuleAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => { + const { + moduleId, + start, + end, + spaceId, + sourceId, + indexPattern, + jobOverrides = [], + datafeedOverrides = [], + query, + } = requestArgs; + + const response = await fetch(`/api/ml/modules/setup/${moduleId}`, { method: 'POST', body: JSON.stringify( setupMlModuleRequestPayloadRT.encode({ @@ -40,10 +51,7 @@ export const callSetupMlModuleAPI = async ( ), }); - return pipe( - setupMlModuleResponsePayloadRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(setupMlModuleResponsePayloadRT)(response); }; const setupMlModuleTimeParamsRT = rt.partial({ diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/validate_datasets.ts b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/validate_datasets.ts index 6c9d5e439d35..ec08d3ac107e 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/validate_datasets.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/validate_datasets.ts @@ -4,21 +4,24 @@ * you may not use this file except in compliance with the Elastic License. */ +import type { HttpHandler } from 'src/core/public'; import { LOG_ANALYSIS_VALIDATE_DATASETS_PATH, validateLogEntryDatasetsRequestPayloadRT, validateLogEntryDatasetsResponsePayloadRT, } from '../../../../../common/http_api'; import { decodeOrThrow } from '../../../../../common/runtime_types'; -import { npStart } from '../../../../legacy_singletons'; -export const callValidateDatasetsAPI = async ( - indices: string[], - timestampField: string, - startTime: number, - endTime: number -) => { - const response = await npStart.http.fetch(LOG_ANALYSIS_VALIDATE_DATASETS_PATH, { +interface RequestArgs { + indices: string[]; + timestampField: string; + startTime: number; + endTime: number; +} + +export const callValidateDatasetsAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => { + const { indices, timestampField, startTime, endTime } = requestArgs; + const response = await fetch(LOG_ANALYSIS_VALIDATE_DATASETS_PATH, { method: 'POST', body: JSON.stringify( validateLogEntryDatasetsRequestPayloadRT.encode({ diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/validate_indices.ts b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/validate_indices.ts index bbef7d201045..465d09a744b1 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/validate_indices.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/validate_indices.ts @@ -4,10 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; + import { LOG_ANALYSIS_VALIDATE_INDICES_PATH, ValidationIndicesFieldSpecification, @@ -15,19 +13,19 @@ import { validationIndicesResponsePayloadRT, } from '../../../../../common/http_api'; -import { throwErrors, createPlainError } from '../../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; -export const callValidateIndicesAPI = async ( - indices: string[], - fields: ValidationIndicesFieldSpecification[] -) => { - const response = await npStart.http.fetch(LOG_ANALYSIS_VALIDATE_INDICES_PATH, { +interface RequestArgs { + indices: string[]; + fields: ValidationIndicesFieldSpecification[]; +} + +export const callValidateIndicesAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => { + const { indices, fields } = requestArgs; + const response = await fetch(LOG_ANALYSIS_VALIDATE_INDICES_PATH, { method: 'POST', body: JSON.stringify(validationIndicesRequestPayloadRT.encode({ data: { indices, fields } })), }); - return pipe( - validationIndicesResponsePayloadRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(validationIndicesResponsePayloadRT)(response); }; diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_capabilities.tsx b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_capabilities.tsx index 9116900ec219..74b316f78259 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_capabilities.tsx +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_capabilities.tsx @@ -6,18 +6,16 @@ import createContainer from 'constate'; import { useMemo, useState, useEffect } from 'react'; -import { fold } from 'fp-ts/lib/Either'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { identity } from 'fp-ts/lib/function'; import { useTrackedPromise } from '../../../utils/use_tracked_promise'; -import { npStart } from '../../../legacy_singletons'; import { getMlCapabilitiesResponsePayloadRT, GetMlCapabilitiesResponsePayload, } from './api/ml_api_types'; -import { throwErrors, createPlainError } from '../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../common/runtime_types'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; export const useLogAnalysisCapabilities = () => { + const { services } = useKibanaContextForPlugin(); const [mlCapabilities, setMlCapabilities] = useState( initialMlCapabilities ); @@ -26,12 +24,9 @@ export const useLogAnalysisCapabilities = () => { { cancelPreviousOn: 'resolution', createPromise: async () => { - const rawResponse = await npStart.http.fetch('/api/ml/ml_capabilities'); + const rawResponse = await services.http.fetch('/api/ml/ml_capabilities'); - return pipe( - getMlCapabilitiesResponsePayloadRT.decode(rawResponse), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(getMlCapabilitiesResponsePayloadRT)(rawResponse); }, onResolve: (response) => { setMlCapabilities(response); diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_cleanup.tsx b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_cleanup.tsx index 522616f83d0c..ec5e879131aa 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_cleanup.tsx +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_cleanup.tsx @@ -3,17 +3,18 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ - +import type { HttpHandler } from 'src/core/public'; import { getJobId } from '../../../../common/log_analysis'; import { callDeleteJobs, callGetJobDeletionTasks, callStopDatafeeds } from './api/ml_cleanup'; export const cleanUpJobsAndDatafeeds = async ( spaceId: string, sourceId: string, - jobTypes: JobType[] + jobTypes: JobType[], + fetch: HttpHandler ) => { try { - await callStopDatafeeds(spaceId, sourceId, jobTypes); + await callStopDatafeeds({ spaceId, sourceId, jobTypes }, fetch); } catch (err) { // Proceed only if datafeed has been deleted or didn't exist in the first place if (err?.res?.status !== 404) { @@ -21,27 +22,29 @@ export const cleanUpJobsAndDatafeeds = async ( } } - return await deleteJobs(spaceId, sourceId, jobTypes); + return await deleteJobs(spaceId, sourceId, jobTypes, fetch); }; const deleteJobs = async ( spaceId: string, sourceId: string, - jobTypes: JobType[] + jobTypes: JobType[], + fetch: HttpHandler ) => { - const deleteJobsResponse = await callDeleteJobs(spaceId, sourceId, jobTypes); - await waitUntilJobsAreDeleted(spaceId, sourceId, jobTypes); + const deleteJobsResponse = await callDeleteJobs({ spaceId, sourceId, jobTypes }, fetch); + await waitUntilJobsAreDeleted(spaceId, sourceId, jobTypes, fetch); return deleteJobsResponse; }; const waitUntilJobsAreDeleted = async ( spaceId: string, sourceId: string, - jobTypes: JobType[] + jobTypes: JobType[], + fetch: HttpHandler ) => { const moduleJobIds = jobTypes.map((jobType) => getJobId(spaceId, sourceId, jobType)); while (true) { - const { jobIds: jobIdsBeingDeleted } = await callGetJobDeletionTasks(); + const { jobIds: jobIdsBeingDeleted } = await callGetJobDeletionTasks(fetch); const needToWait = jobIdsBeingDeleted.some((jobId) => moduleJobIds.includes(jobId)); if (needToWait) { diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module.tsx b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module.tsx index 79768302a731..27ef0039ae49 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module.tsx +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module.tsx @@ -6,6 +6,7 @@ import { useCallback, useMemo } from 'react'; import { DatasetFilter } from '../../../../common/log_analysis'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { useModuleStatus } from './log_analysis_module_status'; import { ModuleDescriptor, ModuleSourceConfiguration } from './log_analysis_module_types'; @@ -17,6 +18,7 @@ export const useLogAnalysisModule = ({ sourceConfiguration: ModuleSourceConfiguration; moduleDescriptor: ModuleDescriptor; }) => { + const { services } = useKibanaContextForPlugin(); const { spaceId, sourceId, timestampField } = sourceConfiguration; const [moduleStatus, dispatchModuleStatus] = useModuleStatus(moduleDescriptor.jobTypes); @@ -25,7 +27,7 @@ export const useLogAnalysisModule = ({ cancelPreviousOn: 'resolution', createPromise: async () => { dispatchModuleStatus({ type: 'fetchingJobStatuses' }); - return await moduleDescriptor.getJobSummary(spaceId, sourceId); + return await moduleDescriptor.getJobSummary(spaceId, sourceId, services.http.fetch); }, onResolve: (jobResponse) => { dispatchModuleStatus({ @@ -52,13 +54,23 @@ export const useLogAnalysisModule = ({ datasetFilter: DatasetFilter ) => { dispatchModuleStatus({ type: 'startedSetup' }); - const setupResult = await moduleDescriptor.setUpModule(start, end, datasetFilter, { - indices: selectedIndices, - sourceId, + const setupResult = await moduleDescriptor.setUpModule( + start, + end, + datasetFilter, + { + indices: selectedIndices, + sourceId, + spaceId, + timestampField, + }, + services.http.fetch + ); + const jobSummaries = await moduleDescriptor.getJobSummary( spaceId, - timestampField, - }); - const jobSummaries = await moduleDescriptor.getJobSummary(spaceId, sourceId); + sourceId, + services.http.fetch + ); return { setupResult, jobSummaries }; }, onResolve: ({ setupResult: { datafeeds, jobs }, jobSummaries }) => { @@ -82,7 +94,7 @@ export const useLogAnalysisModule = ({ { cancelPreviousOn: 'resolution', createPromise: async () => { - return await moduleDescriptor.cleanUpModule(spaceId, sourceId); + return await moduleDescriptor.cleanUpModule(spaceId, sourceId, services.http.fetch); }, }, [spaceId, sourceId] diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module_definition.tsx b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module_definition.tsx index 1f643d0e5eb3..7a5c1d354dc3 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module_definition.tsx +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module_definition.tsx @@ -6,6 +6,7 @@ import { useCallback, useMemo, useState } from 'react'; import { getJobId } from '../../../../common/log_analysis'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { JobSummary } from './api/ml_get_jobs_summary_api'; import { GetMlModuleResponsePayload, JobDefinition } from './api/ml_get_module'; @@ -18,6 +19,7 @@ export const useLogAnalysisModuleDefinition = ({ sourceConfiguration: ModuleSourceConfiguration; moduleDescriptor: ModuleDescriptor; }) => { + const { services } = useKibanaContextForPlugin(); const [moduleDefinition, setModuleDefinition] = useState< GetMlModuleResponsePayload | undefined >(); @@ -40,7 +42,7 @@ export const useLogAnalysisModuleDefinition = ({ { cancelPreviousOn: 'resolution', createPromise: async () => { - return await moduleDescriptor.getModuleDefinition(); + return await moduleDescriptor.getModuleDefinition(services.http.fetch); }, onResolve: (response) => { setModuleDefinition(response); diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module_types.ts b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module_types.ts index ba355ad195b1..c42704860b03 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module_types.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module_types.ts @@ -4,6 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ +import type { HttpHandler } from 'src/core/public'; import { ValidateLogEntryDatasetsResponsePayload, ValidationIndicesResponsePayload, @@ -23,24 +24,35 @@ export interface ModuleDescriptor { jobTypes: JobType[]; bucketSpan: number; getJobIds: (spaceId: string, sourceId: string) => Record; - getJobSummary: (spaceId: string, sourceId: string) => Promise; - getModuleDefinition: () => Promise; + getJobSummary: ( + spaceId: string, + sourceId: string, + fetch: HttpHandler + ) => Promise; + getModuleDefinition: (fetch: HttpHandler) => Promise; setUpModule: ( start: number | undefined, end: number | undefined, datasetFilter: DatasetFilter, - sourceConfiguration: ModuleSourceConfiguration + sourceConfiguration: ModuleSourceConfiguration, + fetch: HttpHandler ) => Promise; - cleanUpModule: (spaceId: string, sourceId: string) => Promise; + cleanUpModule: ( + spaceId: string, + sourceId: string, + fetch: HttpHandler + ) => Promise; validateSetupIndices: ( indices: string[], - timestampField: string + timestampField: string, + fetch: HttpHandler ) => Promise; validateSetupDatasets: ( indices: string[], timestampField: string, startTime: number, - endTime: number + endTime: number, + fetch: HttpHandler ) => Promise; } diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_setup_state.ts b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_setup_state.ts index e6fe8f4e92cc..750a7104a3a9 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_setup_state.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_setup_state.ts @@ -18,6 +18,7 @@ import { ValidationIndicesError, ValidationUIError, } from '../../../components/logging/log_analysis_setup/initial_configuration_step'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { ModuleDescriptor, ModuleSourceConfiguration } from './log_analysis_module_types'; @@ -43,6 +44,7 @@ export const useAnalysisSetupState = ({ setUpModule, sourceConfiguration, }: AnalysisSetupStateArguments) => { + const { services } = useKibanaContextForPlugin(); const [startTime, setStartTime] = useState(Date.now() - fourWeeksInMs); const [endTime, setEndTime] = useState(undefined); @@ -158,7 +160,8 @@ export const useAnalysisSetupState = ({ createPromise: async () => { return await validateSetupIndices( sourceConfiguration.indices, - sourceConfiguration.timestampField + sourceConfiguration.timestampField, + services.http.fetch ); }, onResolve: ({ data: { errors } }) => { @@ -183,7 +186,8 @@ export const useAnalysisSetupState = ({ validIndexNames, sourceConfiguration.timestampField, startTime ?? 0, - endTime ?? Date.now() + endTime ?? Date.now(), + services.http.fetch ); }, onResolve: ({ data: { datasets } }) => { diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/modules/log_entry_categories/module_descriptor.ts b/x-pack/plugins/infra/public/containers/logs/log_analysis/modules/log_entry_categories/module_descriptor.ts index 9682b3e74db3..46b28e091cc5 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/modules/log_entry_categories/module_descriptor.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/modules/log_entry_categories/module_descriptor.ts @@ -5,6 +5,7 @@ */ import { i18n } from '@kbn/i18n'; +import type { HttpHandler } from 'src/core/public'; import { bucketSpan, categoriesMessageField, @@ -42,22 +43,26 @@ const getJobIds = (spaceId: string, sourceId: string) => {} as Record ); -const getJobSummary = async (spaceId: string, sourceId: string) => { - const response = await callJobsSummaryAPI(spaceId, sourceId, logEntryCategoriesJobTypes); +const getJobSummary = async (spaceId: string, sourceId: string, fetch: HttpHandler) => { + const response = await callJobsSummaryAPI( + { spaceId, sourceId, jobTypes: logEntryCategoriesJobTypes }, + fetch + ); const jobIds = Object.values(getJobIds(spaceId, sourceId)); return response.filter((jobSummary) => jobIds.includes(jobSummary.id)); }; -const getModuleDefinition = async () => { - return await callGetMlModuleAPI(moduleId); +const getModuleDefinition = async (fetch: HttpHandler) => { + return await callGetMlModuleAPI(moduleId, fetch); }; const setUpModule = async ( start: number | undefined, end: number | undefined, datasetFilter: DatasetFilter, - { spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration + { spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration, + fetch: HttpHandler ) => { const indexNamePattern = indices.join(','); const jobOverrides = [ @@ -101,46 +106,59 @@ const setUpModule = async ( }; return callSetupMlModuleAPI( - moduleId, - start, - end, - spaceId, - sourceId, - indexNamePattern, - jobOverrides, - [], - query + { + moduleId, + start, + end, + spaceId, + sourceId, + indexPattern: indexNamePattern, + jobOverrides, + query, + }, + fetch ); }; -const cleanUpModule = async (spaceId: string, sourceId: string) => { - return await cleanUpJobsAndDatafeeds(spaceId, sourceId, logEntryCategoriesJobTypes); +const cleanUpModule = async (spaceId: string, sourceId: string, fetch: HttpHandler) => { + return await cleanUpJobsAndDatafeeds(spaceId, sourceId, logEntryCategoriesJobTypes, fetch); }; -const validateSetupIndices = async (indices: string[], timestampField: string) => { - return await callValidateIndicesAPI(indices, [ +const validateSetupIndices = async ( + indices: string[], + timestampField: string, + fetch: HttpHandler +) => { + return await callValidateIndicesAPI( { - name: timestampField, - validTypes: ['date'], + indices, + fields: [ + { + name: timestampField, + validTypes: ['date'], + }, + { + name: partitionField, + validTypes: ['keyword'], + }, + { + name: categoriesMessageField, + validTypes: ['text'], + }, + ], }, - { - name: partitionField, - validTypes: ['keyword'], - }, - { - name: categoriesMessageField, - validTypes: ['text'], - }, - ]); + fetch + ); }; const validateSetupDatasets = async ( indices: string[], timestampField: string, startTime: number, - endTime: number + endTime: number, + fetch: HttpHandler ) => { - return await callValidateDatasetsAPI(indices, timestampField, startTime, endTime); + return await callValidateDatasetsAPI({ indices, timestampField, startTime, endTime }, fetch); }; export const logEntryCategoriesModule: ModuleDescriptor = { diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/modules/log_entry_rate/module_descriptor.ts b/x-pack/plugins/infra/public/containers/logs/log_analysis/modules/log_entry_rate/module_descriptor.ts index 001174a2b755..b97ec55105f5 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/modules/log_entry_rate/module_descriptor.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/modules/log_entry_rate/module_descriptor.ts @@ -5,6 +5,7 @@ */ import { i18n } from '@kbn/i18n'; +import type { HttpHandler } from 'src/core/public'; import { bucketSpan, DatasetFilter, @@ -41,22 +42,26 @@ const getJobIds = (spaceId: string, sourceId: string) => {} as Record ); -const getJobSummary = async (spaceId: string, sourceId: string) => { - const response = await callJobsSummaryAPI(spaceId, sourceId, logEntryRateJobTypes); +const getJobSummary = async (spaceId: string, sourceId: string, fetch: HttpHandler) => { + const response = await callJobsSummaryAPI( + { spaceId, sourceId, jobTypes: logEntryRateJobTypes }, + fetch + ); const jobIds = Object.values(getJobIds(spaceId, sourceId)); return response.filter((jobSummary) => jobIds.includes(jobSummary.id)); }; -const getModuleDefinition = async () => { - return await callGetMlModuleAPI(moduleId); +const getModuleDefinition = async (fetch: HttpHandler) => { + return await callGetMlModuleAPI(moduleId, fetch); }; const setUpModule = async ( start: number | undefined, end: number | undefined, datasetFilter: DatasetFilter, - { spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration + { spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration, + fetch: HttpHandler ) => { const indexNamePattern = indices.join(','); const jobOverrides = [ @@ -93,42 +98,55 @@ const setUpModule = async ( : undefined; return callSetupMlModuleAPI( - moduleId, - start, - end, - spaceId, - sourceId, - indexNamePattern, - jobOverrides, - [], - query + { + moduleId, + start, + end, + spaceId, + sourceId, + indexPattern: indexNamePattern, + jobOverrides, + query, + }, + fetch ); }; -const cleanUpModule = async (spaceId: string, sourceId: string) => { - return await cleanUpJobsAndDatafeeds(spaceId, sourceId, logEntryRateJobTypes); +const cleanUpModule = async (spaceId: string, sourceId: string, fetch: HttpHandler) => { + return await cleanUpJobsAndDatafeeds(spaceId, sourceId, logEntryRateJobTypes, fetch); }; -const validateSetupIndices = async (indices: string[], timestampField: string) => { - return await callValidateIndicesAPI(indices, [ +const validateSetupIndices = async ( + indices: string[], + timestampField: string, + fetch: HttpHandler +) => { + return await callValidateIndicesAPI( { - name: timestampField, - validTypes: ['date'], + indices, + fields: [ + { + name: timestampField, + validTypes: ['date'], + }, + { + name: partitionField, + validTypes: ['keyword'], + }, + ], }, - { - name: partitionField, - validTypes: ['keyword'], - }, - ]); + fetch + ); }; const validateSetupDatasets = async ( indices: string[], timestampField: string, startTime: number, - endTime: number + endTime: number, + fetch: HttpHandler ) => { - return await callValidateDatasetsAPI(indices, timestampField, startTime, endTime); + return await callValidateDatasetsAPI({ indices, timestampField, startTime, endTime }, fetch); }; export const logEntryRateModule: ModuleDescriptor = { diff --git a/x-pack/plugins/infra/public/containers/logs/log_entries/api/fetch_log_entries.ts b/x-pack/plugins/infra/public/containers/logs/log_entries/api/fetch_log_entries.ts index 2a19a8289242..3bbd86cb0ef7 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_entries/api/fetch_log_entries.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_entries/api/fetch_log_entries.ts @@ -4,12 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; -import { throwErrors, createPlainError } from '../../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; import { LOG_ENTRIES_PATH, @@ -18,11 +15,11 @@ import { logEntriesResponseRT, } from '../../../../../common/http_api'; -export const fetchLogEntries = async (requestArgs: LogEntriesRequest) => { - const response = await npStart.http.fetch(LOG_ENTRIES_PATH, { +export const fetchLogEntries = async (requestArgs: LogEntriesRequest, fetch: HttpHandler) => { + const response = await fetch(LOG_ENTRIES_PATH, { method: 'POST', body: JSON.stringify(logEntriesRequestRT.encode(requestArgs)), }); - return pipe(logEntriesResponseRT.decode(response), fold(throwErrors(createPlainError), identity)); + return decodeOrThrow(logEntriesResponseRT)(response); }; diff --git a/x-pack/plugins/infra/public/containers/logs/log_entries/api/fetch_log_entries_item.ts b/x-pack/plugins/infra/public/containers/logs/log_entries/api/fetch_log_entries_item.ts index 5fde01e458e3..d459fba6cf95 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_entries/api/fetch_log_entries_item.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_entries/api/fetch_log_entries_item.ts @@ -4,12 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; -import { throwErrors, createPlainError } from '../../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; import { LOG_ENTRIES_ITEM_PATH, @@ -18,14 +15,14 @@ import { logEntriesItemResponseRT, } from '../../../../../common/http_api'; -export const fetchLogEntriesItem = async (requestArgs: LogEntriesItemRequest) => { - const response = await npStart.http.fetch(LOG_ENTRIES_ITEM_PATH, { +export const fetchLogEntriesItem = async ( + requestArgs: LogEntriesItemRequest, + fetch: HttpHandler +) => { + const response = await fetch(LOG_ENTRIES_ITEM_PATH, { method: 'POST', body: JSON.stringify(logEntriesItemRequestRT.encode(requestArgs)), }); - return pipe( - logEntriesItemResponseRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(logEntriesItemResponseRT)(response); }; diff --git a/x-pack/plugins/infra/public/containers/logs/log_entries/index.ts b/x-pack/plugins/infra/public/containers/logs/log_entries/index.ts index d5b2a0aaa61c..4c8c610794b2 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_entries/index.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_entries/index.ts @@ -14,6 +14,7 @@ import { LogEntriesBaseRequest, } from '../../../../common/http_api'; import { fetchLogEntries } from './api/fetch_log_entries'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; const DESIRED_BUFFER_PAGES = 2; const LIVE_STREAM_INTERVAL = 5000; @@ -144,6 +145,7 @@ const useFetchEntriesEffect = ( dispatch: Dispatch, props: LogEntriesProps ) => { + const { services } = useKibanaContextForPlugin(); const [prevParams, cachePrevParams] = useState(); const [startedStreaming, setStartedStreaming] = useState(false); @@ -172,7 +174,7 @@ const useFetchEntriesEffect = ( before: 'last', }; - const { data: payload } = await fetchLogEntries(fetchArgs); + const { data: payload } = await fetchLogEntries(fetchArgs, services.http.fetch); dispatch({ type: Action.ReceiveNewEntries, payload }); // Move position to the bottom if it's the first load. @@ -228,7 +230,7 @@ const useFetchEntriesEffect = ( after: state.bottomCursor, }; - const { data: payload } = await fetchLogEntries(fetchArgs); + const { data: payload } = await fetchLogEntries(fetchArgs, services.http.fetch); dispatch({ type: getEntriesBefore ? Action.ReceiveEntriesBefore : Action.ReceiveEntriesAfter, diff --git a/x-pack/plugins/infra/public/containers/logs/log_flyout.tsx b/x-pack/plugins/infra/public/containers/logs/log_flyout.tsx index 0489892e58f2..9ed2f5ad175c 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_flyout.tsx +++ b/x-pack/plugins/infra/public/containers/logs/log_flyout.tsx @@ -9,6 +9,7 @@ import { isString } from 'lodash'; import React, { useContext, useEffect, useMemo, useState } from 'react'; import { LogEntriesItem } from '../../../common/http_api'; +import { useKibanaContextForPlugin } from '../../hooks/use_kibana'; import { UrlStateContainer } from '../../utils/url_state'; import { useTrackedPromise } from '../../utils/use_tracked_promise'; import { fetchLogEntriesItem } from './log_entries/api/fetch_log_entries_item'; @@ -26,6 +27,7 @@ export interface FlyoutOptionsUrlState { } export const useLogFlyout = () => { + const { services } = useKibanaContextForPlugin(); const { sourceId } = useLogSourceContext(); const [flyoutVisible, setFlyoutVisibility] = useState(false); const [flyoutId, setFlyoutId] = useState(null); @@ -39,7 +41,7 @@ export const useLogFlyout = () => { if (!flyoutId) { return; } - return await fetchLogEntriesItem({ sourceId, id: flyoutId }); + return await fetchLogEntriesItem({ sourceId, id: flyoutId }, services.http.fetch); }, onResolve: (response) => { if (response) { diff --git a/x-pack/plugins/infra/public/containers/logs/log_highlights/api/fetch_log_entries_highlights.ts b/x-pack/plugins/infra/public/containers/logs/log_highlights/api/fetch_log_entries_highlights.ts index 030a9d180c7b..25865a30467f 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_highlights/api/fetch_log_entries_highlights.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_highlights/api/fetch_log_entries_highlights.ts @@ -4,12 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; -import { throwErrors, createPlainError } from '../../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; import { LOG_ENTRIES_HIGHLIGHTS_PATH, @@ -18,14 +15,14 @@ import { logEntriesHighlightsResponseRT, } from '../../../../../common/http_api'; -export const fetchLogEntriesHighlights = async (requestArgs: LogEntriesHighlightsRequest) => { - const response = await npStart.http.fetch(LOG_ENTRIES_HIGHLIGHTS_PATH, { +export const fetchLogEntriesHighlights = async ( + requestArgs: LogEntriesHighlightsRequest, + fetch: HttpHandler +) => { + const response = await fetch(LOG_ENTRIES_HIGHLIGHTS_PATH, { method: 'POST', body: JSON.stringify(logEntriesHighlightsRequestRT.encode(requestArgs)), }); - return pipe( - logEntriesHighlightsResponseRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(logEntriesHighlightsResponseRT)(response); }; diff --git a/x-pack/plugins/infra/public/containers/logs/log_highlights/api/fetch_log_summary_highlights.ts b/x-pack/plugins/infra/public/containers/logs/log_highlights/api/fetch_log_summary_highlights.ts index bda8f535549c..1cf95bc08a52 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_highlights/api/fetch_log_summary_highlights.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_highlights/api/fetch_log_summary_highlights.ts @@ -3,11 +3,9 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../../legacy_singletons'; -import { throwErrors, createPlainError } from '../../../../../common/runtime_types'; + +import type { HttpHandler } from 'src/core/public'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; import { LOG_ENTRIES_SUMMARY_HIGHLIGHTS_PATH, @@ -17,15 +15,13 @@ import { } from '../../../../../common/http_api'; export const fetchLogSummaryHighlights = async ( - requestArgs: LogEntriesSummaryHighlightsRequest + requestArgs: LogEntriesSummaryHighlightsRequest, + fetch: HttpHandler ) => { - const response = await npStart.http.fetch(LOG_ENTRIES_SUMMARY_HIGHLIGHTS_PATH, { + const response = await fetch(LOG_ENTRIES_SUMMARY_HIGHLIGHTS_PATH, { method: 'POST', body: JSON.stringify(logEntriesSummaryHighlightsRequestRT.encode(requestArgs)), }); - return pipe( - logEntriesSummaryHighlightsResponseRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(logEntriesSummaryHighlightsResponseRT)(response); }; diff --git a/x-pack/plugins/infra/public/containers/logs/log_highlights/log_entry_highlights.tsx b/x-pack/plugins/infra/public/containers/logs/log_highlights/log_entry_highlights.tsx index dbeb8c71c11e..b4edebe8f820 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_highlights/log_entry_highlights.tsx +++ b/x-pack/plugins/infra/public/containers/logs/log_highlights/log_entry_highlights.tsx @@ -10,6 +10,7 @@ import { TimeKey } from '../../../../common/time'; import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { fetchLogEntriesHighlights } from './api/fetch_log_entries_highlights'; import { LogEntry, LogEntriesHighlightsResponse } from '../../../../common/http_api'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; export const useLogEntryHighlights = ( sourceId: string, @@ -21,6 +22,7 @@ export const useLogEntryHighlights = ( filterQuery: string | null, highlightTerms: string[] ) => { + const { services } = useKibanaContextForPlugin(); const [logEntryHighlights, setLogEntryHighlights] = useState< LogEntriesHighlightsResponse['data'] >([]); @@ -32,15 +34,18 @@ export const useLogEntryHighlights = ( throw new Error('Skipping request: Insufficient parameters'); } - return await fetchLogEntriesHighlights({ - sourceId, - startTimestamp, - endTimestamp, - center: centerPoint, - size, - query: filterQuery || undefined, - highlightTerms, - }); + return await fetchLogEntriesHighlights( + { + sourceId, + startTimestamp, + endTimestamp, + center: centerPoint, + size, + query: filterQuery || undefined, + highlightTerms, + }, + services.http.fetch + ); }, onResolve: (response) => { setLogEntryHighlights(response.data); diff --git a/x-pack/plugins/infra/public/containers/logs/log_highlights/log_summary_highlights.ts b/x-pack/plugins/infra/public/containers/logs/log_highlights/log_summary_highlights.ts index 6d982ee004cc..14366891dbf5 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_highlights/log_summary_highlights.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_highlights/log_summary_highlights.ts @@ -11,6 +11,7 @@ import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { fetchLogSummaryHighlights } from './api/fetch_log_summary_highlights'; import { LogEntriesSummaryHighlightsResponse } from '../../../../common/http_api'; import { useBucketSize } from '../log_summary/bucket_size'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; export const useLogSummaryHighlights = ( sourceId: string, @@ -20,6 +21,7 @@ export const useLogSummaryHighlights = ( filterQuery: string | null, highlightTerms: string[] ) => { + const { services } = useKibanaContextForPlugin(); const [logSummaryHighlights, setLogSummaryHighlights] = useState< LogEntriesSummaryHighlightsResponse['data'] >([]); @@ -34,14 +36,17 @@ export const useLogSummaryHighlights = ( throw new Error('Skipping request: Insufficient parameters'); } - return await fetchLogSummaryHighlights({ - sourceId, - startTimestamp, - endTimestamp, - bucketSize, - query: filterQuery, - highlightTerms, - }); + return await fetchLogSummaryHighlights( + { + sourceId, + startTimestamp, + endTimestamp, + bucketSize, + query: filterQuery, + highlightTerms, + }, + services.http.fetch + ); }, onResolve: (response) => { setLogSummaryHighlights(response.data); diff --git a/x-pack/plugins/infra/public/containers/logs/log_source/api/fetch_log_source_configuration.ts b/x-pack/plugins/infra/public/containers/logs/log_source/api/fetch_log_source_configuration.ts index e847302a6d36..c9ced069473a 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_source/api/fetch_log_source_configuration.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_source/api/fetch_log_source_configuration.ts @@ -4,17 +4,14 @@ * you may not use this file except in compliance with the Elastic License. */ -import { HttpSetup } from 'src/core/public'; +import type { HttpHandler } from 'src/core/public'; import { getLogSourceConfigurationPath, getLogSourceConfigurationSuccessResponsePayloadRT, } from '../../../../../common/http_api/log_sources'; import { decodeOrThrow } from '../../../../../common/runtime_types'; -export const callFetchLogSourceConfigurationAPI = async ( - sourceId: string, - fetch: HttpSetup['fetch'] -) => { +export const callFetchLogSourceConfigurationAPI = async (sourceId: string, fetch: HttpHandler) => { const response = await fetch(getLogSourceConfigurationPath(sourceId), { method: 'GET', }); diff --git a/x-pack/plugins/infra/public/containers/logs/log_source/api/fetch_log_source_status.ts b/x-pack/plugins/infra/public/containers/logs/log_source/api/fetch_log_source_status.ts index 20e67a0a59c9..5bc409115e59 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_source/api/fetch_log_source_status.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_source/api/fetch_log_source_status.ts @@ -4,14 +4,14 @@ * you may not use this file except in compliance with the Elastic License. */ -import { HttpSetup } from 'src/core/public'; +import type { HttpHandler } from 'src/core/public'; import { getLogSourceStatusPath, getLogSourceStatusSuccessResponsePayloadRT, } from '../../../../../common/http_api/log_sources'; import { decodeOrThrow } from '../../../../../common/runtime_types'; -export const callFetchLogSourceStatusAPI = async (sourceId: string, fetch: HttpSetup['fetch']) => { +export const callFetchLogSourceStatusAPI = async (sourceId: string, fetch: HttpHandler) => { const response = await fetch(getLogSourceStatusPath(sourceId), { method: 'GET', }); diff --git a/x-pack/plugins/infra/public/containers/logs/log_source/api/patch_log_source_configuration.ts b/x-pack/plugins/infra/public/containers/logs/log_source/api/patch_log_source_configuration.ts index 4361e4bef827..33212c5d3b0f 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_source/api/patch_log_source_configuration.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_source/api/patch_log_source_configuration.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { HttpSetup } from 'src/core/public'; +import type { HttpHandler } from 'src/core/public'; import { getLogSourceConfigurationPath, patchLogSourceConfigurationSuccessResponsePayloadRT, @@ -16,7 +16,7 @@ import { decodeOrThrow } from '../../../../../common/runtime_types'; export const callPatchLogSourceConfigurationAPI = async ( sourceId: string, patchedProperties: LogSourceConfigurationPropertiesPatch, - fetch: HttpSetup['fetch'] + fetch: HttpHandler ) => { const response = await fetch(getLogSourceConfigurationPath(sourceId), { method: 'PATCH', diff --git a/x-pack/plugins/infra/public/containers/logs/log_source/log_source.ts b/x-pack/plugins/infra/public/containers/logs/log_source/log_source.ts index 51b32a4c4eac..e2dd4c523c03 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_source/log_source.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_source/log_source.ts @@ -7,7 +7,7 @@ import createContainer from 'constate'; import { useCallback, useMemo, useState } from 'react'; import { useMountedState } from 'react-use'; -import { HttpSetup } from 'src/core/public'; +import type { HttpHandler } from 'src/core/public'; import { LogSourceConfiguration, LogSourceConfigurationProperties, @@ -26,13 +26,7 @@ export { LogSourceStatus, }; -export const useLogSource = ({ - sourceId, - fetch, -}: { - sourceId: string; - fetch: HttpSetup['fetch']; -}) => { +export const useLogSource = ({ sourceId, fetch }: { sourceId: string; fetch: HttpHandler }) => { const getIsMounted = useMountedState(); const [sourceConfiguration, setSourceConfiguration] = useState< LogSourceConfiguration | undefined diff --git a/x-pack/plugins/infra/public/containers/logs/log_stream/index.ts b/x-pack/plugins/infra/public/containers/logs/log_stream/index.ts index b414408512db..4a6da6063e96 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_stream/index.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_stream/index.ts @@ -9,6 +9,7 @@ import { esKuery } from '../../../../../../../src/plugins/data/public'; import { fetchLogEntries } from '../log_entries/api/fetch_log_entries'; import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { LogEntry, LogEntriesCursor } from '../../../../common/http_api'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; interface LogStreamProps { sourceId: string; @@ -31,6 +32,7 @@ export function useLogStream({ query, center, }: LogStreamProps): LogStreamState { + const { services } = useKibanaContextForPlugin(); const [entries, setEntries] = useState([]); const parsedQuery = useMemo(() => { @@ -47,13 +49,16 @@ export function useLogStream({ setEntries([]); const fetchPosition = center ? { center } : { before: 'last' }; - return fetchLogEntries({ - sourceId, - startTimestamp, - endTimestamp, - query: parsedQuery, - ...fetchPosition, - }); + return fetchLogEntries( + { + sourceId, + startTimestamp, + endTimestamp, + query: parsedQuery, + ...fetchPosition, + }, + services.http.fetch + ); }, onResolve: ({ data }) => { setEntries(data.entries); diff --git a/x-pack/plugins/infra/public/containers/logs/log_summary/api/fetch_log_summary.ts b/x-pack/plugins/infra/public/containers/logs/log_summary/api/fetch_log_summary.ts index f74f0dc0e311..2be6538e21eb 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_summary/api/fetch_log_summary.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_summary/api/fetch_log_summary.ts @@ -4,11 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../../legacy_singletons'; -import { throwErrors, createPlainError } from '../../../../../common/runtime_types'; +import type { HttpHandler } from 'src/core/public'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; import { LOG_ENTRIES_SUMMARY_PATH, @@ -17,14 +14,14 @@ import { logEntriesSummaryResponseRT, } from '../../../../../common/http_api'; -export const fetchLogSummary = async (requestArgs: LogEntriesSummaryRequest) => { - const response = await npStart.http.fetch(LOG_ENTRIES_SUMMARY_PATH, { +export const fetchLogSummary = async ( + requestArgs: LogEntriesSummaryRequest, + fetch: HttpHandler +) => { + const response = await fetch(LOG_ENTRIES_SUMMARY_PATH, { method: 'POST', body: JSON.stringify(logEntriesSummaryRequestRT.encode(requestArgs)), }); - return pipe( - logEntriesSummaryResponseRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(logEntriesSummaryResponseRT)(response); }; diff --git a/x-pack/plugins/infra/public/containers/logs/log_summary/log_summary.test.tsx b/x-pack/plugins/infra/public/containers/logs/log_summary/log_summary.test.tsx index 73d0e5efdf06..652ea8c71dc4 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_summary/log_summary.test.tsx +++ b/x-pack/plugins/infra/public/containers/logs/log_summary/log_summary.test.tsx @@ -5,6 +5,8 @@ */ import { renderHook } from '@testing-library/react-hooks'; +// We are using this inside a `jest.mock` call. Jest requires dynamic dependencies to be prefixed with `mock` +import { coreMock as mockCoreMock } from 'src/core/public/mocks'; import { useLogSummary } from './log_summary'; @@ -16,6 +18,10 @@ import { datemathToEpochMillis } from '../../../utils/datemath'; jest.mock('./api/fetch_log_summary', () => ({ fetchLogSummary: jest.fn() })); const fetchLogSummaryMock = fetchLogSummary as jest.MockedFunction; +jest.mock('../../../hooks/use_kibana', () => ({ + useKibanaContextForPlugin: () => ({ services: mockCoreMock.createStart() }), +})); + describe('useLogSummary hook', () => { beforeEach(() => { fetchLogSummaryMock.mockClear(); @@ -53,7 +59,8 @@ describe('useLogSummary hook', () => { expect(fetchLogSummaryMock).toHaveBeenLastCalledWith( expect.objectContaining({ sourceId: 'INITIAL_SOURCE_ID', - }) + }), + expect.anything() ); expect(result.current.buckets).toEqual(firstMockResponse.data.buckets); @@ -64,7 +71,8 @@ describe('useLogSummary hook', () => { expect(fetchLogSummaryMock).toHaveBeenLastCalledWith( expect.objectContaining({ sourceId: 'CHANGED_SOURCE_ID', - }) + }), + expect.anything() ); expect(result.current.buckets).toEqual(secondMockResponse.data.buckets); }); @@ -96,7 +104,8 @@ describe('useLogSummary hook', () => { expect(fetchLogSummaryMock).toHaveBeenLastCalledWith( expect.objectContaining({ query: 'INITIAL_FILTER_QUERY', - }) + }), + expect.anything() ); expect(result.current.buckets).toEqual(firstMockResponse.data.buckets); @@ -107,7 +116,8 @@ describe('useLogSummary hook', () => { expect(fetchLogSummaryMock).toHaveBeenLastCalledWith( expect.objectContaining({ query: 'CHANGED_FILTER_QUERY', - }) + }), + expect.anything() ); expect(result.current.buckets).toEqual(secondMockResponse.data.buckets); }); @@ -132,7 +142,8 @@ describe('useLogSummary hook', () => { expect.objectContaining({ startTimestamp: firstRange.startTimestamp, endTimestamp: firstRange.endTimestamp, - }) + }), + expect.anything() ); const secondRange = createMockDateRange('now-20s', 'now'); @@ -145,7 +156,8 @@ describe('useLogSummary hook', () => { expect.objectContaining({ startTimestamp: secondRange.startTimestamp, endTimestamp: secondRange.endTimestamp, - }) + }), + expect.anything() ); }); }); diff --git a/x-pack/plugins/infra/public/containers/logs/log_summary/log_summary.tsx b/x-pack/plugins/infra/public/containers/logs/log_summary/log_summary.tsx index b83be7765686..be0d87f5d267 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_summary/log_summary.tsx +++ b/x-pack/plugins/infra/public/containers/logs/log_summary/log_summary.tsx @@ -10,6 +10,7 @@ import { useCancellableEffect } from '../../../utils/cancellable_effect'; import { fetchLogSummary } from './api/fetch_log_summary'; import { LogEntriesSummaryResponse } from '../../../../common/http_api'; import { useBucketSize } from './bucket_size'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; export type LogSummaryBuckets = LogEntriesSummaryResponse['data']['buckets']; @@ -19,6 +20,7 @@ export const useLogSummary = ( endTimestamp: number | null, filterQuery: string | null ) => { + const { services } = useKibanaContextForPlugin(); const [logSummaryBuckets, setLogSummaryBuckets] = useState([]); const bucketSize = useBucketSize(startTimestamp, endTimestamp); @@ -28,13 +30,16 @@ export const useLogSummary = ( return; } - fetchLogSummary({ - sourceId, - startTimestamp, - endTimestamp, - bucketSize, - query: filterQuery, - }).then((response) => { + fetchLogSummary( + { + sourceId, + startTimestamp, + endTimestamp, + bucketSize, + query: filterQuery, + }, + services.http.fetch + ).then((response) => { if (!getIsCancelled()) { setLogSummaryBuckets(response.data.buckets); } diff --git a/x-pack/plugins/infra/public/containers/ml/api/ml_cleanup.ts b/x-pack/plugins/infra/public/containers/ml/api/ml_cleanup.ts index 23fa338e74f1..fa7d8f14c6a9 100644 --- a/x-pack/plugins/infra/public/containers/ml/api/ml_cleanup.ts +++ b/x-pack/plugins/infra/public/containers/ml/api/ml_cleanup.ts @@ -5,21 +5,24 @@ */ import * as rt from 'io-ts'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { fold } from 'fp-ts/lib/Either'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../legacy_singletons'; - +import type { HttpHandler } from 'src/core/public'; import { getDatafeedId, getJobId } from '../../../../common/infra_ml'; -import { throwErrors, createPlainError } from '../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../common/runtime_types'; + +interface DeleteJobsRequestArgs { + spaceId: string; + sourceId: string; + jobTypes: JobType[]; +} export const callDeleteJobs = async ( - spaceId: string, - sourceId: string, - jobTypes: JobType[] + requestArgs: DeleteJobsRequestArgs, + fetch: HttpHandler ) => { + const { spaceId, sourceId, jobTypes } = requestArgs; + // NOTE: Deleting the jobs via this API will delete the datafeeds at the same time - const deleteJobsResponse = await npStart.http.fetch('/api/ml/jobs/delete_jobs', { + const deleteJobsResponse = await fetch('/api/ml/jobs/delete_jobs', { method: 'POST', body: JSON.stringify( deleteJobsRequestPayloadRT.encode({ @@ -28,28 +31,29 @@ export const callDeleteJobs = async ( ), }); - return pipe( - deleteJobsResponsePayloadRT.decode(deleteJobsResponse), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(deleteJobsResponsePayloadRT)(deleteJobsResponse); }; -export const callGetJobDeletionTasks = async () => { - const jobDeletionTasksResponse = await npStart.http.fetch('/api/ml/jobs/deleting_jobs_tasks'); +export const callGetJobDeletionTasks = async (fetch: HttpHandler) => { + const jobDeletionTasksResponse = await fetch('/api/ml/jobs/deleting_jobs_tasks'); - return pipe( - getJobDeletionTasksResponsePayloadRT.decode(jobDeletionTasksResponse), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(getJobDeletionTasksResponsePayloadRT)(jobDeletionTasksResponse); }; +interface StopDatafeedsRequestArgs { + spaceId: string; + sourceId: string; + jobTypes: JobType[]; +} + export const callStopDatafeeds = async ( - spaceId: string, - sourceId: string, - jobTypes: JobType[] + requestArgs: StopDatafeedsRequestArgs, + fetch: HttpHandler ) => { + const { spaceId, sourceId, jobTypes } = requestArgs; + // Stop datafeed due to https://github.com/elastic/kibana/issues/44652 - const stopDatafeedResponse = await npStart.http.fetch('/api/ml/jobs/stop_datafeeds', { + const stopDatafeedResponse = await fetch('/api/ml/jobs/stop_datafeeds', { method: 'POST', body: JSON.stringify( stopDatafeedsRequestPayloadRT.encode({ @@ -58,10 +62,7 @@ export const callStopDatafeeds = async ( ), }); - return pipe( - stopDatafeedsResponsePayloadRT.decode(stopDatafeedResponse), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(stopDatafeedsResponsePayloadRT)(stopDatafeedResponse); }; export const deleteJobsRequestPayloadRT = rt.type({ diff --git a/x-pack/plugins/infra/public/containers/ml/api/ml_get_jobs_summary_api.ts b/x-pack/plugins/infra/public/containers/ml/api/ml_get_jobs_summary_api.ts index 3fddb63f6979..84b5df3d172c 100644 --- a/x-pack/plugins/infra/public/containers/ml/api/ml_get_jobs_summary_api.ts +++ b/x-pack/plugins/infra/public/containers/ml/api/ml_get_jobs_summary_api.ts @@ -4,21 +4,24 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { identity } from 'fp-ts/lib/function'; -import { pipe } from 'fp-ts/lib/pipeable'; import * as rt from 'io-ts'; -import { npStart } from '../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { getJobId, jobCustomSettingsRT } from '../../../../common/infra_ml'; -import { createPlainError, throwErrors } from '../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../common/runtime_types'; + +interface RequestArgs { + spaceId: string; + sourceId: string; + jobTypes: JobType[]; +} export const callJobsSummaryAPI = async ( - spaceId: string, - sourceId: string, - jobTypes: JobType[] + requestArgs: RequestArgs, + fetch: HttpHandler ) => { - const response = await npStart.http.fetch('/api/ml/jobs/jobs_summary', { + const { spaceId, sourceId, jobTypes } = requestArgs; + const response = await fetch('/api/ml/jobs/jobs_summary', { method: 'POST', body: JSON.stringify( fetchJobStatusRequestPayloadRT.encode({ @@ -26,10 +29,7 @@ export const callJobsSummaryAPI = async ( }) ), }); - return pipe( - fetchJobStatusResponsePayloadRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(fetchJobStatusResponsePayloadRT)(response); }; export const fetchJobStatusRequestPayloadRT = rt.type({ diff --git a/x-pack/plugins/infra/public/containers/ml/api/ml_get_module.ts b/x-pack/plugins/infra/public/containers/ml/api/ml_get_module.ts index d492522c120a..75ce335fbe49 100644 --- a/x-pack/plugins/infra/public/containers/ml/api/ml_get_module.ts +++ b/x-pack/plugins/infra/public/containers/ml/api/ml_get_module.ts @@ -4,24 +4,18 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { identity } from 'fp-ts/lib/function'; -import { pipe } from 'fp-ts/lib/pipeable'; import * as rt from 'io-ts'; -import { npStart } from '../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { jobCustomSettingsRT } from '../../../../common/log_analysis'; -import { createPlainError, throwErrors } from '../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../common/runtime_types'; -export const callGetMlModuleAPI = async (moduleId: string) => { - const response = await npStart.http.fetch(`/api/ml/modules/get_module/${moduleId}`, { +export const callGetMlModuleAPI = async (moduleId: string, fetch: HttpHandler) => { + const response = await fetch(`/api/ml/modules/get_module/${moduleId}`, { method: 'GET', }); - return pipe( - getMlModuleResponsePayloadRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(getMlModuleResponsePayloadRT)(response); }; const jobDefinitionRT = rt.type({ diff --git a/x-pack/plugins/infra/public/containers/ml/api/ml_setup_module_api.ts b/x-pack/plugins/infra/public/containers/ml/api/ml_setup_module_api.ts index 06b0e075387b..36dced1bd268 100644 --- a/x-pack/plugins/infra/public/containers/ml/api/ml_setup_module_api.ts +++ b/x-pack/plugins/infra/public/containers/ml/api/ml_setup_module_api.ts @@ -4,27 +4,38 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { identity } from 'fp-ts/lib/function'; -import { pipe } from 'fp-ts/lib/pipeable'; import * as rt from 'io-ts'; -import { npStart } from '../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { getJobIdPrefix, jobCustomSettingsRT } from '../../../../common/infra_ml'; -import { createPlainError, throwErrors } from '../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../common/runtime_types'; -export const callSetupMlModuleAPI = async ( - moduleId: string, - start: number | undefined, - end: number | undefined, - spaceId: string, - sourceId: string, - indexPattern: string, - jobOverrides: SetupMlModuleJobOverrides[] = [], - datafeedOverrides: SetupMlModuleDatafeedOverrides[] = [], - query?: object -) => { - const response = await npStart.http.fetch(`/api/ml/modules/setup/${moduleId}`, { +interface RequestArgs { + moduleId: string; + start?: number; + end?: number; + spaceId: string; + sourceId: string; + indexPattern: string; + jobOverrides?: SetupMlModuleJobOverrides[]; + datafeedOverrides?: SetupMlModuleDatafeedOverrides[]; + query?: object; +} + +export const callSetupMlModuleAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => { + const { + moduleId, + start, + end, + spaceId, + sourceId, + indexPattern, + jobOverrides = [], + datafeedOverrides = [], + query, + } = requestArgs; + + const response = await fetch(`/api/ml/modules/setup/${moduleId}`, { method: 'POST', body: JSON.stringify( setupMlModuleRequestPayloadRT.encode({ @@ -40,10 +51,7 @@ export const callSetupMlModuleAPI = async ( ), }); - return pipe( - setupMlModuleResponsePayloadRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(setupMlModuleResponsePayloadRT)(response); }; const setupMlModuleTimeParamsRT = rt.partial({ diff --git a/x-pack/plugins/infra/public/containers/ml/infra_ml_capabilities.tsx b/x-pack/plugins/infra/public/containers/ml/infra_ml_capabilities.tsx index f4c90a459af6..bc488a51e2af 100644 --- a/x-pack/plugins/infra/public/containers/ml/infra_ml_capabilities.tsx +++ b/x-pack/plugins/infra/public/containers/ml/infra_ml_capabilities.tsx @@ -10,14 +10,15 @@ import { fold } from 'fp-ts/lib/Either'; import { pipe } from 'fp-ts/lib/pipeable'; import { identity } from 'fp-ts/lib/function'; import { useTrackedPromise } from '../../utils/use_tracked_promise'; -import { npStart } from '../../legacy_singletons'; import { getMlCapabilitiesResponsePayloadRT, GetMlCapabilitiesResponsePayload, } from './api/ml_api_types'; import { throwErrors, createPlainError } from '../../../common/runtime_types'; +import { useKibanaContextForPlugin } from '../../hooks/use_kibana'; export const useInfraMLCapabilities = () => { + const { services } = useKibanaContextForPlugin(); const [mlCapabilities, setMlCapabilities] = useState( initialMlCapabilities ); @@ -26,7 +27,7 @@ export const useInfraMLCapabilities = () => { { cancelPreviousOn: 'resolution', createPromise: async () => { - const rawResponse = await npStart.http.fetch('/api/ml/ml_capabilities'); + const rawResponse = await services.http.fetch('/api/ml/ml_capabilities'); return pipe( getMlCapabilitiesResponsePayloadRT.decode(rawResponse), diff --git a/x-pack/plugins/infra/public/containers/ml/infra_ml_cleanup.tsx b/x-pack/plugins/infra/public/containers/ml/infra_ml_cleanup.tsx index 736982c8043b..871e61ecfe50 100644 --- a/x-pack/plugins/infra/public/containers/ml/infra_ml_cleanup.tsx +++ b/x-pack/plugins/infra/public/containers/ml/infra_ml_cleanup.tsx @@ -4,16 +4,18 @@ * you may not use this file except in compliance with the Elastic License. */ +import { HttpHandler } from 'src/core/public'; import { getJobId } from '../../../common/infra_ml'; import { callDeleteJobs, callGetJobDeletionTasks, callStopDatafeeds } from './api/ml_cleanup'; export const cleanUpJobsAndDatafeeds = async ( spaceId: string, sourceId: string, - jobTypes: JobType[] + jobTypes: JobType[], + fetch: HttpHandler ) => { try { - await callStopDatafeeds(spaceId, sourceId, jobTypes); + await callStopDatafeeds({ spaceId, sourceId, jobTypes }, fetch); } catch (err) { // Proceed only if datafeed has been deleted or didn't exist in the first place if (err?.res?.status !== 404) { @@ -21,27 +23,29 @@ export const cleanUpJobsAndDatafeeds = async ( } } - return await deleteJobs(spaceId, sourceId, jobTypes); + return await deleteJobs(spaceId, sourceId, jobTypes, fetch); }; const deleteJobs = async ( spaceId: string, sourceId: string, - jobTypes: JobType[] + jobTypes: JobType[], + fetch: HttpHandler ) => { - const deleteJobsResponse = await callDeleteJobs(spaceId, sourceId, jobTypes); - await waitUntilJobsAreDeleted(spaceId, sourceId, jobTypes); + const deleteJobsResponse = await callDeleteJobs({ spaceId, sourceId, jobTypes }, fetch); + await waitUntilJobsAreDeleted(spaceId, sourceId, jobTypes, fetch); return deleteJobsResponse; }; const waitUntilJobsAreDeleted = async ( spaceId: string, sourceId: string, - jobTypes: JobType[] + jobTypes: JobType[], + fetch: HttpHandler ) => { const moduleJobIds = jobTypes.map((jobType) => getJobId(spaceId, sourceId, jobType)); while (true) { - const { jobIds: jobIdsBeingDeleted } = await callGetJobDeletionTasks(); + const { jobIds: jobIdsBeingDeleted } = await callGetJobDeletionTasks(fetch); const needToWait = jobIdsBeingDeleted.some((jobId) => moduleJobIds.includes(jobId)); if (needToWait) { diff --git a/x-pack/plugins/infra/public/containers/ml/infra_ml_module.tsx b/x-pack/plugins/infra/public/containers/ml/infra_ml_module.tsx index 349541d108f5..5408084a5246 100644 --- a/x-pack/plugins/infra/public/containers/ml/infra_ml_module.tsx +++ b/x-pack/plugins/infra/public/containers/ml/infra_ml_module.tsx @@ -6,6 +6,7 @@ import { useCallback, useMemo } from 'react'; import { DatasetFilter } from '../../../common/infra_ml'; +import { useKibanaContextForPlugin } from '../../hooks/use_kibana'; import { useTrackedPromise } from '../../utils/use_tracked_promise'; import { useModuleStatus } from './infra_ml_module_status'; import { ModuleDescriptor, ModuleSourceConfiguration } from './infra_ml_module_types'; @@ -17,6 +18,7 @@ export const useInfraMLModule = ({ sourceConfiguration: ModuleSourceConfiguration; moduleDescriptor: ModuleDescriptor; }) => { + const { services } = useKibanaContextForPlugin(); const { spaceId, sourceId, timestampField } = sourceConfiguration; const [moduleStatus, dispatchModuleStatus] = useModuleStatus(moduleDescriptor.jobTypes); @@ -25,7 +27,7 @@ export const useInfraMLModule = ({ cancelPreviousOn: 'resolution', createPromise: async () => { dispatchModuleStatus({ type: 'fetchingJobStatuses' }); - return await moduleDescriptor.getJobSummary(spaceId, sourceId); + return await moduleDescriptor.getJobSummary(spaceId, sourceId, services.http.fetch); }, onResolve: (jobResponse) => { dispatchModuleStatus({ @@ -54,18 +56,25 @@ export const useInfraMLModule = ({ ) => { dispatchModuleStatus({ type: 'startedSetup' }); const setupResult = await moduleDescriptor.setUpModule( - start, - end, - datasetFilter, { - indices: selectedIndices, - sourceId, - spaceId, - timestampField, + start, + end, + datasetFilter, + moduleSourceConfiguration: { + indices: selectedIndices, + sourceId, + spaceId, + timestampField, + }, + partitionField, }, - partitionField + services.http.fetch + ); + const jobSummaries = await moduleDescriptor.getJobSummary( + spaceId, + sourceId, + services.http.fetch ); - const jobSummaries = await moduleDescriptor.getJobSummary(spaceId, sourceId); return { setupResult, jobSummaries }; }, onResolve: ({ setupResult: { datafeeds, jobs }, jobSummaries }) => { @@ -89,7 +98,7 @@ export const useInfraMLModule = ({ { cancelPreviousOn: 'resolution', createPromise: async () => { - return await moduleDescriptor.cleanUpModule(spaceId, sourceId); + return await moduleDescriptor.cleanUpModule(spaceId, sourceId, services.http.fetch); }, }, [spaceId, sourceId] diff --git a/x-pack/plugins/infra/public/containers/ml/infra_ml_module_definition.tsx b/x-pack/plugins/infra/public/containers/ml/infra_ml_module_definition.tsx index 3c7ffcfd4a4e..a747a2853d1f 100644 --- a/x-pack/plugins/infra/public/containers/ml/infra_ml_module_definition.tsx +++ b/x-pack/plugins/infra/public/containers/ml/infra_ml_module_definition.tsx @@ -6,6 +6,7 @@ import { useCallback, useMemo, useState } from 'react'; import { getJobId } from '../../../common/log_analysis'; +import { useKibanaContextForPlugin } from '../../hooks/use_kibana'; import { useTrackedPromise } from '../../utils/use_tracked_promise'; import { JobSummary } from './api/ml_get_jobs_summary_api'; import { GetMlModuleResponsePayload, JobDefinition } from './api/ml_get_module'; @@ -18,6 +19,7 @@ export const useInfraMLModuleDefinition = ({ sourceConfiguration: ModuleSourceConfiguration; moduleDescriptor: ModuleDescriptor; }) => { + const { services } = useKibanaContextForPlugin(); const [moduleDefinition, setModuleDefinition] = useState< GetMlModuleResponsePayload | undefined >(); @@ -40,7 +42,7 @@ export const useInfraMLModuleDefinition = ({ { cancelPreviousOn: 'resolution', createPromise: async () => { - return await moduleDescriptor.getModuleDefinition(); + return await moduleDescriptor.getModuleDefinition(services.http.fetch); }, onResolve: (response) => { setModuleDefinition(response); diff --git a/x-pack/plugins/infra/public/containers/ml/infra_ml_module_types.ts b/x-pack/plugins/infra/public/containers/ml/infra_ml_module_types.ts index e36f38add641..976a64e8034b 100644 --- a/x-pack/plugins/infra/public/containers/ml/infra_ml_module_types.ts +++ b/x-pack/plugins/infra/public/containers/ml/infra_ml_module_types.ts @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ - +import { HttpHandler } from 'src/core/public'; import { ValidateLogEntryDatasetsResponsePayload, ValidationIndicesResponsePayload, @@ -16,6 +16,14 @@ import { SetupMlModuleResponsePayload } from './api/ml_setup_module_api'; export { JobModelSizeStats, JobSummary } from './api/ml_get_jobs_summary_api'; +export interface SetUpModuleArgs { + start?: number | undefined; + end?: number | undefined; + datasetFilter?: DatasetFilter; + moduleSourceConfiguration: ModuleSourceConfiguration; + partitionField?: string; +} + export interface ModuleDescriptor { moduleId: string; moduleName: string; @@ -23,25 +31,32 @@ export interface ModuleDescriptor { jobTypes: JobType[]; bucketSpan: number; getJobIds: (spaceId: string, sourceId: string) => Record; - getJobSummary: (spaceId: string, sourceId: string) => Promise; - getModuleDefinition: () => Promise; + getJobSummary: ( + spaceId: string, + sourceId: string, + fetch: HttpHandler + ) => Promise; + getModuleDefinition: (fetch: HttpHandler) => Promise; setUpModule: ( - start: number | undefined, - end: number | undefined, - datasetFilter: DatasetFilter, - sourceConfiguration: ModuleSourceConfiguration, - partitionField?: string + setUpModuleArgs: SetUpModuleArgs, + fetch: HttpHandler ) => Promise; - cleanUpModule: (spaceId: string, sourceId: string) => Promise; + cleanUpModule: ( + spaceId: string, + sourceId: string, + fetch: HttpHandler + ) => Promise; validateSetupIndices?: ( indices: string[], - timestampField: string + timestampField: string, + fetch: HttpHandler ) => Promise; validateSetupDatasets?: ( indices: string[], timestampField: string, startTime: number, - endTime: number + endTime: number, + fetch: HttpHandler ) => Promise; } diff --git a/x-pack/plugins/infra/public/containers/ml/modules/metrics_hosts/module_descriptor.ts b/x-pack/plugins/infra/public/containers/ml/modules/metrics_hosts/module_descriptor.ts index 7ea87c3d2132..47230cbed977 100644 --- a/x-pack/plugins/infra/public/containers/ml/modules/metrics_hosts/module_descriptor.ts +++ b/x-pack/plugins/infra/public/containers/ml/modules/metrics_hosts/module_descriptor.ts @@ -5,7 +5,8 @@ */ import { i18n } from '@kbn/i18n'; -import { ModuleDescriptor, ModuleSourceConfiguration } from '../../infra_ml_module_types'; +import { HttpHandler } from 'src/core/public'; +import { ModuleDescriptor, SetUpModuleArgs } from '../../infra_ml_module_types'; import { cleanUpJobsAndDatafeeds } from '../../infra_ml_cleanup'; import { callJobsSummaryAPI } from '../../api/ml_get_jobs_summary_api'; import { callGetMlModuleAPI } from '../../api/ml_get_module'; @@ -14,7 +15,6 @@ import { metricsHostsJobTypes, getJobId, MetricsHostsJobType, - DatasetFilter, bucketSpan, } from '../../../../../common/infra_ml'; // eslint-disable-next-line @kbn/eslint/no-restricted-paths @@ -48,24 +48,28 @@ const getJobIds = (spaceId: string, sourceId: string) => {} as Record ); -const getJobSummary = async (spaceId: string, sourceId: string) => { - const response = await callJobsSummaryAPI(spaceId, sourceId, metricsHostsJobTypes); +const getJobSummary = async (spaceId: string, sourceId: string, fetch: HttpHandler) => { + const response = await callJobsSummaryAPI( + { spaceId, sourceId, jobTypes: metricsHostsJobTypes }, + fetch + ); const jobIds = Object.values(getJobIds(spaceId, sourceId)); return response.filter((jobSummary) => jobIds.includes(jobSummary.id)); }; -const getModuleDefinition = async () => { - return await callGetMlModuleAPI(moduleId); +const getModuleDefinition = async (fetch: HttpHandler) => { + return await callGetMlModuleAPI(moduleId, fetch); }; -const setUpModule = async ( - start: number | undefined, - end: number | undefined, - datasetFilter: DatasetFilter, - { spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration, - partitionField?: string -) => { +const setUpModule = async (setUpModuleArgs: SetUpModuleArgs, fetch: HttpHandler) => { + const { + start, + end, + moduleSourceConfiguration: { spaceId, sourceId, indices, timestampField }, + partitionField, + } = setUpModuleArgs; + const indexNamePattern = indices.join(','); const jobIds: JobType[] = ['hosts_memory_usage', 'hosts_network_in', 'hosts_network_out']; @@ -128,14 +132,17 @@ const setUpModule = async ( }); return callSetupMlModuleAPI( - moduleId, - start, - end, - spaceId, - sourceId, - indexNamePattern, - jobOverrides, - datafeedOverrides + { + moduleId, + start, + end, + spaceId, + sourceId, + indexPattern: indexNamePattern, + jobOverrides, + datafeedOverrides, + }, + fetch ); }; @@ -159,8 +166,8 @@ const getDefaultJobConfigs = (jobId: JobType): { datafeed: any; job: any } => { } }; -const cleanUpModule = async (spaceId: string, sourceId: string) => { - return await cleanUpJobsAndDatafeeds(spaceId, sourceId, metricsHostsJobTypes); +const cleanUpModule = async (spaceId: string, sourceId: string, fetch: HttpHandler) => { + return await cleanUpJobsAndDatafeeds(spaceId, sourceId, metricsHostsJobTypes, fetch); }; export const metricHostsModule: ModuleDescriptor = { diff --git a/x-pack/plugins/infra/public/containers/ml/modules/metrics_k8s/module_descriptor.ts b/x-pack/plugins/infra/public/containers/ml/modules/metrics_k8s/module_descriptor.ts index eaf7489c84eb..488803dc113b 100644 --- a/x-pack/plugins/infra/public/containers/ml/modules/metrics_k8s/module_descriptor.ts +++ b/x-pack/plugins/infra/public/containers/ml/modules/metrics_k8s/module_descriptor.ts @@ -5,7 +5,8 @@ */ import { i18n } from '@kbn/i18n'; -import { ModuleDescriptor, ModuleSourceConfiguration } from '../../infra_ml_module_types'; +import { HttpHandler } from 'src/core/public'; +import { ModuleDescriptor, SetUpModuleArgs } from '../../infra_ml_module_types'; import { cleanUpJobsAndDatafeeds } from '../../infra_ml_cleanup'; import { callJobsSummaryAPI } from '../../api/ml_get_jobs_summary_api'; import { callGetMlModuleAPI } from '../../api/ml_get_module'; @@ -14,7 +15,6 @@ import { metricsK8SJobTypes, getJobId, MetricK8sJobType, - DatasetFilter, bucketSpan, } from '../../../../../common/infra_ml'; // eslint-disable-next-line @kbn/eslint/no-restricted-paths @@ -49,24 +49,28 @@ const getJobIds = (spaceId: string, sourceId: string) => {} as Record ); -const getJobSummary = async (spaceId: string, sourceId: string) => { - const response = await callJobsSummaryAPI(spaceId, sourceId, metricsK8SJobTypes); +const getJobSummary = async (spaceId: string, sourceId: string, fetch: HttpHandler) => { + const response = await callJobsSummaryAPI( + { spaceId, sourceId, jobTypes: metricsK8SJobTypes }, + fetch + ); const jobIds = Object.values(getJobIds(spaceId, sourceId)); return response.filter((jobSummary) => jobIds.includes(jobSummary.id)); }; -const getModuleDefinition = async () => { - return await callGetMlModuleAPI(moduleId); +const getModuleDefinition = async (fetch: HttpHandler) => { + return await callGetMlModuleAPI(moduleId, fetch); }; -const setUpModule = async ( - start: number | undefined, - end: number | undefined, - datasetFilter: DatasetFilter, - { spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration, - partitionField?: string -) => { +const setUpModule = async (setUpModuleArgs: SetUpModuleArgs, fetch: HttpHandler) => { + const { + start, + end, + moduleSourceConfiguration: { spaceId, sourceId, indices, timestampField }, + partitionField, + } = setUpModuleArgs; + const indexNamePattern = indices.join(','); const jobIds: JobType[] = ['k8s_memory_usage', 'k8s_network_in', 'k8s_network_out']; const jobOverrides = jobIds.map((id) => { @@ -133,14 +137,17 @@ const setUpModule = async ( }); return callSetupMlModuleAPI( - moduleId, - start, - end, - spaceId, - sourceId, - indexNamePattern, - jobOverrides, - datafeedOverrides + { + moduleId, + start, + end, + spaceId, + sourceId, + indexPattern: indexNamePattern, + jobOverrides, + datafeedOverrides, + }, + fetch ); }; @@ -164,8 +171,8 @@ const getDefaultJobConfigs = (jobId: JobType): { datafeed: any; job: any } => { } }; -const cleanUpModule = async (spaceId: string, sourceId: string) => { - return await cleanUpJobsAndDatafeeds(spaceId, sourceId, metricsK8SJobTypes); +const cleanUpModule = async (spaceId: string, sourceId: string, fetch: HttpHandler) => { + return await cleanUpJobsAndDatafeeds(spaceId, sourceId, metricsK8SJobTypes, fetch); }; export const metricHostsModule: ModuleDescriptor = { diff --git a/x-pack/plugins/infra/public/legacy_singletons.ts b/x-pack/plugins/infra/public/legacy_singletons.ts deleted file mode 100644 index f57047f21c28..000000000000 --- a/x-pack/plugins/infra/public/legacy_singletons.ts +++ /dev/null @@ -1,14 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -import { CoreStart } from 'kibana/public'; - -let npStart: CoreStart; - -export function registerStartSingleton(start: CoreStart) { - npStart = start; -} - -export { npStart }; diff --git a/x-pack/plugins/infra/public/pages/link_to/link_to_logs.test.tsx b/x-pack/plugins/infra/public/pages/link_to/link_to_logs.test.tsx index 945b299674aa..4f83e37d7e02 100644 --- a/x-pack/plugins/infra/public/pages/link_to/link_to_logs.test.tsx +++ b/x-pack/plugins/infra/public/pages/link_to/link_to_logs.test.tsx @@ -14,7 +14,6 @@ import { createMemoryHistory } from 'history'; import React from 'react'; import { Route, Router, Switch } from 'react-router-dom'; import { httpServiceMock } from 'src/core/public/mocks'; -// import { HttpSetup } from 'src/core/public'; import { KibanaContextProvider } from 'src/plugins/kibana_react/public'; import { useLogSource } from '../../containers/logs/log_source'; import { diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/service_calls/get_log_entry_category_datasets.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/service_calls/get_log_entry_category_datasets.ts index a8cd7854efb6..5f34d45635b6 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/service_calls/get_log_entry_category_datasets.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/service_calls/get_log_entry_category_datasets.ts @@ -4,24 +4,28 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { getLogEntryCategoryDatasetsRequestPayloadRT, getLogEntryCategoryDatasetsSuccessReponsePayloadRT, LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_DATASETS_PATH, } from '../../../../../common/http_api/log_analysis'; -import { createPlainError, throwErrors } from '../../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; + +interface RequestArgs { + sourceId: string; + startTime: number; + endTime: number; +} export const callGetLogEntryCategoryDatasetsAPI = async ( - sourceId: string, - startTime: number, - endTime: number + requestArgs: RequestArgs, + fetch: HttpHandler ) => { - const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_DATASETS_PATH, { + const { sourceId, startTime, endTime } = requestArgs; + + const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_DATASETS_PATH, { method: 'POST', body: JSON.stringify( getLogEntryCategoryDatasetsRequestPayloadRT.encode({ @@ -36,8 +40,5 @@ export const callGetLogEntryCategoryDatasetsAPI = async ( ), }); - return pipe( - getLogEntryCategoryDatasetsSuccessReponsePayloadRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(getLogEntryCategoryDatasetsSuccessReponsePayloadRT)(response); }; diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/service_calls/get_log_entry_category_examples.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/service_calls/get_log_entry_category_examples.ts index a10d077a2dd4..c4b756ebf5d5 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/service_calls/get_log_entry_category_examples.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/service_calls/get_log_entry_category_examples.ts @@ -4,26 +4,30 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { getLogEntryCategoryExamplesRequestPayloadRT, getLogEntryCategoryExamplesSuccessReponsePayloadRT, LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_EXAMPLES_PATH, } from '../../../../../common/http_api/log_analysis'; -import { createPlainError, throwErrors } from '../../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; + +interface RequestArgs { + sourceId: string; + startTime: number; + endTime: number; + categoryId: number; + exampleCount: number; +} export const callGetLogEntryCategoryExamplesAPI = async ( - sourceId: string, - startTime: number, - endTime: number, - categoryId: number, - exampleCount: number + requestArgs: RequestArgs, + fetch: HttpHandler ) => { - const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_EXAMPLES_PATH, { + const { sourceId, startTime, endTime, categoryId, exampleCount } = requestArgs; + + const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_EXAMPLES_PATH, { method: 'POST', body: JSON.stringify( getLogEntryCategoryExamplesRequestPayloadRT.encode({ @@ -40,8 +44,5 @@ export const callGetLogEntryCategoryExamplesAPI = async ( ), }); - return pipe( - getLogEntryCategoryExamplesSuccessReponsePayloadRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(getLogEntryCategoryExamplesSuccessReponsePayloadRT)(response); }; diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/service_calls/get_top_log_entry_categories.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/service_calls/get_top_log_entry_categories.ts index 2ebcff4fd3ca..fd5380379633 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/service_calls/get_top_log_entry_categories.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/service_calls/get_top_log_entry_categories.ts @@ -4,28 +4,31 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { getLogEntryCategoriesRequestPayloadRT, getLogEntryCategoriesSuccessReponsePayloadRT, LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORIES_PATH, } from '../../../../../common/http_api/log_analysis'; -import { createPlainError, throwErrors } from '../../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; + +interface RequestArgs { + sourceId: string; + startTime: number; + endTime: number; + categoryCount: number; + datasets?: string[]; +} export const callGetTopLogEntryCategoriesAPI = async ( - sourceId: string, - startTime: number, - endTime: number, - categoryCount: number, - datasets?: string[] + requestArgs: RequestArgs, + fetch: HttpHandler ) => { + const { sourceId, startTime, endTime, categoryCount, datasets } = requestArgs; const intervalDuration = endTime - startTime; - const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORIES_PATH, { + const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORIES_PATH, { method: 'POST', body: JSON.stringify( getLogEntryCategoriesRequestPayloadRT.encode({ @@ -60,8 +63,5 @@ export const callGetTopLogEntryCategoriesAPI = async ( ), }); - return pipe( - getLogEntryCategoriesSuccessReponsePayloadRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(getLogEntryCategoriesSuccessReponsePayloadRT)(response); }; diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/use_log_entry_categories_results.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/use_log_entry_categories_results.ts index 123b188046b8..0a12c433db60 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/use_log_entry_categories_results.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/use_log_entry_categories_results.ts @@ -13,6 +13,7 @@ import { import { useTrackedPromise, CanceledPromiseError } from '../../../utils/use_tracked_promise'; import { callGetTopLogEntryCategoriesAPI } from './service_calls/get_top_log_entry_categories'; import { callGetLogEntryCategoryDatasetsAPI } from './service_calls/get_log_entry_category_datasets'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; type TopLogEntryCategories = GetLogEntryCategoriesSuccessResponsePayload['data']['categories']; type LogEntryCategoryDatasets = GetLogEntryCategoryDatasetsSuccessResponsePayload['data']['datasets']; @@ -34,6 +35,7 @@ export const useLogEntryCategoriesResults = ({ sourceId: string; startTime: number; }) => { + const { services } = useKibanaContextForPlugin(); const [topLogEntryCategories, setTopLogEntryCategories] = useState([]); const [logEntryCategoryDatasets, setLogEntryCategoryDatasets] = useState< LogEntryCategoryDatasets @@ -44,11 +46,14 @@ export const useLogEntryCategoriesResults = ({ cancelPreviousOn: 'creation', createPromise: async () => { return await callGetTopLogEntryCategoriesAPI( - sourceId, - startTime, - endTime, - categoriesCount, - filteredDatasets + { + sourceId, + startTime, + endTime, + categoryCount: categoriesCount, + datasets: filteredDatasets, + }, + services.http.fetch ); }, onResolve: ({ data: { categories } }) => { @@ -71,7 +76,10 @@ export const useLogEntryCategoriesResults = ({ { cancelPreviousOn: 'creation', createPromise: async () => { - return await callGetLogEntryCategoryDatasetsAPI(sourceId, startTime, endTime); + return await callGetLogEntryCategoryDatasetsAPI( + { sourceId, startTime, endTime }, + services.http.fetch + ); }, onResolve: ({ data: { datasets } }) => { setLogEntryCategoryDatasets(datasets); diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/use_log_entry_category_examples.tsx b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/use_log_entry_category_examples.tsx index cdf3b642a801..84b9f045288c 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/use_log_entry_category_examples.tsx +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/use_log_entry_category_examples.tsx @@ -7,6 +7,7 @@ import { useMemo, useState } from 'react'; import { LogEntryCategoryExample } from '../../../../common/http_api'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { callGetLogEntryCategoryExamplesAPI } from './service_calls/get_log_entry_category_examples'; @@ -23,6 +24,8 @@ export const useLogEntryCategoryExamples = ({ sourceId: string; startTime: number; }) => { + const { services } = useKibanaContextForPlugin(); + const [logEntryCategoryExamples, setLogEntryCategoryExamples] = useState< LogEntryCategoryExample[] >([]); @@ -32,11 +35,14 @@ export const useLogEntryCategoryExamples = ({ cancelPreviousOn: 'creation', createPromise: async () => { return await callGetLogEntryCategoryExamplesAPI( - sourceId, - startTime, - endTime, - categoryId, - exampleCount + { + sourceId, + startTime, + endTime, + categoryId, + exampleCount, + }, + services.http.fetch ); }, onResolve: ({ data: { examples } }) => { diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_anomalies.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_anomalies.ts index 21696df566ed..7f90604bfefd 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_anomalies.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_anomalies.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { getLogEntryAnomaliesRequestPayloadRT, getLogEntryAnomaliesSuccessReponsePayloadRT, @@ -13,15 +13,18 @@ import { import { decodeOrThrow } from '../../../../../common/runtime_types'; import { Sort, Pagination } from '../../../../../common/http_api/log_analysis'; -export const callGetLogEntryAnomaliesAPI = async ( - sourceId: string, - startTime: number, - endTime: number, - sort: Sort, - pagination: Pagination, - datasets?: string[] -) => { - const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_PATH, { +interface RequestArgs { + sourceId: string; + startTime: number; + endTime: number; + sort: Sort; + pagination: Pagination; + datasets?: string[]; +} + +export const callGetLogEntryAnomaliesAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => { + const { sourceId, startTime, endTime, sort, pagination, datasets } = requestArgs; + const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_PATH, { method: 'POST', body: JSON.stringify( getLogEntryAnomaliesRequestPayloadRT.encode({ diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_anomalies_datasets.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_anomalies_datasets.ts index 24be5a646d10..c62bec691590 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_anomalies_datasets.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_anomalies_datasets.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { decodeOrThrow } from '../../../../../common/runtime_types'; import { getLogEntryAnomaliesDatasetsRequestPayloadRT, @@ -12,12 +12,18 @@ import { LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_DATASETS_PATH, } from '../../../../../common/http_api/log_analysis'; +interface RequestArgs { + sourceId: string; + startTime: number; + endTime: number; +} + export const callGetLogEntryAnomaliesDatasetsAPI = async ( - sourceId: string, - startTime: number, - endTime: number + requestArgs: RequestArgs, + fetch: HttpHandler ) => { - const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_DATASETS_PATH, { + const { sourceId, startTime, endTime } = requestArgs; + const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_DATASETS_PATH, { method: 'POST', body: JSON.stringify( getLogEntryAnomaliesDatasetsRequestPayloadRT.encode({ diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_examples.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_examples.ts index a125b53f9e63..ab724a2f435b 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_examples.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_examples.ts @@ -4,27 +4,27 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { getLogEntryExamplesRequestPayloadRT, getLogEntryExamplesSuccessReponsePayloadRT, LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH, } from '../../../../../common/http_api/log_analysis'; -import { createPlainError, throwErrors } from '../../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; -export const callGetLogEntryExamplesAPI = async ( - sourceId: string, - startTime: number, - endTime: number, - dataset: string, - exampleCount: number, - categoryId?: string -) => { - const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH, { +interface RequestArgs { + sourceId: string; + startTime: number; + endTime: number; + dataset: string; + exampleCount: number; + categoryId?: string; +} + +export const callGetLogEntryExamplesAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => { + const { sourceId, startTime, endTime, dataset, exampleCount, categoryId } = requestArgs; + const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH, { method: 'POST', body: JSON.stringify( getLogEntryExamplesRequestPayloadRT.encode({ @@ -42,8 +42,5 @@ export const callGetLogEntryExamplesAPI = async ( ), }); - return pipe( - getLogEntryExamplesSuccessReponsePayloadRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(getLogEntryExamplesSuccessReponsePayloadRT)(response); }; diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_rate.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_rate.ts index 77111d279309..c9189bd80395 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_rate.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_rate.ts @@ -4,25 +4,25 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { getLogEntryRateRequestPayloadRT, getLogEntryRateSuccessReponsePayloadRT, LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH, } from '../../../../../common/http_api/log_analysis'; -import { createPlainError, throwErrors } from '../../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; -export const callGetLogEntryRateAPI = async ( - sourceId: string, - startTime: number, - endTime: number, - bucketDuration: number, - datasets?: string[] -) => { - const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH, { +interface RequestArgs { + sourceId: string; + startTime: number; + endTime: number; + bucketDuration: number; + datasets?: string[]; +} + +export const callGetLogEntryRateAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => { + const { sourceId, startTime, endTime, bucketDuration, datasets } = requestArgs; + const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH, { method: 'POST', body: JSON.stringify( getLogEntryRateRequestPayloadRT.encode({ @@ -38,8 +38,5 @@ export const callGetLogEntryRateAPI = async ( }) ), }); - return pipe( - getLogEntryRateSuccessReponsePayloadRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(getLogEntryRateSuccessReponsePayloadRT)(response); }; diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_anomalies_results.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_anomalies_results.ts index 52632e54390a..37c99272f087 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_anomalies_results.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_anomalies_results.ts @@ -16,6 +16,7 @@ import { GetLogEntryAnomaliesDatasetsSuccessResponsePayload, LogEntryAnomaly, } from '../../../../common/http_api/log_analysis'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; export type SortOptions = Sort; export type PaginationOptions = Pick; @@ -161,6 +162,8 @@ export const useLogEntryAnomaliesResults = ({ }; }; + const { services } = useKibanaContextForPlugin(); + const [reducerState, dispatch] = useReducer(stateReducer, STATE_DEFAULTS, initStateReducer); const [logEntryAnomalies, setLogEntryAnomalies] = useState([]); @@ -177,15 +180,18 @@ export const useLogEntryAnomaliesResults = ({ filteredDatasets: queryFilteredDatasets, } = reducerState; return await callGetLogEntryAnomaliesAPI( - sourceId, - queryStartTime, - queryEndTime, - sortOptions, { - ...paginationOptions, - cursor: paginationCursor, + sourceId, + startTime: queryStartTime, + endTime: queryEndTime, + sort: sortOptions, + pagination: { + ...paginationOptions, + cursor: paginationCursor, + }, + datasets: queryFilteredDatasets, }, - queryFilteredDatasets + services.http.fetch ); }, onResolve: ({ data: { anomalies, paginationCursors: requestCursors, hasMoreEntries } }) => { @@ -286,7 +292,10 @@ export const useLogEntryAnomaliesResults = ({ { cancelPreviousOn: 'creation', createPromise: async () => { - return await callGetLogEntryAnomaliesDatasetsAPI(sourceId, startTime, endTime); + return await callGetLogEntryAnomaliesDatasetsAPI( + { sourceId, startTime, endTime }, + services.http.fetch + ); }, onResolve: ({ data: { datasets } }) => { setLogEntryAnomaliesDatasets(datasets); diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_examples.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_examples.ts index fae5bd200a41..e809ab9cd5a6 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_examples.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_examples.ts @@ -7,6 +7,7 @@ import { useMemo, useState } from 'react'; import { LogEntryExample } from '../../../../common/http_api'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { callGetLogEntryExamplesAPI } from './service_calls/get_log_entry_examples'; @@ -25,6 +26,7 @@ export const useLogEntryExamples = ({ startTime: number; categoryId?: string; }) => { + const { services } = useKibanaContextForPlugin(); const [logEntryExamples, setLogEntryExamples] = useState([]); const [getLogEntryExamplesRequest, getLogEntryExamples] = useTrackedPromise( @@ -32,12 +34,15 @@ export const useLogEntryExamples = ({ cancelPreviousOn: 'creation', createPromise: async () => { return await callGetLogEntryExamplesAPI( - sourceId, - startTime, - endTime, - dataset, - exampleCount, - categoryId + { + sourceId, + startTime, + endTime, + dataset, + exampleCount, + categoryId, + }, + services.http.fetch ); }, onResolve: ({ data: { examples } }) => { diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_rate_results.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_rate_results.ts index a52dab58cb01..aef94afa505f 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_rate_results.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_rate_results.ts @@ -12,6 +12,7 @@ import { LogEntryRatePartition, LogEntryRateAnomaly, } from '../../../../common/http_api/log_analysis'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { callGetLogEntryRateAPI } from './service_calls/get_log_entry_rate'; @@ -49,6 +50,7 @@ export const useLogEntryRateResults = ({ bucketDuration: number; filteredDatasets?: string[]; }) => { + const { services } = useKibanaContextForPlugin(); const [logEntryRate, setLogEntryRate] = useState(null); const [getLogEntryRateRequest, getLogEntryRate] = useTrackedPromise( @@ -56,11 +58,14 @@ export const useLogEntryRateResults = ({ cancelPreviousOn: 'resolution', createPromise: async () => { return await callGetLogEntryRateAPI( - sourceId, - startTime, - endTime, - bucketDuration, - filteredDatasets + { + sourceId, + startTime, + endTime, + bucketDuration, + datasets: filteredDatasets, + }, + services.http.fetch ); }, onResolve: ({ data }) => { diff --git a/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_metrics_hosts_anomalies.ts b/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_metrics_hosts_anomalies.ts index f33e3ea16b38..02170f41a32c 100644 --- a/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_metrics_hosts_anomalies.ts +++ b/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_metrics_hosts_anomalies.ts @@ -5,6 +5,7 @@ */ import { useMemo, useState, useCallback, useEffect, useReducer } from 'react'; +import { HttpHandler } from 'src/core/public'; import { INFA_ML_GET_METRICS_HOSTS_ANOMALIES_PATH, Metric, @@ -16,8 +17,8 @@ import { getMetricsHostsAnomaliesSuccessReponsePayloadRT, } from '../../../../../common/http_api/infra_ml'; import { useTrackedPromise } from '../../../../utils/use_tracked_promise'; -import { npStart } from '../../../../legacy_singletons'; import { decodeOrThrow } from '../../../../../common/runtime_types'; +import { useKibanaContextForPlugin } from '../../../../hooks/use_kibana'; export type SortOptions = Sort; export type PaginationOptions = Pick; @@ -149,6 +150,7 @@ export const useMetricsHostsAnomaliesResults = ({ onGetMetricsHostsAnomaliesDatasetsError?: (error: Error) => void; filteredDatasets?: string[]; }) => { + const { services } = useKibanaContextForPlugin(); const initStateReducer = (stateDefaults: ReducerStateDefaults): ReducerState => { return { ...stateDefaults, @@ -177,15 +179,18 @@ export const useMetricsHostsAnomaliesResults = ({ paginationCursor, } = reducerState; return await callGetMetricHostsAnomaliesAPI( - sourceId, - queryStartTime, - queryEndTime, - metric, - sortOptions, { - ...paginationOptions, - cursor: paginationCursor, - } + sourceId, + startTime: queryStartTime, + endTime: queryEndTime, + metric, + sort: sortOptions, + pagination: { + ...paginationOptions, + cursor: paginationCursor, + }, + }, + services.http.fetch ); }, onResolve: ({ data: { anomalies, paginationCursors: requestCursors, hasMoreEntries } }) => { @@ -288,15 +293,21 @@ export const useMetricsHostsAnomaliesResults = ({ }; }; +interface RequestArgs { + sourceId: string; + startTime: number; + endTime: number; + metric: Metric; + sort: Sort; + pagination: Pagination; +} + export const callGetMetricHostsAnomaliesAPI = async ( - sourceId: string, - startTime: number, - endTime: number, - metric: Metric, - sort: Sort, - pagination: Pagination + requestArgs: RequestArgs, + fetch: HttpHandler ) => { - const response = await npStart.http.fetch(INFA_ML_GET_METRICS_HOSTS_ANOMALIES_PATH, { + const { sourceId, startTime, endTime, metric, sort, pagination } = requestArgs; + const response = await fetch(INFA_ML_GET_METRICS_HOSTS_ANOMALIES_PATH, { method: 'POST', body: JSON.stringify( getMetricsHostsAnomaliesRequestPayloadRT.encode({ diff --git a/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_metrics_k8s_anomalies.ts b/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_metrics_k8s_anomalies.ts index 89e70c4c5c4c..951951b9b610 100644 --- a/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_metrics_k8s_anomalies.ts +++ b/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_metrics_k8s_anomalies.ts @@ -5,6 +5,7 @@ */ import { useMemo, useState, useCallback, useEffect, useReducer } from 'react'; +import { HttpHandler } from 'src/core/public'; import { Sort, Pagination, @@ -16,8 +17,8 @@ import { Metric, } from '../../../../../common/http_api/infra_ml'; import { useTrackedPromise } from '../../../../utils/use_tracked_promise'; -import { npStart } from '../../../../legacy_singletons'; import { decodeOrThrow } from '../../../../../common/runtime_types'; +import { useKibanaContextForPlugin } from '../../../../hooks/use_kibana'; export type SortOptions = Sort; export type PaginationOptions = Pick; @@ -149,6 +150,7 @@ export const useMetricsK8sAnomaliesResults = ({ onGetMetricsHostsAnomaliesDatasetsError?: (error: Error) => void; filteredDatasets?: string[]; }) => { + const { services } = useKibanaContextForPlugin(); const initStateReducer = (stateDefaults: ReducerStateDefaults): ReducerState => { return { ...stateDefaults, @@ -178,16 +180,19 @@ export const useMetricsK8sAnomaliesResults = ({ filteredDatasets: queryFilteredDatasets, } = reducerState; return await callGetMetricsK8sAnomaliesAPI( - sourceId, - queryStartTime, - queryEndTime, - metric, - sortOptions, { - ...paginationOptions, - cursor: paginationCursor, + sourceId, + startTime: queryStartTime, + endTime: queryEndTime, + metric, + sort: sortOptions, + pagination: { + ...paginationOptions, + cursor: paginationCursor, + }, + datasets: queryFilteredDatasets, }, - queryFilteredDatasets + services.http.fetch ); }, onResolve: ({ data: { anomalies, paginationCursors: requestCursors, hasMoreEntries } }) => { @@ -290,16 +295,22 @@ export const useMetricsK8sAnomaliesResults = ({ }; }; +interface RequestArgs { + sourceId: string; + startTime: number; + endTime: number; + metric: Metric; + sort: Sort; + pagination: Pagination; + datasets?: string[]; +} + export const callGetMetricsK8sAnomaliesAPI = async ( - sourceId: string, - startTime: number, - endTime: number, - metric: Metric, - sort: Sort, - pagination: Pagination, - datasets?: string[] + requestArgs: RequestArgs, + fetch: HttpHandler ) => { - const response = await npStart.http.fetch(INFA_ML_GET_METRICS_K8S_ANOMALIES_PATH, { + const { sourceId, startTime, endTime, metric, sort, pagination, datasets } = requestArgs; + const response = await fetch(INFA_ML_GET_METRICS_K8S_ANOMALIES_PATH, { method: 'POST', body: JSON.stringify( getMetricsK8sAnomaliesRequestPayloadRT.encode({ diff --git a/x-pack/plugins/infra/public/plugin.ts b/x-pack/plugins/infra/public/plugin.ts index 3c6b1a14cfd4..0e49ca93010f 100644 --- a/x-pack/plugins/infra/public/plugin.ts +++ b/x-pack/plugins/infra/public/plugin.ts @@ -9,7 +9,6 @@ import { DEFAULT_APP_CATEGORIES } from '../../../../src/core/public'; import { createMetricThresholdAlertType } from './alerting/metric_threshold'; import { createInventoryMetricAlertType } from './alerting/inventory'; import { getAlertType as getLogsAlertType } from './alerting/log_threshold'; -import { registerStartSingleton } from './legacy_singletons'; import { registerFeatures } from './register_feature'; import { InfraClientSetupDeps, @@ -98,9 +97,7 @@ export class Plugin implements InfraClientPluginClass { }); } - start(core: InfraClientCoreStart, _plugins: InfraClientStartDeps) { - registerStartSingleton(core); - } + start(_core: InfraClientCoreStart, _plugins: InfraClientStartDeps) {} stop() {} }