[ML] rename inference to trained_models (#79676)

This commit is contained in:
Dima Arnautov 2020-10-06 19:31:59 +02:00 committed by GitHub
parent d845922a1a
commit 3002108c40
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 80 additions and 80 deletions

View file

@ -44,7 +44,7 @@ export interface TrainedModelStat {
};
}
export interface ModelConfigResponse {
export interface TrainedModelConfigResponse {
created_by: string;
create_time: string;
default_field_map: Record<string, string>;
@ -79,5 +79,5 @@ export interface ModelPipelines {
* Get inference response from the ES endpoint
*/
export interface InferenceConfigResponse {
trained_model_configs: ModelConfigResponse[];
trained_model_configs: TrainedModelConfigResponse[];
}

View file

@ -19,7 +19,7 @@ import { DataFrameAnalyticsConfig } from '../common';
import { isGetDataFrameAnalyticsStatsResponseOk } from '../pages/analytics_management/services/analytics_service/get_analytics';
import { DATA_FRAME_TASK_STATE } from '../pages/analytics_management/components/analytics_list/common';
import { useInferenceApiService } from '../../services/ml_api_service/inference';
import { useTrainedModelsApiService } from '../../services/ml_api_service/trained_models';
import { TotalFeatureImportance } from '../../../../common/types/feature_importance';
import { getToastNotificationService } from '../../services/toast_notification_service';
import {
@ -29,7 +29,7 @@ import {
export const useResultsViewConfig = (jobId: string) => {
const mlContext = useMlContext();
const inferenceApiService = useInferenceApiService();
const trainedModelsApiService = useTrainedModelsApiService();
const [indexPattern, setIndexPattern] = useState<IndexPattern | undefined>(undefined);
const [isInitialized, setIsInitialized] = useState<boolean>(false);
@ -74,7 +74,7 @@ export const useResultsViewConfig = (jobId: string) => {
isRegressionAnalysis(jobConfigUpdate.analysis)
) {
try {
const inferenceModels = await inferenceApiService.getInferenceModel(`${jobId}*`, {
const inferenceModels = await trainedModelsApiService.getTrainedModels(`${jobId}*`, {
include: 'total_feature_importance',
});
const inferenceModel = inferenceModels.find(

View file

@ -35,7 +35,7 @@ export const DeleteModelsModal: FC<DeleteModelsModalProps> = ({ models, onClose
<EuiModalHeader>
<EuiModalHeaderTitle>
<FormattedMessage
id="xpack.ml.inference.modelsList.deleteModal.header"
id="xpack.ml.trainedModels.modelsList.deleteModal.header"
defaultMessage="Delete {modelsCount, plural, one {{modelId}} other {# models}}?"
values={{
modelId: models[0].model_id,
@ -54,7 +54,7 @@ export const DeleteModelsModal: FC<DeleteModelsModalProps> = ({ models, onClose
size="s"
>
<FormattedMessage
id="xpack.ml.inference.modelsList.deleteModal.modelsWithPipelinesWarningMessage"
id="xpack.ml.trainedModels.modelsList.deleteModal.modelsWithPipelinesWarningMessage"
defaultMessage="{modelsWithPipelinesCount, plural, one{Model} other {Models}} {modelsWithPipelines} {modelsWithPipelinesCount, plural, one{has} other {have}} associated pipelines!"
values={{
modelsWithPipelinesCount: modelsWithPipelines.length,
@ -68,14 +68,14 @@ export const DeleteModelsModal: FC<DeleteModelsModalProps> = ({ models, onClose
<EuiModalFooter>
<EuiButtonEmpty onClick={onClose.bind(null, false)} name="cancelModelDeletion">
<FormattedMessage
id="xpack.ml.inference.modelsList.deleteModal.cancelButtonLabel"
id="xpack.ml.trainedModels.modelsList.deleteModal.cancelButtonLabel"
defaultMessage="Cancel"
/>
</EuiButtonEmpty>
<EuiButton onClick={onClose.bind(null, true)} fill color="danger">
<FormattedMessage
id="xpack.ml.inference.modelsList.deleteModal.deleteButtonLabel"
id="xpack.ml.trainedModels.modelsList.deleteModal.deleteButtonLabel"
defaultMessage="Delete"
/>
</EuiButton>

View file

@ -120,7 +120,7 @@ export const ExpandedRow: FC<ExpandedRowProps> = ({ item }) => {
id: 'details',
name: (
<FormattedMessage
id="xpack.ml.inference.modelsList.expandedRow.detailsTabLabel"
id="xpack.ml.trainedModels.modelsList.expandedRow.detailsTabLabel"
defaultMessage="Details"
/>
),
@ -133,7 +133,7 @@ export const ExpandedRow: FC<ExpandedRowProps> = ({ item }) => {
<EuiTitle size={'xs'}>
<h5>
<FormattedMessage
id="xpack.ml.inference.modelsList.expandedRow.detailsTitle"
id="xpack.ml.trainedModels.modelsList.expandedRow.detailsTitle"
defaultMessage="Details"
/>
</h5>
@ -156,7 +156,7 @@ export const ExpandedRow: FC<ExpandedRowProps> = ({ item }) => {
id: 'config',
name: (
<FormattedMessage
id="xpack.ml.inference.modelsList.expandedRow.configTabLabel"
id="xpack.ml.trainedModels.modelsList.expandedRow.configTabLabel"
defaultMessage="Config"
/>
),
@ -169,7 +169,7 @@ export const ExpandedRow: FC<ExpandedRowProps> = ({ item }) => {
<EuiTitle size={'xs'}>
<h5>
<FormattedMessage
id="xpack.ml.inference.modelsList.expandedRow.inferenceConfigTitle"
id="xpack.ml.trainedModels.modelsList.expandedRow.inferenceConfigTitle"
defaultMessage="Inference configuration"
/>
</h5>
@ -190,7 +190,7 @@ export const ExpandedRow: FC<ExpandedRowProps> = ({ item }) => {
<EuiTitle size={'xs'}>
<h5>
<FormattedMessage
id="xpack.ml.inference.modelsList.expandedRow.analyticsConfigTitle"
id="xpack.ml.trainedModels.modelsList.expandedRow.analyticsConfigTitle"
defaultMessage="Analytics configuration"
/>
</h5>
@ -214,7 +214,7 @@ export const ExpandedRow: FC<ExpandedRowProps> = ({ item }) => {
id: 'stats',
name: (
<FormattedMessage
id="xpack.ml.inference.modelsList.expandedRow.statsTabLabel"
id="xpack.ml.trainedModels.modelsList.expandedRow.statsTabLabel"
defaultMessage="Stats"
/>
),
@ -228,7 +228,7 @@ export const ExpandedRow: FC<ExpandedRowProps> = ({ item }) => {
<EuiTitle size={'xs'}>
<h5>
<FormattedMessage
id="xpack.ml.inference.modelsList.expandedRow.inferenceStatsTitle"
id="xpack.ml.trainedModels.modelsList.expandedRow.inferenceStatsTitle"
defaultMessage="Inference stats"
/>
</h5>
@ -248,7 +248,7 @@ export const ExpandedRow: FC<ExpandedRowProps> = ({ item }) => {
<EuiTitle size={'xs'}>
<h5>
<FormattedMessage
id="xpack.ml.inference.modelsList.expandedRow.ingestStatsTitle"
id="xpack.ml.trainedModels.modelsList.expandedRow.ingestStatsTitle"
defaultMessage="Ingest stats"
/>
</h5>
@ -266,7 +266,7 @@ export const ExpandedRow: FC<ExpandedRowProps> = ({ item }) => {
<EuiTitle size={'xs'}>
<h5>
<FormattedMessage
id="xpack.ml.inference.modelsList.expandedRow.byPipelineTitle"
id="xpack.ml.trainedModels.modelsList.expandedRow.byPipelineTitle"
defaultMessage="By pipeline"
/>
</h5>
@ -300,7 +300,7 @@ export const ExpandedRow: FC<ExpandedRowProps> = ({ item }) => {
<EuiTitle size={'xxs'}>
<h6>
<FormattedMessage
id="xpack.ml.inference.modelsList.expandedRow.byProcessorTitle"
id="xpack.ml.trainedModels.modelsList.expandedRow.byProcessorTitle"
defaultMessage="By processor"
/>
</h6>
@ -354,7 +354,7 @@ export const ExpandedRow: FC<ExpandedRowProps> = ({ item }) => {
name: (
<>
<FormattedMessage
id="xpack.ml.inference.modelsList.expandedRow.pipelinesTabLabel"
id="xpack.ml.trainedModels.modelsList.expandedRow.pipelinesTabLabel"
defaultMessage="Pipelines"
/>{' '}
<EuiNotificationBadge>{stats.pipeline_count}</EuiNotificationBadge>
@ -390,7 +390,7 @@ export const ExpandedRow: FC<ExpandedRowProps> = ({ item }) => {
}}
>
<FormattedMessage
id="xpack.ml.inference.modelsList.expandedRow.editPipelineLabel"
id="xpack.ml.trainedModels.modelsList.expandedRow.editPipelineLabel"
defaultMessage="Edit"
/>
</EuiButtonEmpty>
@ -402,7 +402,7 @@ export const ExpandedRow: FC<ExpandedRowProps> = ({ item }) => {
<EuiTitle size={'xxs'}>
<h6>
<FormattedMessage
id="xpack.ml.inference.modelsList.expandedRow.processorsTitle"
id="xpack.ml.trainedModels.modelsList.expandedRow.processorsTitle"
defaultMessage="Processors"
/>
</h6>

View file

@ -24,16 +24,16 @@ import { EuiBasicTableColumn } from '@elastic/eui/src/components/basic_table/bas
import { EuiTableSelectionType } from '@elastic/eui/src/components/basic_table/table_types';
import { Action } from '@elastic/eui/src/components/basic_table/action_types';
import { StatsBar, ModelsBarStats } from '../../../../../components/stats_bar';
import { useInferenceApiService } from '../../../../../services/ml_api_service/inference';
import { useTrainedModelsApiService } from '../../../../../services/ml_api_service/trained_models';
import { ModelsTableToConfigMapping } from './index';
import { DeleteModelsModal } from './delete_models_modal';
import { useMlKibana, useMlUrlGenerator, useNotifications } from '../../../../../contexts/kibana';
import { ExpandedRow } from './expanded_row';
import {
ModelConfigResponse,
TrainedModelConfigResponse,
ModelPipelines,
TrainedModelStat,
} from '../../../../../../../common/types/inference';
} from '../../../../../../../common/types/trained_models';
import {
getAnalysisType,
REFRESH_ANALYTICS_LIST_STATE,
@ -48,7 +48,7 @@ import { timeFormatter } from '../../../../../../../common/util/date_utils';
type Stats = Omit<TrainedModelStat, 'model_id'>;
export type ModelItem = ModelConfigResponse & {
export type ModelItem = TrainedModelConfigResponse & {
type?: string;
stats?: Stats;
pipelines?: ModelPipelines['pipelines'] | null;
@ -66,7 +66,7 @@ export const ModelsList: FC = () => {
const canDeleteDataFrameAnalytics = capabilities.ml.canDeleteDataFrameAnalytics as boolean;
const inferenceApiService = useInferenceApiService();
const trainedModelsApiService = useTrainedModelsApiService();
const { toasts } = useNotifications();
const [searchQueryText, setSearchQueryText] = useState('');
@ -110,7 +110,7 @@ export const ModelsList: FC = () => {
*/
const fetchData = useCallback(async () => {
try {
const response = await inferenceApiService.getInferenceModel(undefined, {
const response = await trainedModelsApiService.getTrainedModels(undefined, {
with_pipelines: true,
size: 1000,
});
@ -146,7 +146,7 @@ export const ModelsList: FC = () => {
}
} catch (error) {
toasts.addError(new Error(error.body?.message), {
title: i18n.translate('xpack.ml.inference.modelsList.fetchFailedErrorMessage', {
title: i18n.translate('xpack.ml.trainedModels.modelsList.fetchFailedErrorMessage', {
defaultMessage: 'Models fetch failed',
}),
});
@ -166,8 +166,8 @@ export const ModelsList: FC = () => {
total: {
show: true,
value: items.length,
label: i18n.translate('xpack.ml.inference.modelsList.totalAmountLabel', {
defaultMessage: 'Total inference trained models',
label: i18n.translate('xpack.ml.trainedModels.modelsList.totalAmountLabel', {
defaultMessage: 'Total trained models',
}),
},
};
@ -182,7 +182,7 @@ export const ModelsList: FC = () => {
try {
const {
trained_model_stats: modelsStatsResponse,
} = await inferenceApiService.getInferenceModelStats(modelIdsToFetch);
} = await trainedModelsApiService.getTrainedModelStats(modelIdsToFetch);
for (const { model_id: id, ...stats } of modelsStatsResponse) {
const model = models.find((m) => m.model_id === id);
@ -191,7 +191,7 @@ export const ModelsList: FC = () => {
return true;
} catch (error) {
toasts.addError(new Error(error.body.message), {
title: i18n.translate('xpack.ml.inference.modelsList.fetchModelStatsErrorMessage', {
title: i18n.translate('xpack.ml.trainedModels.modelsList.fetchModelStatsErrorMessage', {
defaultMessage: 'Fetch model stats failed',
}),
});
@ -221,7 +221,7 @@ export const ModelsList: FC = () => {
setModelsToDelete(models as ModelItemFull[]);
} else {
toasts.addDanger(
i18n.translate('xpack.ml.inference.modelsList.unableToDeleteModelsErrorMessage', {
i18n.translate('xpack.ml.trainedModels.modelsList.unableToDeleteModelsErrorMessage', {
defaultMessage: 'Unable to delete models',
})
);
@ -236,7 +236,7 @@ export const ModelsList: FC = () => {
try {
await Promise.all(
modelsToDeleteIds.map((modelId) => inferenceApiService.deleteInferenceModel(modelId))
modelsToDeleteIds.map((modelId) => trainedModelsApiService.deleteTrainedModel(modelId))
);
setItems(
items.filter(
@ -244,7 +244,7 @@ export const ModelsList: FC = () => {
)
);
toasts.addSuccess(
i18n.translate('xpack.ml.inference.modelsList.successfullyDeletedMessage', {
i18n.translate('xpack.ml.trainedModels.modelsList.successfullyDeletedMessage', {
defaultMessage:
'{modelsCount, plural, one {Model {modelsToDeleteIds}} other {# models}} {modelsCount, plural, one {has} other {have}} been successfully deleted',
values: {
@ -255,7 +255,7 @@ export const ModelsList: FC = () => {
);
} catch (error) {
toasts.addError(new Error(error?.body?.message), {
title: i18n.translate('xpack.ml.inference.modelsList.fetchDeletionErrorMessage', {
title: i18n.translate('xpack.ml.trainedModels.modelsList.fetchDeletionErrorMessage', {
defaultMessage: '{modelsCount, plural, one {Model} other {Models}} deletion failed',
values: {
modelsCount: modelsToDeleteIds.length,
@ -270,10 +270,10 @@ export const ModelsList: FC = () => {
*/
const actions: Array<Action<ModelItem>> = [
{
name: i18n.translate('xpack.ml.inference.modelsList.viewTrainingDataActionLabel', {
name: i18n.translate('xpack.ml.trainedModels.modelsList.viewTrainingDataActionLabel', {
defaultMessage: 'View training data',
}),
description: i18n.translate('xpack.ml.inference.modelsList.viewTrainingDataActionLabel', {
description: i18n.translate('xpack.ml.trainedModels.modelsList.viewTrainingDataActionLabel', {
defaultMessage: 'View training data',
}),
icon: 'visTable',
@ -298,10 +298,10 @@ export const ModelsList: FC = () => {
isPrimary: true,
},
{
name: i18n.translate('xpack.ml.inference.modelsList.deleteModelActionLabel', {
name: i18n.translate('xpack.ml.trainedModels.modelsList.deleteModelActionLabel', {
defaultMessage: 'Delete model',
}),
description: i18n.translate('xpack.ml.inference.modelsList.deleteModelActionLabel', {
description: i18n.translate('xpack.ml.trainedModels.modelsList.deleteModelActionLabel', {
defaultMessage: 'Delete model',
}),
icon: 'trash',
@ -341,10 +341,10 @@ export const ModelsList: FC = () => {
onClick={toggleDetails.bind(null, item)}
aria-label={
itemIdToExpandedRowMap[item.model_id]
? i18n.translate('xpack.ml.inference.modelsList.collapseRow', {
? i18n.translate('xpack.ml.trainedModels.modelsList.collapseRow', {
defaultMessage: 'Collapse',
})
: i18n.translate('xpack.ml.inference.modelsList.expandRow', {
: i18n.translate('xpack.ml.trainedModels.modelsList.expandRow', {
defaultMessage: 'Expand',
})
}
@ -354,7 +354,7 @@ export const ModelsList: FC = () => {
},
{
field: ModelsTableToConfigMapping.id,
name: i18n.translate('xpack.ml.inference.modelsList.modelIdHeader', {
name: i18n.translate('xpack.ml.trainedModels.modelsList.modelIdHeader', {
defaultMessage: 'ID',
}),
sortable: true,
@ -362,7 +362,7 @@ export const ModelsList: FC = () => {
},
{
field: ModelsTableToConfigMapping.type,
name: i18n.translate('xpack.ml.inference.modelsList.typeHeader', {
name: i18n.translate('xpack.ml.trainedModels.modelsList.typeHeader', {
defaultMessage: 'Type',
}),
sortable: true,
@ -371,7 +371,7 @@ export const ModelsList: FC = () => {
},
{
field: ModelsTableToConfigMapping.createdAt,
name: i18n.translate('xpack.ml.inference.modelsList.createdAtHeader', {
name: i18n.translate('xpack.ml.trainedModels.modelsList.createdAtHeader', {
defaultMessage: 'Created at',
}),
dataType: 'date',
@ -379,7 +379,7 @@ export const ModelsList: FC = () => {
sortable: true,
},
{
name: i18n.translate('xpack.ml.inference.modelsList.actionsHeader', {
name: i18n.translate('xpack.ml.trainedModels.modelsList.actionsHeader', {
defaultMessage: 'Actions',
}),
actions,
@ -413,7 +413,7 @@ export const ModelsList: FC = () => {
<EuiTitle size="s">
<h5>
<FormattedMessage
id="xpack.ml.inference.modelsList.selectedModelsMessage"
id="xpack.ml.trainedModels.modelsList.selectedModelsMessage"
defaultMessage="{modelsCount, plural, one{# model} other {# models}} selected"
values={{ modelsCount: selectedModels.length }}
/>
@ -423,7 +423,7 @@ export const ModelsList: FC = () => {
<EuiFlexItem>
<EuiButton color="danger" onClick={prepareModelsForDeletion.bind(null, selectedModels)}>
<FormattedMessage
id="xpack.ml.inference.modelsList.deleteModelsButtonLabel"
id="xpack.ml.trainedModels.modelsList.deleteModelsButtonLabel"
defaultMessage="Delete"
/>
</EuiButton>
@ -438,10 +438,10 @@ export const ModelsList: FC = () => {
? {
selectableMessage: (selectable, item) => {
return selectable
? i18n.translate('xpack.ml.inference.modelsList.selectableMessage', {
? i18n.translate('xpack.ml.trainedModels.modelsList.selectableMessage', {
defaultMessage: 'Select a model',
})
: i18n.translate('xpack.ml.inference.modelsList.disableSelectableMessage', {
: i18n.translate('xpack.ml.trainedModels.modelsList.disableSelectableMessage', {
defaultMessage: 'Model has associated pipelines',
});
},

View file

@ -10,10 +10,10 @@ import { HttpService } from '../http_service';
import { basePath } from './index';
import { useMlKibana } from '../../contexts/kibana';
import {
ModelConfigResponse,
TrainedModelConfigResponse,
ModelPipelines,
TrainedModelStat,
} from '../../../../common/types/inference';
} from '../../../../common/types/trained_models';
export interface InferenceQueryParams {
decompress_definition?: boolean;
@ -47,7 +47,7 @@ export interface InferenceStatsResponse {
* Service with APIs calls to perform inference operations.
* @param httpService
*/
export function inferenceApiProvider(httpService: HttpService) {
export function trainedModelsApiProvider(httpService: HttpService) {
const apiBasePath = basePath();
return {
@ -58,14 +58,14 @@ export function inferenceApiProvider(httpService: HttpService) {
* Fetches all In case nothing is provided.
* @param params - Optional query params
*/
getInferenceModel(modelId?: string | string[], params?: InferenceQueryParams) {
getTrainedModels(modelId?: string | string[], params?: InferenceQueryParams) {
let model = modelId ?? '';
if (Array.isArray(modelId)) {
model = modelId.join(',');
}
return httpService.http<ModelConfigResponse[]>({
path: `${apiBasePath}/inference${model && `/${model}`}`,
return httpService.http<TrainedModelConfigResponse[]>({
path: `${apiBasePath}/trained_models${model && `/${model}`}`,
method: 'GET',
...(params ? { query: params as HttpFetchQuery } : {}),
});
@ -78,14 +78,14 @@ export function inferenceApiProvider(httpService: HttpService) {
* Fetches all In case nothing is provided.
* @param params - Optional query params
*/
getInferenceModelStats(modelId?: string | string[], params?: InferenceStatsQueryParams) {
getTrainedModelStats(modelId?: string | string[], params?: InferenceStatsQueryParams) {
let model = modelId ?? '_all';
if (Array.isArray(modelId)) {
model = modelId.join(',');
}
return httpService.http<InferenceStatsResponse>({
path: `${apiBasePath}/inference/${model}/_stats`,
path: `${apiBasePath}/trained_models/${model}/_stats`,
method: 'GET',
});
},
@ -95,14 +95,14 @@ export function inferenceApiProvider(httpService: HttpService) {
*
* @param modelId - Model ID, collection of Model IDs.
*/
getInferenceModelPipelines(modelId: string | string[]) {
getTrainedModelPipelines(modelId: string | string[]) {
let model = modelId;
if (Array.isArray(modelId)) {
model = modelId.join(',');
}
return httpService.http<ModelPipelines[]>({
path: `${apiBasePath}/inference/${model}/pipelines`,
path: `${apiBasePath}/trained_models/${model}/pipelines`,
method: 'GET',
});
},
@ -112,25 +112,25 @@ export function inferenceApiProvider(httpService: HttpService) {
*
* @param modelId - Model ID
*/
deleteInferenceModel(modelId: string) {
deleteTrainedModel(modelId: string) {
return httpService.http<any>({
path: `${apiBasePath}/inference/${modelId}`,
path: `${apiBasePath}/trained_models/${modelId}`,
method: 'DELETE',
});
},
};
}
type InferenceApiService = ReturnType<typeof inferenceApiProvider>;
type TrainedModelsApiService = ReturnType<typeof trainedModelsApiProvider>;
/**
* Hooks for accessing {@link InferenceApiService} in React components.
* Hooks for accessing {@link TrainedModelsApiService} in React components.
*/
export function useInferenceApiService(): InferenceApiService {
export function useTrainedModelsApiService(): TrainedModelsApiService {
const {
services: {
mlServices: { httpService },
},
} = useMlKibana();
return useMemo(() => inferenceApiProvider(httpService), [httpService]);
return useMemo(() => trainedModelsApiProvider(httpService), [httpService]);
}

View file

@ -5,7 +5,7 @@
*/
import { IScopedClusterClient } from 'kibana/server';
import { PipelineDefinition } from '../../../common/types/inference';
import { PipelineDefinition } from '../../../common/types/trained_models';
export function modelsProvider(client: IScopedClusterClient) {
return {

View file

@ -47,7 +47,7 @@ import { createSharedServices, SharedServices } from './shared_services';
import { getPluginPrivileges } from '../common/types/capabilities';
import { setupCapabilitiesSwitcher } from './lib/capabilities';
import { registerKibanaSettings } from './lib/register_settings';
import { inferenceRoutes } from './routes/inference';
import { trainedModelsRoutes } from './routes/trained_models';
export type MlPluginSetup = SharedServices;
export type MlPluginStart = void;
@ -153,7 +153,7 @@ export class MlServerPlugin implements Plugin<MlPluginSetup, MlPluginStart, Plug
initMlServerLog({ log: this.log });
initMlTelemetry(coreSetup, plugins.usageCollection);
inferenceRoutes(routeInit);
trainedModelsRoutes(routeInit);
return {
...createSharedServices(

View file

@ -12,19 +12,19 @@ import {
optionalModelIdSchema,
} from './schemas/inference_schema';
import { modelsProvider } from '../models/data_frame_analytics';
import { InferenceConfigResponse } from '../../common/types/inference';
import { InferenceConfigResponse } from '../../common/types/trained_models';
export function inferenceRoutes({ router, mlLicense }: RouteInitialization) {
export function trainedModelsRoutes({ router, mlLicense }: RouteInitialization) {
/**
* @apiGroup Inference
*
* @api {get} /api/ml/inference/:modelId Get info of a trained inference model
* @api {get} /api/ml/trained_models/:modelId Get info of a trained inference model
* @apiName GetInferenceModel
* @apiDescription Retrieves configuration information for a trained inference model.
*/
router.get(
{
path: '/api/ml/inference/{modelId?}',
path: '/api/ml/trained_models/{modelId?}',
validate: {
params: optionalModelIdSchema,
query: getInferenceQuerySchema,
@ -70,13 +70,13 @@ export function inferenceRoutes({ router, mlLicense }: RouteInitialization) {
/**
* @apiGroup Inference
*
* @api {get} /api/ml/inference/:modelId/_stats Get stats of a trained inference model
* @api {get} /api/ml/trained_models/:modelId/_stats Get stats of a trained inference model
* @apiName GetInferenceModelStats
* @apiDescription Retrieves usage information for trained inference models.
*/
router.get(
{
path: '/api/ml/inference/{modelId}/_stats',
path: '/api/ml/trained_models/{modelId}/_stats',
validate: {
params: modelIdSchema,
},
@ -102,13 +102,13 @@ export function inferenceRoutes({ router, mlLicense }: RouteInitialization) {
/**
* @apiGroup Inference
*
* @api {get} /api/ml/inference/:modelId/pipelines Get model pipelines
* @api {get} /api/ml/trained_models/:modelId/pipelines Get model pipelines
* @apiName GetModelPipelines
* @apiDescription Retrieves pipelines associated with a model
*/
router.get(
{
path: '/api/ml/inference/{modelId}/pipelines',
path: '/api/ml/trained_models/{modelId}/pipelines',
validate: {
params: modelIdSchema,
},
@ -132,13 +132,13 @@ export function inferenceRoutes({ router, mlLicense }: RouteInitialization) {
/**
* @apiGroup Inference
*
* @api {delete} /api/ml/inference/:modelId Get stats of a trained inference model
* @api {delete} /api/ml/trained_models/:modelId Get stats of a trained inference model
* @apiName DeleteInferenceModel
* @apiDescription Deletes an existing trained inference model that is currently not referenced by an ingest pipeline.
*/
router.delete(
{
path: '/api/ml/inference/{modelId}',
path: '/api/ml/trained_models/{modelId}',
validate: {
params: modelIdSchema,
},