Skip to content

Commit

Permalink
[Search] Consolidate ML model fetch calls (elastic#176257)
Browse files Browse the repository at this point in the history
## Summary

With the introduction of
[fetch_ml_models.ts](https://github.com/elastic/kibana/blob/main/x-pack/plugins/enterprise_search/server/lib/ml/fetch_ml_models.ts),
the fetching and enriching of ML models for Search purposes has been
consolidated in that API. This allows us to remove the dependency on the
older method that works with ML plugin-specific `TrainedModel` entities.

This PR makes the following changes:
- Switch over code that depend on ML models to use the new function from
`fetch_ml_models.ts` (that already does sorting/filtering).
- Move the fetch process to `ml_inference_logic.ts`, and begin
periodically polling after mounting the logic. This enables passing down
values to lower components, e.g. `model_select_logic.ts`, instead of
repeating the fetch there.
- Use `MlModel` instead of `TrainedModel/MlTrainedModelConfig`. This
requires adding some missing properties to `MlModel`: `types`,
`inputFieldNames`, `version`.
- Remove the old fetch methods
(`x-pack/plugins/enterprise_search/server/lib/ml/ml_*_logic.ts`).
- Remove the "no models available" component and condition, since as of
8.12 at least the ELSER/E5 placeholders are always present.

### Checklist
- [x] [Unit or functional
tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)
were updated or added to match the most common scenarios

---------

Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
  • Loading branch information
2 people authored and CoenWarmer committed Feb 15, 2024
1 parent 9ac11fe commit 9e29f77
Show file tree
Hide file tree
Showing 29 changed files with 345 additions and 1,023 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import { MlTrainedModelConfig, MlTrainedModelStats } from '@elastic/elasticsearch/lib/api/types';
import { BUILT_IN_MODEL_TAG, TRAINED_MODEL_TYPE } from '@kbn/ml-trained-models-utils';

import { MlModel, MlModelDeploymentState } from '../types/ml';
import { MlInferencePipeline, TrainedModelState } from '../types/pipelines';

import {
Expand All @@ -19,7 +20,7 @@ import {
parseModelStateReasonFromStats,
} from '.';

const mockModel: MlTrainedModelConfig = {
const mockTrainedModel: MlTrainedModelConfig = {
inference_config: {
ner: {},
},
Expand All @@ -32,8 +33,27 @@ const mockModel: MlTrainedModelConfig = {
version: '1',
};

const mockModel: MlModel = {
modelId: 'model_1',
type: 'ner',
title: 'Model 1',
description: 'Model 1 description',
licenseType: 'elastic',
modelDetailsPageUrl: 'https://my-model.ai',
deploymentState: MlModelDeploymentState.NotDeployed,
startTime: 0,
targetAllocationCount: 0,
nodeAllocationCount: 0,
threadsPerAllocation: 0,
isPlaceholder: false,
hasStats: false,
types: ['pytorch', 'ner'],
inputFieldNames: ['title'],
version: '1',
};

describe('getMlModelTypesForModelConfig lib function', () => {
const builtInMockModel: MlTrainedModelConfig = {
const builtInMockTrainedModel: MlTrainedModelConfig = {
inference_config: {
text_classification: {},
},
Expand All @@ -47,13 +67,13 @@ describe('getMlModelTypesForModelConfig lib function', () => {

it('should return the model type and inference config type', () => {
const expected = ['pytorch', 'ner'];
const response = getMlModelTypesForModelConfig(mockModel);
const response = getMlModelTypesForModelConfig(mockTrainedModel);
expect(response.sort()).toEqual(expected.sort());
});

it('should include the built in type', () => {
const expected = ['lang_ident', 'text_classification', BUILT_IN_MODEL_TAG];
const response = getMlModelTypesForModelConfig(builtInMockModel);
const response = getMlModelTypesForModelConfig(builtInMockTrainedModel);
expect(response.sort()).toEqual(expected.sort());
});
});
Expand All @@ -71,9 +91,9 @@ describe('generateMlInferencePipelineBody lib function', () => {
{
inference: {
field_map: {
'my-source-field': 'MODEL_INPUT_FIELD',
'my-source-field': 'title',
},
model_id: 'test_id',
model_id: 'model_1',
on_failure: [
{
append: {
Expand Down Expand Up @@ -154,21 +174,21 @@ describe('generateMlInferencePipelineBody lib function', () => {
{
inference: expect.objectContaining({
field_map: {
'my-source-field1': 'MODEL_INPUT_FIELD',
'my-source-field1': 'title',
},
}),
},
{
inference: expect.objectContaining({
field_map: {
'my-source-field2': 'MODEL_INPUT_FIELD',
'my-source-field2': 'title',
},
}),
},
{
inference: expect.objectContaining({
field_map: {
'my-source-field3': 'MODEL_INPUT_FIELD',
'my-source-field3': 'title',
},
}),
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ import {
BUILT_IN_MODEL_TAG,
} from '@kbn/ml-trained-models-utils';

import { MlModel } from '../types/ml';

import {
MlInferencePipeline,
CreateMLInferencePipeline,
Expand All @@ -33,7 +35,7 @@ export interface MlInferencePipelineParams {
description?: string;
fieldMappings: FieldMapping[];
inferenceConfig?: InferencePipelineInferenceConfig;
model: MlTrainedModelConfig;
model: MlModel;
pipelineName: string;
}

Expand Down Expand Up @@ -90,7 +92,7 @@ export const generateMlInferencePipelineBody = ({
model_version: model.version,
pipeline: pipelineName,
processed_timestamp: '{{{ _ingest.timestamp }}}',
types: getMlModelTypesForModelConfig(model),
types: model.types,
},
],
},
Expand All @@ -104,19 +106,19 @@ export const getInferenceProcessor = (
sourceField: string,
targetField: string,
inferenceConfig: InferencePipelineInferenceConfig | undefined,
model: MlTrainedModelConfig,
model: MlModel,
pipelineName: string
): IngestInferenceProcessor => {
// If model returned no input field, insert a placeholder
const modelInputField =
model.input?.field_names?.length > 0 ? model.input.field_names[0] : 'MODEL_INPUT_FIELD';
model.inputFieldNames.length > 0 ? model.inputFieldNames[0] : 'MODEL_INPUT_FIELD';

return {
field_map: {
[sourceField]: modelInputField,
},
inference_config: inferenceConfig,
model_id: model.model_id,
model_id: model.modelId,
on_failure: [
{
append: {
Expand Down
5 changes: 5 additions & 0 deletions x-pack/plugins/enterprise_search/common/types/ml.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,10 @@ export interface MlModel {
modelId: string;
/** Model inference type, e.g. ner, text_classification */
type: string;
/** Type-related tags: model type (e.g. pytorch), inference type, built-in tag */
types: string[];
/** Field names in inference input configuration */
inputFieldNames: string[];
title: string;
description?: string;
licenseType?: string;
Expand All @@ -44,4 +48,5 @@ export interface MlModel {
isPlaceholder: boolean;
/** Does this model have deployment stats? */
hasStats: boolean;
version?: string;
}
Original file line number Diff line number Diff line change
Expand Up @@ -30,15 +30,21 @@ const DEFAULT_VALUES: CachedFetchModelsApiLogicValues = {
const FETCH_MODELS_API_DATA_RESPONSE: MlModel[] = [
{
modelId: 'model_1',
title: 'Model 1',
type: 'ner',
title: 'Model 1',
description: 'Model 1 description',
licenseType: 'elastic',
modelDetailsPageUrl: 'https://my-model.ai',
deploymentState: MlModelDeploymentState.NotDeployed,
startTime: 0,
targetAllocationCount: 0,
nodeAllocationCount: 0,
threadsPerAllocation: 0,
isPlaceholder: false,
hasStats: false,
types: ['pytorch', 'ner'],
inputFieldNames: ['title'],
version: '1',
},
];
const FETCH_MODELS_API_ERROR_RESPONSE = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ import { FetchModelsApiLogic, FetchModelsApiResponse } from './fetch_models_api_
const FETCH_MODELS_POLLING_DURATION = 5000; // 5 seconds
const FETCH_MODELS_POLLING_DURATION_ON_FAILURE = 30000; // 30 seconds

export type { FetchModelsApiResponse } from './fetch_models_api_logic';

export interface CachedFetchModlesApiLogicActions {
apiError: Actions<{}, FetchModelsApiResponse>['apiError'];
apiReset: Actions<{}, FetchModelsApiResponse>['apiReset'];
Expand Down

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

Loading

0 comments on commit 9e29f77

Please sign in to comment.