Skip to content

Commit

Permalink
[Enterprise Search] Bind detach ml inference pipeline to the ML infer…
Browse files Browse the repository at this point in the history
…ence pipeline card (#144098)

* Bind detach ml inference pipeline to the ML inference pipeline card
  • Loading branch information
efegurkan authored Oct 28, 2022
1 parent dfb04fc commit 30fcc1d
Show file tree
Hide file tree
Showing 6 changed files with 176 additions and 10 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { DeleteMlInferencePipelineResponse } from '../../../../../common/types/pipelines';

import { createApiLogic } from '../../../shared/api_logic/create_api_logic';
import { HttpLogic } from '../../../shared/http';

Expand All @@ -12,10 +14,7 @@ export interface DeleteMlInferencePipelineApiLogicArgs {
pipelineName: string;
}

export interface DeleteMlInferencePipelineResponse {
deleted?: string;
updated?: string;
}
export type { DeleteMlInferencePipelineResponse };

export const deleteMlInferencePipeline = async (
args: DeleteMlInferencePipelineApiLogicArgs
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/

import { mockHttpValues } from '../../../__mocks__/kea_logic';

import {
detachMlInferencePipeline,
DetachMlInferencePipelineResponse,
} from './detach_ml_inference_pipeline';

describe('DetachMlInferencePipelineApiLogic', () => {
const { http } = mockHttpValues;
beforeEach(() => {
jest.clearAllMocks();
});

describe('detachMlInferencePipeline', () => {
it('calls detach ml inference api', async () => {
const response: Promise<DetachMlInferencePipelineResponse> = Promise.resolve({
updated: 'parent-pipeline-name',
});
http.delete.mockReturnValue(response);
const result = await detachMlInferencePipeline({
indexName: 'mock-index-name',
pipelineName: 'mock-pipeline-name',
});

expect(http.delete).toHaveBeenCalledWith(
'/internal/enterprise_search/indices/mock-index-name/ml_inference/pipeline_processors/mock-pipeline-name/detach'
);

expect(result).toEqual({
updated: 'parent-pipeline-name',
});
});
});
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/

import { DeleteMlInferencePipelineResponse } from '../../../../../common/types/pipelines';

import { createApiLogic } from '../../../shared/api_logic/create_api_logic';
import { HttpLogic } from '../../../shared/http';

export interface DetachMlInferencePipelineApiLogicArgs {
indexName: string;
pipelineName: string;
}

export type DetachMlInferencePipelineResponse = DeleteMlInferencePipelineResponse;

export const detachMlInferencePipeline = async (
args: DetachMlInferencePipelineApiLogicArgs
): Promise<DetachMlInferencePipelineResponse> => {
const route = `/internal/enterprise_search/indices/${args.indexName}/ml_inference/pipeline_processors/${args.pipelineName}/detach`;
return await HttpLogic.values.http.delete<DetachMlInferencePipelineResponse>(route);
};

export const DetachMlInferencePipelineApiLogic = createApiLogic(
['detach_ml_inference_pipeline_api_logic'],
detachMlInferencePipeline
);
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ export const InferencePipelineCard: React.FC<InferencePipeline> = (pipeline) =>
const { ingestionMethod } = useValues(IndexViewLogic);
const [isPopOverOpen, setIsPopOverOpen] = useState(false);
const [showConfirmDelete, setShowConfirmDelete] = useState(false);
const { deleteMlPipeline } = useActions(PipelinesLogic);
const { deleteMlPipeline, detachMlPipeline } = useActions(PipelinesLogic);
const showConfirmDeleteModal = () => {
setShowConfirmDelete(true);
setIsPopOverOpen(false);
Expand Down Expand Up @@ -107,6 +107,26 @@ export const InferencePipelineCard: React.FC<InferencePipeline> = (pipeline) =>
</EuiButtonEmpty>
</div>
</EuiFlexItem>
<EuiFlexItem>
<div>
<EuiButtonEmpty
data-telemetry-id={`entSearchContent-${ingestionMethod}-pipelines-inferencePipeline-detachPipeline`}
size="s"
flush="both"
iconType="unlink"
color="text"
onClick={() => {
detachMlPipeline({ indexName, pipelineName });
setIsPopOverOpen(false);
}}
>
{i18n.translate(
'xpack.enterpriseSearch.inferencePipelineCard.action.detach',
{ defaultMessage: 'Detach pipeline' }
)}
</EuiButtonEmpty>
</div>
</EuiFlexItem>
<EuiFlexItem>
<div>
<EuiButtonEmpty
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,18 @@
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/

import { LogicMounter, mockFlashMessageHelpers } from '../../../../__mocks__/kea_logic';
import { apiIndex, connectorIndex } from '../../../__mocks__/view_index.mock';

import type { IngestPipeline } from '@elastic/elasticsearch/lib/api/types';

import { nextTick } from '@kbn/test-jest-helpers';

import { UpdatePipelineApiLogic } from '../../../api/connector/update_pipeline_api_logic';
import { FetchCustomPipelineApiLogic } from '../../../api/index/fetch_custom_pipeline_api_logic';
import { FetchIndexApiLogic } from '../../../api/index/fetch_index_api_logic';
import { DetachMlInferencePipelineApiLogic } from '../../../api/pipelines/detach_ml_inference_pipeline';

import { PipelinesLogic } from './pipelines_logic';

Expand Down Expand Up @@ -43,6 +47,9 @@ describe('PipelinesLogic', () => {
const { mount: mountFetchIndexApiLogic } = new LogicMounter(FetchIndexApiLogic);
const { mount: mountUpdatePipelineLogic } = new LogicMounter(UpdatePipelineApiLogic);
const { mount: mountFetchCustomPipelineApiLogic } = new LogicMounter(FetchCustomPipelineApiLogic);
const { mount: mountDetachMlInferencePipelineApiLogic } = new LogicMounter(
DetachMlInferencePipelineApiLogic
);
const { clearFlashMessages, flashAPIErrors, flashSuccessToast } = mockFlashMessageHelpers;

const newPipeline = {
Expand All @@ -53,6 +60,7 @@ describe('PipelinesLogic', () => {

beforeEach(() => {
jest.clearAllMocks();
mountDetachMlInferencePipelineApiLogic();
mountFetchIndexApiLogic();
mountFetchCustomPipelineApiLogic();
mountUpdatePipelineLogic();
Expand Down Expand Up @@ -235,5 +243,29 @@ describe('PipelinesLogic', () => {
});
});
});
describe('detachMlPipelineSuccess', () => {
it('re-fetches pipeline data', async () => {
jest.spyOn(PipelinesLogic.actions, 'fetchMlInferenceProcessors');
jest.spyOn(PipelinesLogic.actions, 'fetchCustomPipeline');
FetchIndexApiLogic.actions.apiSuccess(connectorIndex);
DetachMlInferencePipelineApiLogic.actions.apiSuccess({
updated: 'mock-pipeline-name',
});
await nextTick();
expect(PipelinesLogic.actions.fetchMlInferenceProcessors).toHaveBeenCalledWith({
indexName: connectorIndex.name,
});
expect(PipelinesLogic.actions.fetchCustomPipeline).toHaveBeenCalledWith({
indexName: connectorIndex.name,
});
});
});
describe('detachMlPipelineError', () => {
it('calls flashAPIErrors', () => {
DetachMlInferencePipelineApiLogic.actions.apiError('error' as any);
expect(flashAPIErrors).toHaveBeenCalledTimes(1);
expect(flashAPIErrors).toHaveBeenCalledWith('error');
});
});
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -47,11 +47,6 @@ import {
FetchIndexApiParams,
FetchIndexApiResponse,
} from '../../../api/index/fetch_index_api_logic';
import {
DeleteMlInferencePipelineApiLogic,
DeleteMlInferencePipelineApiLogicArgs,
DeleteMlInferencePipelineResponse,
} from '../../../api/ml_models/delete_ml_inference_pipeline';
import {
AttachMlInferencePipelineApiLogic,
AttachMlInferencePipelineApiLogicArgs,
Expand All @@ -62,6 +57,17 @@ import {
CreateMlInferencePipelineApiLogicArgs,
CreateMlInferencePipelineResponse,
} from '../../../api/pipelines/create_ml_inference_pipeline';
import {
DeleteMlInferencePipelineApiLogic,
DeleteMlInferencePipelineApiLogicArgs,
DeleteMlInferencePipelineResponse,
} from '../../../api/pipelines/delete_ml_inference_pipeline';
import {
DetachMlInferencePipelineApiLogic,
DetachMlInferencePipelineApiLogicArgs,
DetachMlInferencePipelineResponse,
} from '../../../api/pipelines/detach_ml_inference_pipeline';

import { FetchMlInferencePipelineProcessorsApiLogic } from '../../../api/pipelines/fetch_ml_inference_pipeline_processors';
import { isApiIndex, isConnectorIndex, isCrawlerIndex } from '../../../utils/indices';

Expand Down Expand Up @@ -103,6 +109,18 @@ type PipelinesActions = Pick<
DeleteMlInferencePipelineApiLogicArgs,
DeleteMlInferencePipelineResponse
>['apiSuccess'];
detachMlPipeline: Actions<
DetachMlInferencePipelineApiLogicArgs,
DetachMlInferencePipelineResponse
>['makeRequest'];
detachMlPipelineError: Actions<
DetachMlInferencePipelineApiLogicArgs,
DetachMlInferencePipelineResponse
>['apiError'];
detachMlPipelineSuccess: Actions<
DetachMlInferencePipelineApiLogicArgs,
DetachMlInferencePipelineResponse
>['apiSuccess'];
fetchCustomPipeline: Actions<
FetchCustomPipelineApiLogicArgs,
FetchCustomPipelineApiLogicResponse
Expand Down Expand Up @@ -180,6 +198,12 @@ export const PipelinesLogic = kea<MakeLogicType<PipelinesValues, PipelinesAction
'apiSuccess as deleteMlPipelineSuccess',
'makeRequest as deleteMlPipeline',
],
DetachMlInferencePipelineApiLogic,
[
'apiError as detachMlPipelineError',
'apiSuccess as detachMlPipelineSuccess',
'makeRequest as detachMlPipeline',
],
],
values: [
FetchCustomPipelineApiLogic,
Expand Down Expand Up @@ -272,6 +296,26 @@ export const PipelinesLogic = kea<MakeLogicType<PipelinesValues, PipelinesAction
// Needed to ensure correct JSON is available in the JSON configurations tab
actions.fetchCustomPipeline({ indexName: values.index.name });
},
detachMlPipelineError: (error) => flashAPIErrors(error),
detachMlPipelineSuccess: (response) => {
if (response.updated) {
flashSuccessToast(
i18n.translate(
'xpack.enterpriseSearch.content.indices.pipelines.successToastDetachMlPipeline.title',
{
defaultMessage: 'Detached machine learning inference pipeline from "{pipelineName}"',
values: {
pipelineName: response.updated,
},
}
)
);
}
// Re-fetch processors to ensure we display newly removed ml processor
actions.fetchMlInferenceProcessors({ indexName: values.index.name });
// Needed to ensure correct JSON is available in the JSON configurations tab
actions.fetchCustomPipeline({ indexName: values.index.name });
},
fetchIndexApiSuccess: (index) => {
if (!values.showModal) {
// Don't do this when the modal is open to avoid overwriting the values while editing
Expand Down

0 comments on commit 30fcc1d

Please sign in to comment.