Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Observability AI Assistant] migrate to inference client #197630 #199286

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
18 commits
Select commit Hold shift + click to select a range
ad9a877
replacing observabilityAIAssistantClient.chat with inferenceClient.chat
arturoliduena Nov 13, 2024
1d2509c
Remove unused connector types and delete obsolete adapter files
arturoliduena Nov 26, 2024
d2d73a1
Remove processOpenAiStream utility and update related type definitions
arturoliduena Nov 26, 2024
7d85c6e
updating Observability AI Assistant client test
arturoliduena Nov 29, 2024
469b6a8
update o11y ai assistant truncates the message test
arturoliduena Dec 2, 2024
245474d
Enhance chat completion handling and update tests for Observability A…
arturoliduena Dec 2, 2024
206baa8
Add tokenCount method to LlmResponseSimulator and update tests for to…
arturoliduena Dec 2, 2024
e0821e0
Refactor conversation simulator to use tool_calls structure for funct…
arturoliduena Dec 2, 2024
e8094ec
Fix tokenCount method call in chat.spec.ts to await asynchronous exec…
arturoliduena Dec 2, 2024
494aded
update test_serverless/api_integration/test_suites/observability/ai_a…
arturoliduena Dec 2, 2024
04670ff
Add tokenCount calls to conversation and title simulators in API tests
arturoliduena Dec 3, 2024
7101f7d
Update x-pack/test/observability_ai_assistant_api_integration/common/…
arturoliduena Dec 3, 2024
f044957
Simplify CreateChatCompletionResponseChunk type definition in OpenAI …
arturoliduena Dec 3, 2024
e54706a
Update x-pack/plugins/observability_solution/observability_ai_assista…
arturoliduena Dec 3, 2024
e30e0d9
Update x-pack/plugins/observability_solution/observability_ai_assista…
arturoliduena Dec 3, 2024
74d3db8
Import Observable in convert_inference_events_to_streaming_events.ts
arturoliduena Dec 3, 2024
85e4cfb
Refactor event conversion logic in convertInferenceEventsToStreamingE…
arturoliduena Dec 4, 2024
2355ca4
Add toolCalls to InferenceChatCompletionEvent in LLM simulator
arturoliduena Dec 4, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,6 @@ export enum ObservabilityAIAssistantConnectorType {
Gemini = '.gemini',
}

export const SUPPORTED_CONNECTOR_TYPES = [
ObservabilityAIAssistantConnectorType.OpenAI,
ObservabilityAIAssistantConnectorType.Bedrock,
ObservabilityAIAssistantConnectorType.Gemini,
];

export function isSupportedConnectorType(
type: string
): type is ObservabilityAIAssistantConnectorType {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import { TokenCount as TokenCountType, type Message } from './types';

export enum StreamingChatResponseEventType {
ChatCompletionChunk = 'chatCompletionChunk',
ChatCompletionMessage = 'chatCompletionMessage',
ConversationCreate = 'conversationCreate',
ConversationUpdate = 'conversationUpdate',
MessageAdd = 'messageAdd',
Expand All @@ -25,19 +26,26 @@ type StreamingChatResponseEventBase<
type: TEventType;
} & TData;

export type ChatCompletionChunkEvent = StreamingChatResponseEventBase<
StreamingChatResponseEventType.ChatCompletionChunk,
{
id: string;
message: {
content?: string;
function_call?: {
name?: string;
arguments?: string;
type BaseChatCompletionEvent<TType extends StreamingChatResponseEventType> =
StreamingChatResponseEventBase<
TType,
{
id: string;
message: {
content?: string;
function_call?: {
name?: string;
arguments?: string;
};
};
};
}
>;
}
>;

export type ChatCompletionChunkEvent =
BaseChatCompletionEvent<StreamingChatResponseEventType.ChatCompletionChunk>;

export type ChatCompletionMessageEvent =
BaseChatCompletionEvent<StreamingChatResponseEventType.ChatCompletionMessage>;

export type ConversationCreateEvent = StreamingChatResponseEventBase<
StreamingChatResponseEventType.ConversationCreate,
Expand Down Expand Up @@ -100,6 +108,7 @@ export type TokenCountEvent = StreamingChatResponseEventBase<

export type StreamingChatResponseEvent =
| ChatCompletionChunkEvent
| ChatCompletionMessageEvent
| ConversationCreateEvent
| ConversationUpdateEvent
| MessageAddEvent
Expand All @@ -112,7 +121,7 @@ export type StreamingChatResponseEventWithoutError = Exclude<
ChatCompletionErrorEvent
>;

export type ChatEvent = ChatCompletionChunkEvent | TokenCountEvent;
export type ChatEvent = ChatCompletionChunkEvent | TokenCountEvent | ChatCompletionMessageEvent;
export type MessageOrChatEvent = ChatEvent | MessageAddEvent;

export enum ChatCompletionErrorCode {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,13 @@
* 2.0.
*/

import { Message, MessageRole } from '@kbn/observability-ai-assistant-plugin/common';
import {
AssistantMessage,
Message as InferenceMessage,
MessageRole as InferenceMessageRole,
} from '@kbn/inference-common';
import { generateFakeToolCallId } from '@kbn/inference-plugin/common';
import { Message, MessageRole } from '.';

export function convertMessagesForInference(messages: Message[]): InferenceMessage[] {
const inferenceMessages: InferenceMessage[] = [];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@ export {

export type {
ChatCompletionChunkEvent,
ChatCompletionMessageEvent,
TokenCountEvent,
ConversationCreateEvent,
ConversationUpdateEvent,
MessageAddEvent,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,9 @@
*/

import { cloneDeep } from 'lodash';
import { type Observable, scan } from 'rxjs';
import type { ChatCompletionChunkEvent } from '../conversation_complete';
import { type Observable, scan, filter, defaultIfEmpty } from 'rxjs';
import type { ChatCompletionChunkEvent, ChatEvent } from '../conversation_complete';
import { StreamingChatResponseEventType } from '../conversation_complete';
import { MessageRole } from '../types';

export interface ConcatenatedMessage {
Expand All @@ -24,8 +25,12 @@ export interface ConcatenatedMessage {

export const concatenateChatCompletionChunks =
() =>
(source: Observable<ChatCompletionChunkEvent>): Observable<ConcatenatedMessage> =>
(source: Observable<ChatEvent>): Observable<ConcatenatedMessage> =>
source.pipe(
filter(
(event): event is ChatCompletionChunkEvent =>
event.type === StreamingChatResponseEventType.ChatCompletionChunk
),
scan(
(acc, { message }) => {
acc.message.content += message.content ?? '';
Expand All @@ -45,5 +50,16 @@ export const concatenateChatCompletionChunks =
role: MessageRole.Assistant,
},
} as ConcatenatedMessage
)
),
defaultIfEmpty({
message: {
content: '',
function_call: {
name: '',
arguments: '',
trigger: MessageRole.Assistant,
},
role: MessageRole.Assistant,
},
})
);
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,15 @@ import {
OperatorFunction,
shareReplay,
withLatestFrom,
filter,
} from 'rxjs';
import { withoutTokenCountEvents } from './without_token_count_events';
import {
ChatCompletionChunkEvent,
type ChatCompletionChunkEvent,
ChatEvent,
MessageAddEvent,
StreamingChatResponseEventType,
StreamingChatResponseEvent,
} from '../conversation_complete';
import {
concatenateChatCompletionChunks,
Expand Down Expand Up @@ -51,13 +53,23 @@ function mergeWithEditedMessage(
);
}

function filterChunkEvents(): OperatorFunction<
StreamingChatResponseEvent,
ChatCompletionChunkEvent
> {
return filter(
(event): event is ChatCompletionChunkEvent =>
event.type === StreamingChatResponseEventType.ChatCompletionChunk
);
}

export function emitWithConcatenatedMessage<T extends ChatEvent>(
callback?: ConcatenateMessageCallback
): OperatorFunction<T, T | MessageAddEvent> {
return (source$) => {
const shared = source$.pipe(shareReplay());

const withoutTokenCount$ = shared.pipe(withoutTokenCountEvents());
const withoutTokenCount$ = shared.pipe(filterChunkEvents());

const response$ = concat(
shared,
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,38 +1,26 @@
{
"type": "plugin",
"id": "@kbn/observability-ai-assistant-plugin",
"owner": [
"@elastic/obs-ai-assistant"
],
"owner": ["@elastic/obs-ai-assistant"],
"group": "platform",
"visibility": "shared",
"plugin": {
"id": "observabilityAIAssistant",
"browser": true,
"server": true,
"configPath": [
"xpack",
"observabilityAIAssistant"
],
"configPath": ["xpack", "observabilityAIAssistant"],
"requiredPlugins": [
"actions",
"features",
"licensing",
"security",
"taskManager",
"dataViews"
],
"optionalPlugins": [
"cloud",
"serverless"
],
"requiredBundles": [
"kibanaReact",
"kibanaUtils"
],
"runtimePluginDependencies": [
"ml"
"dataViews",
"inference"
],
"optionalPlugins": ["cloud", "serverless"],
"requiredBundles": ["kibanaReact", "kibanaUtils"],
"runtimePluginDependencies": ["ml"],
"extraPublicDirs": []
}
}
Loading
Loading