Skip to content

Commit

Permalink
replacing observabilityAIAssistantClient.chat with inferenceClient.chat
Browse files Browse the repository at this point in the history
  • Loading branch information
arturoliduena committed Nov 11, 2024
1 parent 4eda49f commit c59ff8b
Show file tree
Hide file tree
Showing 16 changed files with 144 additions and 184 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
*/

import { i18n } from '@kbn/i18n';
import type { ChatCompletionChunkEvent as InferenceChatCompletionChunkEvent } from '@kbn/inference-common';
import { TokenCount as TokenCountType, type Message } from './types';

export enum StreamingChatResponseEventType {
Expand Down Expand Up @@ -105,14 +106,19 @@ export type StreamingChatResponseEvent =
| MessageAddEvent
| ChatCompletionErrorEvent
| TokenCountEvent
| BufferFlushEvent;
| BufferFlushEvent
| InferenceChatCompletionChunkEvent;

export type StreamingChatResponseEventWithoutError = Exclude<
StreamingChatResponseEvent,
ChatCompletionErrorEvent
>;

export type ChatEvent = ChatCompletionChunkEvent | TokenCountEvent;
export type ChatEvent =
| InferenceChatCompletionChunkEvent
| ChatCompletionChunkEvent
| TokenCountEvent;

export type MessageOrChatEvent = ChatEvent | MessageAddEvent;

export enum ChatCompletionErrorCode {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,13 @@
* 2.0.
*/

import { Message, MessageRole } from '@kbn/observability-ai-assistant-plugin/common';
import {
AssistantMessage,
Message as InferenceMessage,
MessageRole as InferenceMessageRole,
} from '@kbn/inference-common';
import { generateFakeToolCallId } from '@kbn/inference-plugin/common';
import { Message, MessageRole } from '.';

export function convertMessagesForInference(messages: Message[]): InferenceMessage[] {
const inferenceMessages: InferenceMessage[] = [];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,11 @@
*/

import { cloneDeep } from 'lodash';
import { type Observable, scan } from 'rxjs';
import { type Observable, scan, filter } from 'rxjs';
import type { ChatCompletionChunkEvent as InferenceChatCompletionChunkEvent } from '@kbn/inference-common';
import type { ChatCompletionChunkEvent } from '../conversation_complete';
import { StreamingChatResponseEventType } from '../conversation_complete';
import { MessageRole } from '../types';

export interface ConcatenatedMessage {
message: {
content: string;
Expand All @@ -24,14 +25,21 @@ export interface ConcatenatedMessage {

export const concatenateChatCompletionChunks =
() =>
(source: Observable<ChatCompletionChunkEvent>): Observable<ConcatenatedMessage> =>
(
source: Observable<ChatCompletionChunkEvent | InferenceChatCompletionChunkEvent>
): Observable<ConcatenatedMessage> =>
source.pipe(
filter(
(event): event is InferenceChatCompletionChunkEvent =>
event.type === StreamingChatResponseEventType.ChatCompletionChunk
),
scan(
(acc, { message }) => {
acc.message.content += message.content ?? '';
acc.message.function_call.name += message.function_call?.name ?? '';
acc.message.function_call.arguments += message.function_call?.arguments ?? '';

(acc, event) => {
acc.message.content += event.content ?? '';
if (event.tool_calls.length > 0) {
acc.message.function_call.name += event.tool_calls[0].function.name ?? '';
acc.message.function_call.arguments += event.tool_calls[0].function.arguments ?? '';
}
return cloneDeep(acc);
},
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import {
shareReplay,
withLatestFrom,
} from 'rxjs';
import { ChatCompletionChunkEvent as InferenceChatCompletionChunkEvent } from '@kbn/inference-common';
import { withoutTokenCountEvents } from './without_token_count_events';
import {
ChatCompletionChunkEvent,
Expand All @@ -33,14 +34,14 @@ type ConcatenateMessageCallback = (

function mergeWithEditedMessage(
originalMessage: ConcatenatedMessage,
chunkEvent: ChatCompletionChunkEvent,
chunkEvent: ChatCompletionChunkEvent | InferenceChatCompletionChunkEvent,
callback?: ConcatenateMessageCallback
): Observable<MessageAddEvent> {
return from(
(callback ? callback(originalMessage) : Promise.resolve(originalMessage)).then((message) => {
const next: MessageAddEvent = {
type: StreamingChatResponseEventType.MessageAdd as const,
id: chunkEvent.id,
id: 'id' in chunkEvent ? chunkEvent.id : '',
message: {
'@timestamp': new Date().toISOString(),
...message,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,38 +1,26 @@
{
"type": "plugin",
"id": "@kbn/observability-ai-assistant-plugin",
"owner": [
"@elastic/obs-ai-assistant"
],
"owner": ["@elastic/obs-ai-assistant"],
"group": "platform",
"visibility": "shared",
"plugin": {
"id": "observabilityAIAssistant",
"browser": true,
"server": true,
"configPath": [
"xpack",
"observabilityAIAssistant"
],
"configPath": ["xpack", "observabilityAIAssistant"],
"requiredPlugins": [
"actions",
"features",
"licensing",
"security",
"taskManager",
"dataViews"
],
"optionalPlugins": [
"cloud",
"serverless"
],
"requiredBundles": [
"kibanaReact",
"kibanaUtils"
],
"runtimePluginDependencies": [
"ml"
"dataViews",
"inference"
],
"optionalPlugins": ["cloud", "serverless"],
"requiredBundles": ["kibanaReact", "kibanaUtils"],
"runtimePluginDependencies": ["ml"],
"extraPublicDirs": []
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { AbortError } from '@kbn/kibana-utils-plugin/common';
import type { NotificationsStart } from '@kbn/core/public';
import type { AssistantScope } from '@kbn/ai-assistant-common';
import { ChatCompletionEventType } from '@kbn/inference-common';
import {
MessageRole,
type Message,
Expand Down Expand Up @@ -199,24 +200,22 @@ function useChatWithoutContext({
const subscription = next$.subscribe({
next: (event) => {
switch (event.type) {
case StreamingChatResponseEventType.ChatCompletionChunk:
case ChatCompletionEventType.ChatCompletionChunk:
if (!pendingMessage) {
pendingMessage = {
'@timestamp': new Date().toISOString(),
message: {
content: event.message.content || '',
content: event.content || '',
function_call: {
name: event.message.function_call?.name || '',
arguments: event.message.function_call?.arguments || '',
name: '',
arguments: '',
},
},
};
} else {
pendingMessage.message.content += event.message.content || '';
pendingMessage.message.function_call.name +=
event.message.function_call?.name || '';
pendingMessage.message.function_call.arguments +=
event.message.function_call?.arguments || '';
pendingMessage.message.content += event.content || '';
pendingMessage.message.function_call.name += '';
pendingMessage.message.function_call.arguments += '';
}
break;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import type OpenAI from 'openai';
import { Subject } from 'rxjs';
import { EventEmitter, PassThrough, type Readable } from 'stream';
import { finished } from 'stream/promises';
import type { InferenceClient } from '@kbn/inference-plugin/server';
import { ObservabilityAIAssistantClient } from '.';
import { MessageRole, type Message } from '../../../common';
import { ObservabilityAIAssistantConnectorType } from '../../../common/connectors';
Expand Down Expand Up @@ -94,6 +95,10 @@ describe('Observability AI Assistant client', () => {
get: jest.fn(),
} as any;

const inferenceClientMock: DeeplyMockedKeys<InferenceClient> = {
chatComplete: jest.fn(),
} as any;

const uiSettingsClientMock: DeeplyMockedKeys<IUiSettingsClient> = {
get: jest.fn(),
} as any;
Expand Down Expand Up @@ -183,6 +188,7 @@ describe('Observability AI Assistant client', () => {
asInternalUser: internalUserEsClientMock,
asCurrentUser: currentUserEsClientMock,
},
inferenceClient: inferenceClientMock,
knowledgeBaseService: knowledgeBaseServiceMock,
logger: loggerMock,
namespace: 'default',
Expand Down
Loading

0 comments on commit c59ff8b

Please sign in to comment.