Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use StreamText with forced formatting #693

Merged
merged 29 commits into from
Oct 31, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,8 @@ jobs:
echo "VITE_SUPABASE_API_URL=${{ secrets.SUPABASE_API_URL }}" >> $GITHUB_ENV
echo "VITE_SUPABASE_ANON_KEY=${{ secrets.SUPABASE_ANON_KEY }}" >> $GITHUB_ENV
echo "VITE_MIXPANEL_TOKEN=${{ secrets.MIXPANEL_TOKEN }}" >> $GITHUB_ENV
echo "VITE_ANTHROPIC_API_KEY=${{ secrets.ANTHROPIC_API_KEY }}" >> $GITHUB_ENV
echo "VITE_OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}" >> $GITHUB_ENV

- name: Build/release Electron app
uses: samuelmeuli/action-electron-builder@v1
Expand Down
34 changes: 34 additions & 0 deletions apps/studio/electron/main/chat/helpers.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
import { StreamReponseObject } from '@onlook/models/chat';
import { type DeepPartial } from 'ai';
import { Allow, parse } from 'partial-json';
import { z } from 'zod';
import { zodToJsonSchema } from 'zod-to-json-schema';

export function parseObjectFromText(
text: string,
): DeepPartial<z.infer<typeof StreamReponseObject>> {
const cleanedText = stripFullText(text);
return parse(cleanedText, Allow.ALL) as DeepPartial<z.infer<typeof StreamReponseObject>>;
}

export function getFormatString() {
const jsonFormat = JSON.stringify(zodToJsonSchema(StreamReponseObject));
return `\nReturn your response only in this JSON format: <format>${jsonFormat}</format>`;
}

export function stripFullText(fullText: string) {
let text = fullText;

if (text.startsWith('```')) {
text = text.slice(3);
}

if (text.startsWith('```json\n')) {
text = text.slice(8);
}

if (text.endsWith('```')) {
text = text.slice(0, -3);
}
return text;
}
113 changes: 73 additions & 40 deletions apps/studio/electron/main/chat/index.ts
Original file line number Diff line number Diff line change
@@ -1,22 +1,57 @@
import Anthropic from '@anthropic-ai/sdk';
import type { MessageParam } from '@anthropic-ai/sdk/resources/messages';
import { mainWindow } from '..';
import { GENERATE_CODE_TOOL } from './tool';
import { createAnthropic } from '@ai-sdk/anthropic';
import { createOpenAI } from '@ai-sdk/openai';
import { StreamReponseObject } from '@onlook/models/chat';
import { MainChannels } from '@onlook/models/constants';
import { type CoreMessage, type DeepPartial, type LanguageModelV1, streamText } from 'ai';
import { z } from 'zod';
import { mainWindow } from '..';
import { getFormatString, parseObjectFromText } from './helpers';

enum LLMProvider {
ANTHROPIC = 'anthropic',
OPENAI = 'openai',
}

enum CLAUDE_MODELS {
SONNET = 'claude-3-5-sonnet-latest',
HAIKU = 'claude-3-haiku-20240307',
}

enum OPEN_AI_MODELS {
GPT_4O = 'gpt-4o',
GPT_4O_MINI = 'gpt-4o-mini',
GPT_4_TURBO = 'gpt-4-turbo',
}

class LLMService {
private static instance: LLMService;
private anthropic: Anthropic;
private provider = LLMProvider.ANTHROPIC;
private model: LanguageModelV1;

private constructor() {
this.anthropic = new Anthropic({
apiKey: import.meta.env.VITE_ANTHROPIC_API_KEY,
});
this.model = this.initModel();
}

initModel() {
switch (this.provider) {
case LLMProvider.ANTHROPIC: {
const anthropic = createAnthropic({
apiKey: import.meta.env.VITE_ANTHROPIC_API_KEY,
});

return anthropic(CLAUDE_MODELS.SONNET, {
cacheControl: true,
});
}
case LLMProvider.OPENAI: {
const openai = createOpenAI({
apiKey: import.meta.env.VITE_OPENAI_API_KEY,
});
return openai(OPEN_AI_MODELS.GPT_4O, {
structuredOutputs: true,
});
}
}
}

public static getInstance(): LLMService {
Expand All @@ -26,56 +61,54 @@ class LLMService {
return LLMService.instance;
}

public async send(messages: MessageParam[]): Promise<Anthropic.Messages.Message> {
return this.anthropic.messages.create({
model: CLAUDE_MODELS.SONNET,
max_tokens: 4096,
system: 'You are a seasoned React and Tailwind expert.',
messages,
tools: [GENERATE_CODE_TOOL],
});
}

public async stream(
messages: MessageParam[],
requestId: string,
): Promise<Anthropic.Messages.Message | null> {
messages: CoreMessage[],
): Promise<z.infer<typeof StreamReponseObject> | null> {
try {
const stream = this.anthropic.messages.stream({
model: CLAUDE_MODELS.SONNET,
max_tokens: 4096,
system: 'You are a seasoned React and Tailwind expert.',
const { textStream, text } = await streamText({
model: this.model,
system: 'You are a seasoned React and Tailwind expert.' + getFormatString(),
messages,
tools: [GENERATE_CODE_TOOL],
stream: true,
});

for await (const event of stream) {
this.emitEvent(requestId, event);
}

const finalMessage = await stream.finalMessage();
this.emitFinalMessage(requestId, finalMessage);
return finalMessage;
this.emitStreamEvents(textStream);
const fullObject = parseObjectFromText(await text) as z.infer<
typeof StreamReponseObject
>;
this.emitFinalMessage('id', fullObject);
return fullObject;
} catch (error) {
console.error('Error receiving stream', error);
const errorMessage = this.getErrorMessage(error);
this.emitErrorMessage(requestId, errorMessage);
this.emitErrorMessage('requestId', errorMessage);
return null;
}
}

private emitEvent(requestId: string, message: Anthropic.Messages.RawMessageStreamEvent) {
mainWindow?.webContents.send(MainChannels.CHAT_STREAM_EVENT, {
async emitStreamEvents(textStream: AsyncIterable<string>) {
try {
let fullText = '';
for await (const partialText of textStream) {
fullText += partialText;
const partialObject = parseObjectFromText(fullText);
this.emitEvent('id', partialObject);
}
} catch (error) {
console.error('Error parsing stream', error);
}
}

private emitEvent(requestId: string, object: DeepPartial<z.infer<typeof StreamReponseObject>>) {
mainWindow?.webContents.send(MainChannels.CHAT_STREAM_PARTIAL, {
requestId,
message,
object,
});
}

private emitFinalMessage(requestId: string, message: Anthropic.Messages.Message) {
private emitFinalMessage(requestId: string, object: z.infer<typeof StreamReponseObject>) {
mainWindow?.webContents.send(MainChannels.CHAT_STREAM_FINAL_MESSAGE, {
requestId,
message,
object,
});
}

Expand Down
35 changes: 0 additions & 35 deletions apps/studio/electron/main/chat/tool.ts

This file was deleted.

13 changes: 4 additions & 9 deletions apps/studio/electron/main/events/chat.ts
Original file line number Diff line number Diff line change
@@ -1,19 +1,14 @@
import type { MessageParam } from '@anthropic-ai/sdk/resources/messages';
import { MainChannels } from '@onlook/models/constants';
import type { CoreMessage } from 'ai';
import { ipcMain } from 'electron';
import Chat from '../chat';
import { MainChannels } from '@onlook/models/constants';

export function listenForChatMessages() {
ipcMain.handle(MainChannels.SEND_CHAT_MESSAGES, (e: Electron.IpcMainInvokeEvent, args) => {
const messages = args as MessageParam[];
return Chat.send(messages);
});

ipcMain.handle(
MainChannels.SEND_CHAT_MESSAGES_STREAM,
(e: Electron.IpcMainInvokeEvent, args) => {
const { messages, requestId } = args as { messages: MessageParam[]; requestId: string };
return Chat.stream(messages, requestId);
const { messages, requestId } = args as { messages: CoreMessage[]; requestId: string };
return Chat.stream(messages);
},
);
}
9 changes: 7 additions & 2 deletions apps/studio/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -36,16 +36,19 @@
"typecheck": "tsc --noEmit"
},
"dependencies": {
"@ai-sdk/anthropic": "^0.0.53",
"@ai-sdk/openai": "^0.0.70",
"@anthropic-ai/sdk": "^0.29.2",
"@emotion/react": "^11.13.3",
"@emotion/styled": "^11.13.0",
"@fontsource-variable/inter": "^5.1.0",
"@onlook/foundation": "*",
"@onlook/models": "*",
"@onlook/supabase": "*",
"@onlook/ui": "*",
"@onlook/models": "*",
"@shikijs/monaco": "^1.22.0",
"@supabase/supabase-js": "^2.45.6",
"ai": "^3.4.29",
"electron-log": "^5.2.0",
"electron-updater": "^6.3.4",
"embla-carousel-react": "^8.3.0",
Expand All @@ -57,6 +60,7 @@
"mixpanel": "^0.18.0",
"monaco-editor": "^0.52.0",
"nanoid": "^5.0.7",
"partial-json": "^0.1.7",
"prosemirror-commands": "^1.6.0",
"prosemirror-history": "^1.4.1",
"prosemirror-keymap": "^1.2.2",
Expand All @@ -68,7 +72,8 @@
"react-hotkeys-hook": "^4.5.0",
"shiki": "^1.22.0",
"ts-morph": "^23.0.0",
"use-resize-observer": "^9.1.0"
"use-resize-observer": "^9.1.0",
"zod": "^3.23.8"
},
"devDependencies": {
"@onlook/typescript": "*",
Expand Down
Loading