Skip to content

Commit

Permalink
Merge pull request #5759 from ConnectAI-E/feature/onfinish
Browse files Browse the repository at this point in the history
Feature/onfinish
  • Loading branch information
Dogtiti authored Nov 4, 2024
2 parents d66bfc6 + 44fc5b5 commit 5733e3c
Show file tree
Hide file tree
Showing 16 changed files with 53 additions and 43 deletions.
2 changes: 1 addition & 1 deletion app/client/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ export interface ChatOptions {
config: LLMConfig;

onUpdate?: (message: string, chunk: string) => void;
onFinish: (message: string) => void;
onFinish: (message: string, responseRes: Response) => void;
onError?: (err: Error) => void;
onController?: (controller: AbortController) => void;
onBeforeTool?: (tool: ChatMessageTool) => void;
Expand Down
6 changes: 4 additions & 2 deletions app/client/platforms/alibaba.ts
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,7 @@ export class QwenApi implements LLMApi {
let responseText = "";
let remainText = "";
let finished = false;
let responseRes: Response;

// animate response to make it looks smooth
function animateResponseText() {
Expand Down Expand Up @@ -172,7 +173,7 @@ export class QwenApi implements LLMApi {
const finish = () => {
if (!finished) {
finished = true;
options.onFinish(responseText + remainText);
options.onFinish(responseText + remainText, responseRes);
}
};

Expand All @@ -188,6 +189,7 @@ export class QwenApi implements LLMApi {
"[Alibaba] request response content type: ",
contentType,
);
responseRes = res;

if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
Expand Down Expand Up @@ -254,7 +256,7 @@ export class QwenApi implements LLMApi {

const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
5 changes: 3 additions & 2 deletions app/client/platforms/anthropic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -317,13 +317,14 @@ export class ClaudeApi implements LLMApi {
};

try {
controller.signal.onabort = () => options.onFinish("");
controller.signal.onabort = () =>
options.onFinish("", new Response(null, { status: 400 }));

const res = await fetch(path, payload);
const resJson = await res.json();

const message = this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
} catch (e) {
console.error("failed to chat", e);
options.onError?.(e as Error);
Expand Down
7 changes: 4 additions & 3 deletions app/client/platforms/baidu.ts
Original file line number Diff line number Diff line change
Expand Up @@ -162,6 +162,7 @@ export class ErnieApi implements LLMApi {
let responseText = "";
let remainText = "";
let finished = false;
let responseRes: Response;

// animate response to make it looks smooth
function animateResponseText() {
Expand Down Expand Up @@ -191,7 +192,7 @@ export class ErnieApi implements LLMApi {
const finish = () => {
if (!finished) {
finished = true;
options.onFinish(responseText + remainText);
options.onFinish(responseText + remainText, responseRes);
}
};

Expand All @@ -204,7 +205,7 @@ export class ErnieApi implements LLMApi {
clearTimeout(requestTimeoutId);
const contentType = res.headers.get("content-type");
console.log("[Baidu] request response content type: ", contentType);

responseRes = res;
if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
return finish();
Expand Down Expand Up @@ -267,7 +268,7 @@ export class ErnieApi implements LLMApi {

const resJson = await res.json();
const message = resJson?.result;
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
7 changes: 4 additions & 3 deletions app/client/platforms/bytedance.ts
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,7 @@ export class DoubaoApi implements LLMApi {
let responseText = "";
let remainText = "";
let finished = false;
let responseRes: Response;

// animate response to make it looks smooth
function animateResponseText() {
Expand Down Expand Up @@ -159,7 +160,7 @@ export class DoubaoApi implements LLMApi {
const finish = () => {
if (!finished) {
finished = true;
options.onFinish(responseText + remainText);
options.onFinish(responseText + remainText, responseRes);
}
};

Expand All @@ -175,7 +176,7 @@ export class DoubaoApi implements LLMApi {
"[ByteDance] request response content type: ",
contentType,
);

responseRes = res;
if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
return finish();
Expand Down Expand Up @@ -241,7 +242,7 @@ export class DoubaoApi implements LLMApi {

const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
2 changes: 1 addition & 1 deletion app/client/platforms/glm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ export class ChatGLMApi implements LLMApi {

const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
2 changes: 1 addition & 1 deletion app/client/platforms/google.ts
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,7 @@ export class GeminiProApi implements LLMApi {
);
}
const message = apiClient.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
7 changes: 4 additions & 3 deletions app/client/platforms/iflytek.ts
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,7 @@ export class SparkApi implements LLMApi {
let responseText = "";
let remainText = "";
let finished = false;
let responseRes: Response;

// Animate response text to make it look smooth
function animateResponseText() {
Expand All @@ -143,7 +144,7 @@ export class SparkApi implements LLMApi {
const finish = () => {
if (!finished) {
finished = true;
options.onFinish(responseText + remainText);
options.onFinish(responseText + remainText, responseRes);
}
};

Expand All @@ -156,7 +157,7 @@ export class SparkApi implements LLMApi {
clearTimeout(requestTimeoutId);
const contentType = res.headers.get("content-type");
console.log("[Spark] request response content type: ", contentType);

responseRes = res;
if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
return finish();
Expand Down Expand Up @@ -231,7 +232,7 @@ export class SparkApi implements LLMApi {

const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
2 changes: 1 addition & 1 deletion app/client/platforms/moonshot.ts
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ export class MoonshotApi implements LLMApi {

const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
2 changes: 1 addition & 1 deletion app/client/platforms/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -361,7 +361,7 @@ export class ChatGPTApi implements LLMApi {

const resJson = await res.json();
const message = await this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
7 changes: 4 additions & 3 deletions app/client/platforms/tencent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,7 @@ export class HunyuanApi implements LLMApi {
let responseText = "";
let remainText = "";
let finished = false;
let responseRes: Response;

// animate response to make it looks smooth
function animateResponseText() {
Expand Down Expand Up @@ -171,7 +172,7 @@ export class HunyuanApi implements LLMApi {
const finish = () => {
if (!finished) {
finished = true;
options.onFinish(responseText + remainText);
options.onFinish(responseText + remainText, responseRes);
}
};

Expand All @@ -187,7 +188,7 @@ export class HunyuanApi implements LLMApi {
"[Tencent] request response content type: ",
contentType,
);

responseRes = res;
if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
return finish();
Expand Down Expand Up @@ -253,7 +254,7 @@ export class HunyuanApi implements LLMApi {

const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
2 changes: 1 addition & 1 deletion app/client/platforms/xai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ export class XAIApi implements LLMApi {

const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
35 changes: 17 additions & 18 deletions app/store/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -649,13 +649,14 @@ export const useChatStore = createPersistStore(
stream: false,
providerName,
},
onFinish(message) {
if (!isValidMessage(message)) return;
get().updateCurrentSession(
(session) =>
(session.topic =
message.length > 0 ? trimTopic(message) : DEFAULT_TOPIC),
);
onFinish(message, responseRes) {
if (responseRes?.status === 200) {
get().updateCurrentSession(
(session) =>
(session.topic =
message.length > 0 ? trimTopic(message) : DEFAULT_TOPIC),
);
}
},
});
}
Expand All @@ -669,7 +670,7 @@ export const useChatStore = createPersistStore(

const historyMsgLength = countMessages(toBeSummarizedMsgs);

if (historyMsgLength > modelConfig?.max_tokens ?? 4000) {
if (historyMsgLength > (modelConfig?.max_tokens || 4000)) {
const n = toBeSummarizedMsgs.length;
toBeSummarizedMsgs = toBeSummarizedMsgs.slice(
Math.max(0, n - modelConfig.historyMessageCount),
Expand Down Expand Up @@ -715,22 +716,20 @@ export const useChatStore = createPersistStore(
onUpdate(message) {
session.memoryPrompt = message;
},
onFinish(message) {
console.log("[Memory] ", message);
get().updateCurrentSession((session) => {
session.lastSummarizeIndex = lastSummarizeIndex;
session.memoryPrompt = message; // Update the memory prompt for stored it in local storage
});
onFinish(message, responseRes) {
if (responseRes?.status === 200) {
console.log("[Memory] ", message);
get().updateCurrentSession((session) => {
session.lastSummarizeIndex = lastSummarizeIndex;
session.memoryPrompt = message; // Update the memory prompt for stored it in local storage
});
}
},
onError(err) {
console.error("[Summarize] ", err);
},
});
}

function isValidMessage(message: any): boolean {
return typeof message === "string" && !message.startsWith("```json");
}
},

updateStat(message: ChatMessage) {
Expand Down
4 changes: 3 additions & 1 deletion app/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -266,7 +266,9 @@ export function isVisionModel(model: string) {
model.includes("gpt-4-turbo") && !model.includes("preview");

return (
visionKeywords.some((keyword) => model.includes(keyword)) || isGpt4Turbo
visionKeywords.some((keyword) => model.includes(keyword)) ||
isGpt4Turbo ||
isDalle3(model)
);
}

Expand Down
4 changes: 3 additions & 1 deletion app/utils/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -174,6 +174,7 @@ export function stream(
let finished = false;
let running = false;
let runTools: any[] = [];
let responseRes: Response;

// animate response to make it looks smooth
function animateResponseText() {
Expand Down Expand Up @@ -272,7 +273,7 @@ export function stream(
}
console.debug("[ChatAPI] end");
finished = true;
options.onFinish(responseText + remainText);
options.onFinish(responseText + remainText, responseRes); // 将res传递给onFinish
}
};

Expand Down Expand Up @@ -304,6 +305,7 @@ export function stream(
clearTimeout(requestTimeoutId);
const contentType = res.headers.get("content-type");
console.log("[Request] response content type: ", contentType);
responseRes = res;

if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
Expand Down
2 changes: 1 addition & 1 deletion app/utils/stream.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ type StreamResponse = {
headers: Record<string, string>;
};

export function fetch(url: string, options?: RequestInit): Promise<any> {
export function fetch(url: string, options?: RequestInit): Promise<Response> {
if (window.__TAURI__) {
const {
signal,
Expand Down

0 comments on commit 5733e3c

Please sign in to comment.