Skip to content

Commit

Permalink
feat: add multi-model arch
Browse files Browse the repository at this point in the history
  • Loading branch information
Yidadaa committed Sep 25, 2023
1 parent eae7d62 commit f4541d3
Show file tree
Hide file tree
Showing 27 changed files with 814 additions and 661 deletions.
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -317,3 +317,7 @@ If you want to add a new translation, read this [document](./docs/translation.md
## LICENSE

[MIT](https://opensource.org/license/mit/)

## 下一步要做的事情

- [ ] 完成多模型设置 UI
10 changes: 5 additions & 5 deletions app/api/openai/[...path]/route.ts
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
import { type OpenAIListModelResponse } from "@/app/client/platforms/openai";
import { type OpenAI } from "@/app/client/openai/types";
import { getServerSideConfig } from "@/app/config/server";
import { OpenaiPath } from "@/app/constant";
import { prettyObject } from "@/app/utils/format";
import { NextRequest, NextResponse } from "next/server";
import { auth } from "../../auth";
import { requestOpenai } from "../../common";

const ALLOWD_PATH = new Set(Object.values(OpenaiPath));
const ALLOWD_PATH = new Set(Object.values(OpenaiPath) as string[]);

function getModels(remoteModelRes: OpenAIListModelResponse) {
function getModels(remoteModelRes: OpenAI.ListModelResponse) {
const config = getServerSideConfig();

if (config.disableGPT4) {
Expand Down Expand Up @@ -56,8 +56,8 @@ async function handle(
const response = await requestOpenai(req);

// list models
if (subpath === OpenaiPath.ListModelPath && response.status === 200) {
const resJson = (await response.json()) as OpenAIListModelResponse;
if (subpath === OpenaiPath.ListModel && response.status === 200) {
const resJson = await response.json();
const availableModels = getModels(resJson);
return NextResponse.json(availableModels, {
status: response.status,
Expand Down
151 changes: 0 additions & 151 deletions app/client/api.ts

This file was deleted.

28 changes: 28 additions & 0 deletions app/client/common/auth.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import { getClientConfig } from "@/app/config/client";
import { ACCESS_CODE_PREFIX } from "@/app/constant";
import { useAccessStore } from "@/app/store";

export function bearer(value: string) {
return `Bearer ${value.trim()}`;
}

export function getAuthHeaders(apiKey = "") {
const accessStore = useAccessStore.getState();
const isApp = !!getClientConfig()?.isApp;

let headers: Record<string, string> = {};

if (apiKey) {
// use user's api key first
headers.Authorization = bearer(apiKey);
} else if (
accessStore.enabledAccessControl() &&
!isApp &&
!!accessStore.accessCode
) {
// or use access code
headers.Authorization = bearer(ACCESS_CODE_PREFIX + accessStore.accessCode);
}

return headers;
}
5 changes: 5 additions & 0 deletions app/client/common/config.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
export const COMMON_PROVIDER_CONFIG = {
customModels: "",
models: [] as string[],
autoFetchModels: false, // fetch available models from server or not
};
File renamed without changes.
44 changes: 44 additions & 0 deletions app/client/common/share.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
import { getClientConfig } from "@/app/config/client";
import { ChatMessage } from "@/app/store";

export async function shareToShareGPT(
messages: ChatMessage[],
avatarUrl: string | null = null,
) {
const msgs = messages
.map((m) => ({
from: m.role === "user" ? "human" : "gpt",
value: m.content,
}))
.concat([
{
from: "human",
// 敬告二开开发者们,为了开源大模型的发展,请不要修改上述消息,此消息用于后续数据清洗使用
// Please do not modify this message
value:
"Share from [ChatGPT Next Web]: https://github.com/Yidadaa/ChatGPT-Next-Web",
},
]);

console.log("[Share]", messages, msgs);
const clientConfig = getClientConfig();
const proxyUrl = "/sharegpt";
const rawUrl = "https://sharegpt.com/api/conversations";
const shareUrl = clientConfig?.isApp ? rawUrl : proxyUrl;
const res = await fetch(shareUrl, {
body: JSON.stringify({
avatarUrl,
items: msgs,
}),
headers: {
"Content-Type": "application/json",
},
method: "POST",
});

const resJson = await res.json();
console.log("[Share]", resJson);
if (resJson.id) {
return `https://shareg.pt/${resJson.id}`;
}
}
28 changes: 28 additions & 0 deletions app/client/core.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import { MaskConfig, ProviderConfig } from "../store";
import { shareToShareGPT } from "./common/share";
import { createOpenAiClient } from "./openai";
import { ChatControllerPool } from "./common/controller";

export const LLMClients = {
openai: createOpenAiClient,
};

export function createLLMClient(
config: ProviderConfig,
maskConfig: MaskConfig,
) {
return LLMClients[maskConfig.provider as any as keyof typeof LLMClients](
config,
maskConfig.modelConfig,
);
}

export function createApi() {
return {
createLLMClient,
shareToShareGPT,
controllerManager: ChatControllerPool,
};
}

export const api = createApi();
2 changes: 2 additions & 0 deletions app/client/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
export * from "./types";
export * from "./core";
19 changes: 19 additions & 0 deletions app/client/openai/config.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import { COMMON_PROVIDER_CONFIG } from "../common/config";

export const OpenAIConfig = {
model: {
model: "gpt-3.5-turbo" as string,
summarizeModel: "gpt-3.5-turbo",

temperature: 0.5,
top_p: 1,
max_tokens: 2000,
presence_penalty: 0,
frequency_penalty: 0,
},
provider: {
endpoint: "https://api.openai.com",
apiKey: "",
...COMMON_PROVIDER_CONFIG,
},
};
Loading

0 comments on commit f4541d3

Please sign in to comment.