Skip to content

Commit

Permalink
Merge branch 'embedding' of github.com:cookieY/lobe-chat into embedding
Browse files Browse the repository at this point in the history
* 'embedding' of github.com:cookieY/lobe-chat:
  ♻️refactor: delete unused environment variables
  📝 docs: Update DEFAULT_FILES_CONFIG
  ✨ feat: 支持用户自行定义 embedding model 目前支持 zhipu/openai/github/ollama/bedrock
  add rerank todo
  🔨 chore: embedding model use DEFAULT_FILES_CONFIG env
  Not finished changing
  🐛 fix: fix zhipuAI embedding issue
  📝 docs(bot): Auto sync agents & plugin to readme
  ✨feat: add zhifu embedding provider
  🔨 chore: ollama embed multiple texts
  📝 docs(bot): Auto sync agents & plugin to readme
  ✨feat: add ollama embedding provider
  ✨ feat: customized embedding model support
  📝 docs(bot): Auto sync agents & plugin to readme
  📝 docs(bot): Auto sync agents & plugin to readme
  🔨 chore: support bedrock Claude 3.x function calling
  • Loading branch information
cookieY committed Oct 14, 2024
2 parents 7650430 + f7791d3 commit c0cf123
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 8 deletions.
6 changes: 4 additions & 2 deletions src/config/modelProviders/openrouter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,8 @@ const OpenRouter: ModelProviderCard = {
tokens: 128_000,
},
{
description: 'LLaMA 3.2 旨在处理结合视觉和文本数据的任务。它在图像描述和视觉问答等任务中表现出色,跨越了语言生成和视觉推理之间的鸿沟。',
description:
'LLaMA 3.2 旨在处理结合视觉和文本数据的任务。它在图像描述和视觉问答等任务中表现出色,跨越了语言生成和视觉推理之间的鸿沟。',
displayName: 'Llama 3.2 11B Vision',
enabled: true,
id: 'meta-llama/llama-3.2-11b-vision-instruct',
Expand All @@ -179,7 +180,8 @@ const OpenRouter: ModelProviderCard = {
vision: true,
},
{
description: 'LLaMA 3.2 旨在处理结合视觉和文本数据的任务。它在图像描述和视觉问答等任务中表现出色,跨越了语言生成和视觉推理之间的鸿沟。',
description:
'LLaMA 3.2 旨在处理结合视觉和文本数据的任务。它在图像描述和视觉问答等任务中表现出色,跨越了语言生成和视觉推理之间的鸿沟。',
displayName: 'Llama 3.2 90B Vision',
enabled: true,
id: 'meta-llama/llama-3.2-90b-vision-instruct',
Expand Down
1 change: 1 addition & 0 deletions src/server/routers/async/ragEval.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import {
EvaluationRecordModel,
} from '@/database/server/models/ragEval';
import { asyncAuthedProcedure, asyncRouter as router } from '@/libs/trpc/async';
import { getServerGlobalConfig } from '@/server/globalConfig';
import { initAgentRuntimeWithUserPayload } from '@/server/modules/AgentRuntime';
import { ChunkService } from '@/server/services/chunk';
import { AsyncTaskError } from '@/types/asyncTask';
Expand Down
1 change: 1 addition & 0 deletions src/server/routers/lambda/chunk.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import { MessageModel } from '@/database/server/models/message';
import { knowledgeBaseFiles } from '@/database/server/schemas/lobechat';
import { authedProcedure, router } from '@/libs/trpc';
import { keyVaults } from '@/libs/trpc/middleware/keyVaults';
import { getServerGlobalConfig } from '@/server/globalConfig';
import { initAgentRuntimeWithUserPayload } from '@/server/modules/AgentRuntime';
import { ChunkService } from '@/server/services/chunk';
import { SemanticSearchSchema } from '@/types/rag';
Expand Down
8 changes: 2 additions & 6 deletions src/utils/server/jwt.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,6 @@
import { importJWK, jwtVerify } from 'jose';

import {
JWTPayload,
JWT_SECRET_KEY,
NON_HTTP_PREFIX,
} from '@/const/auth';
import { JWTPayload, JWT_SECRET_KEY, NON_HTTP_PREFIX } from '@/const/auth';

export const getJWTPayload = async (token: string): Promise<JWTPayload> => {
//如果是 HTTP 协议发起的请求,直接解析 token
Expand All @@ -29,4 +25,4 @@ export const getJWTPayload = async (token: string): Promise<JWTPayload> => {
const { payload } = await jwtVerify(token, jwkSecretKey);

return payload as JWTPayload;
};
};

0 comments on commit c0cf123

Please sign in to comment.