Skip to content

Commit

Permalink
Merge branch 'lobehub:main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
sxjeru authored Jan 17, 2025
2 parents fc2005d + aa07c40 commit 36d5f97
Show file tree
Hide file tree
Showing 12 changed files with 251 additions and 26 deletions.
68 changes: 68 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,74 @@

# Changelog

### [Version 1.46.7](https://github.com/lobehub/lobe-chat/compare/v1.46.6...v1.46.7)

<sup>Released on **2025-01-17**</sup>

#### 🐛 Bug Fixes

- **misc**: Improve validation for provider and model in parseFilesConfig, temporarily disable S3 client integrity check for Cloudflare R2.

<br/>

<details>
<summary><kbd>Improvements and Fixes</kbd></summary>

#### What's fixed

- **misc**: Improve validation for provider and model in parseFilesConfig, closes [#5454](https://github.com/lobehub/lobe-chat/issues/5454) ([b4808f8](https://github.com/lobehub/lobe-chat/commit/b4808f8))
- **misc**: Temporarily disable S3 client integrity check for Cloudflare R2, closes [#5479](https://github.com/lobehub/lobe-chat/issues/5479) ([a638238](https://github.com/lobehub/lobe-chat/commit/a638238))

</details>

<div align="right">

[![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)

</div>

### [Version 1.46.6](https://github.com/lobehub/lobe-chat/compare/v1.46.5...v1.46.6)

<sup>Released on **2025-01-16**</sup>

#### 🐛 Bug Fixes

- **misc**: Gemini models HarmBlockThreshold.

<br/>

<details>
<summary><kbd>Improvements and Fixes</kbd></summary>

#### What's fixed

- **misc**: Gemini models HarmBlockThreshold, closes [#5477](https://github.com/lobehub/lobe-chat/issues/5477) ([f98375c](https://github.com/lobehub/lobe-chat/commit/f98375c))

</details>

<div align="right">

[![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)

</div>

### [Version 1.46.5](https://github.com/lobehub/lobe-chat/compare/v1.46.4...v1.46.5)

<sup>Released on **2025-01-16**</sup>

<br/>

<details>
<summary><kbd>Improvements and Fixes</kbd></summary>

</details>

<div align="right">

[![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)

</div>

### [Version 1.46.4](https://github.com/lobehub/lobe-chat/compare/v1.46.3...v1.46.4)

<sup>Released on **2025-01-16**</sup>
Expand Down
21 changes: 21 additions & 0 deletions changelog/v1.json
Original file line number Diff line number Diff line change
@@ -1,4 +1,25 @@
[
{
"children": {
"fixes": [
"Improve validation for provider and model in parseFilesConfig, temporarily disable S3 client integrity check for Cloudflare R2."
]
},
"date": "2025-01-17",
"version": "1.46.7"
},
{
"children": {
"fixes": ["Gemini models HarmBlockThreshold."]
},
"date": "2025-01-16",
"version": "1.46.6"
},
{
"children": {},
"date": "2025-01-16",
"version": "1.46.5"
},
{
"children": {
"improvements": ["Refactor some implement for the next performance improvement."]
Expand Down
2 changes: 1 addition & 1 deletion netlify.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[build]
command = "pnpm run build"
command = "rm -rf .next node_modules/.cache && pnpm run build"
publish = ".next"

[build.environment]
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@lobehub/chat",
"version": "1.46.4",
"version": "1.46.7",
"description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
"keywords": [
"framework",
Expand Down
2 changes: 1 addition & 1 deletion src/const/settings/knowledge.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,6 @@ export const DEFAULT_FILE_RERANK_MODEL_ITEM: FilesConfigItem = {

export const DEFAULT_FILES_CONFIG: FilesConfig = {
embeddingModel: DEFAULT_FILE_EMBEDDING_MODEL_ITEM,
queryModel: DEFAULT_RERANK_QUERY_MODE,
queryMode: DEFAULT_RERANK_QUERY_MODE,
rerankerModel: DEFAULT_FILE_RERANK_MODEL_ITEM,
};
17 changes: 12 additions & 5 deletions src/libs/agent-runtime/google/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,14 @@ enum HarmCategory {

enum HarmBlockThreshold {
BLOCK_NONE = 'BLOCK_NONE',
OFF = 'OFF', // https://discuss.ai.google.dev/t/59352
}

function getThreshold(model: string): HarmBlockThreshold {
const useOFF = ['gemini-2.0-flash-exp'];
if (useOFF.includes(model)) {
return 'OFF' as HarmBlockThreshold; // https://discuss.ai.google.dev/t/59352
}
return HarmBlockThreshold.BLOCK_NONE;
}

export class LobeGoogleAI implements LobeRuntimeAI {
Expand Down Expand Up @@ -71,19 +78,19 @@ export class LobeGoogleAI implements LobeRuntimeAI {
safetySettings: [
{
category: HarmCategory.HARM_CATEGORY_HATE_SPEECH,
threshold: model.includes('2.0') ? (HarmBlockThreshold.OFF as any) : HarmBlockThreshold.BLOCK_NONE,
threshold: getThreshold(model),
},
{
category: HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT,
threshold: model.includes('2.0') ? (HarmBlockThreshold.OFF as any) : HarmBlockThreshold.BLOCK_NONE,
threshold: getThreshold(model),
},
{
category: HarmCategory.HARM_CATEGORY_HARASSMENT,
threshold: model.includes('2.0') ? (HarmBlockThreshold.OFF as any) : HarmBlockThreshold.BLOCK_NONE,
threshold: getThreshold(model),
},
{
category: HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT,
threshold: model.includes('2.0') ? (HarmBlockThreshold.OFF as any) : HarmBlockThreshold.BLOCK_NONE,
threshold: getThreshold(model),
},
],
},
Expand Down
125 changes: 122 additions & 3 deletions src/server/globalConfig/parseFilesConfig.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,134 @@ import { describe, expect, it } from 'vitest';
import { parseFilesConfig } from './parseFilesConfig';

describe('parseFilesConfig', () => {
it('parses full configuration correctly', () => {
const envStr =
'embedding_model=openai/embedding-text-3-small,reranker_model=cohere/rerank-english-v3.0,query_mode=full_text';
const expected = {
embeddingModel: { provider: 'openai', model: 'embedding-text-3-small' },
rerankerModel: { provider: 'cohere', model: 'rerank-english-v3.0' },
queryMode: 'full_text',
};
expect(parseFilesConfig(envStr)).toEqual(expected);
});

// 测试embeddings配置是否被正确解析
it('parses embeddings configuration correctly', () => {
const envStr =
'embedding_model=openai/embedding-text-3-large,reranker_model=cohere/rerank-english-v3.0,query_model=full_text';
const envStr = 'embedding_model=openai/embedding-text-3-large';
const expected = {
embeddingModel: { provider: 'openai', model: 'embedding-text-3-large' },
};
expect(parseFilesConfig(envStr)).toEqual(expected);
});

it('parses rerank configuration correctly', () => {
const envStr = 'reranker_model=cohere/rerank-english-v3.0';
const expected = {
rerankerModel: { provider: 'cohere', model: 'rerank-english-v3.0' },
};
expect(parseFilesConfig(envStr)).toEqual(expected);
});

it('parses queryMode configuration correctly', () => {
const envStr = 'query_mode=full_text';
const expected = {
queryMode: 'full_text',
};
expect(parseFilesConfig(envStr)).toEqual(expected);
});

it('parses queryMode rerank configuration correctly', () => {
const envStr = 'reranker_model=cohere/rerank-english-v3.0,query_mode=full_text';
const expected = {
queryMode: 'full_text',
rerankerModel: { provider: 'cohere', model: 'rerank-english-v3.0' },
};
expect(parseFilesConfig(envStr)).toEqual(expected);
});

it('parses queryMode embeddings configuration correctly', () => {
const envStr = 'embedding_model=openai/embedding-text-3-small,query_mode=full_text';
const expected = {
queryMode: 'full_text',
embeddingModel: { provider: 'openai', model: 'embedding-text-3-small' },
};
expect(parseFilesConfig(envStr)).toEqual(expected);
});

it('parses rerank embeddings configuration correctly', () => {
const envStr =
'reranker_model=cohere/rerank-english-v3.0,embedding_model=openai/embedding-text-3-small';
const expected = {
embeddingModel: { provider: 'openai', model: 'embedding-text-3-small' },
rerankerModel: { provider: 'cohere', model: 'rerank-english-v3.0' },
queryModel: 'full_text',
};
expect(parseFilesConfig(envStr)).toEqual(expected);
});

it('should throw an error for invalid embedding_model format', () => {
const envStr =
'reranker_model=cohere/rerank-english-v3.0,embedding_model=/embedding-text-3-small';
expect(() => {
parseFilesConfig(envStr);
}).toThrow(
new Error(
'Invalid environment variable format. expected of the form embedding_model=provider/model',
),
);
});

it('should throw an error for invalid embedding_model format', () => {
const envStr = 'reranker_model=cohere/rerank-english-v3.0,embedding_model=openai';
expect(() => {
parseFilesConfig(envStr);
}).toThrow(
new Error(
'Invalid environment variable format. expected of the form embedding_model=provider/model',
),
);
});

it('should throw an error for invalid embedding_model format', () => {
const envStr = 'reranker_model=cohere/rerank-english-v3.0,embedding_model=';
expect(() => {
parseFilesConfig(envStr);
}).toThrowError(new Error('Invalid environment variable format.'));
});

it('should throw an error for invalid reranker_model format', () => {
const envStr =
'reranker_model=/rerank-english-v3.0,embedding_model=openai/embedding-text-3-small';
expect(() => {
parseFilesConfig(envStr);
}).toThrow(
new Error(
'Invalid environment variable format. expected of the form reranker_model=provider/model',
),
);
});

it('should throw an error for invalid reranker_model format', () => {
const envStr = 'reranker_model=cohere/,embedding_model=openai/embedding-text-3-small';
expect(() => {
parseFilesConfig(envStr);
}).toThrow(
new Error(
'Invalid environment variable format. expected of the form reranker_model=provider/model',
),
);
});

it('should throw an error for invalid reranker_model format', () => {
const envStr = 'reranker_model=,embedding_model=openai/embedding-text-3-small';
expect(() => {
parseFilesConfig(envStr);
}).toThrow(new Error('Invalid environment variable format.'));
});

it('should throw an error for invalid query_mode format', () => {
const envStr = 'query_mode=';
expect(() => {
parseFilesConfig(envStr);
}).toThrow(new Error('Invalid environment variable format.'));
});
});
32 changes: 19 additions & 13 deletions src/server/globalConfig/parseFilesConfig.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { FilesConfig } from '@/types/user/settings/filesConfig';

const protectedKeys = Object.keys({
embedding_model: null,
query_model: null,
query_mode: null,
reranker_model: null,
});

Expand All @@ -24,34 +24,40 @@ export const parseFilesConfig = (envString: string = ''): SystemEmbeddingConfig
const [provider, ...modelParts] = value.split('/');
const model = modelParts.join('/');

if ((!provider || !model) && key !== 'query_model') {
throw new Error('Missing model or provider value');
}

if (key === 'query_model' && value === '') {
throw new Error('Missing query mode value');
}

if (protectedKeys.includes(key)) {
switch (key) {
case 'embedding_model': {
if (!provider || !model) {
throw new Error(
'Invalid environment variable format. expected of the form embedding_model=provider/model',
);
}
config.embeddingModel = { model: model.trim(), provider: provider.trim() };
break;
}
case 'reranker_model': {
if (!provider || !model) {
throw new Error(
'Invalid environment variable format. expected of the form reranker_model=provider/model',
);
}
config.rerankerModel = { model: model.trim(), provider: provider.trim() };
break;
}
case 'query_model': {
config.queryModel = value;
case 'query_mode': {
config.queryMode = value;
break;
}
default: {
throw new Error(
'Invalid environment variable format. expected one of embedding_model, reranker_model, query_mode',
);
}
}
}
} else {
throw new Error('Invalid environment variable format');
throw new Error('Invalid environment variable format.');
}
}

return config;
};
3 changes: 3 additions & 0 deletions src/server/modules/S3/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,9 @@ export class S3 {
endpoint: fileEnv.S3_ENDPOINT,
forcePathStyle: fileEnv.S3_ENABLE_PATH_STYLE,
region: fileEnv.S3_REGION || DEFAULT_S3_REGION,
// refs: https://github.com/lobehub/lobe-chat/pull/5479
requestChecksumCalculation: 'WHEN_REQUIRED',
responseChecksumValidation: 'WHEN_REQUIRED',
});
}

Expand Down
2 changes: 1 addition & 1 deletion src/types/knowledgeBase/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,6 @@ export interface KnowledgeItem {

export interface SystemEmbeddingConfig {
embeddingModel: FilesConfigItem;
queryModel: string;
queryMode: string;
rerankerModel: FilesConfigItem;
}
2 changes: 1 addition & 1 deletion src/types/user/settings/filesConfig.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,6 @@ export interface FilesConfigItem {
}
export interface FilesConfig {
embeddingModel: FilesConfigItem;
queryModel: string;
queryMode: string;
rerankerModel: FilesConfigItem;
}
1 change: 1 addition & 0 deletions vercel.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
{
"buildCommand": "NODE_OPTIONS=--max-old-space-size=6144 next build",
"installCommand": "bun install"
}

0 comments on commit 36d5f97

Please sign in to comment.