From dd1930499e174abf08417f242c924fed1f1ee3d4 Mon Sep 17 00:00:00 2001 From: youngzm Date: Sun, 12 May 2024 08:43:04 +0000 Subject: [PATCH 1/4] =?UTF-8?q?feat:=20=E5=A2=9E=E5=8A=A0=E4=BA=86DeepSeek?= =?UTF-8?q?=E5=A4=A7=E6=A8=A1=E5=9E=8B=E6=94=AF=E6=8C=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../constants/model_provider_constants.py | 2 + .../impl/deepseek_model_provider/__init__.py | 8 ++ .../deepseek_model_provider.py | 97 +++++++++++++++++++ .../icon/deepseek_icon_svg | 3 + .../model/deepseek_chat_model.py | 30 ++++++ 5 files changed, 140 insertions(+) create mode 100644 apps/setting/models_provider/impl/deepseek_model_provider/__init__.py create mode 100644 apps/setting/models_provider/impl/deepseek_model_provider/deepseek_model_provider.py create mode 100644 apps/setting/models_provider/impl/deepseek_model_provider/icon/deepseek_icon_svg create mode 100644 apps/setting/models_provider/impl/deepseek_model_provider/model/deepseek_chat_model.py diff --git a/apps/setting/models_provider/constants/model_provider_constants.py b/apps/setting/models_provider/constants/model_provider_constants.py index 0a46cbfa41f..1e587bba246 100644 --- a/apps/setting/models_provider/constants/model_provider_constants.py +++ b/apps/setting/models_provider/constants/model_provider_constants.py @@ -16,6 +16,7 @@ from setting.models_provider.impl.kimi_model_provider.kimi_model_provider import KimiModelProvider from setting.models_provider.impl.xf_model_provider.xf_model_provider import XunFeiModelProvider from setting.models_provider.impl.zhipu_model_provider.zhipu_model_provider import ZhiPuModelProvider +from setting.models_provider.impl.deepseek_model_provider.deepseek_model_provider import DeepSeekModelProvider class ModelProvideConstants(Enum): @@ -27,3 +28,4 @@ class ModelProvideConstants(Enum): model_qwen_provider = QwenModelProvider() model_zhipu_provider = ZhiPuModelProvider() model_xf_provider = XunFeiModelProvider() + model_deepseek_provider = DeepSeekModelProvider() diff --git a/apps/setting/models_provider/impl/deepseek_model_provider/__init__.py b/apps/setting/models_provider/impl/deepseek_model_provider/__init__.py new file mode 100644 index 00000000000..ee456da1ffe --- /dev/null +++ b/apps/setting/models_provider/impl/deepseek_model_provider/__init__.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +""" +@Project :MaxKB +@File :__init__.py.py +@Author :Brian Yang +@Date :5/12/24 7:38 AM +""" diff --git a/apps/setting/models_provider/impl/deepseek_model_provider/deepseek_model_provider.py b/apps/setting/models_provider/impl/deepseek_model_provider/deepseek_model_provider.py new file mode 100644 index 00000000000..3baa5f04ad7 --- /dev/null +++ b/apps/setting/models_provider/impl/deepseek_model_provider/deepseek_model_provider.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +""" +@Project :MaxKB +@File :deepseek_model_provider.py +@Author :Brian Yang +@Date :5/12/24 7:40 AM +""" +import os +from typing import Dict + +from langchain.schema import HumanMessage + +from common import forms +from common.exception.app_exception import AppApiException +from common.forms import BaseForm +from common.util.file_util import get_file_content +from setting.models_provider.base_model_provider import IModelProvider, ModelProvideInfo, BaseModelCredential, \ + ModelInfo, ModelTypeConst, ValidCode +from setting.models_provider.impl.deepseek_model_provider.model.deepseek_chat_model import DeepSeekChatModel +from smartdoc.conf import PROJECT_DIR + + +class DeepSeekLLMModelCredential(BaseForm, BaseModelCredential): + + def is_valid(self, model_type: str, model_name, model_credential: Dict[str, object], raise_exception=False): + model_type_list = DeepSeekModelProvider().get_model_type_list() + if not any(list(filter(lambda mt: mt.get('value') == model_type, model_type_list))): + raise AppApiException(ValidCode.valid_error.value, f'{model_type} 模型类型不支持') + + for key in ['api_key']: + if key not in model_credential: + if raise_exception: + raise AppApiException(ValidCode.valid_error.value, f'{key} 字段为必填字段') + else: + return False + try: + model = DeepSeekModelProvider().get_model(model_type, model_name, model_credential) + model.invoke([HumanMessage(content='你好')]) + except Exception as e: + if isinstance(e, AppApiException): + raise e + if raise_exception: + raise AppApiException(ValidCode.valid_error.value, f'校验失败,请检查参数是否正确: {str(e)}') + else: + return False + return True + + def encryption_dict(self, model: Dict[str, object]): + return {**model, 'api_key': super().encryption(model.get('api_key', ''))} + + api_key = forms.PasswordInputField('API Key', required=True) + + +deepseek_llm_model_credential = DeepSeekLLMModelCredential() + +model_dict = { + 'deepseek-chat': ModelInfo('deepseek-chat', '擅长通用对话任务,支持 32K 上下文', ModelTypeConst.LLM, + deepseek_llm_model_credential, + ), + 'deepseek-coder': ModelInfo('deepseek-coder', '擅长处理编程任务,支持 16K 上下文', ModelTypeConst.LLM, + deepseek_llm_model_credential, + ), +} + + +class DeepSeekModelProvider(IModelProvider): + + def get_dialogue_number(self): + return 3 + + def get_model(self, model_type, model_name, model_credential: Dict[str, object], **model_kwargs) -> DeepSeekChatModel: + deepseek_chat_open_ai = DeepSeekChatModel( + model=model_name, + openai_api_base='https://api.deepseek.com', + openai_api_key=model_credential.get('api_key') + ) + return deepseek_chat_open_ai + + def get_model_credential(self, model_type, model_name): + if model_name in model_dict: + return model_dict.get(model_name).model_credential + return deepseek_llm_model_credential + + def get_model_provide_info(self): + return ModelProvideInfo(provider='model_deepseek_provider', name='DeepSeek', icon=get_file_content( + os.path.join(PROJECT_DIR, "apps", "setting", 'models_provider', 'impl', 'deepseek_model_provider', 'icon', + 'deepseek_icon_svg'))) + + def get_model_list(self, model_type: str): + if model_type is None: + raise AppApiException(500, '模型类型不能为空') + return [model_dict.get(key).to_dict() for key in + list(filter(lambda key: model_dict.get(key).model_type == model_type, model_dict.keys()))] + + def get_model_type_list(self): + return [{'key': "大语言模型", 'value': "LLM"}] diff --git a/apps/setting/models_provider/impl/deepseek_model_provider/icon/deepseek_icon_svg b/apps/setting/models_provider/impl/deepseek_model_provider/icon/deepseek_icon_svg new file mode 100644 index 00000000000..38ddf7e0abf --- /dev/null +++ b/apps/setting/models_provider/impl/deepseek_model_provider/icon/deepseek_icon_svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/apps/setting/models_provider/impl/deepseek_model_provider/model/deepseek_chat_model.py b/apps/setting/models_provider/impl/deepseek_model_provider/model/deepseek_chat_model.py new file mode 100644 index 00000000000..b7a54b302d9 --- /dev/null +++ b/apps/setting/models_provider/impl/deepseek_model_provider/model/deepseek_chat_model.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +""" +@Project :MaxKB +@File :deepseek_chat_model.py +@Author :Brian Yang +@Date :5/12/24 7:44 AM +""" +from typing import List + +from langchain_core.messages import BaseMessage, get_buffer_string +from langchain_openai import ChatOpenAI + +from common.config.tokenizer_manage_config import TokenizerManage + + +class DeepSeekChatModel(ChatOpenAI): + def get_num_tokens_from_messages(self, messages: List[BaseMessage]) -> int: + try: + return super().get_num_tokens_from_messages(messages) + except Exception as e: + tokenizer = TokenizerManage.get_tokenizer() + return sum([len(tokenizer.encode(get_buffer_string([m]))) for m in messages]) + + def get_num_tokens(self, text: str) -> int: + try: + return super().get_num_tokens(text) + except Exception as e: + tokenizer = TokenizerManage.get_tokenizer() + return len(tokenizer.encode(text)) From c8f48b12d42b16c14e00ac3799ff0db6854fd364 Mon Sep 17 00:00:00 2001 From: youngzm Date: Sun, 12 May 2024 09:18:52 +0000 Subject: [PATCH 2/4] =?UTF-8?q?fix:=20=E4=BF=AE=E5=A4=8D=E4=BA=86=E7=B3=BB?= =?UTF-8?q?=E7=BB=9F=E8=AE=BE=E7=BD=AE->=E6=A8=A1=E5=9E=8B=E8=AE=BE?= =?UTF-8?q?=E7=BD=AE=E4=B8=AD=E7=9A=84=E5=8A=A8=E6=80=81=E8=A1=A8=E5=8D=95?= =?UTF-8?q?=E5=A4=8D=E7=94=A8=E6=97=B6=EF=BC=8C=E9=94=99=E8=AF=AF=E5=9C=B0?= =?UTF-8?q?=E6=98=BE=E7=A4=BA=E4=B9=8B=E5=89=8D=E5=8A=A8=E6=80=81=E8=A1=A8?= =?UTF-8?q?=E5=8D=95=E5=86=85=E5=AE=B9=E7=9A=84bug?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ui/src/views/template/index.vue | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/ui/src/views/template/index.vue b/ui/src/views/template/index.vue index eb8f6c51300..00ca3d8ffd5 100644 --- a/ui/src/views/template/index.vue +++ b/ui/src/views/template/index.vue @@ -70,6 +70,7 @@ ref="createModelRef" @submit="list_model" @change="openCreateModel($event)" + :key="dialogState.createModelDialogKey" > import { ElMessage } from 'element-plus' -import { onMounted, ref, computed, watch } from 'vue' +import { onMounted, ref, computed, reactive } from 'vue' import ModelApi from '@/api/model' import type { Provider, Model } from '@/api/type/model' import AppIcon from '@/components/icons/AppIcon.vue' @@ -128,6 +129,7 @@ const openCreateModel = (provider?: Provider) => { createModelRef.value?.open(provider) } else { selectProviderRef.value?.open() + refreshCreateModelDialogKey() // 更新key } } @@ -138,6 +140,16 @@ const list_model = () => { }) } +// 添加一个响应式的state来存储dialog的key +const dialogState = reactive({ + createModelDialogKey: Date.now() // 初始值为当前的时间戳 +}) + +// 更新dialogState.createModelDialogKey的函数 +const refreshCreateModelDialogKey = () => { + dialogState.createModelDialogKey = Date.now() // 更新为新的时间戳 +} + onMounted(() => { ModelApi.getProvider(loading).then((ok) => { active_provider.value = allObj @@ -154,6 +166,7 @@ onMounted(() => { width: var(--setting-left-width); min-width: var(--setting-left-width); } + .model-list-height { height: calc(var(--create-dataset-height) - 70px); } From bb6c8520f7cd7c0ca933a8c0fa0756eeb3f85f27 Mon Sep 17 00:00:00 2001 From: youngzm Date: Sun, 12 May 2024 15:10:18 +0000 Subject: [PATCH 3/4] =?UTF-8?q?perf:=20=E4=BF=AE=E6=94=B9Azure=20OpenAI?= =?UTF-8?q?=E6=A8=A1=E5=9E=8B=E8=A1=A8=E5=8D=95=E6=8F=8F=E8=BF=B0=EF=BC=8C?= =?UTF-8?q?=E7=AE=80=E5=8C=96=E4=BB=A3=E7=A0=81=E5=AE=9E=E7=8E=B0=EF=BC=88?= =?UTF-8?q?=E5=8F=96=E6=B6=88=E5=AF=B9AzureLLMModelCredential=E7=9A=84?= =?UTF-8?q?=E4=BE=9D=E8=B5=96=EF=BC=8C=E4=BF=9D=E7=95=99DefaultAzureLLMMod?= =?UTF-8?q?elCredential=EF=BC=89?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../azure_model_provider.py | 36 ++++++++----------- 1 file changed, 15 insertions(+), 21 deletions(-) diff --git a/apps/setting/models_provider/impl/azure_model_provider/azure_model_provider.py b/apps/setting/models_provider/impl/azure_model_provider/azure_model_provider.py index 1f04a268a42..5731d7e38d1 100644 --- a/apps/setting/models_provider/impl/azure_model_provider/azure_model_provider.py +++ b/apps/setting/models_provider/impl/azure_model_provider/azure_model_provider.py @@ -10,7 +10,6 @@ from typing import Dict from langchain.schema import HumanMessage -from langchain_community.chat_models.azure_openai import AzureChatOpenAI from common import forms from common.exception.app_exception import AppApiException @@ -22,7 +21,7 @@ from setting.models_provider.impl.azure_model_provider.model.azure_chat_model import AzureChatModel from smartdoc.conf import PROJECT_DIR - +""" class AzureLLMModelCredential(BaseForm, BaseModelCredential): def is_valid(self, model_type: str, model_name, model_credential: Dict[str, object], raise_exception=False): @@ -52,11 +51,12 @@ def is_valid(self, model_type: str, model_name, model_credential: Dict[str, obje def encryption_dict(self, model: Dict[str, object]): return {**model, 'api_key': super().encryption(model.get('api_key', ''))} - api_base = forms.TextInputField('API 域名', required=True) + api_base = forms.TextInputField('API 版本 (api_version)', required=True) - api_key = forms.PasswordInputField("API Key", required=True) + api_key = forms.PasswordInputField("API Key(API 密钥)", required=True) - deployment_name = forms.TextInputField("部署名", required=True) + deployment_name = forms.TextInputField("部署名(deployment_name)", required=True) +""" class DefaultAzureLLMModelCredential(BaseForm, BaseModelCredential): @@ -88,28 +88,23 @@ def is_valid(self, model_type: str, model_name, model_credential: Dict[str, obje def encryption_dict(self, model: Dict[str, object]): return {**model, 'api_key': super().encryption(model.get('api_key', ''))} - api_version = forms.TextInputField("api_version", required=True) + api_version = forms.TextInputField("API 版本 (api_version)", required=True) - api_base = forms.TextInputField('API 域名', required=True) + api_base = forms.TextInputField('API 域名 (azure_endpoint)', required=True) - api_key = forms.PasswordInputField("API Key", required=True) + api_key = forms.PasswordInputField("API Key (api_key)", required=True) - deployment_name = forms.TextInputField("部署名", required=True) + deployment_name = forms.TextInputField("部署名 (deployment_name)", required=True) -azure_llm_model_credential = AzureLLMModelCredential() +# azure_llm_model_credential: AzureLLMModelCredential = AzureLLMModelCredential() base_azure_llm_model_credential = DefaultAzureLLMModelCredential() model_dict = { - 'gpt-3.5-turbo-0613': ModelInfo('gpt-3.5-turbo-0613', '', ModelTypeConst.LLM, azure_llm_model_credential, - api_version='2023-07-01-preview'), - 'gpt-3.5-turbo-0301': ModelInfo('gpt-3.5-turbo-0301', '', ModelTypeConst.LLM, azure_llm_model_credential, - api_version='2023-07-01-preview'), - 'gpt-3.5-turbo-16k-0613': ModelInfo('gpt-3.5-turbo-16k-0613', '', ModelTypeConst.LLM, azure_llm_model_credential, - api_version='2023-07-01-preview'), - 'gpt-4-0613': ModelInfo('gpt-4-0613', '', ModelTypeConst.LLM, azure_llm_model_credential, - api_version='2023-07-01-preview'), + 'deployment_name': ModelInfo('Azure OpenAI', '具体的基础模型由部署名决定', ModelTypeConst.LLM, + base_azure_llm_model_credential, api_version='2024-02-15-preview' + ) } @@ -118,12 +113,11 @@ class AzureModelProvider(IModelProvider): def get_dialogue_number(self): return 3 - def get_model(self, model_type, model_name, model_credential: Dict[str, object], **model_kwargs) -> AzureChatOpenAI: + def get_model(self, model_type, model_name, model_credential: Dict[str, object], **model_kwargs) -> AzureChatModel: model_info: ModelInfo = model_dict.get(model_name) azure_chat_open_ai = AzureChatModel( azure_endpoint=model_credential.get('api_base'), - openai_api_version=model_info.api_version if model_name in model_dict else model_credential.get( - 'api_version'), + openai_api_version=model_credential.get('api_version', '2024-02-15-preview'), deployment_name=model_credential.get('deployment_name'), openai_api_key=model_credential.get('api_key'), openai_api_type="azure" From a993a65ecfe8f9bd9eb90e99945ab8338feeb208 Mon Sep 17 00:00:00 2001 From: youngzm Date: Sun, 12 May 2024 15:34:01 +0000 Subject: [PATCH 4/4] =?UTF-8?q?perf:=20=E6=9B=B4=E6=8D=A2DeepSeek=E5=9B=BE?= =?UTF-8?q?=E6=A0=87=E4=B8=BA=E5=AE=98=E7=BD=91=E5=BD=A9=E8=89=B2svg?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../impl/deepseek_model_provider/icon/deepseek_icon_svg | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/apps/setting/models_provider/impl/deepseek_model_provider/icon/deepseek_icon_svg b/apps/setting/models_provider/impl/deepseek_model_provider/icon/deepseek_icon_svg index 38ddf7e0abf..6ace8911a62 100644 --- a/apps/setting/models_provider/impl/deepseek_model_provider/icon/deepseek_icon_svg +++ b/apps/setting/models_provider/impl/deepseek_model_provider/icon/deepseek_icon_svg @@ -1,3 +1,6 @@ - - + + \ No newline at end of file