Skip to content

Commit 42ae7b4

Browse files
committed
fix: ollama model provider can not set 'num_ctx' etc. parameter #2442
langchain-openai is not compatible with parameter Settings in ollama, such as num_ctx. Therefore, you need to create model instances using langchain-ollama
1 parent ee7cc80 commit 42ae7b4

File tree

2 files changed

+6
-6
lines changed

2 files changed

+6
-6
lines changed

apps/setting/models_provider/impl/ollama_model_provider/model/llm.py

+5-6
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,8 @@
1010
from urllib.parse import urlparse, ParseResult
1111

1212
from langchain_core.messages import BaseMessage, get_buffer_string
13-
from langchain_openai.chat_models import ChatOpenAI
13+
from langchain_ollama.chat_models import ChatOllama
14+
1415

1516
from common.config.tokenizer_manage_config import TokenizerManage
1617
from setting.models_provider.base_model_provider import MaxKBBaseModel
@@ -24,7 +25,7 @@ def get_base_url(url: str):
2425
return result_url[:-1] if result_url.endswith("/") else result_url
2526

2627

27-
class OllamaChatModel(MaxKBBaseModel, ChatOpenAI):
28+
class OllamaChatModel(MaxKBBaseModel, ChatOllama):
2829
@staticmethod
2930
def is_cache_model():
3031
return False
@@ -33,12 +34,10 @@ def is_cache_model():
3334
def new_instance(model_type, model_name, model_credential: Dict[str, object], **model_kwargs):
3435
api_base = model_credential.get('api_base', '')
3536
base_url = get_base_url(api_base)
36-
base_url = base_url if base_url.endswith('/v1') else (base_url + '/v1')
3737
optional_params = MaxKBBaseModel.filter_optional_params(model_kwargs)
3838

39-
return OllamaChatModel(model=model_name, openai_api_base=base_url,
40-
openai_api_key=model_credential.get('api_key'),
41-
stream_usage=True, **optional_params)
39+
return OllamaChatModel(model=model_name, base_url=base_url,
40+
stream=True, **optional_params)
4241

4342
def get_num_tokens_from_messages(self, messages: List[BaseMessage]) -> int:
4443
tokenizer = TokenizerManage.get_tokenizer()

pyproject.toml

+1
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ pycryptodome = "^3.19.0"
2828
beautifulsoup4 = "^4.12.2"
2929
html2text = "^2024.2.26"
3030
langchain-openai = "^0.1.8"
31+
langchain-ollama = "0.1.3"
3132
django-ipware = "^6.0.4"
3233
django-apscheduler = "^0.6.2"
3334
pymupdf = "1.24.9"

0 commit comments

Comments
 (0)