forked from Arize-ai/phoenix
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Arize-ai#2280 demonstrate using selfhosted ollama as Eval Model.
- Loading branch information
Showing
1 changed file
with
43 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,43 @@ | ||
import os | ||
import unittest | ||
from unittest import mock | ||
|
||
from models import LiteLLMModel | ||
|
||
@mock.patch.dict(os.environ, {"OLLAMA_API_BASE": "just to make validate_env happy"}, clear=True) | ||
@mock.patch("litellm.llms.ollama.get_ollama_response") | ||
def test_selfhosted_ollama_via_model_kwargs(get_ollama_response): | ||
ollama_response = unittest.mock.MagicMock() | ||
ollama_response["choices"][0]["message"]["content"] = "barely understand Python mocks" | ||
ollama_response.choices[0].message.content = "42 per tail" | ||
|
||
get_ollama_response.return_value = ollama_response | ||
|
||
lllmm = LiteLLMModel(model="ollama/monstral", | ||
model_kwargs=dict( | ||
base_url="http://hosted.olla.ma:11434")) | ||
result = lllmm("How much is the fish?") | ||
|
||
assert result == "42 per tail" | ||
call_args = get_ollama_response.call_args[0] | ||
assert call_args[0] == "http://hosted.olla.ma:11434" | ||
assert call_args[1] == "monstral" | ||
assert "How much is the fish?" in call_args[2] | ||
|
||
@mock.patch.dict(os.environ, {"OLLAMA_API_BASE": "http://hosted.olla.ma:11434"}, clear=True) | ||
@mock.patch("litellm.llms.ollama.get_ollama_response") | ||
def test_selfhosted_ollama_via_env(get_ollama_response): | ||
ollama_response = unittest.mock.MagicMock() | ||
ollama_response["choices"][0]["message"]["content"] = "barely understand Python mocks" | ||
ollama_response.choices[0].message.content = "42 per tail" | ||
|
||
get_ollama_response.return_value = ollama_response | ||
|
||
lllmm = LiteLLMModel(model="ollama/monstral") | ||
result = lllmm("How much is the fish?") | ||
|
||
assert result == "42 per tail" | ||
call_args = get_ollama_response.call_args[0] | ||
assert call_args[0] == "http://hosted.olla.ma:11434" | ||
assert call_args[1] == "monstral" | ||
assert "How much is the fish?" in call_args[2] |