Skip to content

Commit

Permalink
demonstrate using selfhosted ollama as Eval Model. Arize-ai#2280
Browse files Browse the repository at this point in the history
  • Loading branch information
mkhludnev committed Mar 5, 2024
1 parent 1de1415 commit 6d54ad4
Show file tree
Hide file tree
Showing 2 changed files with 44 additions and 1 deletion.
2 changes: 1 addition & 1 deletion packages/phoenix-evals/src/phoenix/evals/models/litellm.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def _init_environment(self) -> None:
from litellm import validate_environment

self._litellm = litellm
env_info = validate_environment(self._litellm.utils.get_llm_provider(self.model))
env_info = validate_environment(self.model)

if not env_info["keys_in_environment"]:
raise RuntimeError(
Expand Down
43 changes: 43 additions & 0 deletions packages/phoenix-evals/tests/phoenix/evals/models/test_ollama.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
import os
import unittest
from unittest import mock

from models import LiteLLMModel

@mock.patch.dict(os.environ, {"OLLAMA_API_BASE": "just to make litellm.validate_environment happy"}, clear=True)
@mock.patch("litellm.llms.ollama.get_ollama_response")
def test_selfhosted_ollama_via_model_kwargs(get_ollama_response):
ollama_response = unittest.mock.MagicMock()
ollama_response["choices"][0]["message"]["content"] = "barely understand Python mocks"
ollama_response.choices[0].message.content = "42 per tail"

get_ollama_response.return_value = ollama_response

lllmm = LiteLLMModel(model="ollama/monstral",
model_kwargs=dict(
base_url="http://hosted.olla.ma:11434"))
result = lllmm("How much is the fish?")

assert result == "42 per tail"
call_args = get_ollama_response.call_args[0]
assert call_args[0] == "http://hosted.olla.ma:11434"
assert call_args[1] == "monstral"
assert "How much is the fish?" in call_args[2]

@mock.patch.dict(os.environ, {"OLLAMA_API_BASE": "http://hosted.olla.ma:11434"}, clear=True)
@mock.patch("litellm.llms.ollama.get_ollama_response")
def test_selfhosted_ollama_via_env(get_ollama_response):
ollama_response = unittest.mock.MagicMock()
ollama_response["choices"][0]["message"]["content"] = "barely understand Python mocks"
ollama_response.choices[0].message.content = "42 per tail"

get_ollama_response.return_value = ollama_response

lllmm = LiteLLMModel(model="ollama/monstral")
result = lllmm("How much is the fish?")

assert result == "42 per tail"
call_args = get_ollama_response.call_args[0]
assert call_args[0] == "http://hosted.olla.ma:11434"
assert call_args[1] == "monstral"
assert "How much is the fish?" in call_args[2]

0 comments on commit 6d54ad4

Please sign in to comment.