-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
9 changed files
with
157 additions
and
83 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,10 @@ | ||
- Use tabs and not spaces. | ||
- Never change line endings unless explicitly requested. | ||
- Never change the file structure you were not specifically requested. | ||
- Never make up features you were not specifically requested. | ||
- Never delete anything you were not specifically requested. | ||
- You may advise me to add or delete things I didn't request but not do it yourself. | ||
- Avoid usage of too generic names such as get and set. Be specific and write clean code. | ||
- First explain to yourself what you are about to do and only then do it. | ||
- Use pytest to write tests. | ||
- If you are not sure about something, ask me. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,17 +1,17 @@ | ||
import os | ||
from openai import OpenAI | ||
import openai | ||
from constants import OPENAI_API_KEY | ||
|
||
def fetch_openai_response(query): | ||
client = OpenAI(api_key=OPENAI_API_KEY) | ||
try: | ||
response = client.chat.completions.create( | ||
model="gpt-3.5-turbo", | ||
messages=[ | ||
{"role": "user", "content": query} | ||
] | ||
) | ||
return response.choices[0].message.content | ||
except Exception as e: | ||
return f"Error: {str(e)}" | ||
def fetch_openai_response(query, api_key=OPENAI_API_KEY): | ||
client = openai.OpenAI(api_key=api_key) | ||
try: | ||
response = client.chat.completions.create( | ||
model="gpt-4o-mini", | ||
messages=[ | ||
{"role": "user", "content": query} | ||
] | ||
) | ||
return response.choices[0].message.content | ||
except Exception as e: | ||
return f"Error: {str(e)}" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,79 +1,107 @@ | ||
import pytest | ||
from unittest.mock import patch, MagicMock | ||
import openai | ||
from llm.services.openai_service import fetch_openai_response | ||
import os | ||
|
||
@pytest.fixture | ||
def mock_openai_response(): | ||
mock_response = MagicMock() | ||
mock_response.choices = [ | ||
MagicMock( | ||
message=MagicMock( | ||
content="This is a mock response from OpenAI" | ||
) | ||
) | ||
] | ||
return mock_response | ||
mock_response = MagicMock() | ||
mock_response.choices = [ | ||
MagicMock( | ||
message=MagicMock( | ||
content="This is a mock response from OpenAI" | ||
) | ||
) | ||
] | ||
return mock_response | ||
|
||
def test_fetch_openai_response_success(mock_openai_response): | ||
with patch('openai.OpenAI') as mock_openai: | ||
# Configure the mock | ||
mock_client = MagicMock() | ||
mock_client.chat.completions.create.return_value = mock_openai_response | ||
mock_openai.return_value = mock_client | ||
with patch('openai.OpenAI') as mock_openai: | ||
# Configure the mock | ||
mock_client = MagicMock() | ||
mock_client.chat.completions.create.return_value = mock_openai_response | ||
mock_openai.return_value = mock_client | ||
|
||
# Test the function | ||
response = fetch_openai_response("Test query") | ||
# Assertions | ||
assert response == "This is a mock response from OpenAI" | ||
mock_client.chat.completions.create.assert_called_once_with( | ||
model="gpt-3.5-turbo", | ||
messages=[{"role": "user", "content": "Test query"}] | ||
) | ||
# Test the function with a dummy API key | ||
response = fetch_openai_response("Test query", api_key="test-key") | ||
# Assertions | ||
assert response == "This is a mock response from OpenAI" | ||
mock_client.chat.completions.create.assert_called_once_with( | ||
model="gpt-3.5-turbo", | ||
messages=[{"role": "user", "content": "Test query"}] | ||
) | ||
|
||
def test_fetch_openai_response_error(): | ||
with patch('openai.OpenAI') as mock_openai: | ||
# Configure the mock to raise an exception | ||
mock_client = MagicMock() | ||
mock_client.chat.completions.create.side_effect = Exception("API Error") | ||
mock_openai.return_value = mock_client | ||
with patch('openai.ChatCompletion.create') as mock_create: | ||
# Configure the mock to raise an exception | ||
mock_create.side_effect = Exception("API Error") | ||
|
||
# Test the function | ||
response = fetch_openai_response("Test query") | ||
|
||
# Assertions | ||
assert response.startswith("Error: "), f"Unexpected response: {response}" | ||
|
||
# Test the function | ||
response = fetch_openai_response("Test query") | ||
|
||
# Assertions | ||
assert response.startswith("Error: ") | ||
assert "API Error" in response | ||
|
||
@pytest.mark.django_db | ||
def test_cached_openai_query(mock_openai_response): | ||
from llm.models import Queries | ||
from llm.services.LlmQueryCache import LlmQueryCache | ||
|
||
with patch('openai.OpenAI') as mock_openai: | ||
# Configure the mock | ||
mock_client = MagicMock() | ||
mock_client.chat.completions.create.return_value = mock_openai_response | ||
mock_openai.return_value = mock_client | ||
from llm.models import Queries | ||
from llm.services.LlmQueryCache import LlmQueryCache | ||
|
||
with patch('openai.OpenAI') as mock_openai: | ||
# Configure the mock | ||
mock_client = MagicMock() | ||
mock_client.chat.completions.create.return_value = mock_openai_response | ||
mock_openai.return_value = mock_client | ||
|
||
query = "What is artificial intelligence? __not__cached__158" | ||
|
||
# Assert that the query is not cached | ||
with pytest.raises(Queries.DoesNotExist): | ||
Queries.objects.get(llm_query=query) | ||
|
||
# Get response through cache | ||
llm_response = LlmQueryCache.llm_response( | ||
query=query, | ||
query_llm_callback=fetch_openai_response, | ||
) | ||
|
||
# Verify response | ||
assert llm_response == "This is a mock response from OpenAI" | ||
|
||
# Assert that the query was cached | ||
cached_response = Queries.objects.get(llm_query=query) | ||
assert cached_response.llm_response == llm_response | ||
|
||
# Verify OpenAI was called only once | ||
mock_client.chat.completions.create.assert_called_once() | ||
|
||
@pytest.mark.integration | ||
@pytest.mark.skipif(not os.getenv('RUN_INTEGRATION_TESTS'), reason="Integration tests are not enabled") | ||
def test_fetch_openai_response_integration(): | ||
""" | ||
Integration test that actually calls OpenAI API. | ||
To run this test, set RUN_INTEGRATION_TESTS=1 in your environment. | ||
""" | ||
# Simple query that should always work | ||
query = "say only hi" | ||
|
||
response = fetch_openai_response(query) | ||
|
||
# Check that we got a response and not an error | ||
assert not response.startswith("Error: "), f"Got error response: {response}" | ||
|
||
# Check that we got some actual content | ||
assert len(response) > 0, "Response was empty" | ||
|
||
# Basic validation that response looks reasonable | ||
assert isinstance(response, str), f"Response should be string, got {type(response)}" | ||
assert len(response) < 1000, f"Response suspiciously long: {len(response)} chars" | ||
|
||
query = "What is artificial intelligence?" | ||
|
||
# Assert that the query is not cached | ||
with pytest.raises(Queries.DoesNotExist): | ||
Queries.objects.get(llm_query=query) | ||
|
||
# Get response through cache | ||
llm_response = LlmQueryCache.llm_response( | ||
query=query, | ||
query_llm_callback=fetch_openai_response, | ||
) | ||
|
||
# Verify response | ||
assert llm_response == "This is a mock response from OpenAI" | ||
|
||
# Assert that the query was cached | ||
cached_response = Queries.objects.get(llm_query=query) | ||
assert cached_response.llm_response == llm_response | ||
|
||
# Verify OpenAI was called only once | ||
mock_client.chat.completions.create.assert_called_once() | ||
def test_fetch_openai_response_invalid_key(): | ||
"""Test behavior with invalid API key""" | ||
response = fetch_openai_response("test query", api_key="invalid_key_123") | ||
assert response.startswith("Error: "), "Should have received an error with invalid key" | ||
assert "api key" in response.lower(), f"Unexpected error message: {response}" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
version = "1.2.0" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters