diff --git a/backend/apps/ai/agent/tools/rag/generator.py b/backend/apps/ai/agent/tools/rag/generator.py index b4cc685e87..4721e9e9f8 100644 --- a/backend/apps/ai/agent/tools/rag/generator.py +++ b/backend/apps/ai/agent/tools/rag/generator.py @@ -5,6 +5,9 @@ from typing import Any import openai +from django.core.exceptions import ObjectDoesNotExist + +from apps.core.models.prompt import Prompt logger = logging.getLogger(__name__) @@ -13,23 +16,6 @@ class Generator: """Generates answers to user queries based on retrieved context.""" MAX_TOKENS = 2000 - SYSTEM_PROMPT = """ -You are a helpful and professional AI assistant for the OWASP Foundation. -Your task is to answer user queries based ONLY on the provided context. -Follow these rules strictly: -1. Base your entire answer on the information given in the "CONTEXT" section. Do not use any -external knowledge unless and until it is about OWASP. -2. Do not mention or refer to the word "context", "based on context", "provided information", -"Information given to me" or similar phrases in your responses. -3. you will answer questions only related to OWASP and within the scope of OWASP. -4. Be concise and directly answer the user's query. -5. Provide the necessary link if the context contains a URL. -6. If there is any query based on location, you need to look for latitude and longitude in the -context and provide the nearest OWASP chapter based on that. -7. You can ask for more information if the query is very personalized or user-centric. -8. after trying all of the above, If the context does not contain the information or you think that -it is out of scope for OWASP, you MUST state: "please ask question related to OWASP." -""" TEMPERATURE = 0.4 def __init__(self, chat_model: str = "gpt-4o"): @@ -103,10 +89,15 @@ def generate_answer(self, query: str, context_chunks: list[dict[str, Any]]) -> s """ try: + system_prompt = Prompt.get_rag_system_prompt() + if not system_prompt or not system_prompt.strip(): + error_msg = "Prompt with key 'rag-system-prompt' not found." + raise ObjectDoesNotExist(error_msg) + response = self.openai_client.chat.completions.create( model=self.chat_model, messages=[ - {"role": "system", "content": self.SYSTEM_PROMPT}, + {"role": "system", "content": system_prompt}, {"role": "user", "content": user_prompt}, ], temperature=self.TEMPERATURE, diff --git a/backend/apps/ai/common/constants.py b/backend/apps/ai/common/constants.py index 27ddd0082c..636ef218cc 100644 --- a/backend/apps/ai/common/constants.py +++ b/backend/apps/ai/common/constants.py @@ -1,8 +1,8 @@ """AI app constants.""" DEFAULT_LAST_REQUEST_OFFSET_SECONDS = 2 -DEFAULT_CHUNKS_RETRIEVAL_LIMIT = 5 -DEFAULT_SIMILARITY_THRESHOLD = 0.4 +DEFAULT_CHUNKS_RETRIEVAL_LIMIT = 8 +DEFAULT_SIMILARITY_THRESHOLD = 0.1 DELIMITER = "\n\n" GITHUB_REQUEST_INTERVAL_SECONDS = 0.5 MIN_REQUEST_INTERVAL_SECONDS = 1.2 diff --git a/backend/apps/core/models/prompt.py b/backend/apps/core/models/prompt.py index added88d77..0f65750e43 100644 --- a/backend/apps/core/models/prompt.py +++ b/backend/apps/core/models/prompt.py @@ -139,3 +139,23 @@ def get_owasp_project_summary() -> str: """ return Prompt.get_text("owasp-project-summary") + + @staticmethod + def get_rag_system_prompt() -> str: + """Return RAG system prompt. + + Returns + str: The RAG system prompt text. + + """ + return Prompt.get_text("rag-system-prompt") + + @staticmethod + def get_slack_question_detector_prompt() -> str: + """Return Slack question detector prompt. + + Returns + str: The Slack question detector prompt text. + + """ + return Prompt.get_text("slack-question-detector-system-prompt") diff --git a/backend/apps/slack/blocks.py b/backend/apps/slack/blocks.py index 683fa6db8e..4d3b36acd9 100644 --- a/backend/apps/slack/blocks.py +++ b/backend/apps/slack/blocks.py @@ -4,6 +4,8 @@ from typing import Any +from apps.slack.utils import format_links_for_slack + DIVIDER = "{{ DIVIDER }}" SECTION_BREAK = "{{ SECTION_BREAK }}" @@ -30,7 +32,7 @@ def markdown(text: str) -> dict: """ return { "type": "section", - "text": {"type": "mrkdwn", "text": text}, + "text": {"type": "mrkdwn", "text": format_links_for_slack(text)}, } diff --git a/backend/apps/slack/common/question_detector.py b/backend/apps/slack/common/question_detector.py index 17d13c787d..542582b649 100644 --- a/backend/apps/slack/common/question_detector.py +++ b/backend/apps/slack/common/question_detector.py @@ -7,7 +7,9 @@ import re import openai +from django.core.exceptions import ObjectDoesNotExist +from apps.core.models.prompt import Prompt from apps.slack.constants import OWASP_KEYWORDS logger = logging.getLogger(__name__) @@ -20,18 +22,6 @@ class QuestionDetector: TEMPERATURE = 0.1 CHAT_MODEL = "gpt-4o" - SYSTEM_PROMPT = """ - You are an expert in cybersecurity and OWASP (Open Web Application Security Project). - Your task is to determine if a given question is related to OWASP, cybersecurity, - web application security, or similar topics. - - Key OWASP-related terms: {keywords} - - Respond with only "YES" if the question is related to OWASP/cybersecurity, - or "NO" if it's not. - Do not provide any explanation or additional text. - """ - def __init__(self): """Initialize the question detector. @@ -98,7 +88,12 @@ def is_owasp_question_with_openai(self, text: str) -> bool | None: - None: If the API call fails or the response is unexpected. """ - system_prompt = self.SYSTEM_PROMPT.format(keywords=", ".join(self.owasp_keywords)) + prompt_template = Prompt.get_slack_question_detector_prompt() + if not prompt_template or not prompt_template.strip(): + error_msg = "Prompt with key 'slack-question-detector-system-prompt' not found." + raise ObjectDoesNotExist(error_msg) + + system_prompt = prompt_template.format(keywords=", ".join(self.owasp_keywords)) user_prompt = f'Question: "{text}"' try: diff --git a/backend/apps/slack/utils.py b/backend/apps/slack/utils.py index 66c42c0e80..d263fc4f8e 100644 --- a/backend/apps/slack/utils.py +++ b/backend/apps/slack/utils.py @@ -35,6 +35,23 @@ def escape(content) -> str: return escape_html(content, quote=False) +def format_links_for_slack(text: str) -> str: + """Convert Markdown links to Slack markdown link format. + + Args: + text (str): The input text that may include Markdown links. + + Returns: + str: Text with Markdown links converted to Slack markdown links. + + """ + if not text: + return text + + markdown_link_pattern = re.compile(r"\[([^\]]+)\]\((https?://[^\s)]+)\)") + return markdown_link_pattern.sub(r"<\2|\1>", text) + + @lru_cache def get_gsoc_projects(year: int) -> list: """Get GSoC projects. diff --git a/backend/tests/apps/ai/agent/tools/rag/generator_test.py b/backend/tests/apps/ai/agent/tools/rag/generator_test.py index 903bdf636d..7df87b5d90 100644 --- a/backend/tests/apps/ai/agent/tools/rag/generator_test.py +++ b/backend/tests/apps/ai/agent/tools/rag/generator_test.py @@ -5,6 +5,7 @@ import openai import pytest +from django.core.exceptions import ObjectDoesNotExist from apps.ai.agent.tools.rag.generator import Generator @@ -102,6 +103,10 @@ def test_generate_answer_success(self): with ( patch.dict(os.environ, {"DJANGO_OPEN_AI_SECRET_KEY": "test-key"}), patch("openai.OpenAI") as mock_openai, + patch( + "apps.core.models.prompt.Prompt.get_rag_system_prompt", + return_value="System prompt", + ), ): mock_client = MagicMock() mock_response = MagicMock() @@ -128,6 +133,10 @@ def test_generate_answer_with_custom_model(self): with ( patch.dict(os.environ, {"DJANGO_OPEN_AI_SECRET_KEY": "test-key"}), patch("openai.OpenAI") as mock_openai, + patch( + "apps.core.models.prompt.Prompt.get_rag_system_prompt", + return_value="System prompt", + ), ): mock_client = MagicMock() mock_response = MagicMock() @@ -150,6 +159,10 @@ def test_generate_answer_openai_error(self): with ( patch.dict(os.environ, {"DJANGO_OPEN_AI_SECRET_KEY": "test-key"}), patch("openai.OpenAI") as mock_openai, + patch( + "apps.core.models.prompt.Prompt.get_rag_system_prompt", + return_value="System prompt", + ), ): mock_client = MagicMock() mock_client.chat.completions.create.side_effect = openai.OpenAIError("API Error") @@ -167,6 +180,10 @@ def test_generate_answer_with_empty_chunks(self): with ( patch.dict(os.environ, {"DJANGO_OPEN_AI_SECRET_KEY": "test-key"}), patch("openai.OpenAI") as mock_openai, + patch( + "apps.core.models.prompt.Prompt.get_rag_system_prompt", + return_value="System prompt", + ), ): mock_client = MagicMock() mock_response = MagicMock() @@ -184,14 +201,123 @@ def test_generate_answer_with_empty_chunks(self): assert "No context provided" in call_args[1]["messages"][1]["content"] def test_system_prompt_content(self): - """Test that system prompt contains expected content.""" - assert "OWASP Foundation" in Generator.SYSTEM_PROMPT - assert "context" in Generator.SYSTEM_PROMPT.lower() - assert "professional" in Generator.SYSTEM_PROMPT.lower() - assert "latitude and longitude" in Generator.SYSTEM_PROMPT.lower() + """Test that system prompt passed to OpenAI comes from Prompt getter.""" + with ( + patch.dict(os.environ, {"DJANGO_OPEN_AI_SECRET_KEY": "test-key"}), + patch("openai.OpenAI") as mock_openai, + patch( + "apps.core.models.prompt.Prompt.get_rag_system_prompt", + return_value="OWASP Foundation system prompt", + ) as mock_prompt_getter, + ): + mock_client = MagicMock() + mock_response = MagicMock() + mock_response.choices = [MagicMock()] + mock_response.choices[0].message.content = "Answer" + mock_client.chat.completions.create.return_value = mock_response + mock_openai.return_value = mock_client + + generator = Generator() + generator.generate_answer("Q", []) + + call_args = mock_client.chat.completions.create.call_args + assert call_args[1]["messages"][0]["role"] == "system" + assert call_args[1]["messages"][0]["content"] == "OWASP Foundation system prompt" + mock_prompt_getter.assert_called_once() + + def test_generate_answer_missing_system_prompt(self): + """Test answer generation when system prompt is missing.""" + with ( + patch.dict(os.environ, {"DJANGO_OPEN_AI_SECRET_KEY": "test-key"}), + patch("openai.OpenAI") as mock_openai, + patch( + "apps.core.models.prompt.Prompt.get_rag_system_prompt", + return_value=None, + ), + ): + mock_client = MagicMock() + mock_openai.return_value = mock_client + + generator = Generator() + + chunks = [{"source_name": "Test", "text": "Test content"}] + + with pytest.raises( + ObjectDoesNotExist, match="Prompt with key 'rag-system-prompt' not found" + ): + generator.generate_answer("Test query", chunks) + + def test_generate_answer_empty_system_prompt(self): + """Test answer generation when system prompt is empty.""" + with ( + patch.dict(os.environ, {"DJANGO_OPEN_AI_SECRET_KEY": "test-key"}), + patch("openai.OpenAI") as mock_openai, + patch( + "apps.core.models.prompt.Prompt.get_rag_system_prompt", + return_value=" ", + ), + ): + mock_client = MagicMock() + mock_openai.return_value = mock_client + + generator = Generator() + + chunks = [{"source_name": "Test", "text": "Test content"}] + + with pytest.raises( + ObjectDoesNotExist, match="Prompt with key 'rag-system-prompt' not found" + ): + generator.generate_answer("Test query", chunks) + + def test_generate_answer_empty_openai_response(self): + """Test answer generation when OpenAI returns empty content.""" + with ( + patch.dict(os.environ, {"DJANGO_OPEN_AI_SECRET_KEY": "test-key"}), + patch("openai.OpenAI") as mock_openai, + patch( + "apps.core.models.prompt.Prompt.get_rag_system_prompt", + return_value="System prompt", + ), + ): + mock_client = MagicMock() + mock_response = MagicMock() + mock_response.choices = [MagicMock()] + mock_response.choices[0].message.content = "" + mock_client.chat.completions.create.return_value = mock_response + mock_openai.return_value = mock_client + + generator = Generator() + + chunks = [{"source_name": "Test", "text": "Test content"}] + result = generator.generate_answer("Test query", chunks) + + assert result == "" + + def test_generate_answer_none_openai_response(self): + """Test answer generation when OpenAI returns None content.""" + with ( + patch.dict(os.environ, {"DJANGO_OPEN_AI_SECRET_KEY": "test-key"}), + patch("openai.OpenAI") as mock_openai, + patch( + "apps.core.models.prompt.Prompt.get_rag_system_prompt", + return_value="System prompt", + ), + ): + mock_client = MagicMock() + mock_response = MagicMock() + mock_response.choices = [MagicMock()] + mock_response.choices[0].message.content = None + mock_client.chat.completions.create.return_value = mock_response + mock_openai.return_value = mock_client + + generator = Generator() + + chunks = [{"source_name": "Test", "text": "Test content"}] + + with pytest.raises(AttributeError): + generator.generate_answer("Test query", chunks) def test_constants(self): """Test class constants have expected values.""" assert Generator.MAX_TOKENS == 2000 - assert isinstance(Generator.SYSTEM_PROMPT, str) - assert len(Generator.SYSTEM_PROMPT) > 0 + assert Generator.TEMPERATURE == 0.4 diff --git a/backend/tests/apps/ai/agent/tools/rag/rag_tool_test.py b/backend/tests/apps/ai/agent/tools/rag/rag_tool_test.py index dcb24ca970..0350ef477a 100644 --- a/backend/tests/apps/ai/agent/tools/rag/rag_tool_test.py +++ b/backend/tests/apps/ai/agent/tools/rag/rag_tool_test.py @@ -124,9 +124,9 @@ def test_query_with_defaults(self): assert result == "Default answer" mock_retriever.retrieve.assert_called_once_with( content_types=None, - limit=5, + limit=8, query="Test question", - similarity_threshold=0.4, + similarity_threshold=0.1, ) def test_query_empty_content_types(self): @@ -152,9 +152,9 @@ def test_query_empty_content_types(self): assert result == "Answer" mock_retriever.retrieve.assert_called_once_with( content_types=[], - limit=5, + limit=8, query="Test question", - similarity_threshold=0.4, + similarity_threshold=0.1, ) @patch("apps.ai.agent.tools.rag.rag_tool.logger") diff --git a/backend/tests/apps/ai/management/commands/ai_run_rag_tool_test.py b/backend/tests/apps/ai/management/commands/ai_run_rag_tool_test.py index 33017e3169..71506eb673 100644 --- a/backend/tests/apps/ai/management/commands/ai_run_rag_tool_test.py +++ b/backend/tests/apps/ai/management/commands/ai_run_rag_tool_test.py @@ -40,13 +40,13 @@ def test_add_arguments(self, command): parser.add_argument.assert_any_call( "--limit", type=int, - default=5, # DEFAULT_CHUNKS_RETRIEVAL_LIMIT + default=8, # DEFAULT_CHUNKS_RETRIEVAL_LIMIT help="Maximum number of results to retrieve", ) parser.add_argument.assert_any_call( "--threshold", type=float, - default=0.4, # DEFAULT_SIMILARITY_THRESHOLD + default=0.1, # DEFAULT_SIMILARITY_THRESHOLD help="Similarity threshold (0.0 to 1.0)", ) parser.add_argument.assert_any_call( @@ -106,8 +106,8 @@ def test_handle_initialization_error(self, mock_rag_tool, command): command.handle( query="What is OWASP Foundation?", - limit=5, - threshold=0.5, + limit=8, + threshold=0.1, content_types=None, embedding_model="text-embedding-3-small", chat_model="gpt-4o", @@ -124,8 +124,8 @@ def test_handle_with_default_values(self, mock_rag_tool, command): command.handle( query="What is OWASP Foundation?", - limit=5, - threshold=0.5, + limit=8, + threshold=0.1, content_types=None, embedding_model="text-embedding-3-small", chat_model="gpt-4o", @@ -136,7 +136,7 @@ def test_handle_with_default_values(self, mock_rag_tool, command): ) mock_rag_instance.query.assert_called_once_with( content_types=None, - limit=5, # DEFAULT_CHUNKS_RETRIEVAL_LIMIT + limit=8, # DEFAULT_CHUNKS_RETRIEVAL_LIMIT question="What is OWASP Foundation?", - similarity_threshold=0.5, # DEFAULT_SIMILARITY_THRESHOLD + similarity_threshold=0.1, # DEFAULT_SIMILARITY_THRESHOLD ) diff --git a/backend/tests/apps/slack/common/question_detector_test.py b/backend/tests/apps/slack/common/question_detector_test.py index 4a6bdff9cc..edb102e89f 100644 --- a/backend/tests/apps/slack/common/question_detector_test.py +++ b/backend/tests/apps/slack/common/question_detector_test.py @@ -1,8 +1,11 @@ """Tests for question detector functionality.""" -from unittest.mock import patch +import os +from unittest.mock import MagicMock, patch +import openai import pytest +from django.core.exceptions import ObjectDoesNotExist from apps.slack.common.question_detector import QuestionDetector @@ -13,14 +16,20 @@ class TestQuestionDetector: @pytest.fixture(autouse=True) def _mock_openai(self, monkeypatch): """Avoid real OpenAI calls by forcing fallback path.""" + monkeypatch.setenv("DJANGO_OPEN_AI_SECRET_KEY", "test-key") + + mock_client = MagicMock() + mock_client.chat.completions.create.side_effect = openai.OpenAIError("Mocked OpenAI call") + + monkeypatch.setattr("openai.OpenAI", MagicMock(return_value=mock_client)) + monkeypatch.setattr( - QuestionDetector, - "is_owasp_question_with_openai", - lambda *_args, **_kwargs: None, + "apps.slack.common.question_detector.Prompt.get_slack_question_detector_prompt", + lambda: "System prompt with {keywords}", ) @pytest.fixture - def detector(self): + def detector(self, monkeypatch): """Fixture to provide QuestionDetector instance.""" return QuestionDetector() @@ -195,7 +204,12 @@ def test_complex_owasp_questions(self, detector, question): def test_mocked_initialization(self): """Test with mocked QuestionDetector initialization.""" - with patch("apps.slack.common.question_detector.OWASP_KEYWORDS", {"mocked", "keywords"}): + with ( + patch.dict(os.environ, {"DJANGO_OPEN_AI_SECRET_KEY": "test-key"}), + patch("openai.OpenAI") as mock_openai, + patch("apps.slack.common.question_detector.OWASP_KEYWORDS", {"mocked", "keywords"}), + ): + mock_openai.return_value = MagicMock() detector = QuestionDetector() assert detector.owasp_keywords == {"mocked", "keywords"} @@ -206,10 +220,259 @@ def test_class_constants(self, detector): assert detector.MAX_TOKENS == 50 assert detector.TEMPERATURE == 0.1 assert detector.CHAT_MODEL == "gpt-4o" - assert "OWASP" in detector.SYSTEM_PROMPT - assert "{keywords}" in detector.SYSTEM_PROMPT def test_openai_client_initialization(self, detector): """Test that OpenAI client is properly initialized.""" assert detector.openai_client is not None assert hasattr(detector.openai_client, "chat") + + def test_is_owasp_question_with_openai_missing_prompt(self): + """Test OpenAI question detection when prompt is missing.""" + with ( + patch.dict(os.environ, {"DJANGO_OPEN_AI_SECRET_KEY": "test-key"}), + patch( + "apps.slack.common.question_detector.Prompt.get_slack_question_detector_prompt", + return_value=None, + ), + ): + detector = QuestionDetector() + + with pytest.raises( + ObjectDoesNotExist, + match="Prompt with key 'slack-question-detector-system-prompt' not found", + ): + detector.is_owasp_question_with_openai("What is OWASP?") + + def test_is_owasp_question_with_openai_empty_prompt(self): + """Test OpenAI question detection when prompt is empty.""" + with ( + patch.dict(os.environ, {"DJANGO_OPEN_AI_SECRET_KEY": "test-key"}), + patch( + "apps.slack.common.question_detector.Prompt.get_slack_question_detector_prompt", + return_value=" ", + ), + ): + detector = QuestionDetector() + + with pytest.raises( + ObjectDoesNotExist, + match="Prompt with key 'slack-question-detector-system-prompt' not found", + ): + detector.is_owasp_question_with_openai("What is OWASP?") + + def test_is_owasp_question_with_openai_success_yes(self): + """Test OpenAI question detection with YES response.""" + with ( + patch.dict(os.environ, {"DJANGO_OPEN_AI_SECRET_KEY": "test-key"}), + patch( + "apps.slack.common.question_detector.Prompt.get_slack_question_detector_prompt", + return_value="System prompt with {keywords}", + ), + patch("openai.OpenAI") as mock_openai, + ): + mock_client = MagicMock() + mock_response = MagicMock() + mock_response.choices = [MagicMock()] + mock_response.choices[0].message.content = "YES" + mock_client.chat.completions.create.return_value = mock_response + mock_openai.return_value = mock_client + + detector = QuestionDetector() + result = detector.is_owasp_question_with_openai("What is OWASP?") + + assert result is True + + def test_is_owasp_question_with_openai_success_no(self): + """Test OpenAI question detection with NO response.""" + with ( + patch.dict(os.environ, {"DJANGO_OPEN_AI_SECRET_KEY": "test-key"}), + patch( + "apps.slack.common.question_detector.Prompt.get_slack_question_detector_prompt", + return_value="System prompt with {keywords}", + ), + patch("openai.OpenAI") as mock_openai, + ): + mock_client = MagicMock() + mock_response = MagicMock() + mock_response.choices = [MagicMock()] + mock_response.choices[0].message.content = "NO" + mock_client.chat.completions.create.return_value = mock_response + mock_openai.return_value = mock_client + + detector = QuestionDetector() + result = detector.is_owasp_question_with_openai("What is Python?") + + assert result is False + + def test_is_owasp_question_with_openai_empty_response(self): + """Test OpenAI question detection with empty response.""" + with ( + patch.dict(os.environ, {"DJANGO_OPEN_AI_SECRET_KEY": "test-key"}), + patch( + "apps.slack.common.question_detector.Prompt.get_slack_question_detector_prompt", + return_value="System prompt with {keywords}", + ), + patch("openai.OpenAI") as mock_openai, + ): + mock_client = MagicMock() + mock_response = MagicMock() + mock_response.choices = [MagicMock()] + mock_response.choices[0].message.content = "" + mock_client.chat.completions.create.return_value = mock_response + mock_openai.return_value = mock_client + + detector = QuestionDetector() + result = detector.is_owasp_question_with_openai("What is OWASP?") + + assert result is None + + def test_is_owasp_question_with_openai_none_response(self): + """Test OpenAI question detection with None response.""" + with ( + patch.dict(os.environ, {"DJANGO_OPEN_AI_SECRET_KEY": "test-key"}), + patch( + "apps.slack.common.question_detector.Prompt.get_slack_question_detector_prompt", + return_value="System prompt with {keywords}", + ), + patch("openai.OpenAI") as mock_openai, + ): + mock_client = MagicMock() + mock_response = MagicMock() + mock_response.choices = [MagicMock()] + mock_response.choices[0].message.content = None + mock_client.chat.completions.create.return_value = mock_response + mock_openai.return_value = mock_client + + detector = QuestionDetector() + result = detector.is_owasp_question_with_openai("What is OWASP?") + + assert result is None + + def test_is_owasp_question_with_openai_unexpected_response(self): + """Test OpenAI question detection with unexpected response.""" + with ( + patch.dict(os.environ, {"DJANGO_OPEN_AI_SECRET_KEY": "test-key"}), + patch( + "apps.slack.common.question_detector.Prompt.get_slack_question_detector_prompt", + return_value="System prompt with {keywords}", + ), + patch("openai.OpenAI") as mock_openai, + ): + mock_client = MagicMock() + mock_response = MagicMock() + mock_response.choices = [MagicMock()] + mock_response.choices[0].message.content = "MAYBE" + mock_client.chat.completions.create.return_value = mock_response + mock_openai.return_value = mock_client + + detector = QuestionDetector() + result = detector.is_owasp_question_with_openai("What is OWASP?") + + assert result is None + + def test_is_owasp_question_with_openai_api_error(self): + """Test OpenAI question detection with API error.""" + with ( + patch.dict(os.environ, {"DJANGO_OPEN_AI_SECRET_KEY": "test-key"}), + patch( + "apps.slack.common.question_detector.Prompt.get_slack_question_detector_prompt", + return_value="System prompt with {keywords}", + ), + patch("openai.OpenAI") as mock_openai, + ): + mock_client = MagicMock() + mock_client.chat.completions.create.side_effect = openai.OpenAIError("API Error") + mock_openai.return_value = mock_client + + detector = QuestionDetector() + result = detector.is_owasp_question_with_openai("What is OWASP?") + + assert result is None + + def test_is_owasp_question_with_openai_case_insensitive(self): + """Test OpenAI question detection with case insensitive responses.""" + with ( + patch.dict(os.environ, {"DJANGO_OPEN_AI_SECRET_KEY": "test-key"}), + patch( + "apps.slack.common.question_detector.Prompt.get_slack_question_detector_prompt", + return_value="System prompt with {keywords}", + ), + patch("openai.OpenAI") as mock_openai, + ): + mock_client = MagicMock() + mock_response = MagicMock() + mock_response.choices = [MagicMock()] + mock_response.choices[0].message.content = "yes, this is OWASP related" + mock_client.chat.completions.create.return_value = mock_response + mock_openai.return_value = mock_client + + detector = QuestionDetector() + result = detector.is_owasp_question_with_openai("What is OWASP?") + + assert result is True + + def test_is_owasp_question_with_openai_no_in_response(self): + """Test OpenAI question detection with 'no' in response.""" + with ( + patch.dict(os.environ, {"DJANGO_OPEN_AI_SECRET_KEY": "test-key"}), + patch( + "apps.slack.common.question_detector.Prompt.get_slack_question_detector_prompt", + return_value="System prompt with {keywords}", + ), + patch("openai.OpenAI") as mock_openai, + ): + mock_client = MagicMock() + mock_response = MagicMock() + mock_response.choices = [MagicMock()] + mock_response.choices[0].message.content = "no, this is not OWASP related" + mock_client.chat.completions.create.return_value = mock_response + mock_openai.return_value = mock_client + + detector = QuestionDetector() + result = detector.is_owasp_question_with_openai("What is Python?") + + assert result is False + + def test_is_owasp_question_openai_override_with_keywords(self): + """Test that keyword detection overrides OpenAI NO response.""" + with ( + patch.dict(os.environ, {"DJANGO_OPEN_AI_SECRET_KEY": "test-key"}), + patch( + "apps.slack.common.question_detector.Prompt.get_slack_question_detector_prompt", + return_value="System prompt with {keywords}", + ), + patch("openai.OpenAI") as mock_openai, + patch( + "apps.slack.common.question_detector.QuestionDetector.is_owasp_question_with_openai", + return_value=False, + ), + ): + mock_client = MagicMock() + mock_openai.return_value = mock_client + + detector = QuestionDetector() + result = detector.is_owasp_question("What is OWASP security?") + + assert result is True + + def test_is_owasp_question_openai_override_without_keywords(self): + """Test that keyword detection does not override OpenAI NO response when no keywords.""" + with ( + patch.dict(os.environ, {"DJANGO_OPEN_AI_SECRET_KEY": "test-key"}), + patch( + "apps.slack.common.question_detector.Prompt.get_slack_question_detector_prompt", + return_value="System prompt with {keywords}", + ), + patch("openai.OpenAI") as mock_openai, + patch( + "apps.slack.common.question_detector.QuestionDetector.is_owasp_question_with_openai", + return_value=False, + ), + ): + mock_client = MagicMock() + mock_openai.return_value = mock_client + + detector = QuestionDetector() + result = detector.is_owasp_question("What is Python programming?") + + assert result is False