-
Notifications
You must be signed in to change notification settings - Fork 134
[Feat]: LangGraph POC and Base restructuring #69
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Merged
Merged
Changes from all commits
Commits
Show all changes
18 commits
Select commit
Hold shift + click to select a range
b32dd28
poetry updates
smokeyScraper 74db2c2
scoping codebase
smokeyScraper 2e5c7f1
add global configuration
smokeyScraper 6488370
add base agent and classification router
smokeyScraper 7e8544b
add tools
smokeyScraper ea6c235
add orchestration logic
smokeyScraper 529f22c
update config dependency
smokeyScraper 613d168
implement devrel agent and coordinator
smokeyScraper aaa9c8b
implement discord bot
smokeyScraper 23fe6bf
main.py and poetry updates
smokeyScraper 67858f8
poetry update
smokeyScraper 3d37fae
fix: replace sync call and use lazy logging
smokeyScraper cb3499a
refactor: discordBot to discord_bot
smokeyScraper 89051f3
refactor: coderabbit refactorings
smokeyScraper c64cf95
refactor: coderabbit refactoring
smokeyScraper 2182e84
refactor: modularizing codebase
smokeyScraper a702d36
[feature]: add naive archived thread deletion logic with timeout set …
smokeyScraper 0c8d5ec
[fix]: fix extra classfication handled by DevRel
smokeyScraper File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,10 @@ | ||
| from .devrel.agent import DevRelAgent | ||
| from .shared.base_agent import BaseAgent, AgentState | ||
| from .shared.classification_router import ClassificationRouter | ||
|
|
||
| __all__ = [ | ||
| "DevRelAgent", | ||
| "BaseAgent", | ||
| "AgentState", | ||
| "ClassificationRouter" | ||
| ] |
File renamed without changes.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,100 @@ | ||
| import logging | ||
| from typing import Dict, Any | ||
| from functools import partial | ||
| from langgraph.graph import StateGraph, END | ||
| from langchain_google_genai import ChatGoogleGenerativeAI | ||
| from ..shared.base_agent import BaseAgent, AgentState | ||
| from ..shared.classification_router import MessageCategory | ||
| from .tools.search_tool import TavilySearchTool | ||
| from .tools.faq_tool import FAQTool | ||
| from app.core.config import settings | ||
| from .nodes.gather_context_node import gather_context_node | ||
| from .nodes.handle_faq_node import handle_faq_node | ||
| from .nodes.handle_web_search_node import handle_web_search_node | ||
| from .nodes.handle_technical_support_node import handle_technical_support_node | ||
| from .nodes.handle_onboarding_node import handle_onboarding_node | ||
| from .nodes.generate_response_node import generate_response_node | ||
|
|
||
| logger = logging.getLogger(__name__) | ||
|
|
||
| class DevRelAgent(BaseAgent): | ||
| """DevRel LangGraph Agent for community support and engagement""" | ||
|
|
||
| def __init__(self, config: Dict[str, Any] = None): | ||
| self.config = config or {} | ||
| self.llm = ChatGoogleGenerativeAI( | ||
| model=settings.devrel_agent_model, | ||
| temperature=0.3, | ||
| google_api_key=settings.gemini_api_key | ||
| ) | ||
| self.search_tool = TavilySearchTool() | ||
| self.faq_tool = FAQTool() | ||
| super().__init__("DevRelAgent", self.config) | ||
|
|
||
| def _build_graph(self): | ||
| """Build the DevRel agent workflow graph""" | ||
| workflow = StateGraph(AgentState) | ||
|
|
||
| # Add nodes | ||
| workflow.add_node("gather_context", gather_context_node) | ||
| workflow.add_node("handle_faq", partial(handle_faq_node, faq_tool=self.faq_tool)) | ||
| workflow.add_node("handle_web_search", partial( | ||
| handle_web_search_node, search_tool=self.search_tool, llm=self.llm)) | ||
| workflow.add_node("handle_technical_support", handle_technical_support_node) | ||
| workflow.add_node("handle_onboarding", handle_onboarding_node) | ||
| workflow.add_node("generate_response", partial(generate_response_node, llm=self.llm)) | ||
|
|
||
| # Add edges | ||
| workflow.add_conditional_edges( | ||
| "gather_context", | ||
| self._route_to_handler, | ||
| { | ||
| MessageCategory.FAQ: "handle_faq", | ||
| MessageCategory.WEB_SEARCH: "handle_web_search", | ||
| MessageCategory.ONBOARDING: "handle_onboarding", | ||
| MessageCategory.TECHNICAL_SUPPORT: "handle_technical_support", | ||
| MessageCategory.COMMUNITY_ENGAGEMENT: "handle_technical_support", | ||
| MessageCategory.DOCUMENTATION: "handle_technical_support", | ||
| MessageCategory.BUG_REPORT: "handle_technical_support", | ||
| MessageCategory.FEATURE_REQUEST: "handle_technical_support", | ||
| MessageCategory.NOT_DEVREL: "handle_technical_support" | ||
| } | ||
| ) | ||
|
|
||
| # All handlers lead to response generation | ||
| for node in ["handle_faq", "handle_web_search", "handle_technical_support", "handle_onboarding"]: | ||
| workflow.add_edge(node, "generate_response") | ||
|
|
||
| workflow.add_edge("generate_response", END) | ||
|
|
||
| # Set entry point | ||
| workflow.set_entry_point("gather_context") | ||
|
|
||
| self.graph = workflow.compile() | ||
|
|
||
| def _route_to_handler(self, state: AgentState) -> str: | ||
| """Route to the appropriate handler based on intent""" | ||
| classification = state.context.get("classification", {}) | ||
| intent = classification.get("category") | ||
|
|
||
| if isinstance(intent, str): | ||
| try: | ||
| intent = MessageCategory(intent.lower()) | ||
| except ValueError: | ||
| logger.warning(f"Unknown intent string '{intent}', defaulting to TECHNICAL_SUPPORT") | ||
| intent = MessageCategory.TECHNICAL_SUPPORT | ||
|
|
||
| logger.info(f"Routing based on intent: {intent} for session {state.session_id}") | ||
|
|
||
| # Mapping from MessageCategory enum to string keys used in add_conditional_edges | ||
| if intent in [MessageCategory.FAQ, MessageCategory.WEB_SEARCH, | ||
| MessageCategory.ONBOARDING, MessageCategory.TECHNICAL_SUPPORT, | ||
| MessageCategory.COMMUNITY_ENGAGEMENT, MessageCategory.DOCUMENTATION, | ||
| MessageCategory.BUG_REPORT, MessageCategory.FEATURE_REQUEST, | ||
| MessageCategory.NOT_DEVREL]: | ||
| logger.info(f"Routing to handler for: {intent}") | ||
| return intent | ||
|
|
||
| # Later to be changed to handle anomalies | ||
| logger.info(f"Unknown intent '{intent}', routing to technical support") | ||
| return MessageCategory.TECHNICAL_SUPPORT |
Empty file.
Empty file.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,22 @@ | ||
| import logging | ||
| from app.agents.shared.state import AgentState | ||
| from app.agents.shared.classification_router import MessageCategory | ||
|
|
||
| logger = logging.getLogger(__name__) | ||
|
|
||
| async def gather_context_node(state: AgentState) -> AgentState: | ||
| """Gather additional context for the user and their request""" | ||
| logger.info(f"Gathering context for session {state.session_id}") | ||
|
|
||
| # TODO: Add context gathering from databases | ||
| # Currently, context is simple | ||
| # In production, query databases for user history, etc. | ||
| context_data = { | ||
| "user_profile": {"user_id": state.user_id, "platform": state.platform}, | ||
| "conversation_context": len(state.messages), | ||
| "session_info": {"session_id": state.session_id} | ||
| } | ||
|
|
||
| state.context.update(context_data) | ||
| state.current_task = "context_gathered" | ||
| return state |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,76 @@ | ||
| import logging | ||
| from typing import Dict, Any | ||
| from app.agents.shared.state import AgentState | ||
| from langchain_core.messages import HumanMessage | ||
| from ..prompts.base_prompt import GENERAL_LLM_RESPONSE_PROMPT | ||
|
|
||
| logger = logging.getLogger(__name__) | ||
|
|
||
| async def _create_search_response(task_result: Dict[str, Any]) -> str: | ||
| """Create a response string from search results.""" | ||
| query = task_result.get("query") | ||
| results = task_result.get("results", []) | ||
| if not results: | ||
| return f"I couldn't find any information for '{query}'. You might want to try rephrasing your search." | ||
|
|
||
| response_parts = [f"Here's what I found for '{query}':"] | ||
| for i, result in enumerate(results[:3]): | ||
| title = result.get('title', 'N/A') | ||
| snippet = result.get('snippet', 'N/A') | ||
| url = result.get('url', '#') | ||
| result_line = f"{i+1}. {title}: {snippet}" | ||
| response_parts.append(result_line) | ||
| response_parts.append(f" (Source: {url})") | ||
| response_parts.append("You can ask me to search again with a different query if these aren't helpful.") | ||
| return "\n".join(response_parts) | ||
|
|
||
| async def _create_llm_response(state: AgentState, task_result: Dict[str, Any], llm) -> str: | ||
| """Generate a response using the LLM based on the current state and task result.""" | ||
| logger.info(f"Creating LLM response for session {state.session_id}") | ||
|
|
||
| latest_message = "" | ||
| if state.messages: | ||
| latest_message = state.messages[-1].get("content", "") | ||
| elif state.context.get("original_message"): | ||
| latest_message = state.context["original_message"] | ||
|
|
||
| conversation_history_str = "\n".join([ | ||
| f"{msg.get('type', 'unknown')}: {msg.get('content', '')}" | ||
| for msg in state.conversation_history[-5:] | ||
| ]) | ||
| current_context_str = str(state.context) | ||
| task_type_str = str(task_result.get("type", "N/A")) | ||
| task_details_str = str(task_result) | ||
|
|
||
| try: | ||
| prompt = GENERAL_LLM_RESPONSE_PROMPT.format( | ||
| latest_message=latest_message, | ||
| conversation_history=conversation_history_str, | ||
| current_context=current_context_str, | ||
| task_type=task_type_str, | ||
| task_details=task_details_str | ||
| ) | ||
| except KeyError as e: | ||
| logger.error(f"Missing key in GENERAL_LLM_RESPONSE_PROMPT: {e}") | ||
| return "Error: Response template formatting error." | ||
|
|
||
| response = await llm.ainvoke([HumanMessage(content=prompt)]) | ||
| return response.content.strip() | ||
|
|
||
| async def generate_response_node(state: AgentState, llm) -> AgentState: | ||
| """Generate final response to user""" | ||
| logger.info(f"Generating response for session {state.session_id}") | ||
| task_result = state.task_result or {} | ||
|
|
||
| if task_result.get("type") == "faq": | ||
| state.final_response = task_result.get("response", "I don't have a specific answer for that question.") | ||
| elif task_result.get("type") == "web_search": | ||
| response = await _create_search_response(task_result) | ||
| state.final_response = response | ||
| else: | ||
| # Pass the llm instance to _create_llm_response | ||
| response = await _create_llm_response(state, task_result, llm) | ||
| state.final_response = response | ||
|
|
||
| state.current_task = "response_generated" | ||
| return state | ||
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,26 @@ | ||
| import logging | ||
| from app.agents.shared.state import AgentState | ||
|
|
||
| logger = logging.getLogger(__name__) | ||
|
|
||
| async def handle_faq_node(state: AgentState, faq_tool) -> AgentState: | ||
| """Handle FAQ requests""" | ||
| logger.info(f"Handling FAQ for session {state.session_id}") | ||
|
|
||
| latest_message = "" | ||
| if state.messages: | ||
| latest_message = state.messages[-1].get("content", "") | ||
| elif state.context.get("original_message"): | ||
| latest_message = state.context["original_message"] | ||
|
|
||
| # faq_tool will be passed from the agent, similar to llm for classify_intent | ||
| faq_response = await faq_tool.get_response(latest_message) | ||
|
|
||
| state.task_result = { | ||
| "type": "faq", | ||
| "response": faq_response, | ||
| "source": "faq_database" | ||
| } | ||
|
|
||
| state.current_task = "faq_handled" | ||
| return state | ||
smokeyScraper marked this conversation as resolved.
Show resolved
Hide resolved
|
||
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,17 @@ | ||
| import logging | ||
| from app.agents.shared.state import AgentState | ||
|
|
||
| logger = logging.getLogger(__name__) | ||
|
|
||
| async def handle_onboarding_node(state: AgentState) -> AgentState: | ||
| """Handle onboarding requests""" | ||
| logger.info(f"Handling onboarding for session {state.session_id}") | ||
|
|
||
| state.task_result = { | ||
| "type": "onboarding", | ||
| "action": "welcome_and_guide", | ||
| "next_steps": ["setup_environment", "first_contribution", "join_community"] | ||
| } | ||
|
|
||
| state.current_task = "onboarding_handled" | ||
| return state | ||
smokeyScraper marked this conversation as resolved.
Show resolved
Hide resolved
|
||
17 changes: 17 additions & 0 deletions
17
backend/app/agents/devrel/nodes/handle_technical_support_node.py
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,17 @@ | ||
| import logging | ||
| from app.agents.shared.state import AgentState | ||
|
|
||
| logger = logging.getLogger(__name__) | ||
|
|
||
| async def handle_technical_support_node(state: AgentState) -> AgentState: | ||
smokeyScraper marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| """Handle technical support requests""" | ||
| logger.info(f"Handling technical support for session {state.session_id}") | ||
|
|
||
| state.task_result = { | ||
| "type": "technical_support", | ||
| "action": "provide_guidance", | ||
| "requires_human_review": False | ||
| } | ||
|
|
||
| state.current_task = "technical_support_handled" | ||
| return state | ||
smokeyScraper marked this conversation as resolved.
Show resolved
Hide resolved
|
||
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,42 @@ | ||
| import logging | ||
| from app.agents.shared.state import AgentState | ||
| from langchain_core.messages import HumanMessage | ||
| from ..prompts.search_prompt import EXTRACT_SEARCH_QUERY_PROMPT | ||
|
|
||
| logger = logging.getLogger(__name__) | ||
|
|
||
| async def _extract_search_query(message: str, llm) -> str: | ||
| """Extract a concise search query from the user's message.""" | ||
| logger.info(f"Extracting search query from: {message[:100]}") | ||
| try: | ||
| prompt = EXTRACT_SEARCH_QUERY_PROMPT.format(message=message) | ||
| except KeyError as e: | ||
| logger.error(f"Missing key in EXTRACT_SEARCH_QUERY_PROMPT: {e}") | ||
| return message # Fallback | ||
| response = await llm.ainvoke([HumanMessage(content=prompt)]) | ||
| search_query = response.content.strip() | ||
| logger.info(f"Extracted search query: {search_query}") | ||
| return search_query | ||
|
|
||
| async def handle_web_search_node(state: AgentState, search_tool, llm) -> AgentState: | ||
smokeyScraper marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| """Handle web search requests""" | ||
| logger.info(f"Handling web search for session {state.session_id}") | ||
|
|
||
| latest_message = "" | ||
| if state.messages: | ||
| latest_message = state.messages[-1].get("content", "") | ||
| elif state.context.get("original_message"): | ||
| latest_message = state.context["original_message"] | ||
|
|
||
| search_query = await _extract_search_query(latest_message, llm) | ||
| search_results = await search_tool.search(search_query) | ||
smokeyScraper marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
|
||
| state.task_result = { | ||
| "type": "web_search", | ||
| "query": search_query, | ||
| "results": search_results, | ||
| "source": "tavily_search" | ||
| } | ||
| state.tools_used.append("tavily_search") | ||
| state.current_task = "web_search_handled" | ||
| return state | ||
Empty file.
Empty file.
Empty file.
Empty file.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,16 @@ | ||
| GENERAL_LLM_RESPONSE_PROMPT = ( | ||
| "You are a helpful DevRel assistant. " | ||
| "Your goal is to assist users with their technical questions, onboarding, and community engagement.\n\n" | ||
| "User's message: \"{latest_message}\"\n" | ||
| "Conversation history (last 5): \n" | ||
| "{conversation_history}\n\n" | ||
| "Current context:\n" | ||
| "{current_context}\n\n" | ||
| "Task that was just handled: {task_type}\n" | ||
| "Details of task result: \n" | ||
| "{task_details}\n\n" | ||
| "Based on all this information, provide a helpful and concise response.\n" | ||
| "If the task was 'technical_support' and no specific solution was found, offer to escalate or suggest resources.\n" | ||
| "If the task was 'onboarding', provide welcoming and guiding information.\n" | ||
| "Response: " | ||
| ) |
Empty file.
Empty file.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,4 @@ | ||
| EXTRACT_SEARCH_QUERY_PROMPT = """Extract the core search query from the following user message. | ||
| User Message: "{message}" | ||
| Focus on the main topic or question. Be concise. | ||
| Search Query: """ |
Empty file.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1 @@ | ||
| # Placeholder to enhance ..shared/state.py if required |
Empty file.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,44 @@ | ||
| import logging | ||
| from typing import Optional | ||
|
|
||
| logger = logging.getLogger(__name__) | ||
|
|
||
| class FAQTool: | ||
| """FAQ handling tool""" | ||
|
|
||
| # TODO: Add FAQ responses from a database to refer organization's FAQ and Repo's FAQ | ||
|
|
||
| def __init__(self): | ||
| self.faq_responses = { | ||
| "what is devr.ai": "Devr.AI is an AI-powered Developer Relations assistant that helps open-source communities by automating engagement, issue tracking, and providing intelligent support to developers.", | ||
| "how do i contribute": "You can contribute by visiting our GitHub repository, checking open issues, and submitting pull requests. We welcome all types of contributions including code, documentation, and bug reports.", | ||
| "what platforms does devr.ai support": "Devr.AI integrates with Discord, Slack, GitHub, and can be extended to other platforms. We use these integrations to provide seamless developer support across multiple channels.", | ||
| "who maintains devr.ai": "Devr.AI is maintained by an open-source community of developers passionate about improving developer relations and community engagement.", | ||
| "how do i report a bug": "You can report a bug by opening an issue on our GitHub repository. Please include detailed information about the bug, steps to reproduce it, and your environment.", | ||
| "how to get started": "To get started with Devr.AI: 1) Check our documentation, 2) Join our Discord community, 3) Explore the GitHub repository, 4) Try contributing to open issues.", | ||
| "what is langgraph": "LangGraph is a framework for building stateful, multi-actor applications with large language models. We use it to create intelligent agent workflows for our DevRel automation." | ||
| } | ||
|
|
||
| async def get_response(self, question: str) -> Optional[str]: | ||
| """Get FAQ response for a question""" | ||
| question_lower = question.lower().strip() | ||
|
|
||
| # Direct match | ||
| if question_lower in self.faq_responses: | ||
| return self.faq_responses[question_lower] | ||
|
|
||
| # Fuzzy matching | ||
| for faq_key, response in self.faq_responses.items(): | ||
| if self._is_similar_question(question_lower, faq_key): | ||
| return response | ||
|
|
||
| return None | ||
|
|
||
| def _is_similar_question(self, question: str, faq_key: str) -> bool: | ||
| """Check if question is similar to FAQ key""" | ||
| # Simple keyword matching - in production, use better similarity | ||
| question_words = set(question.split()) | ||
| faq_words = set(faq_key.split()) | ||
|
|
||
| common_words = question_words.intersection(faq_words) | ||
| return len(common_words) >= 2 # At least 2 common words |
Empty file.
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
Uh oh!
There was an error while loading. Please reload this page.