Skip to content

Commit

Permalink
Merge pull request #118 from fengsh27/main
Browse files Browse the repository at this point in the history
fix exception in kg query
  • Loading branch information
slobentanzer authored Feb 6, 2024
2 parents f5817b3 + dcdc9aa commit 3d28083
Showing 1 changed file with 17 additions and 6 deletions.
23 changes: 17 additions & 6 deletions biochatter/llm_connect.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
st = None

from abc import ABC, abstractmethod
import logging
from typing import Optional, List, Tuple
import openai

Expand All @@ -25,6 +26,8 @@
from .rag_agent import RagAgent
from ._stats import get_stats

logger = logging.getLogger(__name__)

OPENAI_MODELS = [
"gpt-3.5-turbo",
"gpt-3.5-turbo-16k",
Expand Down Expand Up @@ -257,18 +260,26 @@ def _inject_context(self, text: str):
for agent in self.rag_agents:
if not agent.use_prompt:
continue
statements = statements + [
doc[0] for doc in agent.generate_responses(text)
]
try:
docs = agent.generate_responses(text)
statements = statements + [
doc[0] for doc in docs
]
except ValueError as e:
logger.warning(e)

else:
statements = []
for agent in self.rag_agents:
if not agent.use_prompt:
continue
statements = statements + [
doc[0] for doc in agent.generate_responses(text)
]
try:
docs = agent.generate_responses(text)
statements = statements + [
doc[0] for doc in docs
]
except ValueError as e:
logger.warning(e)

if statements and len(statements) > 0:
prompts = self.prompts["rag_agent_prompts"]
Expand Down

0 comments on commit 3d28083

Please sign in to comment.