From 463e5d92e77b481ef01cee2bd4a7c4801a1f41fe Mon Sep 17 00:00:00 2001 From: Stan Girard Date: Tue, 19 Sep 2023 12:11:03 +0200 Subject: [PATCH] feat(question): now not rephrasing question when passed to answering llm (#1202) --- backend/llm/qa_base.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/backend/llm/qa_base.py b/backend/llm/qa_base.py index 319d0a17d981..c70a1630688d 100644 --- a/backend/llm/qa_base.py +++ b/backend/llm/qa_base.py @@ -156,6 +156,7 @@ def generate_answer( llm=self._create_llm(model=self.model), prompt=CONDENSE_QUESTION_PROMPT ), verbose=False, + rephrase_question=False, ) prompt_content = ( @@ -230,6 +231,7 @@ async def generate_stream( llm=self._create_llm(model=self.model), prompt=CONDENSE_QUESTION_PROMPT ), verbose=False, + rephrase_question=False, ) transformed_history = format_chat_history(history)