Skip to content

Commit

Permalink
⚡️ Added history
Browse files Browse the repository at this point in the history
  • Loading branch information
PhilippGawlik committed Jul 18, 2024
1 parent e87ef89 commit 7aaec7a
Show file tree
Hide file tree
Showing 5 changed files with 35 additions and 17 deletions.
14 changes: 12 additions & 2 deletions api.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@
import uvicorn


from langchain.load.dump import dumps
from langchain.load.load import loads

from interface.response_models import ResponseModel, CTA, CTAType
from interface.request_models import RequestModel
from src.context import get_context
Expand Down Expand Up @@ -49,16 +52,23 @@ async def redirect():
response_model=ResponseModel
)
def answer_a_question(query: RequestModel) -> ResponseModel:
question = query.question
if query.history is not None:
history = loads(query.history)
else:
history = []

context = get_context(query.question)
prompt = assemble_prompt(query.question, context)
answer = generate_answer(prompt)
answer, history = generate_answer(prompt, history)
refs = [c.metadata["title"] for c in context]
return ResponseModel(
status="ok",
msg="Successfully generated answer",
answer=answer,
cta=list(generate_cta(context)),
refs=refs
refs=refs,
history=dumps(history)
)


Expand Down
7 changes: 5 additions & 2 deletions interface/request_models.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
from typing import Mapping, Optional
from typing import Mapping, Optional, Union
from pydantic import BaseModel

from langchain_core.messages import HumanMessage, SystemMessage


class RequestModel(BaseModel):
question: str
context: Optional[Mapping] = None
assistant_id: Optional[str] = None
assistant_id: Optional[str] = None
history: Optional[str] = None
1 change: 1 addition & 0 deletions interface/response_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,3 +25,4 @@ class ResponseModel(BaseModel):
status: str
msg: str
cta: list[CTA]
history: str
25 changes: 15 additions & 10 deletions src/generate_with_azure.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import os
from typing import Optional
from typing import Optional, Union, Tuple

from langchain_core.messages import HumanMessage, SystemMessage
from langchain_core.documents.base import Document
Expand All @@ -22,15 +22,20 @@
)


def generate_answer(prompt: str, system_prompt: str = SYSTEM_PROMPT) -> str:
message = SystemMessage(
content=system_prompt
)
message = HumanMessage(
content=prompt
)
result = MODEL.invoke([message])
return result.content
def generate_answer(
prompt: str,
history: list[Optional[Union[SystemMessage, HumanMessage]]],
system_prompt: str = SYSTEM_PROMPT
) -> Tuple[str, list[Union[SystemMessage, HumanMessage]]]:
if not history:
history.append(
SystemMessage(
content=system_prompt
)
)
history.append(HumanMessage(content=prompt))
result = MODEL.invoke(history)
return result.content, history


if __name__ == "__main__":
Expand Down
5 changes: 2 additions & 3 deletions src/tools.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from typing import Optional, Generator
from typing import Optional, Generator, Mapping, Union
from langchain_core.documents.base import Document

from langchain_core.messages import HumanMessage, SystemMessage
from interface.response_models import CTA, CTAType
from urllib import parse

Expand All @@ -13,4 +13,3 @@ def generate_cta(context: list[Optional[Document]]) -> Generator[CTA, None, None
text=c.metadata["title"],
payload=f"https://brbuddy-api-service-volume.brdata-dev.de/share/URV7wHnY/{norm_name}"
)

0 comments on commit 7aaec7a

Please sign in to comment.