forked from neo4j-graphacademy/llm-chatbot-python
-
Notifications
You must be signed in to change notification settings - Fork 0
/
agent.py
58 lines (45 loc) · 1.56 KB
/
agent.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
from langchain.agents import AgentType, initialize_agent
# Include the LLM from a previous lesson
from llm import llm
from langchain.chains.conversation.memory import ConversationBufferWindowMemory
from langchain.tools import Tool
from vector import kg_qa
from tools.cypher import cypher_qa
tools = [
Tool.from_function(
name="Vector Search Index", # (1)
description="Provides information about movie plots using Vector Search", # (2)
func=kg_qa, # (3)
),
Tool.from_function(
name="Graph Cypher QA Chain", # (1)
description="Provides information about Movies including their Actors, Directors and User reviews", # (2)
func=cypher_qa, # (3)
),
]
SYSTEM_MESSAGE = """
You are a movie expert providing information about movies.
Be as helpful as possible and return as much information as possible.
Do not answer any questions that do not relate to movies, actors or directors.
Do not answer any questions using your pre-trained knowledge, only use the information provided in the context.
"""
memory = ConversationBufferWindowMemory(
memory_key="chat_history",
k=5,
return_messages=True,
)
agent = initialize_agent(
tools,
llm,
memory=memory,
verbose=True,
agent=AgentType.CHAT_CONVERSATIONAL_REACT_DESCRIPTION,
agent_kwargs={"system_message": SYSTEM_MESSAGE},
)
def generate_response(prompt):
"""
Create a handler that calls the Conversational agent
and returns a response to be rendered in the UI
"""
response = agent(prompt)
return response["output"]