-
-
Notifications
You must be signed in to change notification settings - Fork 34
/
Copy pathapp.py
111 lines (94 loc) · 4.04 KB
/
app.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
"""
This is the template for Autogen UI.
Features:
- Continuous messaging
- Multithreading
- MultiAgent LLM architecture
Written by: Antoine Ross - October 2023.
"""
import os
from typing import Dict, Optional, Union
from dotenv import load_dotenv, find_dotenv
import chainlit as cl
from chainlit.types import AskFileResponse
from langchain.document_loaders import PyPDFLoader, TextLoader
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.chains import ConversationalRetrievalChain
import autogen
from autogen import Agent, AssistantAgent, UserProxyAgent, config_list_from_json
from agents.chainlit_agents import ChainlitAssistantAgent, ChainlitUserProxyAgent
load_dotenv(find_dotenv())
# -------------------- GLOBAL VARIABLES AND AGENTS ----------------------------------- #
USER_PROXY_NAME = "Query Agent"
ASSISTANT = "Assistant"
# -------------------- Config List. Edit to change your preferred model to use ----------------------------- #
config_list = autogen.config_list_from_dotenv(
dotenv_file_path = '.env',
model_api_key_map={
"gpt-3.5-turbo-1106": "OPENAI_API_KEY",
},
filter_dict={
"model": {
"gpt-3.5-turbo-1106",
}
}
)
OPENAI_API_KEY = os.getenv('OPENAI_API_KEY')
llm_config = {"config_list": config_list, "api_key": OPENAI_API_KEY, "cache_seed": 42}
# -------------------- Instantiate agents at the start of a new chat. Call functions and tools the agents will use. ---------------------------- #
@cl.on_chat_start
async def on_chat_start():
try:
assistant = ChainlitAssistantAgent(
name="Assistant", llm_config=llm_config,
system_message="""Assistant. Assist the User Proxy in the task.""",
description="Assistant Agent"
)
user_proxy = ChainlitUserProxyAgent(
name="User_Proxy",
human_input_mode="ALWAYS",
llm_config=llm_config,
# max_consecutive_auto_reply=3,
# is_termination_msg=lambda x: x.get("content", "").rstrip().endswith("TERMINATE"),
code_execution_config=False,
system_message="""Manager. Do the task. Collaborate with the Assistant to finish the task.
""",
description="User Proxy Agent"
)
print("Set agents.")
cl.user_session.set(USER_PROXY_NAME, user_proxy)
cl.user_session.set(ASSISTANT, assistant)
msg = cl.Message(content=f"""Hello! What task would you like to get done today?
""",
author="User_Proxy")
await msg.send()
print("Message sent.")
except Exception as e:
print("Error: ", e)
pass
# -------------------- Instantiate agents at the start of a new chat. Call functions and tools the agents will use. ---------------------------- #
@cl.on_message
async def run_conversation(message: cl.Message):
#try:
print("Running conversation")
llm_config = {"config_list": config_list, "api_key": OPENAI_API_KEY, "cache_seed": 42}
CONTEXT = message.content
MAX_ITER = 10
assistant = cl.user_session.get(ASSISTANT)
user_proxy = cl.user_session.get(USER_PROXY_NAME)
print("Setting grouipchat")
groupchat = autogen.GroupChat(agents=[user_proxy, assistant], messages=[], max_round=MAX_ITER)
manager = autogen.GroupChatManager(groupchat=groupchat,llm_config=llm_config)
# -------------------- Conversation Logic. Edit to change your first message based on the Task you want to get done. ----------------------------- #
if len(groupchat.messages) == 0:
message = f"""Do the task based on the user input: {CONTEXT}."""
# user_proxy.initiate_chat(manager, message=message)
await cl.Message(content=f"""Starting agents on task...""").send()
await cl.make_async(user_proxy.initiate_chat)( manager, message=message, )
elif len(groupchat.messages) < MAX_ITER:
await cl.make_async(user_proxy.send)( manager, message=CONTEXT, )
elif len(groupchat.messages) == MAX_ITER:
await cl.make_async(user_proxy.send)( manager, message="exit", )
# except Exception as e:
# print("Error: ", e)
# pass