Skip to content

Commit 025490a

Browse files
authored
Use class hierarchy to organize AgentChat message types and introduce StructuredMessage type (#5998)
This PR refactored `AgentEvent` and `ChatMessage` union types to abstract base classes. This allows for user-defined message types that subclass one of the base classes to be used in AgentChat. To support a unified interface for working with the messages, the base classes added abstract methods for: - Convert content to string - Convert content to a `UserMessage` for model client - Convert content for rendering in console. - Dump into a dictionary - Load and create a new instance from a dictionary This way, all agents such as `AssistantAgent` and `SocietyOfMindAgent` can utilize the unified interface to work with any built-in and user-defined message type. This PR also introduces a new message type, `StructuredMessage` for AgentChat (Resolves #5131), which is a generic type that requires a user-specified content type. You can create a `StructuredMessage` as follow: ```python class MessageType(BaseModel): data: str references: List[str] message = StructuredMessage[MessageType](content=MessageType(data="data", references=["a", "b"]), source="user") # message.content is of type `MessageType`. ``` This PR addresses the receving side of this message type. To produce this message type from `AssistantAgent`, the work continue in #5934. Added unit tests to verify this message type works with agents and teams.
1 parent 8a5ee3d commit 025490a

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

42 files changed

+4246
-3632
lines changed

python/packages/autogen-agentchat/src/autogen_agentchat/agents/_assistant_agent.py

+4-6
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,6 @@
3131
LLMMessage,
3232
ModelFamily,
3333
SystemMessage,
34-
UserMessage,
3534
)
3635
from autogen_core.tools import BaseTool, FunctionTool
3736
from pydantic import BaseModel
@@ -814,14 +813,13 @@ async def _add_messages_to_context(
814813
messages: Sequence[ChatMessage],
815814
) -> None:
816815
"""
817-
Add incoming user (and possibly handoff) messages to the model context.
816+
Add incoming messages to the model context.
818817
"""
819818
for msg in messages:
820819
if isinstance(msg, HandoffMessage):
821-
# Add handoff context to the model context.
822-
for context_msg in msg.context:
823-
await model_context.add_message(context_msg)
824-
await model_context.add_message(UserMessage(content=msg.content, source=msg.source))
820+
for llm_msg in msg.context:
821+
await model_context.add_message(llm_msg)
822+
await model_context.add_message(msg.to_model_message())
825823

826824
@staticmethod
827825
async def _update_model_context_with_memory(

python/packages/autogen-agentchat/src/autogen_agentchat/agents/_base_chat_agent.py

+4-5
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77
from ..base import ChatAgent, Response, TaskResult
88
from ..messages import (
99
AgentEvent,
10-
BaseChatMessage,
1110
ChatMessage,
1211
ModelClientStreamingChunkEvent,
1312
TextMessage,
@@ -121,15 +120,15 @@ async def run(
121120
text_msg = TextMessage(content=task, source="user")
122121
input_messages.append(text_msg)
123122
output_messages.append(text_msg)
124-
elif isinstance(task, BaseChatMessage):
123+
elif isinstance(task, ChatMessage):
125124
input_messages.append(task)
126125
output_messages.append(task)
127126
else:
128127
if not task:
129128
raise ValueError("Task list cannot be empty.")
130129
# Task is a sequence of messages.
131130
for msg in task:
132-
if isinstance(msg, BaseChatMessage):
131+
if isinstance(msg, ChatMessage):
133132
input_messages.append(msg)
134133
output_messages.append(msg)
135134
else:
@@ -159,15 +158,15 @@ async def run_stream(
159158
input_messages.append(text_msg)
160159
output_messages.append(text_msg)
161160
yield text_msg
162-
elif isinstance(task, BaseChatMessage):
161+
elif isinstance(task, ChatMessage):
163162
input_messages.append(task)
164163
output_messages.append(task)
165164
yield task
166165
else:
167166
if not task:
168167
raise ValueError("Task list cannot be empty.")
169168
for msg in task:
170-
if isinstance(msg, BaseChatMessage):
169+
if isinstance(msg, ChatMessage):
171170
input_messages.append(msg)
172171
output_messages.append(msg)
173172
yield msg

python/packages/autogen-agentchat/src/autogen_agentchat/agents/_code_executor_agent.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,9 @@ class CodeExecutorAgentConfig(BaseModel):
2121

2222

2323
class CodeExecutorAgent(BaseChatAgent, Component[CodeExecutorAgentConfig]):
24-
"""An agent that extracts and executes code snippets found in received messages and returns the output.
24+
"""An agent that extracts and executes code snippets found in received
25+
:class:`~autogen_agentchat.messages.TextMessage` messages and returns the output
26+
of the code execution.
2527
2628
It is typically used within a team with another agent that generates code snippets to be executed.
2729

python/packages/autogen-agentchat/src/autogen_agentchat/agents/_society_of_mind_agent.py

+4-9
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from typing import Any, AsyncGenerator, List, Mapping, Sequence
22

33
from autogen_core import CancellationToken, Component, ComponentModel
4-
from autogen_core.models import ChatCompletionClient, LLMMessage, SystemMessage, UserMessage
4+
from autogen_core.models import ChatCompletionClient, LLMMessage, SystemMessage
55
from pydantic import BaseModel
66
from typing_extensions import Self
77

@@ -11,7 +11,6 @@
1111
from ..base import TaskResult, Team
1212
from ..messages import (
1313
AgentEvent,
14-
BaseChatMessage,
1514
ChatMessage,
1615
ModelClientStreamingChunkEvent,
1716
TextMessage,
@@ -167,13 +166,9 @@ async def on_messages_stream(
167166
else:
168167
# Generate a response using the model client.
169168
llm_messages: List[LLMMessage] = [SystemMessage(content=self._instruction)]
170-
llm_messages.extend(
171-
[
172-
UserMessage(content=message.content, source=message.source)
173-
for message in inner_messages
174-
if isinstance(message, BaseChatMessage)
175-
]
176-
)
169+
for message in messages:
170+
if isinstance(message, ChatMessage):
171+
llm_messages.append(message.to_model_message())
177172
llm_messages.append(SystemMessage(content=self._response_prompt))
178173
completion = await self._model_client.create(messages=llm_messages, cancellation_token=cancellation_token)
179174
assert isinstance(completion.content, str)

python/packages/autogen-agentchat/src/autogen_agentchat/agents/_user_proxy_agent.py

+2
Original file line numberDiff line numberDiff line change
@@ -82,6 +82,7 @@ async def simple_user_agent():
8282
cancellation_token=CancellationToken(),
8383
)
8484
)
85+
assert isinstance(response.chat_message, TextMessage)
8586
print(f"Your name is {response.chat_message.content}")
8687
8788
Example:
@@ -117,6 +118,7 @@ async def cancellable_user_agent():
117118
)
118119
)
119120
response = await agent_task
121+
assert isinstance(response.chat_message, TextMessage)
120122
print(f"Your name is {response.chat_message.content}")
121123
except Exception as e:
122124
print(f"Exception: {e}")

python/packages/autogen-agentchat/src/autogen_agentchat/conditions/_terminations.py

+2-9
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@
1111
BaseChatMessage,
1212
ChatMessage,
1313
HandoffMessage,
14-
MultiModalMessage,
1514
StopMessage,
1615
TextMessage,
1716
ToolCallExecutionEvent,
@@ -137,18 +136,12 @@ async def __call__(self, messages: Sequence[AgentEvent | ChatMessage]) -> StopMe
137136
if self._sources is not None and message.source not in self._sources:
138137
continue
139138

140-
if isinstance(message.content, str) and self._termination_text in message.content:
139+
content = message.to_text()
140+
if self._termination_text in content:
141141
self._terminated = True
142142
return StopMessage(
143143
content=f"Text '{self._termination_text}' mentioned", source="TextMentionTermination"
144144
)
145-
elif isinstance(message, MultiModalMessage):
146-
for item in message.content:
147-
if isinstance(item, str) and self._termination_text in item:
148-
self._terminated = True
149-
return StopMessage(
150-
content=f"Text '{self._termination_text}' mentioned", source="TextMentionTermination"
151-
)
152145
return None
153146

154147
async def reset(self) -> None:

0 commit comments

Comments
 (0)