diff --git a/python/packages/autogen-studio/autogenstudio/datamodel/types.py b/python/packages/autogen-studio/autogenstudio/datamodel/types.py index 71a62a7124e8..f37c008fba58 100644 --- a/python/packages/autogen-studio/autogenstudio/datamodel/types.py +++ b/python/packages/autogen-studio/autogenstudio/datamodel/types.py @@ -5,6 +5,7 @@ from autogen_agentchat.base import TaskResult from autogen_agentchat.messages import BaseChatMessage from autogen_core import ComponentModel +from autogen_core.models import UserMessage from autogen_ext.models.openai import OpenAIChatCompletionClient from pydantic import BaseModel, ConfigDict, SecretStr @@ -25,6 +26,15 @@ class LLMCallEventMessage(BaseChatMessage): source: str = "llm_call_event" content: str + def to_text(self) -> str: + return self.content + + def to_model_text(self) -> str: + return self.content + + def to_model_message(self) -> UserMessage: + raise NotImplementedError("This message type is not supported.") + class MessageMeta(BaseModel): task: Optional[str] = None diff --git a/python/packages/autogen-studio/tests/test_datamodel_types.py b/python/packages/autogen-studio/tests/test_datamodel_types.py new file mode 100644 index 000000000000..88a495e7e5f5 --- /dev/null +++ b/python/packages/autogen-studio/tests/test_datamodel_types.py @@ -0,0 +1,16 @@ +import pytest + +from autogenstudio.datamodel.types import LLMCallEventMessage + +def test_LLMCallEventMessage_inner_funcs(): + """Test the inner functions of LLMCallEventMessage""" + # Create a mock LLMCallEventMessage + message = LLMCallEventMessage( + content="Test message" + ) + + # Test the inner functions + assert message.to_text() == "Test message" + assert message.to_model_text() == "Test message" + with pytest.raises(NotImplementedError, match="This message type is not supported."): + message.to_model_message() \ No newline at end of file