diff --git a/examples/langchain_toolcall.py b/examples/langchain_toolcall.py index c7b6415..6fc3939 100644 --- a/examples/langchain_toolcall.py +++ b/examples/langchain_toolcall.py @@ -11,6 +11,7 @@ from dotenv import load_dotenv +# Add OPENAI_API_KEY and TAVILY_API_KEY for this example. load_dotenv() model = ChatOpenAI(model="gpt-4o") @@ -43,6 +44,7 @@ cb = lai_client.langchain_callback() +# Replace with ainvoke for asynchronous execution. agent_executor.invoke( { "chat_history": [ diff --git a/literalai/callback/langchain_callback.py b/literalai/callback/langchain_callback.py index 703b506..57db15a 100644 --- a/literalai/callback/langchain_callback.py +++ b/literalai/callback/langchain_callback.py @@ -101,6 +101,7 @@ def _convert_message_dict( class_name = message["id"][-1] kwargs = message.get("kwargs", {}) function_call = kwargs.get("additional_kwargs", {}).get("function_call") + tool_calls = kwargs.get("additional_kwargs", {}).get("tool_calls") msg = GenerationMessage( name=kwargs.get("name"), @@ -113,6 +114,9 @@ def _convert_message_dict( else: msg["content"] = kwargs.get("content", "") + if tool_calls: + msg["tool_calls"] = tool_calls + return msg def _convert_message( @@ -124,6 +128,7 @@ def _convert_message( message, ) function_call = message.additional_kwargs.get("function_call") + tool_calls = message.additional_kwargs.get("tool_calls") msg = GenerationMessage( name=getattr(message, "name", None), role=_convert_message_role(message.type), @@ -139,6 +144,9 @@ def _convert_message( else: msg["content"] = message.content # type: ignore + if tool_calls: + msg["tool_calls"] = tool_calls + return msg def _build_llm_settings( diff --git a/literalai/prompt_engineering/prompt.py b/literalai/prompt_engineering/prompt.py index fdaf609..19e3933 100644 --- a/literalai/prompt_engineering/prompt.py +++ b/literalai/prompt_engineering/prompt.py @@ -231,6 +231,9 @@ def format_messages(self, **kwargs: Any) -> List[BaseMessage]: return rendered_messages + async def aformat_messages(self, **kwargs: Any) -> List[BaseMessage]: + return self.format_messages(**kwargs) + lc_messages = [(m["role"], m["content"]) for m in self.template_messages] chat_template = CustomChatPromptTemplate.from_messages(lc_messages)