From 3ab57baf3fe1f5f4a10c115a96e02cfa06da45d8 Mon Sep 17 00:00:00 2001 From: JeevanReddy Date: Wed, 7 Aug 2024 13:07:18 +0530 Subject: [PATCH 1/2] openai can give multiple tool calls, current implementation assumes only one function call at a time. Fixed this to handle multiple function calls. --- src/pipecat/services/openai.py | 36 ++++++++++++++++++++++++++-------- 1 file changed, 28 insertions(+), 8 deletions(-) diff --git a/src/pipecat/services/openai.py b/src/pipecat/services/openai.py index e3113ae8b..d74ff1aee 100644 --- a/src/pipecat/services/openai.py +++ b/src/pipecat/services/openai.py @@ -121,6 +121,10 @@ async def _stream_chat_completions( return chunks async def _process_context(self, context: OpenAILLMContext): + functions_list = [] + arguments_list = [] + tool_id_list = [] + func_idx = 0 function_name = "" arguments = "" tool_call_id = "" @@ -150,6 +154,14 @@ async def _process_context(self, context: OpenAILLMContext): # yield a frame containing the function name and the arguments. tool_call = chunk.choices[0].delta.tool_calls[0] + if tool_call.index != func_idx: + functions_list.append(function_name) + arguments_list.append(arguments) + tool_id_list.append(tool_call_id) + function_name = "" + arguments = "" + tool_call_id = "" + func_idx += 1 if tool_call.function and tool_call.function.name: function_name += tool_call.function.name tool_call_id = tool_call.id @@ -165,11 +177,20 @@ async def _process_context(self, context: OpenAILLMContext): # the context, and re-prompt to get a chat answer. If we don't have a registered # handler, raise an exception. if function_name and arguments: - if self.has_function(function_name): - await self._handle_function_call(context, tool_call_id, function_name, arguments) - else: - raise OpenAIUnhandledFunctionException( - f"The LLM tried to call a function named '{function_name}', but there isn't a callback registered for that function.") + # added to the list as last function name and arguments not added to the list + functions_list.append(function_name) + arguments_list.append(arguments) + tool_id_list.append(tool_call_id) + for function_name,arguments,tool_id in zip(functions_list,arguments_list,tool_id_list): + if self.has_function(function_name): + await self._handle_function_call(context, tool_call_id, function_name, arguments) + else: + raise OpenAIUnhandledFunctionException( + f"The LLM tried to call a function named '{function_name}', but there isn't a callback registered for that function.") + # re-prompt to get a human answer after all the functions are called + await self._process_context(context) + + async def _handle_function_call( self, @@ -206,13 +227,12 @@ async def _handle_function_call( "content": result }) context.add_message(tool_result) - # re-prompt to get a human answer - await self._process_context(context) + elif isinstance(result, list): # reduced magic for msg in result: context.add_message(msg) - await self._process_context(context) + elif isinstance(result, type(None)): pass else: From 9e846c5ef3805a4a8c941329160e4963d5e14842 Mon Sep 17 00:00:00 2001 From: JeevanReddy Date: Mon, 30 Sep 2024 09:13:10 +0530 Subject: [PATCH 2/2] changed tool_call_id to tool_id. --- src/pipecat/services/openai.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pipecat/services/openai.py b/src/pipecat/services/openai.py index d74ff1aee..ba2bdfa88 100644 --- a/src/pipecat/services/openai.py +++ b/src/pipecat/services/openai.py @@ -183,7 +183,7 @@ async def _process_context(self, context: OpenAILLMContext): tool_id_list.append(tool_call_id) for function_name,arguments,tool_id in zip(functions_list,arguments_list,tool_id_list): if self.has_function(function_name): - await self._handle_function_call(context, tool_call_id, function_name, arguments) + await self._handle_function_call(context, tool_id, function_name, arguments) else: raise OpenAIUnhandledFunctionException( f"The LLM tried to call a function named '{function_name}', but there isn't a callback registered for that function.")