From b873717ae58a67b88b5d0c4e22f48c96781d6c0b Mon Sep 17 00:00:00 2001 From: x42en Date: Wed, 28 May 2025 15:14:46 +0200 Subject: [PATCH 1/3] fix: Function role definition based on Server type --- python_a2a/server/llm/ollama.py | 1 + python_a2a/server/llm/openai.py | 7 ++++--- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/python_a2a/server/llm/ollama.py b/python_a2a/server/llm/ollama.py index 458886a..2590bd6 100644 --- a/python_a2a/server/llm/ollama.py +++ b/python_a2a/server/llm/ollama.py @@ -62,6 +62,7 @@ def __init__( # Initialize OpenAI compatible client self.__api_url = api_url + self.function_role = "tool" # Ollama uses "tool" as the function role try: self.__models = self.list_models() diff --git a/python_a2a/server/llm/openai.py b/python_a2a/server/llm/openai.py index c8513e4..c25611b 100644 --- a/python_a2a/server/llm/openai.py +++ b/python_a2a/server/llm/openai.py @@ -67,6 +67,7 @@ def __init__( self.temperature = temperature self.system_prompt = system_prompt or "You are a helpful AI assistant." self.functions = functions + self.function_role = "function" # Default role for function responses self.tools = self._convert_functions_to_tools() if functions else None # Handle support for Ollama setup @@ -136,7 +137,7 @@ def handle_message(self, message: Message) -> Message: # This is critical for function calling to work properly openai_messages.append( { - "role": "function", + "role": self.function_role, "name": message.content.name, "content": json.dumps(message.content.response), } @@ -193,7 +194,7 @@ def handle_message(self, message: Message) -> Message: elif message.content.type == "function_response": self._conversation_state[conversation_id].append( { - "role": "function", + "role": self.function_role, "name": message.content.name, "content": json.dumps(message.content.response), } @@ -464,7 +465,7 @@ def handle_conversation(self, conversation: Conversation) -> Conversation: # Format function response for OpenAI self._conversation_state[conversation_id].append( { - "role": "function", + "role": self.function_role, "name": msg.content.name, "content": json.dumps(msg.content.response), } From 50ca3f227d577562370fd7108b56772cde15c145 Mon Sep 17 00:00:00 2001 From: x42en Date: Wed, 28 May 2025 15:17:45 +0200 Subject: [PATCH 2/3] fix: Function role definition based on Client type --- python_a2a/client/llm/ollama.py | 1 + python_a2a/client/llm/openai.py | 7 ++++--- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/python_a2a/client/llm/ollama.py b/python_a2a/client/llm/ollama.py index b6c9b5c..f529fbc 100644 --- a/python_a2a/client/llm/ollama.py +++ b/python_a2a/client/llm/ollama.py @@ -48,6 +48,7 @@ def __init__( # Initialize OpenAI compatible client self.__api_url = api_url + self.function_role = "tool" # Ollama uses "tool" as the function role try: self.__models = self.list_models() diff --git a/python_a2a/client/llm/openai.py b/python_a2a/client/llm/openai.py index 644fd91..cf0245c 100644 --- a/python_a2a/client/llm/openai.py +++ b/python_a2a/client/llm/openai.py @@ -57,6 +57,7 @@ def __init__( self.temperature = temperature self.system_prompt = system_prompt or "You are a helpful assistant." self.functions = functions + self.function_role = "function" # Default role for function responses self.tools = self._convert_functions_to_tools() if functions else None # Initialize OpenAI client only if the API key is provided @@ -122,7 +123,7 @@ def send_message(self, message: Message) -> Message: # Format function response in OpenAI's expected format openai_messages.append( { - "role": "function", + "role": self.function_role, "name": message.content.name, "content": json.dumps(message.content.response), } @@ -185,7 +186,7 @@ def send_message(self, message: Message) -> Message: elif message.content.type == "function_response": self._conversation_histories[conversation_id].append( { - "role": "function", + "role": self.function_role, "name": message.content.name, "content": json.dumps(message.content.response), } @@ -336,7 +337,7 @@ def send_conversation(self, conversation: Conversation) -> Conversation: # Format function response for OpenAI openai_messages.append( { - "role": "function", + "role": self.function_role, "name": msg.content.name, "content": json.dumps(msg.content.response), } From 03a13edeac1c73be9859558b6fd933b3dc4a09ad Mon Sep 17 00:00:00 2001 From: x42en Date: Wed, 28 May 2025 15:18:33 +0200 Subject: [PATCH 3/3] feat: Add support for Ollama API key --- python_a2a/client/llm/ollama.py | 3 ++- python_a2a/server/llm/ollama.py | 5 +++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/python_a2a/client/llm/ollama.py b/python_a2a/client/llm/ollama.py index f529fbc..00b8760 100644 --- a/python_a2a/client/llm/ollama.py +++ b/python_a2a/client/llm/ollama.py @@ -21,6 +21,7 @@ def __init__( self, api_url: str, model: str, + api_key: str = "ollama", temperature: float = 0.7, system_prompt: Optional[str] = None, functions: Optional[List[Dict[str, Any]]] = None, @@ -60,7 +61,7 @@ def __init__( if model not in self.__models: raise A2AImportError(f"Model '{model}' is not available in the Ollama API.") - self.client = OpenAI(base_url=f"{api_url}/v1", api_key="ollama") + self.client = OpenAI(base_url=f"{api_url}/v1", api_key=api_key) def list_models(self) -> List[str]: """ diff --git a/python_a2a/server/llm/ollama.py b/python_a2a/server/llm/ollama.py index 2590bd6..992feec 100644 --- a/python_a2a/server/llm/ollama.py +++ b/python_a2a/server/llm/ollama.py @@ -29,6 +29,7 @@ def __init__( self, api_url: str, model: str, + api_key: str = "ollama", temperature: float = 0.7, system_prompt: Optional[str] = None, functions: Optional[List[Dict[str, Any]]] = None, @@ -76,8 +77,8 @@ def __init__( # Create an async client for streaming if AsyncOpenAI is not None: - self.client = OpenAI(base_url=f"{api_url}/v1", api_key="ollama") - self.async_client = AsyncOpenAI(base_url=f"{api_url}/v1", api_key="ollama") + self.client = OpenAI(base_url=f"{api_url}/v1", api_key=api_key) + self.async_client = AsyncOpenAI(base_url=f"{api_url}/v1", api_key=api_key) else: self.async_client = None