diff --git a/python_a2a/client/llm/ollama.py b/python_a2a/client/llm/ollama.py index b6c9b5c..00b8760 100644 --- a/python_a2a/client/llm/ollama.py +++ b/python_a2a/client/llm/ollama.py @@ -21,6 +21,7 @@ def __init__( self, api_url: str, model: str, + api_key: str = "ollama", temperature: float = 0.7, system_prompt: Optional[str] = None, functions: Optional[List[Dict[str, Any]]] = None, @@ -48,6 +49,7 @@ def __init__( # Initialize OpenAI compatible client self.__api_url = api_url + self.function_role = "tool" # Ollama uses "tool" as the function role try: self.__models = self.list_models() @@ -59,7 +61,7 @@ def __init__( if model not in self.__models: raise A2AImportError(f"Model '{model}' is not available in the Ollama API.") - self.client = OpenAI(base_url=f"{api_url}/v1", api_key="ollama") + self.client = OpenAI(base_url=f"{api_url}/v1", api_key=api_key) def list_models(self) -> List[str]: """ diff --git a/python_a2a/client/llm/openai.py b/python_a2a/client/llm/openai.py index 644fd91..cf0245c 100644 --- a/python_a2a/client/llm/openai.py +++ b/python_a2a/client/llm/openai.py @@ -57,6 +57,7 @@ def __init__( self.temperature = temperature self.system_prompt = system_prompt or "You are a helpful assistant." self.functions = functions + self.function_role = "function" # Default role for function responses self.tools = self._convert_functions_to_tools() if functions else None # Initialize OpenAI client only if the API key is provided @@ -122,7 +123,7 @@ def send_message(self, message: Message) -> Message: # Format function response in OpenAI's expected format openai_messages.append( { - "role": "function", + "role": self.function_role, "name": message.content.name, "content": json.dumps(message.content.response), } @@ -185,7 +186,7 @@ def send_message(self, message: Message) -> Message: elif message.content.type == "function_response": self._conversation_histories[conversation_id].append( { - "role": "function", + "role": self.function_role, "name": message.content.name, "content": json.dumps(message.content.response), } @@ -336,7 +337,7 @@ def send_conversation(self, conversation: Conversation) -> Conversation: # Format function response for OpenAI openai_messages.append( { - "role": "function", + "role": self.function_role, "name": msg.content.name, "content": json.dumps(msg.content.response), } diff --git a/python_a2a/server/llm/ollama.py b/python_a2a/server/llm/ollama.py index 458886a..992feec 100644 --- a/python_a2a/server/llm/ollama.py +++ b/python_a2a/server/llm/ollama.py @@ -29,6 +29,7 @@ def __init__( self, api_url: str, model: str, + api_key: str = "ollama", temperature: float = 0.7, system_prompt: Optional[str] = None, functions: Optional[List[Dict[str, Any]]] = None, @@ -62,6 +63,7 @@ def __init__( # Initialize OpenAI compatible client self.__api_url = api_url + self.function_role = "tool" # Ollama uses "tool" as the function role try: self.__models = self.list_models() @@ -75,8 +77,8 @@ def __init__( # Create an async client for streaming if AsyncOpenAI is not None: - self.client = OpenAI(base_url=f"{api_url}/v1", api_key="ollama") - self.async_client = AsyncOpenAI(base_url=f"{api_url}/v1", api_key="ollama") + self.client = OpenAI(base_url=f"{api_url}/v1", api_key=api_key) + self.async_client = AsyncOpenAI(base_url=f"{api_url}/v1", api_key=api_key) else: self.async_client = None diff --git a/python_a2a/server/llm/openai.py b/python_a2a/server/llm/openai.py index c8513e4..c25611b 100644 --- a/python_a2a/server/llm/openai.py +++ b/python_a2a/server/llm/openai.py @@ -67,6 +67,7 @@ def __init__( self.temperature = temperature self.system_prompt = system_prompt or "You are a helpful AI assistant." self.functions = functions + self.function_role = "function" # Default role for function responses self.tools = self._convert_functions_to_tools() if functions else None # Handle support for Ollama setup @@ -136,7 +137,7 @@ def handle_message(self, message: Message) -> Message: # This is critical for function calling to work properly openai_messages.append( { - "role": "function", + "role": self.function_role, "name": message.content.name, "content": json.dumps(message.content.response), } @@ -193,7 +194,7 @@ def handle_message(self, message: Message) -> Message: elif message.content.type == "function_response": self._conversation_state[conversation_id].append( { - "role": "function", + "role": self.function_role, "name": message.content.name, "content": json.dumps(message.content.response), } @@ -464,7 +465,7 @@ def handle_conversation(self, conversation: Conversation) -> Conversation: # Format function response for OpenAI self._conversation_state[conversation_id].append( { - "role": "function", + "role": self.function_role, "name": msg.content.name, "content": json.dumps(msg.content.response), }