Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion python_a2a/client/llm/ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ def __init__(
self,
api_url: str,
model: str,
api_key: str = "ollama",
temperature: float = 0.7,
system_prompt: Optional[str] = None,
functions: Optional[List[Dict[str, Any]]] = None,
Expand Down Expand Up @@ -48,6 +49,7 @@ def __init__(

# Initialize OpenAI compatible client
self.__api_url = api_url
self.function_role = "tool" # Ollama uses "tool" as the function role

try:
self.__models = self.list_models()
Expand All @@ -59,7 +61,7 @@ def __init__(
if model not in self.__models:
raise A2AImportError(f"Model '{model}' is not available in the Ollama API.")

self.client = OpenAI(base_url=f"{api_url}/v1", api_key="ollama")
self.client = OpenAI(base_url=f"{api_url}/v1", api_key=api_key)

def list_models(self) -> List[str]:
"""
Expand Down
7 changes: 4 additions & 3 deletions python_a2a/client/llm/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ def __init__(
self.temperature = temperature
self.system_prompt = system_prompt or "You are a helpful assistant."
self.functions = functions
self.function_role = "function" # Default role for function responses
self.tools = self._convert_functions_to_tools() if functions else None

# Initialize OpenAI client only if the API key is provided
Expand Down Expand Up @@ -122,7 +123,7 @@ def send_message(self, message: Message) -> Message:
# Format function response in OpenAI's expected format
openai_messages.append(
{
"role": "function",
"role": self.function_role,
"name": message.content.name,
"content": json.dumps(message.content.response),
}
Expand Down Expand Up @@ -185,7 +186,7 @@ def send_message(self, message: Message) -> Message:
elif message.content.type == "function_response":
self._conversation_histories[conversation_id].append(
{
"role": "function",
"role": self.function_role,
"name": message.content.name,
"content": json.dumps(message.content.response),
}
Expand Down Expand Up @@ -336,7 +337,7 @@ def send_conversation(self, conversation: Conversation) -> Conversation:
# Format function response for OpenAI
openai_messages.append(
{
"role": "function",
"role": self.function_role,
"name": msg.content.name,
"content": json.dumps(msg.content.response),
}
Expand Down
6 changes: 4 additions & 2 deletions python_a2a/server/llm/ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ def __init__(
self,
api_url: str,
model: str,
api_key: str = "ollama",
temperature: float = 0.7,
system_prompt: Optional[str] = None,
functions: Optional[List[Dict[str, Any]]] = None,
Expand Down Expand Up @@ -62,6 +63,7 @@ def __init__(

# Initialize OpenAI compatible client
self.__api_url = api_url
self.function_role = "tool" # Ollama uses "tool" as the function role

try:
self.__models = self.list_models()
Expand All @@ -75,8 +77,8 @@ def __init__(

# Create an async client for streaming
if AsyncOpenAI is not None:
self.client = OpenAI(base_url=f"{api_url}/v1", api_key="ollama")
self.async_client = AsyncOpenAI(base_url=f"{api_url}/v1", api_key="ollama")
self.client = OpenAI(base_url=f"{api_url}/v1", api_key=api_key)
self.async_client = AsyncOpenAI(base_url=f"{api_url}/v1", api_key=api_key)
else:
self.async_client = None

Expand Down
7 changes: 4 additions & 3 deletions python_a2a/server/llm/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ def __init__(
self.temperature = temperature
self.system_prompt = system_prompt or "You are a helpful AI assistant."
self.functions = functions
self.function_role = "function" # Default role for function responses
self.tools = self._convert_functions_to_tools() if functions else None

# Handle support for Ollama setup
Expand Down Expand Up @@ -136,7 +137,7 @@ def handle_message(self, message: Message) -> Message:
# This is critical for function calling to work properly
openai_messages.append(
{
"role": "function",
"role": self.function_role,
"name": message.content.name,
"content": json.dumps(message.content.response),
}
Expand Down Expand Up @@ -193,7 +194,7 @@ def handle_message(self, message: Message) -> Message:
elif message.content.type == "function_response":
self._conversation_state[conversation_id].append(
{
"role": "function",
"role": self.function_role,
"name": message.content.name,
"content": json.dumps(message.content.response),
}
Expand Down Expand Up @@ -464,7 +465,7 @@ def handle_conversation(self, conversation: Conversation) -> Conversation:
# Format function response for OpenAI
self._conversation_state[conversation_id].append(
{
"role": "function",
"role": self.function_role,
"name": msg.content.name,
"content": json.dumps(msg.content.response),
}
Expand Down
Loading