diff --git a/python/packages/core/agent_framework/ollama/__init__.py b/python/packages/core/agent_framework/ollama/__init__.py new file mode 100644 index 0000000000..eae73853c2 --- /dev/null +++ b/python/packages/core/agent_framework/ollama/__init__.py @@ -0,0 +1,23 @@ +# Copyright (c) Microsoft. All rights reserved. + +import importlib +from typing import Any + +IMPORT_PATH = "agent_framework_ollama" +PACKAGE_NAME = "agent-framework-ollama" +_IMPORTS = ["__version__", "OllamaChatClient", "OllamaSettings"] + + +def __getattr__(name: str) -> Any: + if name in _IMPORTS: + try: + return getattr(importlib.import_module(IMPORT_PATH), name) + except ModuleNotFoundError as exc: + raise ModuleNotFoundError( + f"The '{PACKAGE_NAME}' package is not installed, please do `pip install {PACKAGE_NAME}`" + ) from exc + raise AttributeError(f"Module {IMPORT_PATH} has no attribute {name}.") + + +def __dir__() -> list[str]: + return _IMPORTS diff --git a/python/packages/core/agent_framework/ollama/__init__.pyi b/python/packages/core/agent_framework/ollama/__init__.pyi new file mode 100644 index 0000000000..3a1e7824d6 --- /dev/null +++ b/python/packages/core/agent_framework/ollama/__init__.pyi @@ -0,0 +1,13 @@ +# Copyright (c) Microsoft. All rights reserved. + +from agent_framework_ollama import ( + OllamaChatClient, + OllamaSettings, + __version__, +) + +__all__ = [ + "OllamaChatClient", + "OllamaSettings", + "__version__", +] diff --git a/python/packages/core/pyproject.toml b/python/packages/core/pyproject.toml index 8eec13e8e6..17430d4e65 100644 --- a/python/packages/core/pyproject.toml +++ b/python/packages/core/pyproject.toml @@ -52,6 +52,7 @@ all = [ "agent-framework-devui", "agent-framework-lab", "agent-framework-mem0", + "agent-framework-ollama", "agent-framework-purview", "agent-framework-redis", ] diff --git a/python/packages/ollama/getting_started/README.md b/python/packages/ollama/getting_started/README.md deleted file mode 100644 index bf159c5475..0000000000 --- a/python/packages/ollama/getting_started/README.md +++ /dev/null @@ -1,38 +0,0 @@ -# Ollama Examples - -This folder contains examples demonstrating how to use Ollama models with the Agent Framework. - -## Prerequisites - -1. **Install Ollama**: Download and install Ollama from [ollama.com](https://ollama.com/) -2. **Start Ollama**: Ensure Ollama is running on your local machine -3. **Pull a model**: Run `ollama pull mistral` (or any other model you prefer) - - For function calling examples, use models that support tool calling like `mistral` or `qwen2.5` - - For reasoning examples, use models that support reasoning like `qwen2.5:8b` - - For Multimodality you can use models like `gemma3:4b` - -> **Note**: Not all models support all features. Function calling and reasoning capabilities depend on the specific model you're using. - -## Examples - -| File | Description | -|------|-------------| -| [`ollama_agent_basic.py`](ollama_agent_basic.py) | Demonstrates basic Ollama agent usage with the native Ollama Chat Client. Shows both streaming and non-streaming responses with tool calling capabilities. | -| [`ollama_agent_reasoning.py`](ollama_agent_reasoning.py) | Demonstrates Ollama agent with reasoning capabilities using the native Ollama Chat Client. Shows how to enable thinking/reasoning mode. | -| [`ollama_chat_client.py`](ollama_chat_client.py) | Ollama Chat Client with native Ollama Chat Client | -| [`ollama_chat_multimodal.py`](ollama_chat_multimodal.py) | Ollama Chat with multimodal native Ollama Chat Client | - -## Configuration - -The examples use environment variables for configuration. Set the appropriate variables based on which example you're running: - -### For Native Ollama Examples (`ollama_agent_basic.py`, `ollama_agent_reasoning.py`) - -Set the following environment variables: - -- `OLLAMA_HOST`: The base URL for your Ollama server (optional, defaults to `http://localhost:11434`) - - Example: `export OLLAMA_HOST="http://localhost:11434"` - -- `OLLAMA_CHAT_MODEL_ID`: The model name to use - - Example: `export OLLAMA_CHAT_MODEL_ID="qwen2.5:8b"` - - Must be a model you have pulled with Ollama \ No newline at end of file diff --git a/python/samples/README.md b/python/samples/README.md index 9c87ddd67b..7d291f119e 100644 --- a/python/samples/README.md +++ b/python/samples/README.md @@ -99,11 +99,15 @@ This directory contains samples demonstrating the capabilities of Microsoft Agen ### Ollama +The recommended way to use Ollama is via the native `OllamaChatClient` from the `agent-framework-ollama` package. + | File | Description | |------|-------------| -| [`getting_started/agents/ollama/ollama_with_openai_chat_client.py`](./getting_started/agents/ollama/ollama_with_openai_chat_client.py) | Ollama with OpenAI Chat Client Example | -| [`packages/ollama/getting_started/ollama_agent_basic.py`](../packages/ollama/getting_started/ollama_agent_basic.py) | (Experimental) Ollama Agent with native Ollama Chat Client | -| [`packages/ollama/getting_started/ollama_agent_reasoning.py`](../packages/ollama/getting_started/ollama_agent_reasoning.py) | (Experimental) Ollama Reasoning Agent with native Ollama Chat Client | +| [`getting_started/agents/ollama/ollama_agent_basic.py`](./getting_started/agents/ollama/ollama_agent_basic.py) | Basic Ollama Agent with native Ollama Chat Client | +| [`getting_started/agents/ollama/ollama_agent_reasoning.py`](./getting_started/agents/ollama/ollama_agent_reasoning.py) | Ollama Agent with reasoning capabilities | +| [`getting_started/agents/ollama/ollama_chat_client.py`](./getting_started/agents/ollama/ollama_chat_client.py) | Direct usage of Ollama Chat Client | +| [`getting_started/agents/ollama/ollama_chat_multimodal.py`](./getting_started/agents/ollama/ollama_chat_multimodal.py) | Ollama Chat Client with multimodal (image) input | +| [`getting_started/agents/ollama/ollama_with_openai_chat_client.py`](./getting_started/agents/ollama/ollama_with_openai_chat_client.py) | Alternative: Ollama via OpenAI Chat Client | ### OpenAI @@ -149,7 +153,6 @@ This directory contains samples demonstrating the capabilities of Microsoft Agen | [`getting_started/chat_client/openai_assistants_client.py`](./getting_started/chat_client/openai_assistants_client.py) | OpenAI Assistants Client Direct Usage Example | | [`getting_started/chat_client/openai_chat_client.py`](./getting_started/chat_client/openai_chat_client.py) | OpenAI Chat Client Direct Usage Example | | [`getting_started/chat_client/openai_responses_client.py`](./getting_started/chat_client/openai_responses_client.py) | OpenAI Responses Client Direct Usage Example | -| [`packages/ollama/getting_started/ollama_chat_client.py`](../packages/ollama/getting_started/ollama_chat_client.py) | (Experimental) Ollama Chat Client with native Ollama Chat Client | ## Context Providers @@ -225,7 +228,6 @@ This directory contains samples demonstrating the capabilities of Microsoft Agen | [`getting_started/multimodal_input/azure_chat_multimodal.py`](./getting_started/multimodal_input/azure_chat_multimodal.py) | Azure OpenAI Chat with multimodal (image) input example | | [`getting_started/multimodal_input/azure_responses_multimodal.py`](./getting_started/multimodal_input/azure_responses_multimodal.py) | Azure OpenAI Responses with multimodal (image) input example | | [`getting_started/multimodal_input/openai_chat_multimodal.py`](./getting_started/multimodal_input/openai_chat_multimodal.py) | OpenAI Chat with multimodal (image) input example | -| [`packages/ollama/getting_started/ollama_chat_multimodal.py`](../packages/ollama/getting_started/ollama_chat_multimodal.py) | (Experimental) Ollama Chat with multimodal native Ollama Chat Client | ## Azure Functions diff --git a/python/samples/getting_started/agents/ollama/README.md b/python/samples/getting_started/agents/ollama/README.md index e6d0878c74..ac4b2cb3d0 100644 --- a/python/samples/getting_started/agents/ollama/README.md +++ b/python/samples/getting_started/agents/ollama/README.md @@ -8,20 +8,41 @@ This folder contains examples demonstrating how to use Ollama models with the Ag 2. **Start Ollama**: Ensure Ollama is running on your local machine 3. **Pull a model**: Run `ollama pull mistral` (or any other model you prefer) - For function calling examples, use models that support tool calling like `mistral` or `qwen2.5` - - For reasoning examples, use models that support reasoning like `qwen2.5:8b` + - For reasoning examples, use models that support reasoning like `qwen3:8b` + - For multimodal examples, use models like `gemma3:4b` -> **Note**: Not all models support all features. Function calling and reasoning capabilities depend on the specific model you're using. +> **Note**: Not all models support all features. Function calling, reasoning, and multimodal capabilities depend on the specific model you're using. + +## Recommended Approach + +The recommended way to use Ollama with Agent Framework is via the native `OllamaChatClient` from the `agent-framework-ollama` package. This provides full support for Ollama-specific features like reasoning mode. + +Alternatively, you can use the `OpenAIChatClient` configured to point to your local Ollama server, which may be useful if you're already familiar with the OpenAI client interface. ## Examples | File | Description | |------|-------------| -| [`ollama_with_openai_chat_client.py`](ollama_with_openai_chat_client.py) | Demonstrates how to configure OpenAI Chat Client to use local Ollama models. Shows both streaming and non-streaming responses with tool calling capabilities. | +| [`ollama_agent_basic.py`](ollama_agent_basic.py) | Basic Ollama agent with tool calling using native Ollama Chat Client. Shows both streaming and non-streaming responses. | +| [`ollama_agent_reasoning.py`](ollama_agent_reasoning.py) | Ollama agent with reasoning capabilities using native Ollama Chat Client. Shows how to enable thinking/reasoning mode. | +| [`ollama_chat_client.py`](ollama_chat_client.py) | Direct usage of the native Ollama Chat Client with tool calling. | +| [`ollama_chat_multimodal.py`](ollama_chat_multimodal.py) | Ollama Chat Client with multimodal (image) input capabilities. | +| [`ollama_with_openai_chat_client.py`](ollama_with_openai_chat_client.py) | Alternative approach using OpenAI Chat Client configured to use local Ollama models. | ## Configuration The examples use environment variables for configuration. Set the appropriate variables based on which example you're running: +### For Native Ollama Examples + +Set the following environment variables: + +- `OLLAMA_HOST`: The base URL for your Ollama server (optional, defaults to `http://localhost:11434`) + - Example: `export OLLAMA_HOST="http://localhost:11434"` + +- `OLLAMA_CHAT_MODEL_ID`: The model name to use + - Example: `export OLLAMA_CHAT_MODEL_ID="qwen2.5:8b"` + - Must be a model you have pulled with Ollama ### For OpenAI Client with Ollama (`ollama_with_openai_chat_client.py`) diff --git a/python/packages/ollama/getting_started/ollama_agent_basic.py b/python/samples/getting_started/agents/ollama/ollama_agent_basic.py similarity index 97% rename from python/packages/ollama/getting_started/ollama_agent_basic.py rename to python/samples/getting_started/agents/ollama/ollama_agent_basic.py index 3769410332..4d2a69b56b 100644 --- a/python/packages/ollama/getting_started/ollama_agent_basic.py +++ b/python/samples/getting_started/agents/ollama/ollama_agent_basic.py @@ -3,7 +3,7 @@ import asyncio from datetime import datetime -from agent_framework_ollama import OllamaChatClient +from agent_framework.ollama import OllamaChatClient """ Ollama Agent Basic Example diff --git a/python/packages/ollama/getting_started/ollama_agent_reasoning.py b/python/samples/getting_started/agents/ollama/ollama_agent_reasoning.py similarity index 96% rename from python/packages/ollama/getting_started/ollama_agent_reasoning.py rename to python/samples/getting_started/agents/ollama/ollama_agent_reasoning.py index a76492b42b..e123ca04b5 100644 --- a/python/packages/ollama/getting_started/ollama_agent_reasoning.py +++ b/python/samples/getting_started/agents/ollama/ollama_agent_reasoning.py @@ -3,8 +3,7 @@ import asyncio from agent_framework import TextReasoningContent - -from agent_framework_ollama import OllamaChatClient +from agent_framework.ollama import OllamaChatClient """ Ollama Agent Reasoning Example diff --git a/python/packages/ollama/getting_started/ollama_chat_client.py b/python/samples/getting_started/agents/ollama/ollama_chat_client.py similarity index 63% rename from python/packages/ollama/getting_started/ollama_chat_client.py rename to python/samples/getting_started/agents/ollama/ollama_chat_client.py index 5cde122ecd..5d7197d8f5 100644 --- a/python/packages/ollama/getting_started/ollama_chat_client.py +++ b/python/samples/getting_started/agents/ollama/ollama_chat_client.py @@ -3,12 +3,19 @@ import asyncio from datetime import datetime -from agent_framework_ollama import OllamaChatClient +from agent_framework.ollama import OllamaChatClient -# Ensure to install Ollama and have a model running locally before running the sample -# Not all Models support function calling, to test function calling try llama3.2 -# Set the model to use via the OLLAMA_CHAT_MODEL_ID environment variable or modify the code below. -# https://ollama.com/ +""" +Ollama Chat Client Example + +This sample demonstrates using the native Ollama Chat Client directly. + +Ensure to install Ollama and have a model running locally before running the sample. +Not all Models support function calling, to test function calling try llama3.2 +Set the model to use via the OLLAMA_CHAT_MODEL_ID environment variable or modify the code below. +https://ollama.com/ + +""" def get_time(): diff --git a/python/packages/ollama/getting_started/ollama_chat_multimodal.py b/python/samples/getting_started/agents/ollama/ollama_chat_multimodal.py similarity index 96% rename from python/packages/ollama/getting_started/ollama_chat_multimodal.py rename to python/samples/getting_started/agents/ollama/ollama_chat_multimodal.py index e0670d785d..724cecbe72 100644 --- a/python/packages/ollama/getting_started/ollama_chat_multimodal.py +++ b/python/samples/getting_started/agents/ollama/ollama_chat_multimodal.py @@ -3,8 +3,7 @@ import asyncio from agent_framework import ChatMessage, DataContent, Role, TextContent - -from agent_framework_ollama import OllamaChatClient +from agent_framework.ollama import OllamaChatClient """ Ollama Agent Multimodal Example diff --git a/python/uv.lock b/python/uv.lock index 27cdef8ac7..6a791a34db 100644 --- a/python/uv.lock +++ b/python/uv.lock @@ -335,6 +335,7 @@ all = [ { name = "agent-framework-devui", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "agent-framework-lab", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "agent-framework-mem0", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "agent-framework-ollama", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "agent-framework-purview", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "agent-framework-redis", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] @@ -353,6 +354,7 @@ requires-dist = [ { name = "agent-framework-devui", marker = "extra == 'all'", editable = "packages/devui" }, { name = "agent-framework-lab", marker = "extra == 'all'", editable = "packages/lab" }, { name = "agent-framework-mem0", marker = "extra == 'all'", editable = "packages/mem0" }, + { name = "agent-framework-ollama", marker = "extra == 'all'", editable = "packages/ollama" }, { name = "agent-framework-purview", marker = "extra == 'all'", editable = "packages/purview" }, { name = "agent-framework-redis", marker = "extra == 'all'", editable = "packages/redis" }, { name = "azure-identity", specifier = ">=1,<2" },