From 79df455fa47ebd306fe9199aa4df3edf8f9d736e Mon Sep 17 00:00:00 2001 From: majiayu000 <1835304752@qq.com> Date: Sun, 28 Dec 2025 00:30:48 +0800 Subject: [PATCH] autogen-ext: improve import error messages for model clients MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add user-friendly ImportError messages when optional dependencies are not installed for Ollama, Anthropic, and OpenAI model clients. This follows the pattern already used by LlamaCppChatCompletionClient, providing clear guidance on how to install the required dependencies (e.g., "pip install autogen-ext[ollama]"). 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 Signed-off-by: majiayu000 <1835304752@qq.com> --- .../autogen_ext/models/anthropic/__init__.py | 18 +++++++++++++----- .../src/autogen_ext/models/ollama/__init__.py | 8 +++++++- .../src/autogen_ext/models/openai/__init__.py | 19 +++++++++++++------ 3 files changed, 33 insertions(+), 12 deletions(-) diff --git a/python/packages/autogen-ext/src/autogen_ext/models/anthropic/__init__.py b/python/packages/autogen-ext/src/autogen_ext/models/anthropic/__init__.py index f31e7b1c0b72..c49421d37aec 100644 --- a/python/packages/autogen-ext/src/autogen_ext/models/anthropic/__init__.py +++ b/python/packages/autogen-ext/src/autogen_ext/models/anthropic/__init__.py @@ -1,8 +1,16 @@ -from ._anthropic_client import ( - AnthropicBedrockChatCompletionClient, - AnthropicChatCompletionClient, - BaseAnthropicChatCompletionClient, -) +try: + from ._anthropic_client import ( + AnthropicBedrockChatCompletionClient, + AnthropicChatCompletionClient, + BaseAnthropicChatCompletionClient, + ) +except ImportError as e: + raise ImportError( + "Dependencies for Anthropic not found. " + "Please install the anthropic package: " + "pip install autogen-ext[anthropic]" + ) from e + from .config import ( AnthropicBedrockClientConfiguration, AnthropicBedrockClientConfigurationConfigModel, diff --git a/python/packages/autogen-ext/src/autogen_ext/models/ollama/__init__.py b/python/packages/autogen-ext/src/autogen_ext/models/ollama/__init__.py index 1cfcb60cd128..6c55f7e9204c 100644 --- a/python/packages/autogen-ext/src/autogen_ext/models/ollama/__init__.py +++ b/python/packages/autogen-ext/src/autogen_ext/models/ollama/__init__.py @@ -1,4 +1,10 @@ -from ._ollama_client import OllamaChatCompletionClient +try: + from ._ollama_client import OllamaChatCompletionClient +except ImportError as e: + raise ImportError( + "Dependencies for Ollama not found. " "Please install the ollama package: " "pip install autogen-ext[ollama]" + ) from e + from .config import ( BaseOllamaClientConfigurationConfigModel, CreateArgumentsConfigModel, diff --git a/python/packages/autogen-ext/src/autogen_ext/models/openai/__init__.py b/python/packages/autogen-ext/src/autogen_ext/models/openai/__init__.py index 2241f663af26..eab9cf1e05f5 100644 --- a/python/packages/autogen-ext/src/autogen_ext/models/openai/__init__.py +++ b/python/packages/autogen-ext/src/autogen_ext/models/openai/__init__.py @@ -1,10 +1,17 @@ from . import _message_transform -from ._openai_client import ( - AZURE_OPENAI_USER_AGENT, - AzureOpenAIChatCompletionClient, - BaseOpenAIChatCompletionClient, - OpenAIChatCompletionClient, -) + +try: + from ._openai_client import ( + AZURE_OPENAI_USER_AGENT, + AzureOpenAIChatCompletionClient, + BaseOpenAIChatCompletionClient, + OpenAIChatCompletionClient, + ) +except ImportError as e: + raise ImportError( + "Dependencies for OpenAI not found. " "Please install the openai package: " "pip install autogen-ext[openai]" + ) from e + from .config import ( AzureOpenAIClientConfigurationConfigModel, BaseOpenAIClientConfigurationConfigModel,