Skip to content

Commit c469fc0

Browse files
Copilotekzhu
andauthored
Fix OllamaChatCompletionClient load_component() error by adding to WELL_KNOWN_PROVIDERS (#7030)
Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: ekzhu <320302+ekzhu@users.noreply.github.com> Co-authored-by: Eric Zhu <ekzhu@users.noreply.github.com>
1 parent e0e39e4 commit c469fc0

File tree

2 files changed

+44
-0
lines changed

2 files changed

+44
-0
lines changed

python/packages/autogen-core/src/autogen_core/_component_config.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,7 @@ def _type_to_provider_str(t: type) -> str:
4949
"AzureOpenAIChatCompletionClient": "autogen_ext.models.openai.AzureOpenAIChatCompletionClient",
5050
"openai_chat_completion_client": "autogen_ext.models.openai.OpenAIChatCompletionClient",
5151
"OpenAIChatCompletionClient": "autogen_ext.models.openai.OpenAIChatCompletionClient",
52+
"OllamaChatCompletionClient": "autogen_ext.models.ollama.OllamaChatCompletionClient",
5253
}
5354

5455

python/packages/autogen-ext/tests/models/test_ollama_chat_completion_client.py

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1313,3 +1313,46 @@ async def _mock_chat(*args: Any, **kwargs: Any) -> ChatResponse:
13131313
assert len(create_result.content) > 0
13141314
assert isinstance(create_result.content[0], FunctionCall)
13151315
assert create_result.content[0].name == add_tool.name
1316+
1317+
1318+
def test_ollama_load_component() -> None:
1319+
"""Test that OllamaChatCompletionClient can be loaded via ChatCompletionClient.load_component()."""
1320+
from autogen_core.models import ChatCompletionClient
1321+
1322+
# Test the exact configuration from the issue
1323+
config = {
1324+
"provider": "OllamaChatCompletionClient",
1325+
"config": {
1326+
"model": "qwen3",
1327+
"host": "http://1.2.3.4:30130",
1328+
},
1329+
}
1330+
1331+
# This should not raise an error anymore
1332+
client = ChatCompletionClient.load_component(config)
1333+
1334+
# Verify we got the right type of client
1335+
assert isinstance(client, OllamaChatCompletionClient)
1336+
assert client._model_name == "qwen3" # type: ignore[reportPrivateUsage]
1337+
1338+
# Test that the config was applied correctly
1339+
create_args = client.get_create_args()
1340+
assert create_args["model"] == "qwen3" # type: ignore[reportPrivateUsage]
1341+
1342+
1343+
def test_ollama_load_component_via_class() -> None:
1344+
"""Test that OllamaChatCompletionClient can be loaded via the class directly."""
1345+
config = {
1346+
"provider": "OllamaChatCompletionClient",
1347+
"config": {
1348+
"model": "llama3.2",
1349+
"host": "http://localhost:11434",
1350+
},
1351+
}
1352+
1353+
# Load via the specific class
1354+
client = OllamaChatCompletionClient.load_component(config)
1355+
1356+
# Verify we got the right type and configuration
1357+
assert isinstance(client, OllamaChatCompletionClient)
1358+
assert client._model_name == "llama3.2" # type: ignore[reportPrivateUsage]

0 commit comments

Comments
 (0)