Skip to content

Commit 2635296

Browse files
authored
LCORE-173: Fixed Unit Tests (#83)
* Fixed Unit Test * Added Sqlite (Required to pass unit tests) * Removed Environment Varaibles * Fixed Ruff
1 parent 61dae19 commit 2635296

File tree

4 files changed

+27
-10
lines changed

4 files changed

+27
-10
lines changed

pdm.lock

Lines changed: 15 additions & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@ dev = [
3030
"mypy>=1.16.0",
3131
"types-PyYAML>=6.0.2",
3232
"ruff>=0.11.13",
33+
"aiosqlite",
3334
]
3435

3536
[tool.pytest.ini_options]
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
version: '2'
2+
image_name: llamastack-minimal-stack
3+
container_image: null
4+
5+
apis: []
6+
providers: {}

tests/unit/test_client.py

Lines changed: 5 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,45 +1,41 @@
11
"""Unit tests for functions defined in src/client.py."""
22

3-
import os
43
import pytest
5-
from unittest.mock import patch
64

75
from client import get_llama_stack_client
86
from models.config import LLamaStackConfiguration
97

108

119
# [tisnik] Need to resolve dependencies on CI to be able to run this tests
12-
@patch.dict(os.environ, {"INFERENCE_MODEL": "llama3.2:3b-instruct-fp16"})
13-
def _test_get_llama_stack_library_client() -> None:
10+
def test_get_llama_stack_library_client() -> None:
1411
cfg = LLamaStackConfiguration(
1512
url=None,
1613
api_key=None,
1714
use_as_library_client=True,
18-
library_client_config_path="ollama",
15+
library_client_config_path="./tests/configuration/minimal-stack.yaml",
1916
)
17+
2018
client = get_llama_stack_client(cfg)
2119
assert client is not None
2220

2321

24-
@patch.dict(os.environ, {"INFERENCE_MODEL": "llama3.2:3b-instruct-fp16"})
2522
def test_get_llama_stack_remote_client() -> None:
2623
cfg = LLamaStackConfiguration(
2724
url="http://localhost:8321",
2825
api_key=None,
2926
use_as_library_client=False,
30-
library_client_config_path="ollama",
27+
library_client_config_path="./tests/configuration/minimal-stack.yaml",
3128
)
3229
client = get_llama_stack_client(cfg)
3330
assert client is not None
3431

3532

36-
@patch.dict(os.environ, {"INFERENCE_MODEL": "llama3.2:3b-instruct-fp16"})
3733
def test_get_llama_stack_wrong_configuration() -> None:
3834
cfg = LLamaStackConfiguration(
3935
url=None,
4036
api_key=None,
4137
use_as_library_client=True,
42-
library_client_config_path="ollama",
38+
library_client_config_path="./tests/configuration/minimal-stack.yaml",
4339
)
4440
cfg.library_client_config_path = None
4541
with pytest.raises(

0 commit comments

Comments
 (0)