Skip to content

Commit b68b78b

Browse files
authored
Merge pull request #266 from tisnik/lcore-324-add-check-for-config-file
LCORE-324: add check for config file `run.yaml` existence
2 parents 46c7fa2 + 91c956f commit b68b78b

File tree

4 files changed

+162
-11
lines changed

4 files changed

+162
-11
lines changed

src/models/config.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
"""Model with service configuration."""
22

3+
from pathlib import Path
34
from typing import Optional
45

56
from pydantic import BaseModel, model_validator, FilePath, AnyHttpUrl, PositiveInt
@@ -82,6 +83,10 @@ def check_llama_stack_model(self) -> Self:
8283
raise ValueError(
8384
"LLama stack library client mode is enabled but a configuration file path is not specified" # noqa: C0301
8485
)
86+
# the configuration file must exists and be regular readable file
87+
checks.file_check(
88+
Path(self.library_client_config_path), "Llama Stack configuration file"
89+
)
8590
return self
8691

8792

tests/configuration/run.yaml

Lines changed: 125 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,125 @@
1+
version: '2'
2+
image_name: minimal-viable-llama-stack-configuration
3+
4+
apis:
5+
- agents
6+
- datasetio
7+
- eval
8+
- inference
9+
- post_training
10+
- safety
11+
- scoring
12+
- telemetry
13+
- tool_runtime
14+
- vector_io
15+
benchmarks: []
16+
container_image: null
17+
datasets: []
18+
external_providers_dir: null
19+
inference_store:
20+
db_path: .llama/distributions/ollama/inference_store.db
21+
type: sqlite
22+
logging: null
23+
metadata_store:
24+
db_path: .llama/distributions/ollama/registry.db
25+
namespace: null
26+
type: sqlite
27+
providers:
28+
agents:
29+
- config:
30+
persistence_store:
31+
db_path: .llama/distributions/ollama/agents_store.db
32+
namespace: null
33+
type: sqlite
34+
responses_store:
35+
db_path: .llama/distributions/ollama/responses_store.db
36+
type: sqlite
37+
provider_id: meta-reference
38+
provider_type: inline::meta-reference
39+
datasetio:
40+
- config:
41+
kvstore:
42+
db_path: .llama/distributions/ollama/huggingface_datasetio.db
43+
namespace: null
44+
type: sqlite
45+
provider_id: huggingface
46+
provider_type: remote::huggingface
47+
- config:
48+
kvstore:
49+
db_path: .llama/distributions/ollama/localfs_datasetio.db
50+
namespace: null
51+
type: sqlite
52+
provider_id: localfs
53+
provider_type: inline::localfs
54+
eval:
55+
- config:
56+
kvstore:
57+
db_path: .llama/distributions/ollama/meta_reference_eval.db
58+
namespace: null
59+
type: sqlite
60+
provider_id: meta-reference
61+
provider_type: inline::meta-reference
62+
inference:
63+
- provider_id: openai
64+
provider_type: remote::openai
65+
config:
66+
api_key: ${env.OPENAI_API_KEY}
67+
post_training:
68+
- config:
69+
checkpoint_format: huggingface
70+
device: cpu
71+
distributed_backend: null
72+
provider_id: huggingface
73+
provider_type: inline::huggingface
74+
safety:
75+
- config:
76+
excluded_categories: []
77+
provider_id: llama-guard
78+
provider_type: inline::llama-guard
79+
scoring:
80+
- config: {}
81+
provider_id: basic
82+
provider_type: inline::basic
83+
- config: {}
84+
provider_id: llm-as-judge
85+
provider_type: inline::llm-as-judge
86+
- config:
87+
openai_api_key: '********'
88+
provider_id: braintrust
89+
provider_type: inline::braintrust
90+
telemetry:
91+
- config:
92+
service_name: ''
93+
sinks: sqlite
94+
sqlite_db_path: .llama/distributions/ollama/trace_store.db
95+
provider_id: meta-reference
96+
provider_type: inline::meta-reference
97+
tool_runtime:
98+
- provider_id: model-context-protocol
99+
provider_type: remote::model-context-protocol
100+
config: {}
101+
vector_io:
102+
- config:
103+
kvstore:
104+
db_path: .llama/distributions/ollama/faiss_store.db
105+
namespace: null
106+
type: sqlite
107+
provider_id: faiss
108+
provider_type: inline::faiss
109+
scoring_fns: []
110+
server:
111+
auth: null
112+
host: null
113+
port: 8321
114+
quota: null
115+
tls_cafile: null
116+
tls_certfile: null
117+
tls_keyfile: null
118+
shields: []
119+
vector_dbs: []
120+
121+
models:
122+
- model_id: gpt-4-turbo
123+
provider_id: openai
124+
model_type: llm
125+
provider_model_id: gpt-4-turbo

tests/unit/models/test_config.py

Lines changed: 31 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,8 @@
2525
DataCollectorConfiguration,
2626
)
2727

28+
from utils.checks import InvalidConfigurationError
29+
2830

2931
def test_service_configuration_constructor() -> None:
3032
"""Test the ServiceConfiguration constructor."""
@@ -58,7 +60,8 @@ def test_service_configuration_workers_value() -> None:
5860
def test_llama_stack_configuration_constructor() -> None:
5961
"""Test the LLamaStackConfiguration constructor."""
6062
llama_stack_configuration = LlamaStackConfiguration(
61-
use_as_library_client=True, library_client_config_path="foo"
63+
use_as_library_client=True,
64+
library_client_config_path="tests/configuration/run.yaml",
6265
)
6366
assert llama_stack_configuration is not None
6467

@@ -76,6 +79,18 @@ def test_llama_stack_configuration_constructor() -> None:
7679
assert llama_stack_configuration is not None
7780

7881

82+
def test_llama_stack_configuration_no_run_yaml() -> None:
83+
"""Test the LLamaStackConfiguration constructor when run.yaml file is not a file."""
84+
with pytest.raises(
85+
InvalidConfigurationError,
86+
match="Llama Stack configuration file 'not a file' is not a file",
87+
):
88+
LlamaStackConfiguration(
89+
use_as_library_client=True,
90+
library_client_config_path="not a file",
91+
)
92+
93+
7994
def test_llama_stack_wrong_configuration_constructor_no_url() -> None:
8095
"""Test the LLamaStackConfiguration constructor."""
8196
with pytest.raises(
@@ -298,7 +313,8 @@ def test_configuration_empty_mcp_servers() -> None:
298313
name="test_name",
299314
service=ServiceConfiguration(),
300315
llama_stack=LlamaStackConfiguration(
301-
use_as_library_client=True, library_client_config_path="foo"
316+
use_as_library_client=True,
317+
library_client_config_path="tests/configuration/run.yaml",
302318
),
303319
user_data_collection=UserDataCollection(
304320
feedback_disabled=True, feedback_storage=None
@@ -319,7 +335,8 @@ def test_configuration_single_mcp_server() -> None:
319335
name="test_name",
320336
service=ServiceConfiguration(),
321337
llama_stack=LlamaStackConfiguration(
322-
use_as_library_client=True, library_client_config_path="foo"
338+
use_as_library_client=True,
339+
library_client_config_path="tests/configuration/run.yaml",
323340
),
324341
user_data_collection=UserDataCollection(
325342
feedback_disabled=True, feedback_storage=None
@@ -346,7 +363,8 @@ def test_configuration_multiple_mcp_servers() -> None:
346363
name="test_name",
347364
service=ServiceConfiguration(),
348365
llama_stack=LlamaStackConfiguration(
349-
use_as_library_client=True, library_client_config_path="foo"
366+
use_as_library_client=True,
367+
library_client_config_path="tests/configuration/run.yaml",
350368
),
351369
user_data_collection=UserDataCollection(
352370
feedback_disabled=True, feedback_storage=None
@@ -368,7 +386,8 @@ def test_dump_configuration(tmp_path) -> None:
368386
name="test_name",
369387
service=ServiceConfiguration(),
370388
llama_stack=LlamaStackConfiguration(
371-
use_as_library_client=True, library_client_config_path="foo"
389+
use_as_library_client=True,
390+
library_client_config_path="tests/configuration/run.yaml",
372391
),
373392
user_data_collection=UserDataCollection(
374393
feedback_disabled=True, feedback_storage=None
@@ -413,7 +432,7 @@ def test_dump_configuration(tmp_path) -> None:
413432
"url": None,
414433
"api_key": None,
415434
"use_as_library_client": True,
416-
"library_client_config_path": "foo",
435+
"library_client_config_path": "tests/configuration/run.yaml",
417436
},
418437
"user_data_collection": {
419438
"feedback_disabled": True,
@@ -450,7 +469,8 @@ def test_dump_configuration_with_one_mcp_server(tmp_path) -> None:
450469
name="test_name",
451470
service=ServiceConfiguration(),
452471
llama_stack=LlamaStackConfiguration(
453-
use_as_library_client=True, library_client_config_path="foo"
472+
use_as_library_client=True,
473+
library_client_config_path="tests/configuration/run.yaml",
454474
),
455475
user_data_collection=UserDataCollection(
456476
feedback_disabled=True, feedback_storage=None
@@ -490,7 +510,7 @@ def test_dump_configuration_with_one_mcp_server(tmp_path) -> None:
490510
"url": None,
491511
"api_key": None,
492512
"use_as_library_client": True,
493-
"library_client_config_path": "foo",
513+
"library_client_config_path": "tests/configuration/run.yaml",
494514
},
495515
"user_data_collection": {
496516
"feedback_disabled": True,
@@ -535,7 +555,8 @@ def test_dump_configuration_with_more_mcp_servers(tmp_path) -> None:
535555
name="test_name",
536556
service=ServiceConfiguration(),
537557
llama_stack=LlamaStackConfiguration(
538-
use_as_library_client=True, library_client_config_path="foo"
558+
use_as_library_client=True,
559+
library_client_config_path="tests/configuration/run.yaml",
539560
),
540561
user_data_collection=UserDataCollection(
541562
feedback_disabled=True, feedback_storage=None
@@ -581,7 +602,7 @@ def test_dump_configuration_with_more_mcp_servers(tmp_path) -> None:
581602
"url": None,
582603
"api_key": None,
583604
"use_as_library_client": True,
584-
"library_client_config_path": "foo",
605+
"library_client_config_path": "tests/configuration/run.yaml",
585606
},
586607
"user_data_collection": {
587608
"feedback_disabled": True,

tests/unit/utils/test_common.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -269,7 +269,7 @@ async def test_register_mcp_servers_async_with_library_client(mocker):
269269
service=ServiceConfiguration(),
270270
llama_stack=LlamaStackConfiguration(
271271
use_as_library_client=True,
272-
library_client_config_path="/path/to/config.yaml",
272+
library_client_config_path="tests/configuration/run.yaml",
273273
),
274274
user_data_collection=UserDataCollection(feedback_disabled=True),
275275
mcp_servers=[mcp_server],

0 commit comments

Comments
 (0)