Skip to content

Commit d89f7a3

Browse files
authored
Merge pull request #456 from tisnik/use-proper-llama-stack-name
Use proper Llama Stack name in sources
2 parents ac278b5 + 3fe5a26 commit d89f7a3

File tree

7 files changed

+15
-15
lines changed

7 files changed

+15
-15
lines changed

src/app/endpoints/query.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -177,7 +177,7 @@ async def query_endpoint_handler(
177177
# log Llama Stack configuration, but without sensitive information
178178
llama_stack_config = configuration.llama_stack_configuration.model_copy()
179179
llama_stack_config.api_key = "********"
180-
logger.info("LLama stack config: %s", llama_stack_config)
180+
logger.info("Llama stack config: %s", llama_stack_config)
181181

182182
user_id, _, token = auth
183183

src/app/endpoints/streaming_query.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -134,15 +134,15 @@ def stream_end_event(metadata_map: dict) -> str:
134134
def stream_build_event(chunk: Any, chunk_id: int, metadata_map: dict) -> Iterator[str]:
135135
"""Build a streaming event from a chunk response.
136136
137-
This function processes chunks from the LLama Stack streaming response and formats
137+
This function processes chunks from the Llama Stack streaming response and formats
138138
them into Server-Sent Events (SSE) format for the client. It handles two main
139139
event types:
140140
141141
1. step_progress: Contains text deltas from the model inference process
142142
2. step_complete: Contains information about completed tool execution steps
143143
144144
Args:
145-
chunk: The streaming chunk from LLama Stack containing event data
145+
chunk: The streaming chunk from Llama Stack containing event data
146146
chunk_id: The current chunk ID counter (gets incremented for each token)
147147
148148
Returns:
@@ -544,7 +544,7 @@ async def streaming_query_endpoint_handler( # pylint: disable=too-many-locals
544544
# log Llama Stack configuration, but without sensitive information
545545
llama_stack_config = configuration.llama_stack_configuration.model_copy()
546546
llama_stack_config.api_key = "********"
547-
logger.info("LLama stack config: %s", llama_stack_config)
547+
logger.info("Llama stack config: %s", llama_stack_config)
548548

549549
user_id, _user_name, token = auth
550550

src/client.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
"""LLama Stack client retrieval class."""
1+
"""Llama Stack client retrieval class."""
22

33
import logging
44

src/models/config.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -179,19 +179,19 @@ def check_llama_stack_model(self) -> Self:
179179
if self.url is None:
180180
if self.use_as_library_client is None:
181181
raise ValueError(
182-
"LLama stack URL is not specified and library client mode is not specified"
182+
"Llama stack URL is not specified and library client mode is not specified"
183183
)
184184
if self.use_as_library_client is False:
185185
raise ValueError(
186-
"LLama stack URL is not specified and library client mode is not enabled"
186+
"Llama stack URL is not specified and library client mode is not enabled"
187187
)
188188
if self.use_as_library_client is None:
189189
self.use_as_library_client = False
190190
if self.use_as_library_client:
191191
if self.library_client_config_path is None:
192192
# pylint: disable=line-too-long
193193
raise ValueError(
194-
"LLama stack library client mode is enabled but a configuration file path is not specified" # noqa: E501
194+
"Llama stack library client mode is enabled but a configuration file path is not specified" # noqa: E501
195195
)
196196
# the configuration file must exists and be regular readable file
197197
checks.file_check(

tests/unit/app/endpoints/test_models.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ async def test_models_endpoint_handler_improper_llama_stack_configuration(mocker
8484
with pytest.raises(HTTPException) as e:
8585
await models_endpoint_handler(request=request, auth=auth)
8686
assert e.value.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR
87-
assert e.detail["response"] == "LLama stack is not configured"
87+
assert e.detail["response"] == "Llama stack is not configured"
8888

8989

9090
@pytest.mark.asyncio

tests/unit/app/endpoints/test_streaming_query.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -182,7 +182,7 @@ async def _test_streaming_query_endpoint_handler(mocker, store_transcript=False)
182182
mocker.Mock(identifier="model2", model_type="llm", provider_id="provider2"),
183183
]
184184

185-
# Construct the streaming response from LLama Stack.
185+
# Construct the streaming response from Llama Stack.
186186
# We cannot use 'mock' as 'hasattr(mock, "xxx")' adds the missing
187187
# attribute and therefore makes checks to see whether it is missing fail.
188188
mock_streaming_response = mocker.AsyncMock()

tests/unit/models/test_config.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -114,23 +114,23 @@ def test_llama_stack_wrong_configuration_constructor_no_url() -> None:
114114
"""
115115
with pytest.raises(
116116
ValueError,
117-
match="LLama stack URL is not specified and library client mode is not specified",
117+
match="Llama stack URL is not specified and library client mode is not specified",
118118
):
119119
LlamaStackConfiguration()
120120

121121

122122
def test_llama_stack_wrong_configuration_constructor_library_mode_off() -> None:
123-
"""Test the LLamaStackConfiguration constructor."""
123+
"""Test the LlamaStackConfiguration constructor."""
124124
with pytest.raises(
125125
ValueError,
126-
match="LLama stack URL is not specified and library client mode is not enabled",
126+
match="Llama stack URL is not specified and library client mode is not enabled",
127127
):
128128
LlamaStackConfiguration(use_as_library_client=False)
129129

130130

131131
def test_llama_stack_wrong_configuration_no_config_file() -> None:
132-
"""Test the LLamaStackConfiguration constructor."""
133-
m = "LLama stack library client mode is enabled but a configuration file path is not specified"
132+
"""Test the LlamaStackConfiguration constructor."""
133+
m = "Llama stack library client mode is enabled but a configuration file path is not specified"
134134
with pytest.raises(ValueError, match=m):
135135
LlamaStackConfiguration(use_as_library_client=True)
136136

0 commit comments

Comments
 (0)