Skip to content

Commit c10d224

Browse files
committed
Update Llama Stack to 0.2.22
1 parent 0d4c9c8 commit c10d224

File tree

4 files changed

+689
-702
lines changed

4 files changed

+689
-702
lines changed

pyproject.toml

Lines changed: 14 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,9 @@ line-length = 100
2020
[tool.mypy]
2121
disable_error_code = ["union-attr", "return-value", "arg-type", "import-untyped"]
2222
ignore_missing_imports = true
23+
mypy_path = ["src"]
24+
namespace_packages = true
25+
explicit_package_bases = true
2326

2427
[tool.pdm]
2528
distribution = true
@@ -34,19 +37,19 @@ version = "0.1.0"
3437
description = "RAG content for OpenShift LightSpeed."
3538
authors = []
3639
dependencies = [
37-
"PyYAML==6.0.2",
40+
"PyYAML>=6.0.2",
3841
"huggingface_hub>=0.33.4",
39-
"llama-index==0.12.51",
40-
"llama-index-vector-stores-faiss==0.3.0",
41-
"llama-index-embeddings-huggingface==0.4.0",
42-
"llama-index-readers-file==0.4.11",
43-
"faiss-cpu==1.11.0.post1",
42+
"llama-index>=0.12.51",
43+
"llama-index-vector-stores-faiss>=0.3.0",
44+
"llama-index-embeddings-huggingface>=0.4.0",
45+
"llama-index-readers-file>=0.4.11",
46+
"faiss-cpu>=1.11.0.post1",
4447
"llama-index-vector-stores-postgres>=0.5.4",
45-
"torch==2.7.1",
46-
"llama-stack==0.2.16",
47-
"llama-stack-client==0.2.16",
48-
"aiosqlite==0.21.0",
49-
"sqlite-vec==0.1.6",
48+
"torch>=2.7.1",
49+
"llama-stack==0.2.22",
50+
"llama-stack-client==0.2.22",
51+
"aiosqlite>=0.21.0",
52+
"sqlite-vec>=0.1.6",
5053
"tomlkit",
5154
]
5255
requires-python = "==3.12.*"

scripts/query_rag.py

Lines changed: 41 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
import yaml
1313
from llama_index.core import Settings, load_index_from_storage
1414
from llama_index.core.llms.utils import resolve_llm
15+
from llama_index.core.schema import NodeWithScore, TextNode
1516
from llama_index.core.storage.storage_context import StorageContext
1617
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
1718
from llama_index.vector_stores.faiss import FaissVectorStore
@@ -35,20 +36,34 @@ def _llama_index_query(args: argparse.Namespace) -> None:
3536

3637
if args.node is not None:
3738
node = storage_context.docstore.get_node(args.node)
38-
result = {
39-
"query": args.query,
40-
"type": "single_node",
41-
"node_id": args.node,
42-
"node": {
43-
"id": node.node_id,
44-
"text": node.text,
45-
"metadata": node.metadata if hasattr(node, "metadata") else {},
46-
},
47-
}
48-
if args.json:
49-
print(json.dumps(result, indent=2))
39+
if isinstance(node, TextNode):
40+
result = {
41+
"query": args.query,
42+
"type": "single_node",
43+
"node_id": args.node,
44+
"node": {
45+
"id": node.node_id,
46+
"text": node.text,
47+
"metadata": node.metadata if hasattr(node, "metadata") else {},
48+
},
49+
}
50+
if args.json:
51+
print(json.dumps(result, indent=2))
52+
else:
53+
print(node)
5054
else:
51-
print(node)
55+
logging.warning(
56+
f"Node {args.node} is not a TextNode, type: {type(node).__name__}"
57+
)
58+
if args.json:
59+
result = {
60+
"query": args.query,
61+
"type": "single_node",
62+
"node_id": args.node,
63+
"error": f"Node is not a TextNode (type: {type(node).__name__})",
64+
}
65+
print(json.dumps(result, indent=2))
66+
exit(1)
5267
else:
5368
retriever = vector_index.as_retriever(similarity_top_k=args.top_k)
5469
nodes = retriever.retrieve(args.query)
@@ -88,13 +103,18 @@ def _llama_index_query(args: argparse.Namespace) -> None:
88103
"nodes": [],
89104
}
90105
for node in nodes: # type: ignore
91-
node_data = {
92-
"id": node.node_id,
93-
"score": node.score,
94-
"text": node.text,
95-
"metadata": node.metadata if hasattr(node, "metadata") else {},
96-
}
97-
result["nodes"].append(node_data)
106+
if isinstance(node, NodeWithScore):
107+
node_data = {
108+
"id": node.node_id,
109+
"score": node.score,
110+
"text": node.text,
111+
"metadata": node.metadata if hasattr(node, "metadata") else {},
112+
}
113+
result["nodes"].append(node_data)
114+
else:
115+
logging.debug(
116+
f"Skipping node of type {type(node).__name__}, expected NodeWithScore"
117+
)
98118

99119
if args.json:
100120
print(json.dumps(result, indent=2))
@@ -134,7 +154,7 @@ def _llama_stack_query(args: argparse.Namespace) -> None:
134154
yaml.safe_dump(cfg, open(cfg_file, "w", encoding="utf-8"))
135155

136156
stack_lib = importlib.import_module("llama_stack")
137-
client = stack_lib.distribution.library_client.LlamaStackAsLibraryClient(cfg_file)
157+
client = stack_lib.core.library_client.LlamaStackAsLibraryClient(cfg_file)
138158
client.initialize()
139159

140160
# No need to register the DB as it's defined in llama-stack.yaml

src/lightspeed_rag_content/document_processor.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -280,12 +280,10 @@ def __init__(self, config: _Config):
280280
# Not using importlib to help with typechecking
281281
import llama_stack # pylint: disable=C0415
282282

283-
self.document_class = llama_stack.apis.tools.rag_tool.RAGDocument # type: ignore
284-
self.client_class = (
285-
llama_stack.distribution.library_client.LlamaStackAsLibraryClient # type: ignore
286-
)
283+
self.document_class = llama_stack.apis.tools.rag_tool.RAGDocument
284+
self.client_class = llama_stack.core.library_client.LlamaStackAsLibraryClient
287285
self.documents: list[
288-
dict[str, Any] | llama_stack.apis.tools.rag_tool.RAGDocument # type: ignore
286+
dict[str, Any] | llama_stack.apis.tools.rag_tool.RAGDocument
289287
] = []
290288

291289
def write_yaml_config(self, index_id: str, filename: str, db_file: str) -> None:
@@ -313,7 +311,7 @@ def _start_llama_stack(self, cfg_file: str) -> Any:
313311
"""Start llama-stack as a library and return the client.
314312
315313
Return type is really
316-
llama_stack.distribution.library_client.LlamaStackAsLibraryClient
314+
llama_stack.core.library_client.LlamaStackAsLibraryClient
317315
318316
But we do dynamic import, so we don't have it for static typechecking
319317
"""

0 commit comments

Comments
 (0)