Skip to content

Commit 256c5d7

Browse files
committed
Update Llama Stack to 0.2.22
1 parent 0d4c9c8 commit 256c5d7

File tree

4 files changed

+684
-699
lines changed

4 files changed

+684
-699
lines changed

pyproject.toml

Lines changed: 14 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,9 @@ line-length = 100
2020
[tool.mypy]
2121
disable_error_code = ["union-attr", "return-value", "arg-type", "import-untyped"]
2222
ignore_missing_imports = true
23+
mypy_path = ["src"]
24+
namespace_packages = true
25+
explicit_package_bases = true
2326

2427
[tool.pdm]
2528
distribution = true
@@ -34,19 +37,19 @@ version = "0.1.0"
3437
description = "RAG content for OpenShift LightSpeed."
3538
authors = []
3639
dependencies = [
37-
"PyYAML==6.0.2",
40+
"PyYAML>=6.0.2",
3841
"huggingface_hub>=0.33.4",
39-
"llama-index==0.12.51",
40-
"llama-index-vector-stores-faiss==0.3.0",
41-
"llama-index-embeddings-huggingface==0.4.0",
42-
"llama-index-readers-file==0.4.11",
43-
"faiss-cpu==1.11.0.post1",
42+
"llama-index>=0.12.51",
43+
"llama-index-vector-stores-faiss>=0.3.0",
44+
"llama-index-embeddings-huggingface>=0.4.0",
45+
"llama-index-readers-file>=0.4.11",
46+
"faiss-cpu>=1.11.0.post1",
4447
"llama-index-vector-stores-postgres>=0.5.4",
45-
"torch==2.7.1",
46-
"llama-stack==0.2.16",
47-
"llama-stack-client==0.2.16",
48-
"aiosqlite==0.21.0",
49-
"sqlite-vec==0.1.6",
48+
"torch>=2.7.1",
49+
"llama-stack==0.2.22",
50+
"llama-stack-client==0.2.22",
51+
"aiosqlite>=0.21.0",
52+
"sqlite-vec>=0.1.6",
5053
"tomlkit",
5154
]
5255
requires-python = "==3.12.*"

scripts/query_rag.py

Lines changed: 37 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111

1212
import yaml
1313
from llama_index.core import Settings, load_index_from_storage
14+
from llama_index.core.schema import NodeWithScore, TextNode
1415
from llama_index.core.llms.utils import resolve_llm
1516
from llama_index.core.storage.storage_context import StorageContext
1617
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
@@ -35,20 +36,32 @@ def _llama_index_query(args: argparse.Namespace) -> None:
3536

3637
if args.node is not None:
3738
node = storage_context.docstore.get_node(args.node)
38-
result = {
39-
"query": args.query,
40-
"type": "single_node",
41-
"node_id": args.node,
42-
"node": {
43-
"id": node.node_id,
44-
"text": node.text,
45-
"metadata": node.metadata if hasattr(node, "metadata") else {},
46-
},
47-
}
48-
if args.json:
49-
print(json.dumps(result, indent=2))
39+
if isinstance(node, TextNode):
40+
result = {
41+
"query": args.query,
42+
"type": "single_node",
43+
"node_id": args.node,
44+
"node": {
45+
"id": node.node_id,
46+
"text": node.text,
47+
"metadata": node.metadata if hasattr(node, "metadata") else {},
48+
},
49+
}
50+
if args.json:
51+
print(json.dumps(result, indent=2))
52+
else:
53+
print(node)
5054
else:
51-
print(node)
55+
logging.warning(f"Node {args.node} is not a TextNode, type: {type(node).__name__}")
56+
if args.json:
57+
result = {
58+
"query": args.query,
59+
"type": "single_node",
60+
"node_id": args.node,
61+
"error": f"Node is not a TextNode (type: {type(node).__name__})",
62+
}
63+
print(json.dumps(result, indent=2))
64+
exit(1)
5265
else:
5366
retriever = vector_index.as_retriever(similarity_top_k=args.top_k)
5467
nodes = retriever.retrieve(args.query)
@@ -88,13 +101,16 @@ def _llama_index_query(args: argparse.Namespace) -> None:
88101
"nodes": [],
89102
}
90103
for node in nodes: # type: ignore
91-
node_data = {
92-
"id": node.node_id,
93-
"score": node.score,
94-
"text": node.text,
95-
"metadata": node.metadata if hasattr(node, "metadata") else {},
96-
}
97-
result["nodes"].append(node_data)
104+
if isinstance(node, NodeWithScore):
105+
node_data = {
106+
"id": node.node_id,
107+
"score": node.score,
108+
"text": node.text,
109+
"metadata": node.metadata if hasattr(node, "metadata") else {},
110+
}
111+
result["nodes"].append(node_data)
112+
else:
113+
logging.debug(f"Skipping node of type {type(node).__name__}, expected NodeWithScore")
98114

99115
if args.json:
100116
print(json.dumps(result, indent=2))
@@ -134,7 +150,7 @@ def _llama_stack_query(args: argparse.Namespace) -> None:
134150
yaml.safe_dump(cfg, open(cfg_file, "w", encoding="utf-8"))
135151

136152
stack_lib = importlib.import_module("llama_stack")
137-
client = stack_lib.distribution.library_client.LlamaStackAsLibraryClient(cfg_file)
153+
client = stack_lib.core.library_client.LlamaStackAsLibraryClient(cfg_file)
138154
client.initialize()
139155

140156
# No need to register the DB as it's defined in llama-stack.yaml

src/lightspeed_rag_content/document_processor.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -280,12 +280,12 @@ def __init__(self, config: _Config):
280280
# Not using importlib to help with typechecking
281281
import llama_stack # pylint: disable=C0415
282282

283-
self.document_class = llama_stack.apis.tools.rag_tool.RAGDocument # type: ignore
283+
self.document_class = llama_stack.apis.tools.rag_tool.RAGDocument
284284
self.client_class = (
285-
llama_stack.distribution.library_client.LlamaStackAsLibraryClient # type: ignore
285+
llama_stack.core.library_client.LlamaStackAsLibraryClient
286286
)
287287
self.documents: list[
288-
dict[str, Any] | llama_stack.apis.tools.rag_tool.RAGDocument # type: ignore
288+
dict[str, Any] | llama_stack.apis.tools.rag_tool.RAGDocument
289289
] = []
290290

291291
def write_yaml_config(self, index_id: str, filename: str, db_file: str) -> None:

0 commit comments

Comments
 (0)