Skip to content

Commit

Permalink
feat: update rag example to use parameters (#122)
Browse files Browse the repository at this point in the history
  • Loading branch information
arjunattam authored Apr 12, 2024
1 parent 3970f9c commit 91112b7
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 5 deletions.
17 changes: 16 additions & 1 deletion examples/rag/empiricalrc.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,24 @@
"$schema": "https://assets.empirical.run/config/schema/v0.4.0.json",
"runs": [
{
"name": "rag script run",
"type": "py-script",
"path": "rag.py",
"parameters": {
"model": "gpt-3.5-turbo"
},
"scorers": [
{
"type": "py-script",
"path": "score.py"
}
]
},
{
"type": "py-script",
"path": "rag.py",
"parameters": {
"model": "gpt-4-turbo-preview"
},
"scorers": [
{
"type": "py-script",
Expand Down
7 changes: 3 additions & 4 deletions examples/rag/rag.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,7 @@
import nest_asyncio
from llama_index.core import VectorStoreIndex, SimpleDirectoryReader
from llama_index.llms.openai import OpenAI as LlamaOpenAI
from llama_index.core.node_parser import SentenceSplitter

nest_asyncio.apply()


def execute(inputs, parameters):
# load documents
Expand All @@ -21,7 +18,9 @@ def build_query_engine(llm):
query_engine = vector_index.as_query_engine(similarity_top_k=2, llm=llm)
return query_engine

query_engine = build_query_engine(llm=LlamaOpenAI(model="gpt-3.5-turbo"))
query_engine = build_query_engine(
llm=LlamaOpenAI(model=parameters.get("model", "gpt-3.5-turbo"))
)
response = query_engine.query(question)
output = response.response
contexts = [c.node.get_content() for c in response.source_nodes]
Expand Down

0 comments on commit 91112b7

Please sign in to comment.