Skip to content

Commit

Permalink
fix: handle RouterQueryEngine with string query (run-llama#1181)
Browse files Browse the repository at this point in the history
  • Loading branch information
kieransimkin authored and himself65 committed Sep 11, 2024
1 parent 2dcad52 commit 4810364
Show file tree
Hide file tree
Showing 7 changed files with 151 additions and 5 deletions.
5 changes: 5 additions & 0 deletions .changeset/few-otters-tie.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"llamaindex": patch
---

fix: handle `RouterQueryEngine` with string query
5 changes: 5 additions & 0 deletions .changeset/little-bats-boil.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"@llamaindex/cloud": patch
---

fix: bump version
67 changes: 67 additions & 0 deletions packages/llamaindex/e2e/node/issue.e2e.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
import { LLMSingleSelector, Settings } from "llamaindex";
import assert from "node:assert";
import { test } from "node:test";
import { mockLLMEvent } from "./utils.js";

await test("#1177", async (t) => {
await mockLLMEvent(t, "#1177");
await t.test(async () => {
const selector = new LLMSingleSelector({
llm: Settings.llm,
});
{
const result = await selector.select(
[
{
description: "Math calculation",
},
{
description: "Search from google",
},
],
"calculate 2 + 2",
);
assert.equal(result.selections.length, 1);
assert.equal(result.selections.at(0)!.index, 0);
}
{
const result = await selector.select(
[
{
description: "Math calculation",
},
{
description: "Search from google",
},
],
{
query: "calculate 2 + 2",
},
);
assert.equal(result.selections.length, 1);
assert.equal(result.selections.at(0)!.index, 0);
}
{
const result = await selector.select(
[
{
description: "Math calculation",
},
{
description: "Search from google",
},
],
{
query: [
{
type: "text",
text: "calculate 2 + 2",
},
],
},
);
assert.equal(result.selections.length, 1);
assert.equal(result.selections.at(0)!.index, 0);
}
});
});
67 changes: 67 additions & 0 deletions packages/llamaindex/e2e/node/snapshot/#1177.snap
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
{
"llmEventStart": [
{
"id": "PRESERVE_0",
"messages": [
{
"content": "Some choices are given below. It is provided in a numbered list (1 to 42), where each item in the list corresponds to a summary.\n---------------------\n(1) Math calculation(2) Search from google\n---------------------\nUsing only the choices above and not prior knowledge, return the choice that is most relevant to the question: 'calculate 2 + 2'\n\n\nThe output should be ONLY JSON formatted as a JSON instance.\n\nHere is an example:\n[\n {\n \"choice\": 1,\n \"reason\": \"<insert reason for choice>\"\n },\n ...\n]\n",
"role": "user"
}
]
},
{
"id": "PRESERVE_1",
"messages": [
{
"content": "Some choices are given below. It is provided in a numbered list (1 to 42), where each item in the list corresponds to a summary.\n---------------------\n(1) Math calculation(2) Search from google\n---------------------\nUsing only the choices above and not prior knowledge, return the choice that is most relevant to the question: 'calculate 2 + 2'\n\n\nThe output should be ONLY JSON formatted as a JSON instance.\n\nHere is an example:\n[\n {\n \"choice\": 1,\n \"reason\": \"<insert reason for choice>\"\n },\n ...\n]\n",
"role": "user"
}
]
},
{
"id": "PRESERVE_2",
"messages": [
{
"content": "Some choices are given below. It is provided in a numbered list (1 to 42), where each item in the list corresponds to a summary.\n---------------------\n(1) Math calculation(2) Search from google\n---------------------\nUsing only the choices above and not prior knowledge, return the choice that is most relevant to the question: 'calculate 2 + 2'\n\n\nThe output should be ONLY JSON formatted as a JSON instance.\n\nHere is an example:\n[\n {\n \"choice\": 1,\n \"reason\": \"<insert reason for choice>\"\n },\n ...\n]\n",
"role": "user"
}
]
}
],
"llmEventEnd": [
{
"id": "PRESERVE_0",
"response": {
"raw": null,
"message": {
"content": "[\n {\n \"choice\": 1,\n \"reason\": \"The question 'calculate 2 + 2' is directly asking for a math calculation, which corresponds to choice 1.\"\n }\n]",
"role": "assistant",
"options": {}
}
}
},
{
"id": "PRESERVE_1",
"response": {
"raw": null,
"message": {
"content": "[\n {\n \"choice\": 1,\n \"reason\": \"The question 'calculate 2 + 2' is asking for a mathematical calculation, which directly corresponds to choice 1: Math calculation.\"\n }\n]",
"role": "assistant",
"options": {}
}
}
},
{
"id": "PRESERVE_2",
"response": {
"raw": null,
"message": {
"content": "[\n {\n \"choice\": 1,\n \"reason\": \"The question 'calculate 2 + 2' is asking for a mathematical calculation, which directly corresponds to choice 1: Math calculation.\"\n }\n]",
"role": "assistant",
"options": {}
}
}
}
],
"llmEventStream": []
}
6 changes: 4 additions & 2 deletions packages/llamaindex/src/Settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,10 +59,12 @@ class GlobalSettings implements Config {
}

get llm(): LLM {
if (CoreSettings.llm === null) {
// fixme: we might need check internal error instead of try-catch here
try {
CoreSettings.llm;
} catch (error) {
CoreSettings.llm = new OpenAI();
}

return CoreSettings.llm;
}

Expand Down
4 changes: 2 additions & 2 deletions packages/llamaindex/src/outputParsers/selectors.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ const formatStr = `The output should be ONLY JSON formatted as a JSON instance.
Here is an example:
[
{
choice: 1,
reason: "<insert reason for choice>"
"choice": 1,
"reason": "<insert reason for choice>"
},
...
]
Expand Down
2 changes: 1 addition & 1 deletion packages/llamaindex/src/selectors/llmSelectors.ts
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ export class LLMSingleSelector extends BaseSelector {
const prompt = this.prompt.format({
numChoices: `${choicesText.length}`,
context: choicesText,
query: extractText(query.query),
query: extractText(query),
});

const formattedPrompt = this.outputParser.format(prompt);
Expand Down

0 comments on commit 4810364

Please sign in to comment.