diff --git a/.changeset/odd-dogs-buy.md b/.changeset/odd-dogs-buy.md new file mode 100644 index 000000000..54d7fee33 --- /dev/null +++ b/.changeset/odd-dogs-buy.md @@ -0,0 +1,5 @@ +--- +"create-llama": patch +--- + +chore: bump llmaindex diff --git a/e2e/shared/llamaindexserver_template.spec.ts b/e2e/shared/llamaindexserver_template.spec.ts index 119600724..35e1e4ec3 100644 --- a/e2e/shared/llamaindexserver_template.spec.ts +++ b/e2e/shared/llamaindexserver_template.spec.ts @@ -65,7 +65,9 @@ for (const useCase of templateUseCases) { templateFramework === "express", ); await page.goto(`http://localhost:${port}`); - await expect(page.getByText("Built by LlamaIndex")).toBeVisible(); + await expect(page.getByText("Built by LlamaIndex")).toBeVisible({ + timeout: 5 * 60 * 1000, + }); }); test("Frontend should be able to submit a message and receive the start of a streamed response", async ({ diff --git a/templates/types/llamaindexserver/nextjs/package.json b/templates/types/llamaindexserver/nextjs/package.json index d3b18dbe3..c3492ec44 100644 --- a/templates/types/llamaindexserver/nextjs/package.json +++ b/templates/types/llamaindexserver/nextjs/package.json @@ -9,10 +9,10 @@ "dependencies": { "@llamaindex/openai": "0.2.0", "@llamaindex/readers": "^2.0.0", - "@llamaindex/server": "0.1.3", + "@llamaindex/server": "0.1.4", "@llamaindex/tools": "0.0.4", "dotenv": "^16.4.7", - "llamaindex": "0.9.17", + "llamaindex": "0.10.1", "zod": "^3.23.8" }, "devDependencies": {