diff --git a/.changeset/gorgeous-squids-run.md b/.changeset/gorgeous-squids-run.md new file mode 100644 index 000000000..ba9304780 --- /dev/null +++ b/.changeset/gorgeous-squids-run.md @@ -0,0 +1,6 @@ +--- +"create-llama": patch +"@llamaindex/server": minor +--- + +refactor: llamacloud configs diff --git a/packages/create-llama/helpers/python.ts b/packages/create-llama/helpers/python.ts index b25c350da..c266374f6 100644 --- a/packages/create-llama/helpers/python.ts +++ b/packages/create-llama/helpers/python.ts @@ -439,9 +439,17 @@ const installLlamaIndexServerTemplate = async ({ }); // copy ts server to ui folder - await copy("**", uiDir, { + const tsProxyDir = path.join(templatesDir, "components", "ts-proxy"); + await copy("package.json", uiDir, { parents: true, - cwd: path.join(templatesDir, "components", "ts-proxy"), + cwd: tsProxyDir, + }); + const serverFileLocation = useLlamaParse + ? path.join(tsProxyDir, "llamacloud") + : path.join(tsProxyDir); + await copy("index.ts", uiDir, { + parents: true, + cwd: serverFileLocation, }); // Copy custom UI components to ui/components folder diff --git a/packages/create-llama/helpers/typescript.ts b/packages/create-llama/helpers/typescript.ts index e32d2890f..2f4944bc2 100644 --- a/packages/create-llama/helpers/typescript.ts +++ b/packages/create-llama/helpers/typescript.ts @@ -133,6 +133,18 @@ export const installTSTemplate = async ({ modelConfig, dataSources, }); + + if (vectorDb === "llamacloud") { + // replace index.ts with llamacloud/index.ts + await fs.rm(path.join(root, "src", "index.ts")); + await copy("index.ts", path.join(root, "src"), { + parents: true, + cwd: path.join(root, "src", "llamacloud"), + }); + } + + // remove llamacloud folder + await fs.rm(path.join(root, "src", "llamacloud"), { recursive: true }); } else { throw new Error(`Template ${template} not supported`); } diff --git a/packages/create-llama/templates/components/ts-proxy/llamacloud/index.ts b/packages/create-llama/templates/components/ts-proxy/llamacloud/index.ts new file mode 100644 index 000000000..5efb46373 --- /dev/null +++ b/packages/create-llama/templates/components/ts-proxy/llamacloud/index.ts @@ -0,0 +1,12 @@ +import { LlamaIndexServer } from "@llamaindex/server"; + +new LlamaIndexServer({ + uiConfig: { + componentsDir: "components", + layoutDir: "layout", + llamaDeploy: { deployment: "chat", workflow: "workflow" }, + }, + llamaCloud: { + outputDir: "output/llamacloud", + }, +}).start(); diff --git a/packages/create-llama/templates/components/use-cases/python/agentic_rag/README-template.md b/packages/create-llama/templates/components/use-cases/python/agentic_rag/README-template.md index 211d8f543..c529eab6d 100644 --- a/packages/create-llama/templates/components/use-cases/python/agentic_rag/README-template.md +++ b/packages/create-llama/templates/components/use-cases/python/agentic_rag/README-template.md @@ -93,9 +93,16 @@ The following are the available options: - `starterQuestions`: Predefined questions for chat interface - `componentsDir`: Directory for custom event components - `layoutDir`: Directory for custom layout components -- `llamaCloudIndexSelector`: Enable LlamaCloud integration - `llamaDeploy`: The LlamaDeploy configration (deployment name and workflow name that defined in the [llama_deploy.yml](llama_deploy.yml) file) +## LlamaCloud Integration + +You can enable LlamaCloud integration by setting the `llamaCloud` option in the [ui/index.ts](ui/index.ts) file. + +The following are the available options: + +- `outputDir`: The directory for LlamaCloud output + ## Learn More - [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex. diff --git a/packages/create-llama/templates/components/use-cases/python/code_generator/README-template.md b/packages/create-llama/templates/components/use-cases/python/code_generator/README-template.md index 725fbe871..7e237833c 100644 --- a/packages/create-llama/templates/components/use-cases/python/code_generator/README-template.md +++ b/packages/create-llama/templates/components/use-cases/python/code_generator/README-template.md @@ -86,9 +86,16 @@ The following are the available options: - `starterQuestions`: Predefined questions for chat interface - `componentsDir`: Directory for custom event components - `layoutDir`: Directory for custom layout components -- `llamaCloudIndexSelector`: Enable LlamaCloud integration - `llamaDeploy`: The LlamaDeploy configration (deployment name and workflow name that defined in the [llama_deploy.yml](llama_deploy.yml) file) +## LlamaCloud Integration + +You can enable LlamaCloud integration by setting the `llamaCloud` option in the [ui/index.ts](ui/index.ts) file. + +The following are the available options: + +- `outputDir`: The directory for LlamaCloud output + ## Learn More - [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex. diff --git a/packages/create-llama/templates/components/use-cases/python/deep_research/README-template.md b/packages/create-llama/templates/components/use-cases/python/deep_research/README-template.md index 47065d5e1..7d183791a 100644 --- a/packages/create-llama/templates/components/use-cases/python/deep_research/README-template.md +++ b/packages/create-llama/templates/components/use-cases/python/deep_research/README-template.md @@ -93,9 +93,16 @@ The following are the available options: - `starterQuestions`: Predefined questions for chat interface - `componentsDir`: Directory for custom event components - `layoutDir`: Directory for custom layout components -- `llamaCloudIndexSelector`: Enable LlamaCloud integration - `llamaDeploy`: The LlamaDeploy configration (deployment name and workflow name that defined in the [llama_deploy.yml](llama_deploy.yml) file) +## LlamaCloud Integration + +You can enable LlamaCloud integration by setting the `llamaCloud` option in the [ui/index.ts](ui/index.ts) file. + +The following are the available options: + +- `outputDir`: The directory for LlamaCloud output + ## Learn More - [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex. diff --git a/packages/create-llama/templates/components/use-cases/python/document_generator/README-template.md b/packages/create-llama/templates/components/use-cases/python/document_generator/README-template.md index 5d90432e6..3c1c2a292 100644 --- a/packages/create-llama/templates/components/use-cases/python/document_generator/README-template.md +++ b/packages/create-llama/templates/components/use-cases/python/document_generator/README-template.md @@ -85,11 +85,18 @@ The following are the available options: - `starterQuestions`: Predefined questions for chat interface - `componentsDir`: Directory for custom event components - `layoutDir`: Directory for custom layout components -- `llamaCloudIndexSelector`: Enable LlamaCloud integration - `llamaDeploy`: The LlamaDeploy configration (deployment name and workflow name that defined in the [llama_deploy.yml](llama_deploy.yml) file) To customize the UI, you can start by modifying the [./ui/components/ui_event.jsx](./ui/components/ui_event.jsx) file. +## LlamaCloud Integration + +You can enable LlamaCloud integration by setting the `llamaCloud` option in the [ui/index.ts](ui/index.ts) file. + +The following are the available options: + +- `outputDir`: The directory for LlamaCloud output + ## Learn More - [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex. diff --git a/packages/create-llama/templates/types/llamaindexserver/nextjs/src/llamacloud/index.ts b/packages/create-llama/templates/types/llamaindexserver/nextjs/src/llamacloud/index.ts new file mode 100644 index 000000000..aa76a1479 --- /dev/null +++ b/packages/create-llama/templates/types/llamaindexserver/nextjs/src/llamacloud/index.ts @@ -0,0 +1,17 @@ +import { LlamaIndexServer } from "@llamaindex/server"; +import "dotenv/config"; +import { initSettings } from "./app/settings"; +import { workflowFactory } from "./app/workflow"; + +initSettings(); + +new LlamaIndexServer({ + workflow: workflowFactory, + uiConfig: { + componentsDir: "components", + devMode: true, + }, + llamaCloud: { + outputDir: "output/llamacloud", + }, +}).start(); diff --git a/packages/server/CLAUDE.md b/packages/server/CLAUDE.md index 72e7207e2..74b19157e 100644 --- a/packages/server/CLAUDE.md +++ b/packages/server/CLAUDE.md @@ -128,9 +128,11 @@ Server configuration through `LlamaIndexServerOptions`: - `uiConfig.starterQuestions`: Predefined questions for chat interface - `uiConfig.componentsDir`: Directory for custom event components - `uiConfig.layoutDir`: Directory for custom layout components -- `uiConfig.llamaCloudIndexSelector`: Enable LlamaCloud integration - `uiConfig.devMode`: Enable live code editing - `suggestNextQuestions`: Auto-suggest follow-up questions +- `llamaCloud`: An object to configure the LlamaCloud integration containing the following properties: + - `outputDir`: The directory for LlamaCloud output + - `indexSelector`: Whether to show the LlamaCloud index selector in the chat UI ## Dependencies diff --git a/packages/server/README.md b/packages/server/README.md index 1264d79af..c2e8993d8 100644 --- a/packages/server/README.md +++ b/packages/server/README.md @@ -64,9 +64,11 @@ The `LlamaIndexServer` accepts the following configuration options: - `enableFileUpload`: Whether to enable file upload in the chat UI (default: `false`). See [Upload file example](./examples/private-file/README.md) for more details. - `componentsDir`: The directory for custom UI components rendering events emitted by the workflow. The default is undefined, which does not render custom UI components. - `layoutDir`: The directory for custom layout sections. The default value is `layout`. See [Custom Layout](#custom-layout) for more details. - - `llamaCloudIndexSelector`: Whether to show the LlamaCloud index selector in the chat UI (requires `LLAMA_CLOUD_API_KEY` to be set in the environment variables) (default: `false`) - `dev_mode`: When enabled, you can update workflow code in the UI and see the changes immediately. It's currently in beta and only supports updating workflow code at `app/src/workflow.ts`. Please start server in dev mode (`npm run dev`) to use see this reload feature enabled. - `suggestNextQuestions`: Whether to suggest next questions after the assistant's response (default: `true`). You can change the prompt for the next questions by setting the `NEXT_QUESTION_PROMPT` environment variable. +- `llamaCloud`: An object to configure the LlamaCloud integration containing the following properties: + - `outputDir`: The directory for LlamaCloud output + - `indexSelector`: Whether to show the LlamaCloud index selector in the chat UI LlamaIndexServer accepts all the configuration options from Nextjs Custom Server such as `port`, `hostname`, `dev`, etc. See all Nextjs Custom Server options [here](https://nextjs.org/docs/app/building-your-application/configuring/custom-server). diff --git a/packages/server/next/app/api/files/[...slug]/route.ts b/packages/server/next/app/api/files/[...slug]/route.ts index 7044d0c38..0627efec3 100644 --- a/packages/server/next/app/api/files/[...slug]/route.ts +++ b/packages/server/next/app/api/files/[...slug]/route.ts @@ -8,7 +8,8 @@ export async function GET( request: NextRequest, { params }: { params: Promise<{ slug: string[] }> }, ) { - const isUsingLlamaCloud = !!process.env.LLAMA_CLOUD_API_KEY; + const { searchParams } = request.nextUrl; + const isUsingLlamaCloud = searchParams.get("useLlamaCloud") === "true"; const filePath = (await params).slug.join("/"); if (!filePath.startsWith("output") && !filePath.startsWith("data")) { diff --git a/packages/server/src/handlers/chat.ts b/packages/server/src/handlers/chat.ts index c907c3a58..9cbdaa3fe 100644 --- a/packages/server/src/handlers/chat.ts +++ b/packages/server/src/handlers/chat.ts @@ -21,6 +21,7 @@ export const handleChat = async ( res: ServerResponse, workflowFactory: WorkflowFactory, suggestNextQuestions: boolean, + llamaCloudOutputDir?: string, ) => { const abortController = new AbortController(); res.on("close", () => abortController.abort("Connection closed")); @@ -53,7 +54,10 @@ export const handleChat = async ( }, }); - const stream = processWorkflowStream(context.stream).until( + const stream = processWorkflowStream( + context.stream, + llamaCloudOutputDir, + ).until( (event) => abortController.signal.aborted || stopAgentEvent.include(event), ); diff --git a/packages/server/src/server.ts b/packages/server/src/server.ts index 27a66b4d4..34b919915 100644 --- a/packages/server/src/server.ts +++ b/packages/server/src/server.ts @@ -7,7 +7,11 @@ import path from "path"; import { parse } from "url"; import { promisify } from "util"; import { handleChat } from "./handlers/chat"; -import type { LlamaDeployConfig, LlamaIndexServerOptions } from "./types"; +import type { + LlamaCloudConfig, + LlamaDeployConfig, + LlamaIndexServerOptions, +} from "./types"; const nextDir = path.join(__dirname, "..", "server"); const configFile = path.join(__dirname, "..", "server", "public", "config.js"); @@ -25,6 +29,7 @@ export class LlamaIndexServer { llamaDeploy?: LlamaDeployConfig | undefined; serverUrl: string; fileServer: string; + llamaCloud?: LlamaCloudConfig | undefined; constructor(options: LlamaIndexServerOptions) { const { workflow, suggestNextQuestions, ...nextAppOptions } = options; @@ -38,8 +43,16 @@ export class LlamaIndexServer { this.llamaDeploy = options.uiConfig?.llamaDeploy; this.serverUrl = options.uiConfig?.serverUrl || ""; // use current host if not set - const isUsingLlamaCloud = !!getEnv("LLAMA_CLOUD_API_KEY"); - const defaultFileServer = isUsingLlamaCloud ? "output/llamacloud" : "data"; + this.llamaCloud = options.llamaCloud; + if (this.llamaCloud?.indexSelector && !getEnv("LLAMA_CLOUD_API_KEY")) { + throw new Error( + "LlamaCloud API key is required. Please set `LLAMA_CLOUD_API_KEY` in environment variables", + ); + } + + const defaultFileServer = this.llamaCloud + ? this.llamaCloud.outputDir + : "data"; this.fileServer = options.fileServer ?? defaultFileServer; if (this.llamaDeploy) { @@ -48,8 +61,8 @@ export class LlamaIndexServer { "LlamaDeploy requires deployment and workflow to be set", ); } - const { devMode, llamaCloudIndexSelector, enableFileUpload } = - options.uiConfig ?? {}; + const { devMode, enableFileUpload } = options.uiConfig ?? {}; + const llamaCloudIndexSelector = this.llamaCloud?.indexSelector; if (devMode || llamaCloudIndexSelector || enableFileUpload) { throw new Error( @@ -103,7 +116,7 @@ export default { const starterQuestions = uiConfig?.starterQuestions ?? []; const llamaCloudApi = - uiConfig?.llamaCloudIndexSelector && getEnv("LLAMA_CLOUD_API_KEY") + this.llamaCloud?.indexSelector && getEnv("LLAMA_CLOUD_API_KEY") ? `${basePath}/api/chat/config/llamacloud` : undefined; const componentsApi = this.componentsDir @@ -166,6 +179,7 @@ export default { res, this.workflowFactory, this.suggestNextQuestions, + this.llamaCloud?.outputDir, ); } @@ -181,6 +195,14 @@ export default { query.layoutDir = this.layoutDir; } + if ( + pathname?.includes("/api/files") && + req.method === "GET" && + this.llamaCloud + ) { + query.useLlamaCloud = "true"; + } + const handle = this.app.getRequestHandler(); handle(req, res, { ...parsedUrl, query }); }); diff --git a/packages/server/src/types.ts b/packages/server/src/types.ts index f5b68cd4a..92f17b41c 100644 --- a/packages/server/src/types.ts +++ b/packages/server/src/types.ts @@ -21,16 +21,21 @@ export type UIConfig = { starterQuestions?: string[]; componentsDir?: string; layoutDir?: string; - llamaCloudIndexSelector?: boolean; devMode?: boolean; enableFileUpload?: boolean; llamaDeploy?: LlamaDeployConfig; serverUrl?: string; }; +export type LlamaCloudConfig = { + outputDir: string; + indexSelector?: boolean; +}; + export type LlamaIndexServerOptions = NextAppOptions & { workflow?: WorkflowFactory; uiConfig?: UIConfig; fileServer?: string; suggestNextQuestions?: boolean; + llamaCloud?: LlamaCloudConfig; }; diff --git a/packages/server/src/utils/events.ts b/packages/server/src/utils/events.ts index c8e8439e3..fffe0a2b5 100644 --- a/packages/server/src/utils/events.ts +++ b/packages/server/src/utils/events.ts @@ -41,11 +41,14 @@ export type AgentRunEventData = { }; export const agentRunEvent = workflowEvent(); -export function toSourceEventNode(node: NodeWithScore) { +export function toSourceEventNode( + node: NodeWithScore, + llamaCloudOutputDir: string = "output/llamacloud", +) { const { file_name, pipeline_id } = node.node.metadata; const filePath = pipeline_id - ? `output/llamacloud/${pipeline_id}$${file_name}` + ? `${llamaCloudOutputDir}/${pipeline_id}$${file_name}` : `data/${file_name}`; return { @@ -59,9 +62,12 @@ export function toSourceEventNode(node: NodeWithScore) { }; } -export function toSourceEvent(sourceNodes: NodeWithScore[] = []) { +export function toSourceEvent( + sourceNodes: NodeWithScore[] = [], + llamaCloudOutputDir: string = "output/llamacloud", +) { const nodes: SourceEventNode[] = sourceNodes.map((node) => - toSourceEventNode(node), + toSourceEventNode(node, llamaCloudOutputDir), ); return sourceEvent.with({ data: { nodes }, diff --git a/packages/server/src/utils/workflow.ts b/packages/server/src/utils/workflow.ts index 439f16729..1c04669df 100644 --- a/packages/server/src/utils/workflow.ts +++ b/packages/server/src/utils/workflow.ts @@ -64,6 +64,7 @@ export async function runWorkflow({ export function processWorkflowStream( stream: WorkflowStream>, + llamaCloudOutputDir?: string, ) { return stream.pipeThrough( new TransformStream, WorkflowEventData>( @@ -90,7 +91,10 @@ export function processWorkflowStream( ) { const sourceNodes = rawOutput.sourceNodes as unknown as NodeWithScore[]; - transformedEvent = toSourceEvent(sourceNodes); + transformedEvent = toSourceEvent( + sourceNodes, + llamaCloudOutputDir, + ); } } // Handle artifact events, transform to agentStreamEvent