Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .changeset/gorgeous-squids-run.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
---
"create-llama": patch
"@llamaindex/server": minor
---

refactor: llamacloud configs
12 changes: 10 additions & 2 deletions packages/create-llama/helpers/python.ts
Original file line number Diff line number Diff line change
Expand Up @@ -439,9 +439,17 @@ const installLlamaIndexServerTemplate = async ({
});

// copy ts server to ui folder
await copy("**", uiDir, {
const tsProxyDir = path.join(templatesDir, "components", "ts-proxy");
await copy("package.json", uiDir, {
parents: true,
cwd: path.join(templatesDir, "components", "ts-proxy"),
cwd: tsProxyDir,
});
const serverFileLocation = useLlamaParse
? path.join(tsProxyDir, "llamacloud")
: path.join(tsProxyDir);
await copy("index.ts", uiDir, {
parents: true,
cwd: serverFileLocation,
});

// Copy custom UI components to ui/components folder
Expand Down
12 changes: 12 additions & 0 deletions packages/create-llama/helpers/typescript.ts
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,18 @@ export const installTSTemplate = async ({
modelConfig,
dataSources,
});

if (vectorDb === "llamacloud") {
// replace index.ts with llamacloud/index.ts
await fs.rm(path.join(root, "src", "index.ts"));
await copy("index.ts", path.join(root, "src"), {
parents: true,
cwd: path.join(root, "src", "llamacloud"),
});
}

// remove llamacloud folder
await fs.rm(path.join(root, "src", "llamacloud"), { recursive: true });
} else {
throw new Error(`Template ${template} not supported`);
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
import { LlamaIndexServer } from "@llamaindex/server";

new LlamaIndexServer({
uiConfig: {
componentsDir: "components",
layoutDir: "layout",
llamaDeploy: { deployment: "chat", workflow: "workflow" },
},
llamaCloud: {
outputDir: "output/llamacloud",
},
}).start();
Original file line number Diff line number Diff line change
Expand Up @@ -93,9 +93,16 @@ The following are the available options:
- `starterQuestions`: Predefined questions for chat interface
- `componentsDir`: Directory for custom event components
- `layoutDir`: Directory for custom layout components
- `llamaCloudIndexSelector`: Enable LlamaCloud integration
- `llamaDeploy`: The LlamaDeploy configration (deployment name and workflow name that defined in the [llama_deploy.yml](llama_deploy.yml) file)

## LlamaCloud Integration

You can enable LlamaCloud integration by setting the `llamaCloud` option in the [ui/index.ts](ui/index.ts) file.

The following are the available options:

- `outputDir`: The directory for LlamaCloud output

## Learn More

- [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,9 +86,16 @@ The following are the available options:
- `starterQuestions`: Predefined questions for chat interface
- `componentsDir`: Directory for custom event components
- `layoutDir`: Directory for custom layout components
- `llamaCloudIndexSelector`: Enable LlamaCloud integration
- `llamaDeploy`: The LlamaDeploy configration (deployment name and workflow name that defined in the [llama_deploy.yml](llama_deploy.yml) file)

## LlamaCloud Integration

You can enable LlamaCloud integration by setting the `llamaCloud` option in the [ui/index.ts](ui/index.ts) file.

The following are the available options:

- `outputDir`: The directory for LlamaCloud output

## Learn More

- [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,9 +93,16 @@ The following are the available options:
- `starterQuestions`: Predefined questions for chat interface
- `componentsDir`: Directory for custom event components
- `layoutDir`: Directory for custom layout components
- `llamaCloudIndexSelector`: Enable LlamaCloud integration
- `llamaDeploy`: The LlamaDeploy configration (deployment name and workflow name that defined in the [llama_deploy.yml](llama_deploy.yml) file)

## LlamaCloud Integration

You can enable LlamaCloud integration by setting the `llamaCloud` option in the [ui/index.ts](ui/index.ts) file.

The following are the available options:

- `outputDir`: The directory for LlamaCloud output

## Learn More

- [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,11 +85,18 @@ The following are the available options:
- `starterQuestions`: Predefined questions for chat interface
- `componentsDir`: Directory for custom event components
- `layoutDir`: Directory for custom layout components
- `llamaCloudIndexSelector`: Enable LlamaCloud integration
- `llamaDeploy`: The LlamaDeploy configration (deployment name and workflow name that defined in the [llama_deploy.yml](llama_deploy.yml) file)

To customize the UI, you can start by modifying the [./ui/components/ui_event.jsx](./ui/components/ui_event.jsx) file.

## LlamaCloud Integration

You can enable LlamaCloud integration by setting the `llamaCloud` option in the [ui/index.ts](ui/index.ts) file.

The following are the available options:

- `outputDir`: The directory for LlamaCloud output

## Learn More

- [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex.
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import { LlamaIndexServer } from "@llamaindex/server";
import "dotenv/config";
import { initSettings } from "./app/settings";
import { workflowFactory } from "./app/workflow";

initSettings();

new LlamaIndexServer({
workflow: workflowFactory,
uiConfig: {
componentsDir: "components",
devMode: true,
},
llamaCloud: {
outputDir: "output/llamacloud",
},
}).start();
4 changes: 3 additions & 1 deletion packages/server/CLAUDE.md
Original file line number Diff line number Diff line change
Expand Up @@ -128,9 +128,11 @@ Server configuration through `LlamaIndexServerOptions`:
- `uiConfig.starterQuestions`: Predefined questions for chat interface
- `uiConfig.componentsDir`: Directory for custom event components
- `uiConfig.layoutDir`: Directory for custom layout components
- `uiConfig.llamaCloudIndexSelector`: Enable LlamaCloud integration
- `uiConfig.devMode`: Enable live code editing
- `suggestNextQuestions`: Auto-suggest follow-up questions
- `llamaCloud`: An object to configure the LlamaCloud integration containing the following properties:
- `outputDir`: The directory for LlamaCloud output
- `indexSelector`: Whether to show the LlamaCloud index selector in the chat UI

## Dependencies

Expand Down
4 changes: 3 additions & 1 deletion packages/server/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -64,9 +64,11 @@ The `LlamaIndexServer` accepts the following configuration options:
- `enableFileUpload`: Whether to enable file upload in the chat UI (default: `false`). See [Upload file example](./examples/private-file/README.md) for more details.
- `componentsDir`: The directory for custom UI components rendering events emitted by the workflow. The default is undefined, which does not render custom UI components.
- `layoutDir`: The directory for custom layout sections. The default value is `layout`. See [Custom Layout](#custom-layout) for more details.
- `llamaCloudIndexSelector`: Whether to show the LlamaCloud index selector in the chat UI (requires `LLAMA_CLOUD_API_KEY` to be set in the environment variables) (default: `false`)
- `dev_mode`: When enabled, you can update workflow code in the UI and see the changes immediately. It's currently in beta and only supports updating workflow code at `app/src/workflow.ts`. Please start server in dev mode (`npm run dev`) to use see this reload feature enabled.
- `suggestNextQuestions`: Whether to suggest next questions after the assistant's response (default: `true`). You can change the prompt for the next questions by setting the `NEXT_QUESTION_PROMPT` environment variable.
- `llamaCloud`: An object to configure the LlamaCloud integration containing the following properties:
- `outputDir`: The directory for LlamaCloud output
- `indexSelector`: Whether to show the LlamaCloud index selector in the chat UI

LlamaIndexServer accepts all the configuration options from Nextjs Custom Server such as `port`, `hostname`, `dev`, etc.
See all Nextjs Custom Server options [here](https://nextjs.org/docs/app/building-your-application/configuring/custom-server).
Expand Down
3 changes: 2 additions & 1 deletion packages/server/next/app/api/files/[...slug]/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ slug: string[] }> },
) {
const isUsingLlamaCloud = !!process.env.LLAMA_CLOUD_API_KEY;
const { searchParams } = request.nextUrl;
const isUsingLlamaCloud = searchParams.get("useLlamaCloud") === "true";
const filePath = (await params).slug.join("/");

if (!filePath.startsWith("output") && !filePath.startsWith("data")) {
Expand Down
6 changes: 5 additions & 1 deletion packages/server/src/handlers/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ export const handleChat = async (
res: ServerResponse,
workflowFactory: WorkflowFactory,
suggestNextQuestions: boolean,
llamaCloudOutputDir?: string,
) => {
const abortController = new AbortController();
res.on("close", () => abortController.abort("Connection closed"));
Expand Down Expand Up @@ -53,7 +54,10 @@ export const handleChat = async (
},
});

const stream = processWorkflowStream(context.stream).until(
const stream = processWorkflowStream(
context.stream,
llamaCloudOutputDir,
).until(
(event) =>
abortController.signal.aborted || stopAgentEvent.include(event),
);
Expand Down
34 changes: 28 additions & 6 deletions packages/server/src/server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,11 @@ import path from "path";
import { parse } from "url";
import { promisify } from "util";
import { handleChat } from "./handlers/chat";
import type { LlamaDeployConfig, LlamaIndexServerOptions } from "./types";
import type {
LlamaCloudConfig,
LlamaDeployConfig,
LlamaIndexServerOptions,
} from "./types";

const nextDir = path.join(__dirname, "..", "server");
const configFile = path.join(__dirname, "..", "server", "public", "config.js");
Expand All @@ -25,6 +29,7 @@ export class LlamaIndexServer {
llamaDeploy?: LlamaDeployConfig | undefined;
serverUrl: string;
fileServer: string;
llamaCloud?: LlamaCloudConfig | undefined;

constructor(options: LlamaIndexServerOptions) {
const { workflow, suggestNextQuestions, ...nextAppOptions } = options;
Expand All @@ -38,8 +43,16 @@ export class LlamaIndexServer {
this.llamaDeploy = options.uiConfig?.llamaDeploy;
this.serverUrl = options.uiConfig?.serverUrl || ""; // use current host if not set

const isUsingLlamaCloud = !!getEnv("LLAMA_CLOUD_API_KEY");
const defaultFileServer = isUsingLlamaCloud ? "output/llamacloud" : "data";
this.llamaCloud = options.llamaCloud;
if (this.llamaCloud?.indexSelector && !getEnv("LLAMA_CLOUD_API_KEY")) {
throw new Error(
"LlamaCloud API key is required. Please set `LLAMA_CLOUD_API_KEY` in environment variables",
);
}

const defaultFileServer = this.llamaCloud
? this.llamaCloud.outputDir
: "data";
this.fileServer = options.fileServer ?? defaultFileServer;

if (this.llamaDeploy) {
Expand All @@ -48,8 +61,8 @@ export class LlamaIndexServer {
"LlamaDeploy requires deployment and workflow to be set",
);
}
const { devMode, llamaCloudIndexSelector, enableFileUpload } =
options.uiConfig ?? {};
const { devMode, enableFileUpload } = options.uiConfig ?? {};
const llamaCloudIndexSelector = this.llamaCloud?.indexSelector;

if (devMode || llamaCloudIndexSelector || enableFileUpload) {
throw new Error(
Expand Down Expand Up @@ -103,7 +116,7 @@ export default {

const starterQuestions = uiConfig?.starterQuestions ?? [];
const llamaCloudApi =
uiConfig?.llamaCloudIndexSelector && getEnv("LLAMA_CLOUD_API_KEY")
this.llamaCloud?.indexSelector && getEnv("LLAMA_CLOUD_API_KEY")
? `${basePath}/api/chat/config/llamacloud`
: undefined;
const componentsApi = this.componentsDir
Expand Down Expand Up @@ -166,6 +179,7 @@ export default {
res,
this.workflowFactory,
this.suggestNextQuestions,
this.llamaCloud?.outputDir,
);
}

Expand All @@ -181,6 +195,14 @@ export default {
query.layoutDir = this.layoutDir;
}

if (
pathname?.includes("/api/files") &&
req.method === "GET" &&
this.llamaCloud
) {
query.useLlamaCloud = "true";
}

const handle = this.app.getRequestHandler();
handle(req, res, { ...parsedUrl, query });
});
Expand Down
7 changes: 6 additions & 1 deletion packages/server/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,16 +21,21 @@ export type UIConfig = {
starterQuestions?: string[];
componentsDir?: string;
layoutDir?: string;
llamaCloudIndexSelector?: boolean;
devMode?: boolean;
enableFileUpload?: boolean;
llamaDeploy?: LlamaDeployConfig;
serverUrl?: string;
};

export type LlamaCloudConfig = {
outputDir: string;
indexSelector?: boolean;
};

export type LlamaIndexServerOptions = NextAppOptions & {
workflow?: WorkflowFactory;
uiConfig?: UIConfig;
fileServer?: string;
suggestNextQuestions?: boolean;
llamaCloud?: LlamaCloudConfig;
};
14 changes: 10 additions & 4 deletions packages/server/src/utils/events.ts
Original file line number Diff line number Diff line change
Expand Up @@ -41,11 +41,14 @@ export type AgentRunEventData = {
};
export const agentRunEvent = workflowEvent<AgentRunEventData>();

export function toSourceEventNode(node: NodeWithScore<Metadata>) {
export function toSourceEventNode(
node: NodeWithScore<Metadata>,
llamaCloudOutputDir: string = "output/llamacloud",
) {
const { file_name, pipeline_id } = node.node.metadata;

const filePath = pipeline_id
? `output/llamacloud/${pipeline_id}$${file_name}`
? `${llamaCloudOutputDir}/${pipeline_id}$${file_name}`
: `data/${file_name}`;

return {
Expand All @@ -59,9 +62,12 @@ export function toSourceEventNode(node: NodeWithScore<Metadata>) {
};
}

export function toSourceEvent(sourceNodes: NodeWithScore<Metadata>[] = []) {
export function toSourceEvent(
sourceNodes: NodeWithScore<Metadata>[] = [],
llamaCloudOutputDir: string = "output/llamacloud",
) {
const nodes: SourceEventNode[] = sourceNodes.map((node) =>
toSourceEventNode(node),
toSourceEventNode(node, llamaCloudOutputDir),
);
return sourceEvent.with({
data: { nodes },
Expand Down
6 changes: 5 additions & 1 deletion packages/server/src/utils/workflow.ts
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ export async function runWorkflow({

export function processWorkflowStream(
stream: WorkflowStream<WorkflowEventData<unknown>>,
llamaCloudOutputDir?: string,
) {
return stream.pipeThrough(
new TransformStream<WorkflowEventData<unknown>, WorkflowEventData<unknown>>(
Expand All @@ -90,7 +91,10 @@ export function processWorkflowStream(
) {
const sourceNodes =
rawOutput.sourceNodes as unknown as NodeWithScore<Metadata>[];
transformedEvent = toSourceEvent(sourceNodes);
transformedEvent = toSourceEvent(
sourceNodes,
llamaCloudOutputDir,
);
}
}
// Handle artifact events, transform to agentStreamEvent
Expand Down
Loading