Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/wise-ways-knock.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"@llamaindex/server": patch
---

feat: support file server for python llamadeploy
48 changes: 47 additions & 1 deletion packages/server/next/app/api/files/[...slug]/route.ts
Original file line number Diff line number Diff line change
@@ -1,24 +1,70 @@
import fs from "fs";
import { LLamaCloudFileService } from "llamaindex";
import { NextRequest, NextResponse } from "next/server";
import { promisify } from "util";
import { downloadFile } from "../helpers";

export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ slug: string[] }> },
) {
const isUsingLlamaCloud = !!process.env.LLAMA_CLOUD_API_KEY;
const filePath = (await params).slug.join("/");

if (!filePath.startsWith("output") && !filePath.startsWith("data")) {
return NextResponse.json({ error: "No permission" }, { status: 400 });
}

const decodedFilePath = decodeURIComponent(filePath);
const fileExists = await promisify(fs.exists)(decodedFilePath);

// if using llama cloud and file not exists, download it
if (isUsingLlamaCloud) {
const fileExists = await promisify(fs.exists)(decodedFilePath);
if (!fileExists) {
const { pipeline_id, file_name } =
getLlamaCloudPipelineIdAndFileName(decodedFilePath);

if (pipeline_id && file_name) {
// get the file url from llama cloud
const downloadUrl = await LLamaCloudFileService.getFileUrl(
pipeline_id,
file_name,
);
if (!downloadUrl) {
return NextResponse.json(
{
error: `Cannot create LlamaCloud download url for pipeline_id=${pipeline_id}, file_name=${file_name}`,
},
{ status: 404 },
);
}

// download the LlamaCloud file to local
await downloadFile(downloadUrl, decodedFilePath);
console.log("File downloaded successfully to: ", decodedFilePath);
}
}
}

const fileExists = await promisify(fs.exists)(decodedFilePath);
if (fileExists) {
const fileBuffer = await promisify(fs.readFile)(decodedFilePath);
return new NextResponse(fileBuffer);
} else {
return NextResponse.json({ error: "File not found" }, { status: 404 });
}
}

function getLlamaCloudPipelineIdAndFileName(filePath: string) {
const fileName = filePath.split("/").pop() ?? ""; // fileName is the last slug part (pipeline_id$file_name)

const delimiterIndex = fileName.indexOf("$"); // delimiter is the first dollar sign in the fileName
if (delimiterIndex === -1) {
return { pipeline_id: "", file_name: "" };
}

const pipeline_id = fileName.slice(0, delimiterIndex); // before delimiter
const file_name = fileName.slice(delimiterIndex + 1); // after delimiter

return { pipeline_id, file_name };
}
35 changes: 35 additions & 0 deletions packages/server/next/app/api/files/helpers.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import crypto from "node:crypto";
import fs from "node:fs";
import https from "node:https";
import path from "node:path";

import { type ServerFile } from "@llamaindex/server";
Expand Down Expand Up @@ -55,3 +56,37 @@ async function saveFile(filepath: string, content: string | Buffer) {
function sanitizeFileName(fileName: string) {
return fileName.replace(/[^a-zA-Z0-9_-]/g, "_");
}
export async function downloadFile(
urlToDownload: string,
downloadedPath: string,
): Promise<void> {
return new Promise((resolve, reject) => {
const dir = path.dirname(downloadedPath);
fs.mkdirSync(dir, { recursive: true });
const file = fs.createWriteStream(downloadedPath);

https
.get(urlToDownload, (response) => {
if (response.statusCode !== 200) {
reject(
new Error(`Failed to download file: Status ${response.statusCode}`),
);
return;
}

response.pipe(file);

file.on("finish", () => {
file.close();
resolve();
});

file.on("error", (err) => {
fs.unlink(downloadedPath, () => reject(err));
});
})
.on("error", (err) => {
fs.unlink(downloadedPath, () => reject(err));
});
});
}
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ export default function ChatSection() {
});

const useChatWorkflowHandler = useChatWorkflow({
fileServerUrl: getConfig("FILE_SERVER_URL"),
deployment,
workflow,
onError: handleError,
Expand Down
8 changes: 6 additions & 2 deletions packages/server/next/app/components/ui/chat/chat-starter.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,13 @@ import { getConfig } from "../lib/utils";

export function ChatStarter({ className }: { className?: string }) {
const { append, messages, requestData } = useChatUI();
const starterQuestionsFromConfig = getConfig("STARTER_QUESTIONS");

const starterQuestions =
getConfig("STARTER_QUESTIONS") ??
JSON.parse(process.env.NEXT_PUBLIC_STARTER_QUESTIONS || "[]");
Array.isArray(starterQuestionsFromConfig) &&
starterQuestionsFromConfig?.length > 0
? starterQuestionsFromConfig
: JSON.parse(process.env.NEXT_PUBLIC_STARTER_QUESTIONS || "[]");

if (starterQuestions.length === 0 || messages.length > 0) return null;
return (
Expand Down
2 changes: 1 addition & 1 deletion packages/server/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@
"@babel/traverse": "^7.27.0",
"@babel/types": "^7.27.0",
"@hookform/resolvers": "^5.0.1",
"@llamaindex/chat-ui": "0.5.12",
"@llamaindex/chat-ui": "0.5.16",
"@radix-ui/react-accordion": "^1.2.3",
"@radix-ui/react-alert-dialog": "^1.1.7",
"@radix-ui/react-aspect-ratio": "^1.1.3",
Expand Down
27 changes: 22 additions & 5 deletions packages/server/src/server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ import type { LlamaDeployConfig, LlamaIndexServerOptions } from "./types";
const nextDir = path.join(__dirname, "..", "server");
const configFile = path.join(__dirname, "..", "server", "public", "config.js");
const nextConfigFile = path.join(nextDir, "next.config.ts");
const layoutFile = path.join(nextDir, "app", "layout.tsx");
const constantsFile = path.join(nextDir, "app", "constants.ts");
const dev = process.env.NODE_ENV !== "production";

Expand All @@ -24,6 +23,8 @@ export class LlamaIndexServer {
layoutDir: string;
suggestNextQuestions: boolean;
llamaDeploy?: LlamaDeployConfig | undefined;
serverUrl: string;
fileServer: string;

constructor(options: LlamaIndexServerOptions) {
const { workflow, suggestNextQuestions, ...nextAppOptions } = options;
Expand All @@ -33,17 +34,27 @@ export class LlamaIndexServer {
this.componentsDir = options.uiConfig?.componentsDir;
this.layoutDir = options.uiConfig?.layoutDir ?? "layout";
this.suggestNextQuestions = suggestNextQuestions ?? true;

this.llamaDeploy = options.uiConfig?.llamaDeploy;
this.serverUrl = options.uiConfig?.serverUrl || ""; // use current host if not set

const isUsingLlamaCloud = !!getEnv("LLAMA_CLOUD_API_KEY");
const defaultFileServer = isUsingLlamaCloud ? "output/llamacloud" : "data";
this.fileServer = options.fileServer ?? defaultFileServer;

if (this.llamaDeploy) {
if (!this.llamaDeploy.deployment || !this.llamaDeploy.workflow) {
throw new Error(
"LlamaDeploy requires deployment and workflow to be set",
);
}
if (options.uiConfig?.devMode) {
// workflow file is in llama-deploy src, so we should disable devmode
throw new Error("Devmode is not supported when enabling LlamaDeploy");
const { devMode, llamaCloudIndexSelector, enableFileUpload } =
options.uiConfig ?? {};

if (devMode || llamaCloudIndexSelector || enableFileUpload) {
throw new Error(
"`devMode`, `llamaCloudIndexSelector`, and `enableFileUpload` are not supported when enabling LlamaDeploy",
);
}
} else {
// if llamaDeploy is not set but workflowFactory is not defined, we should throw an error
Expand Down Expand Up @@ -103,6 +114,11 @@ export default {
const enableFileUpload = uiConfig?.enableFileUpload ?? false;
const uploadApi = enableFileUpload ? `${basePath}/api/files` : undefined;

// construct file server url for LlamaDeploy
// eg. for Non-LlamaCloud: localhost:3000/deployments/chat/ui/api/files/data
// eg. for LlamaCloud: localhost:3000/deployments/chat/ui/api/files/output/llamacloud
const fileServerUrl = `${this.serverUrl}${basePath}/api/files/${this.fileServer}`;

// content in javascript format
const content = `
window.LLAMAINDEX = {
Expand All @@ -115,7 +131,8 @@ export default {
SUGGEST_NEXT_QUESTIONS: ${JSON.stringify(this.suggestNextQuestions)},
UPLOAD_API: ${JSON.stringify(uploadApi)},
DEPLOYMENT: ${JSON.stringify(this.llamaDeploy?.deployment)},
WORKFLOW: ${JSON.stringify(this.llamaDeploy?.workflow)}
WORKFLOW: ${JSON.stringify(this.llamaDeploy?.workflow)},
FILE_SERVER_URL: ${JSON.stringify(fileServerUrl)}
}
`;
fs.writeFileSync(configFile, content);
Expand Down
2 changes: 2 additions & 0 deletions packages/server/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,12 @@ export type UIConfig = {
devMode?: boolean;
enableFileUpload?: boolean;
llamaDeploy?: LlamaDeployConfig;
serverUrl?: string;
};

export type LlamaIndexServerOptions = NextAppOptions & {
workflow?: WorkflowFactory;
uiConfig?: UIConfig;
fileServer?: string;
suggestNextQuestions?: boolean;
};
10 changes: 5 additions & 5 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading