@@ -12,7 +12,6 @@ import type { LlamaDeployConfig, LlamaIndexServerOptions } from "./types";
1212const nextDir = path . join ( __dirname , ".." , "server" ) ;
1313const configFile = path . join ( __dirname , ".." , "server" , "public" , "config.js" ) ;
1414const nextConfigFile = path . join ( nextDir , "next.config.ts" ) ;
15- const layoutFile = path . join ( nextDir , "app" , "layout.tsx" ) ;
1615const constantsFile = path . join ( nextDir , "app" , "constants.ts" ) ;
1716const dev = process . env . NODE_ENV !== "production" ;
1817
@@ -24,6 +23,8 @@ export class LlamaIndexServer {
2423 layoutDir : string ;
2524 suggestNextQuestions : boolean ;
2625 llamaDeploy ?: LlamaDeployConfig | undefined ;
26+ serverUrl : string ;
27+ fileServer : string ;
2728
2829 constructor ( options : LlamaIndexServerOptions ) {
2930 const { workflow, suggestNextQuestions, ...nextAppOptions } = options ;
@@ -33,17 +34,27 @@ export class LlamaIndexServer {
3334 this . componentsDir = options . uiConfig ?. componentsDir ;
3435 this . layoutDir = options . uiConfig ?. layoutDir ?? "layout" ;
3536 this . suggestNextQuestions = suggestNextQuestions ?? true ;
37+
3638 this . llamaDeploy = options . uiConfig ?. llamaDeploy ;
39+ this . serverUrl = options . uiConfig ?. serverUrl || "" ; // use current host if not set
40+
41+ const isUsingLlamaCloud = ! ! getEnv ( "LLAMA_CLOUD_API_KEY" ) ;
42+ const defaultFileServer = isUsingLlamaCloud ? "output/llamacloud" : "data" ;
43+ this . fileServer = options . fileServer ?? defaultFileServer ;
3744
3845 if ( this . llamaDeploy ) {
3946 if ( ! this . llamaDeploy . deployment || ! this . llamaDeploy . workflow ) {
4047 throw new Error (
4148 "LlamaDeploy requires deployment and workflow to be set" ,
4249 ) ;
4350 }
44- if ( options . uiConfig ?. devMode ) {
45- // workflow file is in llama-deploy src, so we should disable devmode
46- throw new Error ( "Devmode is not supported when enabling LlamaDeploy" ) ;
51+ const { devMode, llamaCloudIndexSelector, enableFileUpload } =
52+ options . uiConfig ?? { } ;
53+
54+ if ( devMode || llamaCloudIndexSelector || enableFileUpload ) {
55+ throw new Error (
56+ "`devMode`, `llamaCloudIndexSelector`, and `enableFileUpload` are not supported when enabling LlamaDeploy" ,
57+ ) ;
4758 }
4859 } else {
4960 // if llamaDeploy is not set but workflowFactory is not defined, we should throw an error
@@ -103,6 +114,11 @@ export default {
103114 const enableFileUpload = uiConfig ?. enableFileUpload ?? false ;
104115 const uploadApi = enableFileUpload ? `${ basePath } /api/files` : undefined ;
105116
117+ // construct file server url for LlamaDeploy
118+ // eg. for Non-LlamaCloud: localhost:3000/deployments/chat/ui/api/files/data
119+ // eg. for LlamaCloud: localhost:3000/deployments/chat/ui/api/files/output/llamacloud
120+ const fileServerUrl = `${ this . serverUrl } ${ basePath } /api/files/${ this . fileServer } ` ;
121+
106122 // content in javascript format
107123 const content = `
108124 window.LLAMAINDEX = {
@@ -115,7 +131,8 @@ export default {
115131 SUGGEST_NEXT_QUESTIONS: ${ JSON . stringify ( this . suggestNextQuestions ) } ,
116132 UPLOAD_API: ${ JSON . stringify ( uploadApi ) } ,
117133 DEPLOYMENT: ${ JSON . stringify ( this . llamaDeploy ?. deployment ) } ,
118- WORKFLOW: ${ JSON . stringify ( this . llamaDeploy ?. workflow ) }
134+ WORKFLOW: ${ JSON . stringify ( this . llamaDeploy ?. workflow ) } ,
135+ FILE_SERVER_URL: ${ JSON . stringify ( fileServerUrl ) }
119136 }
120137 ` ;
121138 fs . writeFileSync ( configFile , content ) ;
0 commit comments