Skip to content

Commit

Permalink
refactor: RunCreateLlamaOptions
Browse files Browse the repository at this point in the history
  • Loading branch information
thucpn committed Sep 25, 2024
1 parent 152d773 commit 8b1abe6
Show file tree
Hide file tree
Showing 6 changed files with 120 additions and 100 deletions.
18 changes: 9 additions & 9 deletions e2e/extractor_template.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,16 +32,16 @@ if (
cwd = await createTestDir();
frontendPort = Math.floor(Math.random() * 10000) + 10000;
backendPort = frontendPort + 1;
const result = await runCreateLlama(
const result = await runCreateLlama({
cwd,
"extractor",
"fastapi",
"--example-file",
"none",
frontendPort,
backendPort,
"runApp",
);
templateType: "extractor",
templateFramework: "fastapi",
dataSource: "--example-file",
vectorDb: "none",
port: frontendPort,
externalPort: backendPort,
postInstallAction: "runApp",
});
name = result.projectName;
appProcess = result.appProcess;
});
Expand Down
8 changes: 4 additions & 4 deletions e2e/multiagent_template.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,18 +36,18 @@ test.describe(`Test multiagent template ${templateFramework} ${dataSource} ${tem
port = Math.floor(Math.random() * 10000) + 10000;
externalPort = port + 1;
cwd = await createTestDir();
const result = await runCreateLlama(
const result = await runCreateLlama({
cwd,
"multiagent",
templateType: "multiagent",
templateFramework,
dataSource,
vectorDb,
port,
externalPort,
templatePostInstallAction,
postInstallAction: templatePostInstallAction,
templateUI,
appType,
);
});
name = result.projectName;
appProcess = result.appProcess;
});
Expand Down
24 changes: 12 additions & 12 deletions e2e/resolve_python_dependencies.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -53,21 +53,21 @@ if (
test(`options: ${optionDescription}`, async () => {
const cwd = await createTestDir();

const result = await runCreateLlama(
const result = await runCreateLlama({
cwd,
"streaming",
"fastapi",
templateType: "streaming",
templateFramework: "fastapi",
dataSource,
vectorDb,
3000, // port
8000, // externalPort
"none", // postInstallAction
undefined, // ui
"--no-frontend", // appType
undefined, // llamaCloudProjectName
undefined, // llamaCloudIndexName
tool,
);
port: 3000, // port
externalPort: 8000, // externalPort
postInstallAction: "none", // postInstallAction
templateUI: undefined, // ui
appType: "--no-frontend", // appType
llamaCloudProjectName: undefined, // llamaCloudProjectName
llamaCloudIndexName: undefined, // llamaCloudIndexName
tools: tool,
});
const name = result.projectName;

// Check if the app folder exists
Expand Down
113 changes: 58 additions & 55 deletions e2e/resolve_ts_dependencies.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ if (
templateFramework == "nextjs" ||
templateFramework == "express" // test is only relevant for TS projects
) {
const llamaParseOptions = [true, false];
// vectorDBs combinations to test
const vectorDbs: TemplateVectorDB[] = [
"mongo",
Expand All @@ -33,67 +34,69 @@ if (
];

test.describe("Test resolve TS dependencies", () => {
for (const vectorDb of vectorDbs) {
const optionDescription = `vectorDb: ${vectorDb}, dataSource: ${dataSource}`;
for (const llamaParseOpt of llamaParseOptions) {
for (const vectorDb of vectorDbs) {
const optionDescription = `vectorDb: ${vectorDb}, dataSource: ${dataSource}, llamaParse: ${llamaParseOpt}`;

test(`options: ${optionDescription}`, async () => {
const cwd = await createTestDir();
test(`options: ${optionDescription}`, async () => {
const cwd = await createTestDir();

const result = await runCreateLlama(
cwd,
"streaming",
templateFramework,
dataSource,
vectorDb,
3000, // port
8000, // externalPort
"none", // postInstallAction
undefined, // ui
templateFramework === "nextjs" ? "" : "--no-frontend", // appType
undefined, // llamaCloudProjectName
undefined, // llamaCloudIndexName
undefined, // tools
true, // useLlamaParse
);
const name = result.projectName;
const result = await runCreateLlama({
cwd: cwd,
templateType: "streaming",
templateFramework: templateFramework,
dataSource: dataSource,
vectorDb: vectorDb,
port: 3000,
externalPort: 8000,
postInstallAction: "none",
templateUI: undefined,
appType: templateFramework === "nextjs" ? "" : "--no-frontend",
llamaCloudProjectName: undefined,
llamaCloudIndexName: undefined,
tools: undefined,
useLlamaParse: llamaParseOpt,
});
const name = result.projectName;

// Check if the app folder exists
const appDir = path.join(cwd, name);
const dirExists = fs.existsSync(appDir);
expect(dirExists).toBeTruthy();
// Check if the app folder exists
const appDir = path.join(cwd, name);
const dirExists = fs.existsSync(appDir);
expect(dirExists).toBeTruthy();

// Install dependencies using pnpm
try {
const { stderr: installStderr } = await execAsync(
"pnpm install --prefer-offline",
{
cwd: appDir,
},
);
expect(installStderr).toBeFalsy();
} catch (error) {
console.error("Error installing dependencies:", error);
throw error;
}
// Install dependencies using pnpm
try {
const { stderr: installStderr } = await execAsync(
"pnpm install --prefer-offline",
{
cwd: appDir,
},
);
expect(installStderr).toBeFalsy();
} catch (error) {
console.error("Error installing dependencies:", error);
throw error;
}

// Run tsc type check and capture the output
try {
const { stdout, stderr } = await execAsync(
"pnpm exec tsc -b --diagnostics",
{
cwd: appDir,
},
);
// Check if there's any error output
expect(stderr).toBeFalsy();
// Run tsc type check and capture the output
try {
const { stdout, stderr } = await execAsync(
"pnpm exec tsc -b --diagnostics",
{
cwd: appDir,
},
);
// Check if there's any error output
expect(stderr).toBeFalsy();

// Log the stdout for debugging purposes
console.log("TypeScript type-check output:", stdout);
} catch (error) {
console.error("Error running tsc:", error);
throw error;
}
});
// Log the stdout for debugging purposes
console.log("TypeScript type-check output:", stdout);
} catch (error) {
console.error("Error running tsc:", error);
throw error;
}
});
}
}
});
}
8 changes: 4 additions & 4 deletions e2e/streaming_template.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,20 +39,20 @@ test.describe(`Test streaming template ${templateFramework} ${dataSource} ${temp
port = Math.floor(Math.random() * 10000) + 10000;
externalPort = port + 1;
cwd = await createTestDir();
const result = await runCreateLlama(
const result = await runCreateLlama({
cwd,
"streaming",
templateType: "streaming",
templateFramework,
dataSource,
vectorDb,
port,
externalPort,
templatePostInstallAction,
postInstallAction: templatePostInstallAction,
templateUI,
appType,
llamaCloudProjectName,
llamaCloudIndexName,
);
});
name = result.projectName;
appProcess = result.appProcess;
});
Expand Down
49 changes: 33 additions & 16 deletions e2e/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,23 +18,40 @@ export type CreateLlamaResult = {
appProcess: ChildProcess;
};

export type RunCreateLlamaOptions = {
cwd: string;
templateType: TemplateType;
templateFramework: TemplateFramework;
dataSource: string;
vectorDb: TemplateVectorDB;
port: number;
externalPort: number;
postInstallAction: TemplatePostInstallAction;
templateUI?: TemplateUI;
appType?: AppType;
llamaCloudProjectName?: string;
llamaCloudIndexName?: string;
tools?: string;
useLlamaParse?: boolean;
};

// eslint-disable-next-line max-params
export async function runCreateLlama(
cwd: string,
templateType: TemplateType,
templateFramework: TemplateFramework,
dataSource: string,
vectorDb: TemplateVectorDB,
port: number,
externalPort: number,
postInstallAction: TemplatePostInstallAction,
templateUI?: TemplateUI,
appType?: AppType,
llamaCloudProjectName?: string,
llamaCloudIndexName?: string,
tools?: string,
useLlamaParse?: boolean,
): Promise<CreateLlamaResult> {
export async function runCreateLlama({
cwd,
templateType,
templateFramework,
dataSource,
vectorDb,
port,
externalPort,
postInstallAction,
templateUI,
appType,
llamaCloudProjectName,
llamaCloudIndexName,
tools,
useLlamaParse,
}: RunCreateLlamaOptions): Promise<CreateLlamaResult> {
if (!process.env.OPENAI_API_KEY || !process.env.LLAMA_CLOUD_API_KEY) {
throw new Error(
"Setting the OPENAI_API_KEY and LLAMA_CLOUD_API_KEY is mandatory to run tests",
Expand Down

0 comments on commit 8b1abe6

Please sign in to comment.