Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add e2e testing for llamacloud datasource #181

Merged
merged 17 commits into from
Jul 25, 2024
Merged
Show file tree
Hide file tree
Changes from 13 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/e2e.yml
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ jobs:
run: pnpm run e2e
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
LLAMA_CLOUD_API_KEY: ${{ secrets.LLAMA_CLOUD_API_KEY }}
working-directory: .

- uses: actions/upload-artifact@v3
Expand Down
16 changes: 13 additions & 3 deletions e2e/basic.spec.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
/* eslint-disable turbo/no-undeclared-env-vars */
import { expect, test } from "@playwright/test";
import { ChildProcess } from "child_process";
import { randomUUID } from "crypto";
import fs from "fs";
import path from "path";
import type {
Expand All @@ -17,20 +18,27 @@ const templateFrameworks: TemplateFramework[] = [
"express",
"fastapi",
];
const dataSources: string[] = ["--no-files", "--example-file"];
const dataSources: string[] = ["--no-files", "--llamacloud"];
const templateUIs: TemplateUI[] = ["shadcn", "html"];
const templatePostInstallActions: TemplatePostInstallAction[] = [
"none",
"runApp",
];

const llamaCloudProjectName = "create-llama";
const llamaCloudIndexName = randomUUID();
marcusschiesser marked this conversation as resolved.
Show resolved Hide resolved

for (const templateType of templateTypes) {
for (const templateFramework of templateFrameworks) {
for (const dataSource of dataSources) {
for (const templateUI of templateUIs) {
for (const templatePostInstallAction of templatePostInstallActions) {
const appType: AppType =
templateFramework === "nextjs" ? "" : "--frontend";
const userMessage =
dataSource !== "--no-files"
? "Physical standard for letters"
: "Hello";
test.describe(`try create-llama ${templateType} ${templateFramework} ${dataSource} ${templateUI} ${appType} ${templatePostInstallAction}`, async () => {
let port: number;
let externalPort: number;
Expand All @@ -55,6 +63,8 @@ for (const templateType of templateTypes) {
port,
externalPort,
templatePostInstallAction,
llamaCloudProjectName,
llamaCloudIndexName,
);
name = result.projectName;
appProcess = result.appProcess;
Expand All @@ -75,7 +85,7 @@ for (const templateType of templateTypes) {
}) => {
test.skip(templatePostInstallAction !== "runApp");
await page.goto(`http://localhost:${port}`);
await page.fill("form input", "hello");
await page.fill("form input", userMessage);
const [response] = await Promise.all([
page.waitForResponse(
(res) => {
Expand Down Expand Up @@ -106,7 +116,7 @@ for (const templateType of templateTypes) {
messages: [
{
role: "user",
content: "Hello",
content: userMessage,
},
],
},
Expand Down
13 changes: 8 additions & 5 deletions e2e/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -72,10 +72,9 @@ export async function runCreateLlama(
port: number,
externalPort: number,
postInstallAction: TemplatePostInstallAction,
llamaCloudProjectName: string,
llamaCloudIndexName: string,
): Promise<CreateLlamaResult> {
if (!process.env.OPENAI_API_KEY) {
marcusschiesser marked this conversation as resolved.
Show resolved Hide resolved
throw new Error("Setting OPENAI_API_KEY is mandatory to run tests");
}
const name = [
templateType,
templateFramework,
Expand All @@ -95,8 +94,8 @@ export async function runCreateLlama(
templateUI,
"--vector-db",
vectorDb,
"--open-ai-key",
process.env.OPENAI_API_KEY,
process.env.OPENAI_API_KEY ? "--open-ai-key" : "",
process.env.OPENAI_API_KEY || "",
appType,
"--use-pnpm",
"--port",
Expand All @@ -110,12 +109,16 @@ export async function runCreateLlama(
"--no-llama-parse",
"--observability",
"none",
process.env.LLAMA_CLOUD_API_KEY ? "--llama-cloud-key" : "",
process.env.LLAMA_CLOUD_API_KEY || "",
].join(" ");
console.log(`running command '${command}' in ${cwd}`);
const appProcess = exec(command, {
cwd,
env: {
...process.env,
LLAMA_CLOUD_PROJECT_NAME: llamaCloudProjectName,
LLAMA_CLOUD_INDEX_NAME: llamaCloudIndexName,
},
});
appProcess.stderr?.on("data", (data) => {
Expand Down
51 changes: 36 additions & 15 deletions helpers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,31 @@ import {
} from "./types";
import { installTSTemplate } from "./typescript";

const checkForGenerateScript = (
marcusschiesser marked this conversation as resolved.
Show resolved Hide resolved
modelConfig: ModelConfig,
vectorDb?: TemplateVectorDB,
llamaCloudKey?: string,
useLlamaParse?: boolean,
) => {
const missingSettings = [];

if (!modelConfig.isConfigured()) {
missingSettings.push("your model provider API key");
}

const llamaCloudApiKey = llamaCloudKey ?? process.env["LLAMA_CLOUD_API_KEY"];
const isRequiredLlamaCloudKey = useLlamaParse || vectorDb === "llamacloud";
if (isRequiredLlamaCloudKey && !llamaCloudApiKey) {
missingSettings.push("your Llama Cloud key");
marcusschiesser marked this conversation as resolved.
Show resolved Hide resolved
}

if (vectorDb !== "none" && vectorDb !== "llamacloud") {
missingSettings.push("your Vector DB environment variables");
}

return missingSettings;
};

// eslint-disable-next-line max-params
async function generateContextData(
framework: TemplateFramework,
Expand All @@ -38,12 +63,15 @@ async function generateContextData(
? "poetry run generate"
: `${packageManager} run generate`,
)}`;
const modelConfigured = modelConfig.isConfigured();
const llamaCloudKeyConfigured = useLlamaParse
? llamaCloudKey || process.env["LLAMA_CLOUD_API_KEY"]
: true;
const hasVectorDb = vectorDb && vectorDb !== "none";
if (modelConfigured && llamaCloudKeyConfigured && !hasVectorDb) {

const missingSettings = checkForGenerateScript(
modelConfig,
vectorDb,
llamaCloudKey,
useLlamaParse,
);

if (!missingSettings.length) {
// If all the required environment variables are set, run the generate script
if (framework === "fastapi") {
if (isHavingPoetryLockFile()) {
Expand All @@ -63,15 +91,8 @@ async function generateContextData(
}
}

// generate the message of what to do to run the generate script manually
const settings = [];
if (!modelConfigured) settings.push("your model provider API key");
if (!llamaCloudKeyConfigured) settings.push("your Llama Cloud key");
if (hasVectorDb) settings.push("your Vector DB environment variables");
const settingsMessage =
settings.length > 0 ? `After setting ${settings.join(" and ")}, ` : "";
const generateMessage = `run ${runGenerate} to generate the context data.`;
console.log(`\n${settingsMessage}${generateMessage}\n\n`);
const settingsMessage = `After setting ${missingSettings.join(" and ")}, run ${runGenerate} to generate the context data.`;
console.log(`\n${settingsMessage}\n\n`);
}
}

Expand Down
12 changes: 10 additions & 2 deletions index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import prompts from "prompts";
import terminalLink from "terminal-link";
import checkForUpdate from "update-check";
import { createApp } from "./create-app";
import { getDataSources } from "./helpers/datasources";
import { EXAMPLE_FILE, getDataSources } from "./helpers/datasources";
import { getPkgManager } from "./helpers/get-pkg-manager";
import { isFolderEmpty } from "./helpers/is-folder-empty";
import { initializeGlobalAgent } from "./helpers/proxy";
Expand Down Expand Up @@ -194,8 +194,16 @@ if (process.argv.includes("--no-llama-parse")) {
program.askModels = process.argv.includes("--ask-models");
if (process.argv.includes("--no-files")) {
program.dataSources = [];
} else {
} else if (process.argv.includes("--example-file")) {
program.dataSources = getDataSources(program.files, program.exampleFile);
} else if (process.argv.includes("--llamacloud")) {
program.dataSources = [
{
type: "llamacloud",
config: {},
},
EXAMPLE_FILE,
];
}

const packageManager = !!program.useNpm
Expand Down
32 changes: 17 additions & 15 deletions questions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -671,21 +671,23 @@ export const askQuestions = async (

// Ask for LlamaCloud API key when using a LlamaCloud index or LlamaParse
if (isUsingLlamaCloud || program.useLlamaParse) {
if (ciInfo.isCI) {
program.llamaCloudKey = getPrefOrDefault("llamaCloudKey");
} else {
// Ask for LlamaCloud API key
const { llamaCloudKey } = await prompts(
{
type: "text",
name: "llamaCloudKey",
message:
"Please provide your LlamaCloud API key (leave blank to skip):",
},
questionHandlers,
);
program.llamaCloudKey = preferences.llamaCloudKey =
llamaCloudKey || process.env.LLAMA_CLOUD_API_KEY;
if (!program.llamaCloudKey) { // if already set, don't ask again
if (ciInfo.isCI) {
program.llamaCloudKey = getPrefOrDefault("llamaCloudKey");
} else {
// Ask for LlamaCloud API key
const { llamaCloudKey } = await prompts(
{
type: "text",
name: "llamaCloudKey",
message:
"Please provide your LlamaCloud API key (leave blank to skip):",
},
questionHandlers,
);
program.llamaCloudKey = preferences.llamaCloudKey =
llamaCloudKey || process.env.LLAMA_CLOUD_API_KEY;
}
}
}

Expand Down
Loading