Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/fluffy-otters-refuse.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"create-llama": patch
---

Add artifact use case for Typescript template
5 changes: 5 additions & 0 deletions .changeset/yummy-moles-lick.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"create-llama": patch
---

Update typescript use cases to use the new workflow engine
4 changes: 2 additions & 2 deletions packages/create-llama/helpers/typescript.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ const installLlamaIndexServerTemplate = async ({
process.exit(1);
}

await copy("workflow.ts", path.join(root, "src", "app"), {
await copy("*.ts", path.join(root, "src", "app"), {
parents: true,
cwd: path.join(
templatesDir,
Expand Down Expand Up @@ -516,7 +516,7 @@ async function updatePackageJson({
if (backend) {
packageJson.dependencies = {
...packageJson.dependencies,
"@llamaindex/readers": "^2.0.0",
"@llamaindex/readers": "^3.0.0",
};

if (vectorDb && vectorDb in vectorDbDependencies) {
Expand Down
28 changes: 13 additions & 15 deletions packages/create-llama/questions/simple.ts
Original file line number Diff line number Diff line change
Expand Up @@ -62,21 +62,19 @@ export const askSimpleQuestions = async (

let useLlamaCloud = false;

if (appType !== "artifacts") {
const { language: newLanguage } = await prompts(
{
type: "select",
name: "language",
message: "What language do you want to use?",
choices: [
{ title: "Python (FastAPI)", value: "fastapi" },
{ title: "Typescript (NextJS)", value: "nextjs" },
],
},
questionHandlers,
);
language = newLanguage;
}
const { language: newLanguage } = await prompts(
{
type: "select",
name: "language",
message: "What language do you want to use?",
choices: [
{ title: "Python (FastAPI)", value: "fastapi" },
{ title: "Typescript (NextJS)", value: "nextjs" },
],
},
questionHandlers,
);
language = newLanguage;

const { useLlamaCloud: newUseLlamaCloud } = await prompts(
{
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
import { Document, LLamaCloudFileService, VectorStoreIndex } from "llamaindex";
import { LlamaCloudIndex } from "llamaindex/cloud/LlamaCloudIndex";
import {
Document,
LLamaCloudFileService,
LlamaCloudIndex,
VectorStoreIndex,
} from "llamaindex";
import { DocumentFile } from "../streaming/annotations";
import { parseFile, storeFile } from "./helper";
import { runPipeline } from "./pipeline";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ dependencies = [
"python-dotenv>=1.0.0",
"pydantic<2.10",
"llama-index>=0.12.1",
"llama-parse>=0.6.21,<0.7.0",
"cachetools>=5.3.3",
"reflex>=0.6.2.post1",
]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ dependencies = [
"python-dotenv>=1.0.0",
"pydantic<2.10",
"llama-index>=0.12.1",
"llama-parse>=0.6.21,<0.7.0",
"cachetools>=5.3.3",
"reflex>=0.6.2.post1",
]
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { LlamaCloudIndex } from "llamaindex/cloud/LlamaCloudIndex";
import { LlamaCloudIndex } from "llamaindex";

type LlamaCloudDataSourceParams = {
llamaCloudPipeline?: {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { LlamaCloudIndex } from "llamaindex/cloud/LlamaCloudIndex";
import { LlamaCloudIndex } from "llamaindex";

type LlamaCloudDataSourceParams = {
llamaCloudPipeline?: {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { agent } from "llamaindex";
import { agent } from "@llamaindex/workflow";
import { getIndex } from "./data";

export const workflowFactory = async (reqBody: any) => {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
This is a [LlamaIndex](https://www.llamaindex.ai/) project bootstrapped with [`create-llama`](https://github.com/run-llama/LlamaIndexTS/tree/main/packages/create-llama).

## Getting Started

First, install the dependencies:

```
npm install
```

Third, run the development server:

```
npm run dev
```

Open [http://localhost:3000](http://localhost:3000) with your browser to see the chat UI.

## Configure LLM and Embedding Model

You can configure [LLM model](https://ts.llamaindex.ai/docs/llamaindex/modules/llms) in the [settings file](src/app/settings.ts).

## Custom UI Components

We have a custom component located in `components/ui_event.jsx`. This is used to display the state of artifact workflows in UI. You can regenerate a new UI component from the workflow event schema by running the following command:

```
npm run generate:ui
```

## Use Case

We have prepared two artifact workflows:

- [Code Workflow](app/code_workflow.ts): To generate code and display it in the UI like Vercel's v0.
- [Document Workflow](app/document_workflow.ts): Generate and update a document like OpenAI's canvas.

Modify the factory method in [`workflow.ts`](app/workflow.ts) to decide which artifact workflow to use. Without any changes the Code Workflow is used.

You can start by sending an request on the [chat UI](http://localhost:3000) or you can test the `/api/chat` endpoint with the following curl request:

```shell
curl --location 'localhost:3000/api/chat' \
--header 'Content-Type: application/json' \
--data '{ "messages": [{ "role": "user", "content": "Compare the financial performance of Apple and Tesla" }] }'
```

## Learn More

To learn more about LlamaIndex, take a look at the following resources:

- [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex (Python features).
- [LlamaIndexTS Documentation](https://ts.llamaindex.ai/docs/llamaindex) - learn about LlamaIndex (Typescript features).
- [Workflows Introduction](https://ts.llamaindex.ai/docs/llamaindex/modules/workflows) - learn about LlamaIndexTS workflows.

You can check out [the LlamaIndexTS GitHub repository](https://github.com/run-llama/LlamaIndexTS) - your feedback and contributions are welcome!
Loading
Loading