Skip to content

Commit 8947f8b

Browse files
committed
fix: llamaindex stream to node response
1 parent ab97e74 commit 8947f8b

File tree

1 file changed

+18
-3
lines changed

1 file changed

+18
-3
lines changed

templates/types/streaming/express/src/controllers/chat.controller.ts

Lines changed: 18 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { LlamaIndexAdapter, Message, StreamData, streamToResponse } from "ai";
1+
import { LlamaIndexAdapter, Message, StreamData } from "ai";
22
import { Request, Response } from "express";
33
import { ChatMessage, Settings } from "llamaindex";
44
import { createChatEngine } from "./engine/chat";
@@ -59,8 +59,23 @@ export const chat = async (req: Request, res: Response) => {
5959
});
6060
};
6161

62-
const stream = LlamaIndexAdapter.toDataStream(response, { onCompletion });
63-
return streamToResponse(stream, res, {}, vercelStreamData);
62+
const streamResponse = LlamaIndexAdapter.toDataStreamResponse(response, {
63+
data: vercelStreamData,
64+
callbacks: { onCompletion },
65+
});
66+
// TODO: move to LlamaIndexAdapter
67+
const reader = streamResponse.body?.getReader();
68+
function read() {
69+
reader?.read().then(({ done, value }: { done: boolean; value?: any }) => {
70+
if (done) {
71+
res.end();
72+
return;
73+
}
74+
res.write(value);
75+
read();
76+
});
77+
}
78+
read();
6479
} catch (error) {
6580
console.error("[LlamaIndex]", error);
6681
return res.status(500).json({

0 commit comments

Comments
 (0)