From eed0b0415d4183b5303a3b2938ececedca62fc52 Mon Sep 17 00:00:00 2001 From: Thuc Pham <51660321+thucpn@users.noreply.github.com> Date: Fri, 23 Aug 2024 22:56:18 +0700 Subject: [PATCH] fix: use metadata mode LLM for generating context (#1133) Co-authored-by: Marcus Schiesser --- .changeset/tough-cups-doubt.md | 5 +++++ packages/llamaindex/src/engines/chat/ContextChatEngine.ts | 3 ++- .../llamaindex/src/engines/chat/DefaultContextGenerator.ts | 7 ++++++- 3 files changed, 13 insertions(+), 2 deletions(-) create mode 100644 .changeset/tough-cups-doubt.md diff --git a/.changeset/tough-cups-doubt.md b/.changeset/tough-cups-doubt.md new file mode 100644 index 0000000000..579b0f7624 --- /dev/null +++ b/.changeset/tough-cups-doubt.md @@ -0,0 +1,5 @@ +--- +"llamaindex": patch +--- + +fix: use LLM metadata mode for generating context of ContextChatEngine diff --git a/packages/llamaindex/src/engines/chat/ContextChatEngine.ts b/packages/llamaindex/src/engines/chat/ContextChatEngine.ts index 792c31f45e..ea1f043506 100644 --- a/packages/llamaindex/src/engines/chat/ContextChatEngine.ts +++ b/packages/llamaindex/src/engines/chat/ContextChatEngine.ts @@ -4,7 +4,7 @@ import type { MessageContent, MessageType, } from "@llamaindex/core/llms"; -import { EngineResponse } from "@llamaindex/core/schema"; +import { EngineResponse, MetadataMode } from "@llamaindex/core/schema"; import { extractText, streamConverter, @@ -53,6 +53,7 @@ export class ContextChatEngine extends PromptMixin implements ChatEngine { contextSystemPrompt: init?.contextSystemPrompt, nodePostprocessors: init?.nodePostprocessors, contextRole: init?.contextRole, + metadataMode: MetadataMode.LLM, }); this.systemPrompt = init.systemPrompt; } diff --git a/packages/llamaindex/src/engines/chat/DefaultContextGenerator.ts b/packages/llamaindex/src/engines/chat/DefaultContextGenerator.ts index 3983908004..2b5edba05b 100644 --- a/packages/llamaindex/src/engines/chat/DefaultContextGenerator.ts +++ b/packages/llamaindex/src/engines/chat/DefaultContextGenerator.ts @@ -1,5 +1,5 @@ import type { MessageContent, MessageType } from "@llamaindex/core/llms"; -import { type NodeWithScore } from "@llamaindex/core/schema"; +import { MetadataMode, type NodeWithScore } from "@llamaindex/core/schema"; import type { BaseNodePostprocessor } from "../../postprocessors/index.js"; import type { ContextSystemPrompt } from "../../Prompt.js"; import { defaultContextSystemPrompt } from "../../Prompt.js"; @@ -16,12 +16,14 @@ export class DefaultContextGenerator contextSystemPrompt: ContextSystemPrompt; nodePostprocessors: BaseNodePostprocessor[]; contextRole: MessageType; + metadataMode?: MetadataMode; constructor(init: { retriever: BaseRetriever; contextSystemPrompt?: ContextSystemPrompt; nodePostprocessors?: BaseNodePostprocessor[]; contextRole?: MessageType; + metadataMode?: MetadataMode; }) { super(); @@ -30,6 +32,7 @@ export class DefaultContextGenerator init?.contextSystemPrompt ?? defaultContextSystemPrompt; this.nodePostprocessors = init.nodePostprocessors || []; this.contextRole = init.contextRole ?? "system"; + this.metadataMode = init.metadataMode ?? MetadataMode.NONE; } protected _getPrompts(): { contextSystemPrompt: ContextSystemPrompt } { @@ -75,6 +78,8 @@ export class DefaultContextGenerator const content = await createMessageContent( this.contextSystemPrompt, nodes.map((r) => r.node), + undefined, + this.metadataMode, ); return {