diff --git a/.changeset/red-wasps-know.md b/.changeset/red-wasps-know.md new file mode 100644 index 000000000000..926b5dbc1b4b --- /dev/null +++ b/.changeset/red-wasps-know.md @@ -0,0 +1,5 @@ +--- +'ai': patch +--- + +feat(agent): add message metadata support when inferring UI messages diff --git a/content/docs/07-reference/01-ai-sdk-core/16-tool-loop-agent.mdx b/content/docs/07-reference/01-ai-sdk-core/16-tool-loop-agent.mdx index ce18b2d3769b..577da8c050cd 100644 --- a/content/docs/07-reference/01-ai-sdk-core/16-tool-loop-agent.mdx +++ b/content/docs/07-reference/01-ai-sdk-core/16-tool-loop-agent.mdx @@ -285,6 +285,8 @@ The `stream()` method returns a `StreamTextResult` object (see [`streamText`](/d Infers the UI message type for the given agent instance. Useful for type-safe UI and message exchanges. +#### Basic Example + ```ts import { ToolLoopAgent, InferAgentUIMessage } from 'ai'; @@ -296,6 +298,36 @@ const weatherAgent = new ToolLoopAgent({ type WeatherAgentUIMessage = InferAgentUIMessage; ``` +#### Example with Message Metadata + +You can provide a second type argument to customize the metadata for each message. This is useful for tracking rich metadata returned by the agent (such as createdAt, tokens, finish reason, etc.). + +```ts +import { ToolLoopAgent, InferAgentUIMessage } from 'ai'; +import { z } from 'zod'; + +// Example schema for message metadata +const exampleMetadataSchema = z.object({ + createdAt: z.number().optional(), + model: z.string().optional(), + totalTokens: z.number().optional(), + finishReason: z.string().optional(), +}); +type ExampleMetadata = z.infer; + +// Define agent as usual +const metadataAgent = new ToolLoopAgent({ + model: 'openai/gpt-4o', + // ...other options +}); + +// Type-safe UI message type with custom metadata +type MetadataAgentUIMessage = InferAgentUIMessage< + typeof metadataAgent, + ExampleMetadata +>; +``` + ## Examples ### Basic Agent with Tools diff --git a/examples/next-openai/app/api/use-chat-message-metadata/example-metadata-schema.ts b/examples/next-openai/agent/openai-metadata-agent.ts similarity index 52% rename from examples/next-openai/app/api/use-chat-message-metadata/example-metadata-schema.ts rename to examples/next-openai/agent/openai-metadata-agent.ts index 9821f9e2083d..5cb1a76268a5 100644 --- a/examples/next-openai/app/api/use-chat-message-metadata/example-metadata-schema.ts +++ b/examples/next-openai/agent/openai-metadata-agent.ts @@ -1,3 +1,6 @@ +import { openai } from '@ai-sdk/openai'; +import { ToolLoopAgent, InferAgentUIMessage } from 'ai'; + import { z } from 'zod'; export const exampleMetadataSchema = z.object({ @@ -9,3 +12,12 @@ export const exampleMetadataSchema = z.object({ }); export type ExampleMetadata = z.infer; + +export const openaiMetadataAgent = new ToolLoopAgent({ + model: openai('gpt-4o'), +}); + +export type OpenAIMetadataMessage = InferAgentUIMessage< + typeof openaiMetadataAgent, + ExampleMetadata +>; diff --git a/examples/next-openai/app/api/use-chat-message-metadata/route.ts b/examples/next-openai/app/api/use-chat-message-metadata/route.ts index 4be1623e0bcd..4c639dc79ff2 100644 --- a/examples/next-openai/app/api/use-chat-message-metadata/route.ts +++ b/examples/next-openai/app/api/use-chat-message-metadata/route.ts @@ -1,16 +1,15 @@ -import { openai } from '@ai-sdk/openai'; -import { convertToModelMessages, streamText, UIMessage } from 'ai'; -import { ExampleMetadata } from './example-metadata-schema'; +import { createAgentUIStreamResponse, UIMessage } from 'ai'; +import { + ExampleMetadata, + openaiMetadataAgent, +} from '@/agent/openai-metadata-agent'; export async function POST(req: Request) { const { messages }: { messages: UIMessage[] } = await req.json(); - const result = streamText({ - model: openai('gpt-4o'), - prompt: convertToModelMessages(messages), - }); - - return result.toUIMessageStreamResponse({ + return createAgentUIStreamResponse({ + agent: openaiMetadataAgent, + messages, messageMetadata: ({ part }): ExampleMetadata | undefined => { // send custom information to the client on start: if (part.type === 'start') { diff --git a/examples/next-openai/app/use-chat-message-metadata/page.tsx b/examples/next-openai/app/use-chat-message-metadata/page.tsx index f91fd1a4aac3..0870b37943a2 100644 --- a/examples/next-openai/app/use-chat-message-metadata/page.tsx +++ b/examples/next-openai/app/use-chat-message-metadata/page.tsx @@ -3,7 +3,7 @@ import ChatInput from '@/components/chat-input'; import { useChat } from '@ai-sdk/react'; import { DefaultChatTransport, UIMessage } from 'ai'; -import { ExampleMetadata } from '../api/use-chat-message-metadata/example-metadata-schema'; +import { ExampleMetadata } from '@/agent/openai-metadata-agent'; type MyMessage = UIMessage; @@ -16,7 +16,7 @@ export default function Chat() { }); return ( -
+
{messages.map(message => (
{message.role === 'user' ? 'User: ' : 'AI: '} @@ -46,7 +46,7 @@ export default function Chat() { {status === 'submitted' &&
Loading...
}