Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat (rsc): add streamUI onFinish callback #1920

Merged
merged 13 commits into from
Jun 19, 2024
Merged
Show file tree
Hide file tree
Changes from 8 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/violet-horses-accept.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'ai': patch
---

feat (rsc): add streamUI onFinish callback
71 changes: 71 additions & 0 deletions content/docs/07-reference/ai-sdk-rsc/01-stream-ui.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -333,6 +333,77 @@ A helper function to create a streamable UI from LLM providers. This function is
},
],
},
{
name: 'onFinish',
type: '(result: OnFinishResult) => void',
isOptional: true,
description:
'Callback that is called when the LLM response and all request tool executions (for tools that have a `generate` function) are finished.',
properties: [
{
type: 'OnFinishResult',
parameters: [
{
name: 'usage',
type: 'TokenUsage',
description: 'The token usage of the generated text.',
properties: [
{
type: 'TokenUsage',
parameters: [
{
name: 'promptTokens',
type: 'number',
description: 'The total number of tokens in the prompt.',
},
{
name: 'completionTokens',
type: 'number',
description:
'The total number of tokens in the completion.',
},
{
name: 'totalTokens',
type: 'number',
description: 'The total number of tokens generated.',
},
],
},
],
},
{
name: 'value',
type: 'ReactNode',
description: 'The final ui node that was generated.',
},
{
name: 'warnings',
type: 'Warning[] | undefined',
description:
'Warnings from the model provider (e.g. unsupported settings).',
},
{
name: 'rawResponse',
type: 'RawResponse',
description: 'Optional raw response data.',
properties: [
{
type: 'RawResponse',
parameters: [
{
name: 'header',
optional: true,
type: 'Record<string, string>',
description: 'Response headers.',
},
],
},
],
},
],
},
],
},
]}
/>

Expand Down
131 changes: 131 additions & 0 deletions packages/core/rsc/stream-ui/__snapshots__/stream-ui.ui.test.tsx.snap
Original file line number Diff line number Diff line change
@@ -0,0 +1,131 @@
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html

exports[`rsc - streamUI() > should emit React Nodes with async render function 1`] = `
{
"children": {},
"props": {
"s": {
"curr": undefined,
"next": {
"curr": <div>
Weather
</div>,
},
"type": Symbol(ui.streamable.value),
},
},
"type": "InternalStreamableUIClient",
}
`;

exports[`rsc - streamUI() > should emit React Nodes with async streamUI function 1`] = `
{
"children": {},
"props": {
"s": {
"curr": undefined,
"next": {
"curr": <div>
Weather
</div>,
},
"type": Symbol(ui.streamable.value),
},
},
"type": "InternalStreamableUIClient",
}
`;

exports[`rsc - streamUI() > should emit React Nodes with generator render function 1`] = `
{
"children": {},
"props": {
"s": {
"curr": undefined,
"next": {
"curr": <div>
Loading...
</div>,
"next": {
"curr": <div>
Weather
</div>,
},
},
"type": Symbol(ui.streamable.value),
},
},
"type": "InternalStreamableUIClient",
}
`;

exports[`rsc - streamUI() > should emit React Nodes with generator streamUI function 1`] = `
{
"children": {},
"props": {
"s": {
"curr": undefined,
"next": {
"curr": <div>
Loading...
</div>,
"next": {
"curr": <div>
Weather
</div>,
},
},
"type": Symbol(ui.streamable.value),
},
},
"type": "InternalStreamableUIClient",
}
`;

exports[`rsc - streamUI() > should emit React Nodes with sync render function 1`] = `
{
"children": {},
"props": {
"s": {
"curr": undefined,
"next": {
"curr": <div>
Weather
</div>,
},
"type": Symbol(ui.streamable.value),
},
},
"type": "InternalStreamableUIClient",
}
`;

exports[`rsc - streamUI() > should emit React Nodes with sync streamUI function 1`] = `
{
"children": {},
"props": {
"s": {
"curr": undefined,
"next": {
"curr": <div>
Weather
</div>,
},
"type": Symbol(ui.streamable.value),
},
},
"type": "InternalStreamableUIClient",
}
`;

exports[`rsc - streamUI() onFinish callback > should contain final React node 1`] = `
<InternalStreamableUIClient
s={
{
"curr": undefined,
"next": Promise {},
"type": Symbol(ui.streamable.value),
}
}
/>
`;
47 changes: 44 additions & 3 deletions packages/core/rsc/stream-ui/stream-ui.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,14 @@ import { getValidatedPrompt } from '../../core/prompt/get-validated-prompt';
import { prepareCallSettings } from '../../core/prompt/prepare-call-settings';
import { prepareToolsAndToolChoice } from '../../core/prompt/prepare-tools-and-tool-choice';
import { Prompt } from '../../core/prompt/prompt';
import { CoreToolChoice } from '../../core/types';
import { CallWarning, CoreToolChoice, FinishReason } from '../../core/types';
import { retryWithExponentialBackoff } from '../../core/util/retry-with-exponential-backoff';
import { createStreamableUI } from '../streamable';
import { createResolvablePromise } from '../utils';
import {
TokenUsage,
calculateTokenUsage,
} from '../../core/generate-text/token-usage';

type Streamable = ReactNode | Promise<ReactNode>;

Expand Down Expand Up @@ -84,6 +88,7 @@ export async function streamUI<
abortSignal,
initial,
text,
onFinish,
...settings
}: CallSettings &
Prompt & {
Expand All @@ -100,12 +105,42 @@ export async function streamUI<
};

/**
The tool choice strategy. Default: 'auto'.
* The tool choice strategy. Default: 'auto'.
*/
toolChoice?: CoreToolChoice<TOOLS>;

text?: RenderText;
initial?: ReactNode;
/**
* Callback that is called when the LLM response and the final object validation are finished.
*/
onFinish?: (event: {
/**
* The reason why the generation finished.
*/
finishReason: FinishReason;
/**
* The token usage of the generated response.
*/
usage: TokenUsage;
/**
* The final ui node that was generated.
*/
value: ReactNode;
/**
* Warnings from the model provider (e.g. unsupported settings)
*/
warnings?: CallWarning[];
/**
* Optional raw response data.
*/
rawResponse?: {
/**
* Response headers.
*/
headers?: Record<string, string>;
};
}) => Promise<void> | void;
}): Promise<RenderResult> {
// TODO: Remove these errors after the experimental phase.
if (typeof model === 'string') {
Expand Down Expand Up @@ -311,7 +346,13 @@ The tool choice strategy. Default: 'auto'.
}

case 'finish': {
// Nothing to do here.
onFinish?.({
finishReason: value.finishReason,
usage: calculateTokenUsage(value.usage),
value: ui.value,
warnings: result.warnings,
rawResponse: result.rawResponse,
});
}
}
}
Expand Down
Loading
Loading