Skip to content

Commit e99d331

Browse files
Copilotpranaygp
andcommitted
Add CallSettings properties to DurableAgent
Port easier properties from AI SDK Agent class (temperature, maxOutputTokens, topP, topK, presencePenalty, frequencyPenalty, stopSequences, seed) to bring DurableAgent closer to functional parity with ToolLoopAgent/Agent class. Co-authored-by: pranaygp <1797812+pranaygp@users.noreply.github.com>
1 parent 628c0a0 commit e99d331

File tree

4 files changed

+300
-2
lines changed

4 files changed

+300
-2
lines changed

packages/ai/src/agent/do-stream-step.ts

Lines changed: 19 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,14 +12,32 @@ export async function doStreamStep(
1212
conversationPrompt: LanguageModelV2Prompt,
1313
modelId: string,
1414
writable: WritableStream<UIMessageChunk>,
15-
tools?: LanguageModelV2CallOptions['tools']
15+
tools?: LanguageModelV2CallOptions['tools'],
16+
callOptions?: {
17+
temperature?: number;
18+
maxOutputTokens?: number;
19+
topP?: number;
20+
topK?: number;
21+
presencePenalty?: number;
22+
frequencyPenalty?: number;
23+
stopSequences?: string[];
24+
seed?: number;
25+
}
1626
) {
1727
'use step';
1828

1929
const model = gateway(modelId);
2030
const result = await model.doStream({
2131
prompt: conversationPrompt,
2232
tools,
33+
temperature: callOptions?.temperature,
34+
maxOutputTokens: callOptions?.maxOutputTokens,
35+
topP: callOptions?.topP,
36+
topK: callOptions?.topK,
37+
presencePenalty: callOptions?.presencePenalty,
38+
frequencyPenalty: callOptions?.frequencyPenalty,
39+
stopSequences: callOptions?.stopSequences,
40+
seed: callOptions?.seed,
2341
});
2442

2543
let finish: FinishPart | undefined;
Lines changed: 169 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,169 @@
1+
/**
2+
* Tests for DurableAgent
3+
*
4+
* These tests verify that the DurableAgent constructor properly accepts
5+
* and stores configuration options from the AI SDK Agent class.
6+
*/
7+
import { describe, expect, it } from 'vitest';
8+
import { DurableAgent } from './durable-agent.js';
9+
10+
describe('DurableAgent', () => {
11+
describe('constructor', () => {
12+
it('should accept basic required options', () => {
13+
const agent = new DurableAgent({
14+
model: 'anthropic/claude-opus',
15+
tools: {},
16+
});
17+
18+
expect(agent).toBeDefined();
19+
});
20+
21+
it('should accept system prompt', () => {
22+
const agent = new DurableAgent({
23+
model: 'anthropic/claude-opus',
24+
tools: {},
25+
system: 'You are a helpful assistant.',
26+
});
27+
28+
expect(agent).toBeDefined();
29+
});
30+
31+
it('should accept temperature option', () => {
32+
const agent = new DurableAgent({
33+
model: 'anthropic/claude-opus',
34+
tools: {},
35+
temperature: 0.7,
36+
});
37+
38+
expect(agent).toBeDefined();
39+
});
40+
41+
it('should accept maxOutputTokens option', () => {
42+
const agent = new DurableAgent({
43+
model: 'anthropic/claude-opus',
44+
tools: {},
45+
maxOutputTokens: 1000,
46+
});
47+
48+
expect(agent).toBeDefined();
49+
});
50+
51+
it('should accept topP option', () => {
52+
const agent = new DurableAgent({
53+
model: 'anthropic/claude-opus',
54+
tools: {},
55+
topP: 0.9,
56+
});
57+
58+
expect(agent).toBeDefined();
59+
});
60+
61+
it('should accept topK option', () => {
62+
const agent = new DurableAgent({
63+
model: 'anthropic/claude-opus',
64+
tools: {},
65+
topK: 40,
66+
});
67+
68+
expect(agent).toBeDefined();
69+
});
70+
71+
it('should accept presencePenalty option', () => {
72+
const agent = new DurableAgent({
73+
model: 'anthropic/claude-opus',
74+
tools: {},
75+
presencePenalty: 0.5,
76+
});
77+
78+
expect(agent).toBeDefined();
79+
});
80+
81+
it('should accept frequencyPenalty option', () => {
82+
const agent = new DurableAgent({
83+
model: 'anthropic/claude-opus',
84+
tools: {},
85+
frequencyPenalty: 0.5,
86+
});
87+
88+
expect(agent).toBeDefined();
89+
});
90+
91+
it('should accept stopSequences option', () => {
92+
const agent = new DurableAgent({
93+
model: 'anthropic/claude-opus',
94+
tools: {},
95+
stopSequences: ['STOP', 'END'],
96+
});
97+
98+
expect(agent).toBeDefined();
99+
});
100+
101+
it('should accept seed option', () => {
102+
const agent = new DurableAgent({
103+
model: 'anthropic/claude-opus',
104+
tools: {},
105+
seed: 12345,
106+
});
107+
108+
expect(agent).toBeDefined();
109+
});
110+
111+
it('should accept all options together', () => {
112+
const agent = new DurableAgent({
113+
model: 'anthropic/claude-opus',
114+
tools: {},
115+
system: 'You are a helpful assistant.',
116+
temperature: 0.7,
117+
maxOutputTokens: 1000,
118+
topP: 0.9,
119+
topK: 40,
120+
presencePenalty: 0.5,
121+
frequencyPenalty: 0.3,
122+
stopSequences: ['STOP', 'END'],
123+
seed: 12345,
124+
});
125+
126+
expect(agent).toBeDefined();
127+
});
128+
129+
it('should accept tools with proper structure', () => {
130+
const agent = new DurableAgent({
131+
model: 'anthropic/claude-opus',
132+
tools: {
133+
testTool: {
134+
description: 'A test tool',
135+
inputSchema: {
136+
type: 'object',
137+
properties: {},
138+
},
139+
execute: async () => 'result',
140+
},
141+
},
142+
});
143+
144+
expect(agent).toBeDefined();
145+
});
146+
});
147+
148+
describe('methods', () => {
149+
it('should have generate method', () => {
150+
const agent = new DurableAgent({
151+
model: 'anthropic/claude-opus',
152+
tools: {},
153+
});
154+
155+
expect(agent.generate).toBeDefined();
156+
expect(typeof agent.generate).toBe('function');
157+
});
158+
159+
it('should have stream method', () => {
160+
const agent = new DurableAgent({
161+
model: 'anthropic/claude-opus',
162+
tools: {},
163+
});
164+
165+
expect(agent.stream).toBeDefined();
166+
expect(typeof agent.stream).toBe('function');
167+
});
168+
});
169+
});

packages/ai/src/agent/durable-agent.ts

Lines changed: 85 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,67 @@ export interface DurableAgentOptions {
3333
* Optional system prompt to guide the agent's behavior.
3434
*/
3535
system?: string;
36+
37+
/**
38+
* Temperature setting. The range depends on the provider and model.
39+
*
40+
* It is recommended to set either `temperature` or `topP`, but not both.
41+
*/
42+
temperature?: number;
43+
44+
/**
45+
* Maximum number of tokens to generate.
46+
*/
47+
maxOutputTokens?: number;
48+
49+
/**
50+
* Nucleus sampling. This is a number between 0 and 1.
51+
*
52+
* E.g. 0.1 would mean that only tokens with the top 10% probability mass
53+
* are considered.
54+
*
55+
* It is recommended to set either `temperature` or `topP`, but not both.
56+
*/
57+
topP?: number;
58+
59+
/**
60+
* Only sample from the top K options for each subsequent token.
61+
*
62+
* Used to remove "long tail" low probability responses.
63+
* Recommended for advanced use cases only. You usually only need to use temperature.
64+
*/
65+
topK?: number;
66+
67+
/**
68+
* Presence penalty setting. It affects the likelihood of the model to
69+
* repeat information that is already in the prompt.
70+
*
71+
* The presence penalty is a number between -1 (increase repetition)
72+
* and 1 (maximum penalty, decrease repetition). 0 means no penalty.
73+
*/
74+
presencePenalty?: number;
75+
76+
/**
77+
* Frequency penalty setting. It affects the likelihood of the model
78+
* to repeatedly use the same words or phrases.
79+
*
80+
* The frequency penalty is a number between -1 (increase repetition)
81+
* and 1 (maximum penalty, decrease repetition). 0 means no penalty.
82+
*/
83+
frequencyPenalty?: number;
84+
85+
/**
86+
* Stop sequences.
87+
* If set, the model will stop generating text when one of the stop sequences is generated.
88+
* Providers may have limits on the number of stop sequences.
89+
*/
90+
stopSequences?: string[];
91+
92+
/**
93+
* The seed (integer) to use for random sampling. If set and supported
94+
* by the model, calls will generate deterministic results.
95+
*/
96+
seed?: number;
3697
}
3798

3899
/**
@@ -92,11 +153,27 @@ export class DurableAgent {
92153
private model: string;
93154
private tools: ToolSet;
94155
private system?: string;
156+
private temperature?: number;
157+
private maxOutputTokens?: number;
158+
private topP?: number;
159+
private topK?: number;
160+
private presencePenalty?: number;
161+
private frequencyPenalty?: number;
162+
private stopSequences?: string[];
163+
private seed?: number;
95164

96165
constructor(options: DurableAgentOptions) {
97166
this.model = options.model;
98167
this.tools = options.tools;
99168
this.system = options.system;
169+
this.temperature = options.temperature;
170+
this.maxOutputTokens = options.maxOutputTokens;
171+
this.topP = options.topP;
172+
this.topK = options.topK;
173+
this.presencePenalty = options.presencePenalty;
174+
this.frequencyPenalty = options.frequencyPenalty;
175+
this.stopSequences = options.stopSequences;
176+
this.seed = options.seed;
100177
}
101178

102179
generate() {
@@ -124,6 +201,14 @@ export class DurableAgent {
124201
tools: this.tools,
125202
writable,
126203
prompt: modelPrompt,
204+
temperature: this.temperature,
205+
maxOutputTokens: this.maxOutputTokens,
206+
topP: this.topP,
207+
topK: this.topK,
208+
presencePenalty: this.presencePenalty,
209+
frequencyPenalty: this.frequencyPenalty,
210+
stopSequences: this.stopSequences,
211+
seed: this.seed,
127212
});
128213

129214
let result = await iterator.next();

packages/ai/src/agent/stream-text-iterator.ts

Lines changed: 27 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,11 +13,27 @@ export async function* streamTextIterator({
1313
tools = {},
1414
writable,
1515
model,
16+
temperature,
17+
maxOutputTokens,
18+
topP,
19+
topK,
20+
presencePenalty,
21+
frequencyPenalty,
22+
stopSequences,
23+
seed,
1624
}: {
1725
prompt: LanguageModelV2Prompt;
1826
tools: ToolSet;
1927
writable: WritableStream<UIMessageChunk>;
2028
model: string;
29+
temperature?: number;
30+
maxOutputTokens?: number;
31+
topP?: number;
32+
topK?: number;
33+
presencePenalty?: number;
34+
frequencyPenalty?: number;
35+
stopSequences?: string[];
36+
seed?: number;
2137
}): AsyncGenerator<
2238
LanguageModelV2ToolCall[],
2339
void,
@@ -31,7 +47,17 @@ export async function* streamTextIterator({
3147
conversationPrompt,
3248
model,
3349
writable,
34-
toolsToModelTools(tools)
50+
toolsToModelTools(tools),
51+
{
52+
temperature,
53+
maxOutputTokens,
54+
topP,
55+
topK,
56+
presencePenalty,
57+
frequencyPenalty,
58+
stopSequences,
59+
seed,
60+
}
3561
);
3662

3763
if (finish?.finishReason === 'tool-calls') {

0 commit comments

Comments
 (0)