Skip to content

Commit f8d6e12

Browse files
feat: enable native tool calling for openai-native provider (#9348)
Co-authored-by: daniel-lxs <ricciodaniel98@gmail.com>
1 parent f5d3ac0 commit f8d6e12

File tree

2 files changed

+240
-36
lines changed

2 files changed

+240
-36
lines changed

packages/types/src/providers/openai.ts

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ export const openAiNativeModels = {
99
"gpt-5.1": {
1010
maxTokens: 128000,
1111
contextWindow: 400000,
12+
supportsNativeTools: true,
1213
supportsImages: true,
1314
supportsPromptCache: true,
1415
promptCacheRetention: "24h",
@@ -28,6 +29,7 @@ export const openAiNativeModels = {
2829
"gpt-5.1-codex": {
2930
maxTokens: 128000,
3031
contextWindow: 400000,
32+
supportsNativeTools: true,
3133
supportsImages: true,
3234
supportsPromptCache: true,
3335
promptCacheRetention: "24h",
@@ -43,6 +45,7 @@ export const openAiNativeModels = {
4345
"gpt-5.1-codex-mini": {
4446
maxTokens: 128000,
4547
contextWindow: 400000,
48+
supportsNativeTools: true,
4649
supportsImages: true,
4750
supportsPromptCache: true,
4851
promptCacheRetention: "24h",
@@ -57,6 +60,7 @@ export const openAiNativeModels = {
5760
"gpt-5": {
5861
maxTokens: 128000,
5962
contextWindow: 400000,
63+
supportsNativeTools: true,
6064
supportsImages: true,
6165
supportsPromptCache: true,
6266
supportsReasoningEffort: ["minimal", "low", "medium", "high"],
@@ -75,6 +79,7 @@ export const openAiNativeModels = {
7579
"gpt-5-mini": {
7680
maxTokens: 128000,
7781
contextWindow: 400000,
82+
supportsNativeTools: true,
7883
supportsImages: true,
7984
supportsPromptCache: true,
8085
supportsReasoningEffort: ["minimal", "low", "medium", "high"],
@@ -93,6 +98,7 @@ export const openAiNativeModels = {
9398
"gpt-5-codex": {
9499
maxTokens: 128000,
95100
contextWindow: 400000,
101+
supportsNativeTools: true,
96102
supportsImages: true,
97103
supportsPromptCache: true,
98104
supportsReasoningEffort: ["low", "medium", "high"],
@@ -107,6 +113,7 @@ export const openAiNativeModels = {
107113
"gpt-5-nano": {
108114
maxTokens: 128000,
109115
contextWindow: 400000,
116+
supportsNativeTools: true,
110117
supportsImages: true,
111118
supportsPromptCache: true,
112119
supportsReasoningEffort: ["minimal", "low", "medium", "high"],
@@ -122,6 +129,7 @@ export const openAiNativeModels = {
122129
"gpt-5-chat-latest": {
123130
maxTokens: 128000,
124131
contextWindow: 400000,
132+
supportsNativeTools: true,
125133
supportsImages: true,
126134
supportsPromptCache: true,
127135
inputPrice: 1.25,
@@ -132,6 +140,7 @@ export const openAiNativeModels = {
132140
"gpt-4.1": {
133141
maxTokens: 32_768,
134142
contextWindow: 1_047_576,
143+
supportsNativeTools: true,
135144
supportsImages: true,
136145
supportsPromptCache: true,
137146
inputPrice: 2,
@@ -145,6 +154,7 @@ export const openAiNativeModels = {
145154
"gpt-4.1-mini": {
146155
maxTokens: 32_768,
147156
contextWindow: 1_047_576,
157+
supportsNativeTools: true,
148158
supportsImages: true,
149159
supportsPromptCache: true,
150160
inputPrice: 0.4,
@@ -158,6 +168,7 @@ export const openAiNativeModels = {
158168
"gpt-4.1-nano": {
159169
maxTokens: 32_768,
160170
contextWindow: 1_047_576,
171+
supportsNativeTools: true,
161172
supportsImages: true,
162173
supportsPromptCache: true,
163174
inputPrice: 0.1,
@@ -171,6 +182,7 @@ export const openAiNativeModels = {
171182
o3: {
172183
maxTokens: 100_000,
173184
contextWindow: 200_000,
185+
supportsNativeTools: true,
174186
supportsImages: true,
175187
supportsPromptCache: true,
176188
inputPrice: 2.0,
@@ -187,6 +199,7 @@ export const openAiNativeModels = {
187199
"o3-high": {
188200
maxTokens: 100_000,
189201
contextWindow: 200_000,
202+
supportsNativeTools: true,
190203
supportsImages: true,
191204
supportsPromptCache: true,
192205
inputPrice: 2.0,
@@ -198,6 +211,7 @@ export const openAiNativeModels = {
198211
"o3-low": {
199212
maxTokens: 100_000,
200213
contextWindow: 200_000,
214+
supportsNativeTools: true,
201215
supportsImages: true,
202216
supportsPromptCache: true,
203217
inputPrice: 2.0,
@@ -209,6 +223,7 @@ export const openAiNativeModels = {
209223
"o4-mini": {
210224
maxTokens: 100_000,
211225
contextWindow: 200_000,
226+
supportsNativeTools: true,
212227
supportsImages: true,
213228
supportsPromptCache: true,
214229
inputPrice: 1.1,
@@ -225,6 +240,7 @@ export const openAiNativeModels = {
225240
"o4-mini-high": {
226241
maxTokens: 100_000,
227242
contextWindow: 200_000,
243+
supportsNativeTools: true,
228244
supportsImages: true,
229245
supportsPromptCache: true,
230246
inputPrice: 1.1,
@@ -236,6 +252,7 @@ export const openAiNativeModels = {
236252
"o4-mini-low": {
237253
maxTokens: 100_000,
238254
contextWindow: 200_000,
255+
supportsNativeTools: true,
239256
supportsImages: true,
240257
supportsPromptCache: true,
241258
inputPrice: 1.1,
@@ -247,6 +264,7 @@ export const openAiNativeModels = {
247264
"o3-mini": {
248265
maxTokens: 100_000,
249266
contextWindow: 200_000,
267+
supportsNativeTools: true,
250268
supportsImages: false,
251269
supportsPromptCache: true,
252270
inputPrice: 1.1,
@@ -259,6 +277,7 @@ export const openAiNativeModels = {
259277
"o3-mini-high": {
260278
maxTokens: 100_000,
261279
contextWindow: 200_000,
280+
supportsNativeTools: true,
262281
supportsImages: false,
263282
supportsPromptCache: true,
264283
inputPrice: 1.1,
@@ -270,6 +289,7 @@ export const openAiNativeModels = {
270289
"o3-mini-low": {
271290
maxTokens: 100_000,
272291
contextWindow: 200_000,
292+
supportsNativeTools: true,
273293
supportsImages: false,
274294
supportsPromptCache: true,
275295
inputPrice: 1.1,
@@ -281,6 +301,7 @@ export const openAiNativeModels = {
281301
o1: {
282302
maxTokens: 100_000,
283303
contextWindow: 200_000,
304+
supportsNativeTools: true,
284305
supportsImages: true,
285306
supportsPromptCache: true,
286307
inputPrice: 15,
@@ -291,6 +312,7 @@ export const openAiNativeModels = {
291312
"o1-preview": {
292313
maxTokens: 32_768,
293314
contextWindow: 128_000,
315+
supportsNativeTools: true,
294316
supportsImages: true,
295317
supportsPromptCache: true,
296318
inputPrice: 15,
@@ -301,6 +323,7 @@ export const openAiNativeModels = {
301323
"o1-mini": {
302324
maxTokens: 65_536,
303325
contextWindow: 128_000,
326+
supportsNativeTools: true,
304327
supportsImages: true,
305328
supportsPromptCache: true,
306329
inputPrice: 1.1,
@@ -311,6 +334,7 @@ export const openAiNativeModels = {
311334
"gpt-4o": {
312335
maxTokens: 16_384,
313336
contextWindow: 128_000,
337+
supportsNativeTools: true,
314338
supportsImages: true,
315339
supportsPromptCache: true,
316340
inputPrice: 2.5,
@@ -324,6 +348,7 @@ export const openAiNativeModels = {
324348
"gpt-4o-mini": {
325349
maxTokens: 16_384,
326350
contextWindow: 128_000,
351+
supportsNativeTools: true,
327352
supportsImages: true,
328353
supportsPromptCache: true,
329354
inputPrice: 0.15,
@@ -337,6 +362,7 @@ export const openAiNativeModels = {
337362
"codex-mini-latest": {
338363
maxTokens: 16_384,
339364
contextWindow: 200_000,
365+
supportsNativeTools: true,
340366
supportsImages: false,
341367
supportsPromptCache: false,
342368
inputPrice: 1.5,
@@ -350,6 +376,7 @@ export const openAiNativeModels = {
350376
"gpt-5-2025-08-07": {
351377
maxTokens: 128000,
352378
contextWindow: 400000,
379+
supportsNativeTools: true,
353380
supportsImages: true,
354381
supportsPromptCache: true,
355382
supportsReasoningEffort: ["minimal", "low", "medium", "high"],
@@ -368,6 +395,7 @@ export const openAiNativeModels = {
368395
"gpt-5-mini-2025-08-07": {
369396
maxTokens: 128000,
370397
contextWindow: 400000,
398+
supportsNativeTools: true,
371399
supportsImages: true,
372400
supportsPromptCache: true,
373401
supportsReasoningEffort: ["minimal", "low", "medium", "high"],
@@ -386,6 +414,7 @@ export const openAiNativeModels = {
386414
"gpt-5-nano-2025-08-07": {
387415
maxTokens: 128000,
388416
contextWindow: 400000,
417+
supportsNativeTools: true,
389418
supportsImages: true,
390419
supportsPromptCache: true,
391420
supportsReasoningEffort: ["minimal", "low", "medium", "high"],

0 commit comments

Comments
 (0)