diff --git a/src/api/providers/__tests__/openai-native-tools.spec.ts b/src/api/providers/__tests__/openai-native-tools.spec.ts new file mode 100644 index 0000000000..1a3b93b9c2 --- /dev/null +++ b/src/api/providers/__tests__/openai-native-tools.spec.ts @@ -0,0 +1,77 @@ +import OpenAI from "openai" + +import { OpenAiHandler } from "../openai" + +describe("OpenAiHandler native tools", () => { + it("includes tools in request when custom model info lacks supportsNativeTools (regression test)", async () => { + const mockCreate = vi.fn().mockImplementationOnce(() => ({ + [Symbol.asyncIterator]: async function* () { + yield { + choices: [{ delta: { content: "Test response" } }], + } + }, + })) + + // Set openAiCustomModelInfo WITHOUT supportsNativeTools to simulate + // a user-provided custom model info that doesn't specify native tool support. + // The getModel() fix should merge NATIVE_TOOL_DEFAULTS to ensure + // supportsNativeTools defaults to true. + const handler = new OpenAiHandler({ + openAiApiKey: "test-key", + openAiBaseUrl: "https://example.com/v1", + openAiModelId: "test-model", + openAiCustomModelInfo: { + maxTokens: 4096, + contextWindow: 128000, + }, + } as unknown as import("../../../shared/api").ApiHandlerOptions) + + // Patch the OpenAI client call + const mockClient = { + chat: { + completions: { + create: mockCreate, + }, + }, + } as unknown as OpenAI + ;(handler as unknown as { client: OpenAI }).client = mockClient + + const tools: OpenAI.Chat.ChatCompletionTool[] = [ + { + type: "function", + function: { + name: "test_tool", + description: "test", + parameters: { type: "object", properties: {} }, + }, + }, + ] + + // Mimic the behavior in Task.attemptApiRequest() where tools are only + // included when modelInfo.supportsNativeTools is true. This is the + // actual regression path being tested - without the getModel() fix, + // supportsNativeTools would be undefined and tools wouldn't be passed. + const modelInfo = handler.getModel().info + const supportsNativeTools = modelInfo.supportsNativeTools ?? false + + const stream = handler.createMessage("system", [], { + taskId: "test-task-id", + ...(supportsNativeTools && { tools }), + ...(supportsNativeTools && { toolProtocol: "native" as const }), + }) + await stream.next() + + expect(mockCreate).toHaveBeenCalledWith( + expect.objectContaining({ + tools: expect.arrayContaining([ + expect.objectContaining({ + type: "function", + function: expect.objectContaining({ name: "test_tool" }), + }), + ]), + parallel_tool_calls: false, + }), + expect.anything(), + ) + }) +}) diff --git a/src/api/providers/openai.ts b/src/api/providers/openai.ts index f63ee99dc2..7ab2b00524 100644 --- a/src/api/providers/openai.ts +++ b/src/api/providers/openai.ts @@ -6,6 +6,7 @@ import { type ModelInfo, azureOpenAiDefaultApiVersion, openAiModelInfoSaneDefaults, + NATIVE_TOOL_DEFAULTS, DEEP_SEEK_DEFAULT_TEMPERATURE, OPENAI_AZURE_AI_INFERENCE_PATH, } from "@roo-code/types" @@ -291,7 +292,13 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl override getModel() { const id = this.options.openAiModelId ?? "" - const info = this.options.openAiCustomModelInfo ?? openAiModelInfoSaneDefaults + // Ensure OpenAI-compatible models default to supporting native tool calling. + // This is required for [`Task.attemptApiRequest()`](src/core/task/Task.ts:3817) to + // include tool definitions in the request. + const info: ModelInfo = { + ...NATIVE_TOOL_DEFAULTS, + ...(this.options.openAiCustomModelInfo ?? openAiModelInfoSaneDefaults), + } const params = getModelParams({ format: "openai", modelId: id, model: info, settings: this.options }) return { id, info, ...params } }