Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
48 commits
Select commit Hold shift + click to select a range
0ca7adb
Add native tool call support (WIP)
daniel-lxs Nov 11, 2025
e41fcea
Revert default protocol to XML for test compatibility
daniel-lxs Nov 11, 2025
94d1a31
add native tool call parser and migrate read file
daniel-lxs Nov 11, 2025
9176a7d
fix: prevent "no assistant messages" error by initializing assistantM…
daniel-lxs Nov 11, 2025
591765b
fix: enhance read_file handling to support both single-file and multi…
daniel-lxs Nov 11, 2025
c0d0a06
fix: task getting stuck after native tool call
daniel-lxs Nov 11, 2025
8c316e1
feat: migrate attempt completion
daniel-lxs Nov 11, 2025
20d9326
fix: revert CURRENT_TOOL_PROTOCOL to XML for compatibility
daniel-lxs Nov 11, 2025
14e7936
feat: migrate list-files tool to support the native protocol
daniel-lxs Nov 11, 2025
9351602
fix: update import path for listFilesTool to match naming convention
daniel-lxs Nov 11, 2025
adb6dab
feat: migrate new-task tool to handle native protocol
daniel-lxs Nov 11, 2025
ad87a9b
refactor: rename tools
daniel-lxs Nov 11, 2025
b7a95ea
feat: refactor executeCommandTool to use class-based structure and up…
daniel-lxs Nov 11, 2025
8fd363b
Rename tool
daniel-lxs Nov 11, 2025
fe34b75
feat: migrate tool to support native tool call path
daniel-lxs Nov 11, 2025
3cf1fa9
feat: migrate apply-diff to support native tool calls
daniel-lxs Nov 12, 2025
e5928ac
feat: migrate ask-followup-question to support native tool calling
daniel-lxs Nov 12, 2025
5cf419c
feat: migrate browser action to support native protocol
daniel-lxs Nov 12, 2025
fefd4c0
feat: migrate codebase_search to support native protocol
daniel-lxs Nov 12, 2025
910743e
feat: migrate fetch instructions to support native protocol
daniel-lxs Nov 12, 2025
f4090bb
feat: migrate generate image to support native protocol
daniel-lxs Nov 12, 2025
31d15a7
feat migrate list code definitions to support native protocol
daniel-lxs Nov 12, 2025
15b7e85
feat: migrate run slash command to support native protocol
daniel-lxs Nov 12, 2025
259e16f
feat: migrate search files tool to support native protocol
daniel-lxs Nov 12, 2025
6ec00ea
feat: migrate switch mode tool to support native protocol
daniel-lxs Nov 12, 2025
b3c73cf
feat: migrate update todo list and mcp tools to support the native pr…
daniel-lxs Nov 12, 2025
da10b25
feat: migrate write to file to support native protocol, force model t…
daniel-lxs Nov 12, 2025
9758b10
rename: rename class-based tools
daniel-lxs Nov 12, 2025
af4c717
feat: add support for native tools in OpenRouter and update related i…
daniel-lxs Nov 12, 2025
64062ac
fix: anthropic models by removing duplicate tool call and fix "no ass…
daniel-lxs Nov 12, 2025
b0ed535
fix: handle markdown checklist parsing in handlePartial method
daniel-lxs Nov 13, 2025
0dcb39e
refactor(attempt-completion): standardize tool result handling
daniel-lxs Nov 13, 2025
7ed3a62
fix(native-protocol): prevent consecutive user messages on API retry
daniel-lxs Nov 13, 2025
6cb30e2
test: add supportsNativeTools to openrouter mock model data
daniel-lxs Nov 13, 2025
8509486
fix: correct import casing for renamed tool files
daniel-lxs Nov 13, 2025
d48860a
fix: move tool parameter types to @roo-code/types package
daniel-lxs Nov 13, 2025
b046bb9
refactor: clean up debug logs
daniel-lxs Nov 13, 2025
c7d310f
refactor(parser): clean up NativeToolCallParser type assertions and l…
daniel-lxs Nov 13, 2025
a6b9341
fix: detect tool calling support from API tool-use tag
daniel-lxs Nov 13, 2025
78aa72b
fix: remove spaces from search/replace block
daniel-lxs Nov 13, 2025
9154b72
Remove unimplemented tools (search_and_replace, edit_file)
daniel-lxs Nov 13, 2025
40cd9f5
fix: remove unrelated code from merge
daniel-lxs Nov 13, 2025
3d5ad18
refactor: move getMcpServerTools to top-level import
daniel-lxs Nov 13, 2025
fb93c15
refactor: move removeClosingTag to BaseTool as shared method
daniel-lxs Nov 13, 2025
2d655e1
feat: move tool protocol to VSCode setting
daniel-lxs Nov 13, 2025
19cb25a
fix: comment
daniel-lxs Nov 13, 2025
af31053
fix: typographical error
daniel-lxs Nov 13, 2025
e8d52f1
Update src/package.nls.id.json
daniel-lxs Nov 13, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions packages/types/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ export * from "./todo.js"
export * from "./telemetry.js"
export * from "./terminal.js"
export * from "./tool.js"
export * from "./tool-params.js"
export * from "./type-fu.js"
export * from "./vscode.js"

Expand Down
2 changes: 2 additions & 0 deletions packages/types/src/model.ts
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,8 @@ export const modelInfoSchema = z.object({
deprecated: z.boolean().optional(),
// Flag to indicate if the model is free (no cost)
isFree: z.boolean().optional(),
// Flag to indicate if the model supports native tool calling (OpenAI-style function calling)
supportsNativeTools: z.boolean().optional(),
/**
* Service tiers with pricing information.
* Each tier can have a name (for OpenAI service tiers) and pricing overrides.
Expand Down
1 change: 1 addition & 0 deletions packages/types/src/providers/openrouter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ export const openRouterDefaultModelInfo: ModelInfo = {
contextWindow: 200_000,
supportsImages: true,
supportsPromptCache: true,
supportsNativeTools: true,
inputPrice: 3.0,
outputPrice: 15.0,
cacheWritesPrice: 3.75,
Expand Down
37 changes: 37 additions & 0 deletions packages/types/src/tool-params.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
/**
* Tool parameter type definitions for native protocol
*/

export interface LineRange {
start: number
end: number
}

export interface FileEntry {
path: string
lineRanges?: LineRange[]
}

export interface Coordinate {
x: number
y: number
}

export interface Size {
width: number
height: number
}

export interface BrowserActionParams {
action: "launch" | "click" | "hover" | "type" | "scroll_down" | "scroll_up" | "resize" | "close"
url?: string
coordinate?: Coordinate
size?: Size
text?: string
}

export interface GenerateImageParams {
prompt: string
path: string
image?: string
}
21 changes: 21 additions & 0 deletions packages/types/src/tool.ts
Original file line number Diff line number Diff line change
Expand Up @@ -68,3 +68,24 @@ export const TOOL_PROTOCOL = {
* Derived from TOOL_PROTOCOL constants to ensure type safety
*/
export type ToolProtocol = (typeof TOOL_PROTOCOL)[keyof typeof TOOL_PROTOCOL]

/**
* Checks if the protocol is native (non-XML).
*
* @param protocol - The tool protocol to check
* @returns True if protocol is native
*/
export function isNativeProtocol(protocol: ToolProtocol): boolean {
return protocol === TOOL_PROTOCOL.NATIVE
}

/**
* Gets the effective protocol from settings or falls back to the default XML.
* This function is safe to use in webview-accessible code as it doesn't depend on vscode module.
*
* @param toolProtocol - Optional tool protocol from settings
* @returns The effective tool protocol (defaults to "xml")
*/
export function getEffectiveProtocol(toolProtocol?: ToolProtocol): ToolProtocol {
return toolProtocol || TOOL_PROTOCOL.XML
}
27 changes: 26 additions & 1 deletion src/api/index.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { Anthropic } from "@anthropic-ai/sdk"
import OpenAI from "openai"

import type { ProviderSettings, ModelInfo } from "@roo-code/types"
import type { ProviderSettings, ModelInfo, ToolProtocol } from "@roo-code/types"

import { ApiStream } from "./transform/stream"

Expand Down Expand Up @@ -63,6 +64,30 @@ export interface ApiHandlerCreateMessageMetadata {
* - Unbound: Sent in unbound_metadata
*/
mode?: string
suppressPreviousResponseId?: boolean
/**
* Controls whether the response should be stored for 30 days in OpenAI's Responses API.
* When true (default), responses are stored and can be referenced in future requests
* using the previous_response_id for efficient conversation continuity.
* Set to false to opt out of response storage for privacy or compliance reasons.
* @default true
*/
store?: boolean
/**
* Optional array of tool definitions to pass to the model.
* For OpenAI-compatible providers, these are ChatCompletionTool definitions.
*/
tools?: OpenAI.Chat.ChatCompletionTool[]
/**
* Controls which (if any) tool is called by the model.
* Can be "none", "auto", "required", or a specific tool choice.
*/
tool_choice?: OpenAI.Chat.ChatCompletionCreateParams["tool_choice"]
/**
* The tool protocol being used (XML or Native).
* Used by providers to determine whether to include native tool definitions.
*/
toolProtocol?: ToolProtocol
}

export interface ApiHandler {
Expand Down
2 changes: 2 additions & 0 deletions src/api/providers/__tests__/openrouter.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ vitest.mock("../fetchers/modelCache", () => ({
contextWindow: 200000,
supportsImages: true,
supportsPromptCache: true,
supportsNativeTools: true,
inputPrice: 3,
outputPrice: 15,
cacheWritesPrice: 3.75,
Expand Down Expand Up @@ -97,6 +98,7 @@ describe("OpenRouterHandler", () => {
const result = await handler.fetchModel()
expect(result.id).toBe("anthropic/claude-sonnet-4.5")
expect(result.info.supportsPromptCache).toBe(true)
expect(result.info.supportsNativeTools).toBe(true)
})

it("honors custom maxTokens for thinking models", async () => {
Expand Down
4 changes: 4 additions & 0 deletions src/api/providers/fetchers/__tests__/openrouter.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ describe("OpenRouter API", () => {
description: expect.any(String),
supportsReasoningBudget: false,
supportsReasoningEffort: false,
supportsNativeTools: true,
supportedParameters: ["max_tokens", "temperature", "reasoning", "include_reasoning"],
})

Expand All @@ -44,6 +45,7 @@ describe("OpenRouter API", () => {
supportsReasoningBudget: true,
requiredReasoningBudget: true,
supportsReasoningEffort: true,
supportsNativeTools: true,
supportedParameters: ["max_tokens", "temperature", "reasoning", "include_reasoning"],
})

Expand Down Expand Up @@ -96,6 +98,7 @@ describe("OpenRouter API", () => {
cacheReadsPrice: 0.31,
description: undefined,
supportsReasoningEffort: undefined,
supportsNativeTools: undefined,
supportedParameters: undefined,
},
"google-ai-studio": {
Expand All @@ -110,6 +113,7 @@ describe("OpenRouter API", () => {
cacheReadsPrice: 0.31,
description: undefined,
supportsReasoningEffort: undefined,
supportsNativeTools: undefined,
supportedParameters: undefined,
},
})
Expand Down
3 changes: 3 additions & 0 deletions src/api/providers/fetchers/__tests__/roo.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ describe("getRooModels", () => {
supportsImages: true,
supportsReasoningEffort: true,
requiredReasoningEffort: false,
supportsNativeTools: false,
supportsPromptCache: true,
inputPrice: 100, // 0.0001 * 1_000_000
outputPrice: 200, // 0.0002 * 1_000_000
Expand Down Expand Up @@ -116,6 +117,7 @@ describe("getRooModels", () => {
supportsImages: false,
supportsReasoningEffort: true,
requiredReasoningEffort: true,
supportsNativeTools: false,
supportsPromptCache: false,
inputPrice: 100, // 0.0001 * 1_000_000
outputPrice: 200, // 0.0002 * 1_000_000
Expand Down Expand Up @@ -162,6 +164,7 @@ describe("getRooModels", () => {
supportsImages: false,
supportsReasoningEffort: false,
requiredReasoningEffort: false,
supportsNativeTools: false,
supportsPromptCache: false,
inputPrice: 100, // 0.0001 * 1_000_000
outputPrice: 200, // 0.0002 * 1_000_000
Expand Down
5 changes: 4 additions & 1 deletion src/api/providers/fetchers/openrouter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -115,14 +115,16 @@ export async function getOpenRouterModels(options?: ApiHandlerOptions): Promise<
continue
}

models[id] = parseOpenRouterModel({
const parsedModel = parseOpenRouterModel({
id,
model,
inputModality: architecture?.input_modalities,
outputModality: architecture?.output_modalities,
maxTokens: top_provider?.max_completion_tokens,
supportedParameters: supported_parameters,
})

models[id] = parsedModel
}
} catch (error) {
console.error(
Expand Down Expand Up @@ -216,6 +218,7 @@ export const parseOpenRouterModel = ({
cacheReadsPrice,
description: model.description,
supportsReasoningEffort: supportedParameters ? supportedParameters.includes("reasoning") : undefined,
supportsNativeTools: supportedParameters ? supportedParameters.includes("tools") : undefined,
supportedParameters: supportedParameters ? supportedParameters.filter(isModelParameter) : undefined,
}

Expand Down
4 changes: 4 additions & 0 deletions src/api/providers/fetchers/roo.ts
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,9 @@ export async function getRooModels(baseUrl: string, apiKey?: string): Promise<Mo
// Determine if the model requires reasoning effort based on tags
const requiredReasoningEffort = tags.includes("reasoning-required")

// Determine if the model supports native tool calling based on tags
const supportsNativeTools = tags.includes("tool-use")

// Parse pricing (API returns strings, convert to numbers)
const inputPrice = parseApiPrice(pricing.input)
const outputPrice = parseApiPrice(pricing.output)
Expand All @@ -104,6 +107,7 @@ export async function getRooModels(baseUrl: string, apiKey?: string): Promise<Mo
supportsImages,
supportsReasoningEffort,
requiredReasoningEffort,
supportsNativeTools,
supportsPromptCache: Boolean(cacheReadPrice !== undefined),
inputPrice,
outputPrice,
Expand Down
80 changes: 75 additions & 5 deletions src/api/providers/openrouter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import { getModelEndpoints } from "./fetchers/modelEndpointCache"

import { DEFAULT_HEADERS } from "./constants"
import { BaseProvider } from "./base-provider"
import type { SingleCompletionHandler } from "../index"
import type { ApiHandlerCreateMessageMetadata, SingleCompletionHandler } from "../index"
import { handleOpenAIError } from "./utils/openai-error-handler"

// Image generation types
Expand Down Expand Up @@ -96,11 +96,38 @@ export class OpenRouterHandler extends BaseProvider implements SingleCompletionH
const apiKey = this.options.openRouterApiKey ?? "not-provided"

this.client = new OpenAI({ baseURL, apiKey, defaultHeaders: DEFAULT_HEADERS })

// Load models asynchronously to populate cache before getModel() is called
this.loadDynamicModels().catch((error) => {
console.error("[OpenRouterHandler] Failed to load dynamic models:", error)
})
}

private async loadDynamicModels(): Promise<void> {
try {
const [models, endpoints] = await Promise.all([
getModels({ provider: "openrouter" }),
getModelEndpoints({
router: "openrouter",
modelId: this.options.openRouterModelId,
endpoint: this.options.openRouterSpecificProvider,
}),
])

this.models = models
this.endpoints = endpoints
} catch (error) {
console.error("[OpenRouterHandler] Error loading dynamic models:", {
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
})
}
}

override async *createMessage(
systemPrompt: string,
messages: Anthropic.Messages.MessageParam[],
metadata?: ApiHandlerCreateMessageMetadata,
): AsyncGenerator<ApiStreamChunk> {
const model = await this.fetchModel()

Expand Down Expand Up @@ -159,8 +186,11 @@ export class OpenRouterHandler extends BaseProvider implements SingleCompletionH
allow_fallbacks: false,
},
}),
parallel_tool_calls: false, // Ensure only one tool call at a time
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Someday...

...(transforms && { transforms }),
...(reasoning && { reasoning }),
...(metadata?.tools && { tools: metadata.tools }),
...(metadata?.tool_choice && { tool_choice: metadata.tool_choice }),
}

let stream
Expand All @@ -171,6 +201,7 @@ export class OpenRouterHandler extends BaseProvider implements SingleCompletionH
}

let lastUsage: CompletionUsage | undefined = undefined
const toolCallAccumulator = new Map<number, { id: string; name: string; arguments: string }>()

for await (const chunk of stream) {
// OpenRouter returns an error object instead of the OpenAI SDK throwing an error.
Expand All @@ -181,13 +212,52 @@ export class OpenRouterHandler extends BaseProvider implements SingleCompletionH
}

const delta = chunk.choices[0]?.delta
const finishReason = chunk.choices[0]?.finish_reason

if (delta) {
if ("reasoning" in delta && delta.reasoning && typeof delta.reasoning === "string") {
yield { type: "reasoning", text: delta.reasoning }
}

if ("reasoning" in delta && delta.reasoning && typeof delta.reasoning === "string") {
yield { type: "reasoning", text: delta.reasoning }
// Check for tool calls in delta
if ("tool_calls" in delta && Array.isArray(delta.tool_calls)) {
for (const toolCall of delta.tool_calls) {
const index = toolCall.index
const existing = toolCallAccumulator.get(index)

if (existing) {
// Accumulate arguments for existing tool call
if (toolCall.function?.arguments) {
existing.arguments += toolCall.function.arguments
}
} else {
// Start new tool call accumulation
toolCallAccumulator.set(index, {
id: toolCall.id || "",
name: toolCall.function?.name || "",
arguments: toolCall.function?.arguments || "",
})
}
}
}

if (delta.content) {
yield { type: "text", text: delta.content }
}
}

if (delta?.content) {
yield { type: "text", text: delta.content }
// When finish_reason is 'tool_calls', yield all accumulated tool calls
if (finishReason === "tool_calls" && toolCallAccumulator.size > 0) {
for (const toolCall of toolCallAccumulator.values()) {
yield {
type: "tool_call",
id: toolCall.id,
name: toolCall.name,
arguments: toolCall.arguments,
}
}
// Clear accumulator after yielding
toolCallAccumulator.clear()
}

if (chunk.usage) {
Expand Down
Loading