diff --git a/src/api/providers/__tests__/roo.spec.ts b/src/api/providers/__tests__/roo.spec.ts index 458fa228636..8eadb9f6943 100644 --- a/src/api/providers/__tests__/roo.spec.ts +++ b/src/api/providers/__tests__/roo.spec.ts @@ -102,6 +102,24 @@ vitest.mock("../../providers/fetchers/modelCache", () => ({ inputPrice: 0, outputPrice: 0, }, + "minimax/minimax-m2": { + maxTokens: 32_768, + contextWindow: 1_000_000, + supportsImages: false, + supportsPromptCache: true, + supportsNativeTools: true, + inputPrice: 0.15, + outputPrice: 0.6, + }, + "anthropic/claude-haiku-4.5": { + maxTokens: 8_192, + contextWindow: 200_000, + supportsImages: true, + supportsPromptCache: true, + supportsNativeTools: true, + inputPrice: 0.8, + outputPrice: 4, + }, } } return {} @@ -402,6 +420,41 @@ describe("RooHandler", () => { expect(modelInfo.info.contextWindow).toBeDefined() } }) + + it("should apply defaultToolProtocol: native for minimax/minimax-m2", () => { + const handlerWithMinimax = new RooHandler({ + apiModelId: "minimax/minimax-m2", + }) + const modelInfo = handlerWithMinimax.getModel() + expect(modelInfo.id).toBe("minimax/minimax-m2") + expect((modelInfo.info as any).defaultToolProtocol).toBe("native") + // Verify cached model info is preserved + expect(modelInfo.info.maxTokens).toBe(32_768) + expect(modelInfo.info.contextWindow).toBe(1_000_000) + }) + + it("should apply defaultToolProtocol: native for anthropic/claude-haiku-4.5", () => { + const handlerWithHaiku = new RooHandler({ + apiModelId: "anthropic/claude-haiku-4.5", + }) + const modelInfo = handlerWithHaiku.getModel() + expect(modelInfo.id).toBe("anthropic/claude-haiku-4.5") + expect((modelInfo.info as any).defaultToolProtocol).toBe("native") + // Verify cached model info is preserved + expect(modelInfo.info.maxTokens).toBe(8_192) + expect(modelInfo.info.contextWindow).toBe(200_000) + }) + + it("should not override existing properties when applying MODEL_DEFAULTS", () => { + const handlerWithMinimax = new RooHandler({ + apiModelId: "minimax/minimax-m2", + }) + const modelInfo = handlerWithMinimax.getModel() + // The defaults should be merged, but not overwrite existing cached values + expect(modelInfo.info.supportsNativeTools).toBe(true) + expect(modelInfo.info.inputPrice).toBe(0.15) + expect(modelInfo.info.outputPrice).toBe(0.6) + }) }) describe("temperature and model configuration", () => { diff --git a/src/api/providers/roo.ts b/src/api/providers/roo.ts index 0d5590e74c7..fa39abc97f5 100644 --- a/src/api/providers/roo.ts +++ b/src/api/providers/roo.ts @@ -18,6 +18,18 @@ import { handleOpenAIError } from "./utils/openai-error-handler" import { generateImageWithProvider, ImageGenerationResult } from "./utils/image-generation" import { t } from "../../i18n" +import type { ModelInfo } from "@roo-code/types" + +// Model-specific defaults that should be applied even when models come from API cache +const MODEL_DEFAULTS: Record> = { + "minimax/minimax-m2": { + defaultToolProtocol: "native", + }, + "anthropic/claude-haiku-4.5": { + defaultToolProtocol: "native", + }, +} + // Extend OpenAI's CompletionUsage to include Roo specific fields interface RooUsage extends OpenAI.CompletionUsage { cache_creation_input_tokens?: number @@ -245,8 +257,13 @@ export class RooHandler extends BaseOpenAiCompatibleProvider { const models = getModelsFromCache("roo") || {} const modelInfo = models[modelId] + // Get model-specific defaults if they exist + const modelDefaults = MODEL_DEFAULTS[modelId] + if (modelInfo) { - return { id: modelId, info: modelInfo } + // Merge model-specific defaults with cached model info + const mergedInfo = modelDefaults ? { ...modelInfo, ...modelDefaults } : modelInfo + return { id: modelId, info: mergedInfo } } // Return the requested model ID even if not found, with fallback info.