diff --git a/src/api/providers/__tests__/zai.temperature.spec.ts b/src/api/providers/__tests__/zai.temperature.spec.ts new file mode 100644 index 0000000000..f0bb7402cb --- /dev/null +++ b/src/api/providers/__tests__/zai.temperature.spec.ts @@ -0,0 +1,114 @@ +// npx vitest run api/providers/__tests__/zai.temperature.spec.ts + +import { Anthropic } from "@anthropic-ai/sdk" +import OpenAI from "openai" + +import type { ModelInfo } from "@roo-code/types" + +// Must be mocked before importing the handler. +vi.mock("@roo-code/types", async () => { + const model: ModelInfo = { + maxTokens: 1000, + maxThinkingTokens: null, + contextWindow: 8000, + supportsImages: false, + supportsPromptCache: false, + defaultTemperature: 0.9, + supportsReasoningEffort: ["disable", "medium"], + reasoningEffort: "medium", + preserveReasoning: true, + } + + const models = { + "glm-4.7": model, + } as const satisfies Record + + return { + internationalZAiModels: models, + mainlandZAiModels: models, + internationalZAiDefaultModelId: "glm-4.7", + mainlandZAiDefaultModelId: "glm-4.7", + ZAI_DEFAULT_TEMPERATURE: 0.5, + zaiApiLineConfigs: { + international_coding: { isChina: false, baseUrl: "https://example.invalid/v1" }, + china_coding: { isChina: true, baseUrl: "https://example.invalid/v1" }, + } as const, + } +}) + +const mockCreate = vi.fn() + +vi.mock("openai", () => ({ + default: vi.fn(() => ({ + chat: { + completions: { + create: mockCreate, + }, + }, + })), +})) + +import { ZAiHandler } from "../zai" + +describe("ZAiHandler temperature precedence", () => { + beforeEach(() => { + vi.clearAllMocks() + }) + + it("uses model defaultTemperature when modelTemperature is not set", async () => { + mockCreate.mockImplementationOnce(() => { + return { + [Symbol.asyncIterator]: () => ({ + async next() { + return { done: true } + }, + }), + } + }) + + const handler = new ZAiHandler({ + apiModelId: "glm-4.7", + zaiApiKey: "test-key", + zaiApiLine: "international_coding", + }) + + const messages: Anthropic.Messages.MessageParam[] = [{ role: "user", content: "hi" }] + const stream = handler.createMessage("system", messages) + await stream.next() + + expect(mockCreate).toHaveBeenCalledWith( + expect.objectContaining({ + temperature: 0.9, + }), + ) + }) + + it("uses modelTemperature when set", async () => { + mockCreate.mockImplementationOnce(() => { + return { + [Symbol.asyncIterator]: () => ({ + async next() { + return { done: true } + }, + }), + } + }) + + const handler = new ZAiHandler({ + apiModelId: "glm-4.7", + zaiApiKey: "test-key", + zaiApiLine: "international_coding", + modelTemperature: 0.1, + }) + + const messages: Anthropic.Messages.MessageParam[] = [{ role: "user", content: "hi" }] + const stream = handler.createMessage("system", messages) + await stream.next() + + expect(mockCreate).toHaveBeenCalledWith( + expect.objectContaining({ + temperature: 0.1, + }), + ) + }) +}) diff --git a/src/api/providers/zai.ts b/src/api/providers/zai.ts index c7bf6d635e..6f0e301a9c 100644 --- a/src/api/providers/zai.ts +++ b/src/api/providers/zai.ts @@ -87,7 +87,7 @@ export class ZAiHandler extends BaseOpenAiCompatibleProvider { format: "openai", }) ?? undefined - const temperature = this.options.modelTemperature ?? this.defaultTemperature + const temperature = this.options.modelTemperature ?? info.defaultTemperature ?? this.defaultTemperature // Use Z.ai format to preserve reasoning_content and merge post-tool text into tool messages const convertedMessages = convertToZAiFormat(messages, { mergeToolResultText: true })