Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
114 changes: 114 additions & 0 deletions src/api/providers/__tests__/zai.temperature.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
// npx vitest run api/providers/__tests__/zai.temperature.spec.ts

import { Anthropic } from "@anthropic-ai/sdk"
import OpenAI from "openai"

import type { ModelInfo } from "@roo-code/types"

// Must be mocked before importing the handler.
vi.mock("@roo-code/types", async () => {
const model: ModelInfo = {
maxTokens: 1000,
maxThinkingTokens: null,
contextWindow: 8000,
supportsImages: false,
supportsPromptCache: false,
defaultTemperature: 0.9,
supportsReasoningEffort: ["disable", "medium"],
reasoningEffort: "medium",
preserveReasoning: true,
}

const models = {
"glm-4.7": model,
} as const satisfies Record<string, ModelInfo>

return {
internationalZAiModels: models,
mainlandZAiModels: models,
internationalZAiDefaultModelId: "glm-4.7",
mainlandZAiDefaultModelId: "glm-4.7",
ZAI_DEFAULT_TEMPERATURE: 0.5,
zaiApiLineConfigs: {
international_coding: { isChina: false, baseUrl: "https://example.invalid/v1" },
china_coding: { isChina: true, baseUrl: "https://example.invalid/v1" },
} as const,
}
})

const mockCreate = vi.fn()

vi.mock("openai", () => ({
default: vi.fn(() => ({
chat: {
completions: {
create: mockCreate,
},
},
})),
}))

import { ZAiHandler } from "../zai"

describe("ZAiHandler temperature precedence", () => {
beforeEach(() => {
vi.clearAllMocks()
})

it("uses model defaultTemperature when modelTemperature is not set", async () => {
mockCreate.mockImplementationOnce(() => {
return {
[Symbol.asyncIterator]: () => ({
async next() {
return { done: true }
},
}),
}
})

const handler = new ZAiHandler({
apiModelId: "glm-4.7",
zaiApiKey: "test-key",
zaiApiLine: "international_coding",
})

const messages: Anthropic.Messages.MessageParam[] = [{ role: "user", content: "hi" }]
const stream = handler.createMessage("system", messages)
await stream.next()

expect(mockCreate).toHaveBeenCalledWith(
expect.objectContaining({
temperature: 0.9,
}),
)
})

it("uses modelTemperature when set", async () => {
mockCreate.mockImplementationOnce(() => {
return {
[Symbol.asyncIterator]: () => ({
async next() {
return { done: true }
},
}),
}
})

const handler = new ZAiHandler({
apiModelId: "glm-4.7",
zaiApiKey: "test-key",
zaiApiLine: "international_coding",
modelTemperature: 0.1,
})

const messages: Anthropic.Messages.MessageParam[] = [{ role: "user", content: "hi" }]
const stream = handler.createMessage("system", messages)
await stream.next()

expect(mockCreate).toHaveBeenCalledWith(
expect.objectContaining({
temperature: 0.1,
}),
)
})
})
2 changes: 1 addition & 1 deletion src/api/providers/zai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ export class ZAiHandler extends BaseOpenAiCompatibleProvider<string> {
format: "openai",
}) ?? undefined

const temperature = this.options.modelTemperature ?? this.defaultTemperature
const temperature = this.options.modelTemperature ?? info.defaultTemperature ?? this.defaultTemperature

// Use Z.ai format to preserve reasoning_content and merge post-tool text into tool messages
const convertedMessages = convertToZAiFormat(messages, { mergeToolResultText: true })
Expand Down
Loading