Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
2d84092
feat: Adding UI and settings for the max context window
HahaBill Jun 5, 2025
cd50c11
feat: Tuning the setting specifically for Gemini
HahaBill Jun 5, 2025
e610717
feat: change the desciption and rangeDesription of `maxContextWindow`…
HahaBill Jun 5, 2025
d18b3c9
feat: modelMaxContextWindow for context condensing
HahaBill Jun 5, 2025
d27eb94
Merge branch 'RooCodeInc:main' into feat/set-max-context-window
HahaBill Jun 5, 2025
95a1440
feat: maxContextWindow - languages translated
HahaBill Jun 6, 2025
ef9ec80
Merge branch 'feat/set-max-context-window' of https://github.com/Haha…
HahaBill Jun 6, 2025
dfad585
feat: renamed the `modelMaxContextWindow` to `maxContextWindow` and a…
HahaBill Jun 7, 2025
4a7a571
feat: fix linting
HahaBill Jun 7, 2025
d448d81
fix: bringing back the openaiCompatible{__}
HahaBill Jun 7, 2025
93ecb9e
test: Adding a unit test to test the `MaxContextWindowControl` component
HahaBill Jun 9, 2025
f63c1ac
test: adding a unit test in `Task.test.ts` to test whether `maxContex…
HahaBill Jun 9, 2025
f506170
test: change to `false` with `get: ()` in `maxContextWindow` test
HahaBill Jun 9, 2025
dbdfc6d
fix: Update webview-ui/src/i18n/locales/zh-CN/settings.json
HahaBill Jun 9, 2025
5049653
fix: Update webview-ui/src/i18n/locales/zh-TW/settings.json
HahaBill Jun 9, 2025
2badff7
fix: Update webview-ui/src/i18n/locales/fr/settings.json
HahaBill Jun 9, 2025
c0dd359
fix: Update webview-ui/src/i18n/locales/ko/settings.json
HahaBill Jun 9, 2025
1cbd6f7
fix: Update webview-ui/src/components/settings/MaxContextWindowContro…
HahaBill Jun 9, 2025
16ffd71
fix: Update webview-ui/src/i18n/locales/pt-BR/settings.json
HahaBill Jun 9, 2025
38a9a58
fix: change from `hasCustomTemperature` to `hasCustomMaxContextWindow…
HahaBill Jun 9, 2025
8a9d382
Merge branch 'feat/set-max-context-window' of https://github.com/Haha…
HahaBill Jun 9, 2025
b411e08
fix: experimental commit due to GitHub Action errors
HahaBill Jun 9, 2025
4e72a06
Revert "fix: Update webview-ui/src/i18n/locales/pt-BR/settings.json"
HahaBill Jun 9, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions packages/types/src/provider-settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,7 @@ const lmStudioSchema = baseProviderSettingsSchema.extend({
const geminiSchema = apiModelIdProviderModelSchema.extend({
geminiApiKey: z.string().optional(),
googleGeminiBaseUrl: z.string().optional(),
maxContextWindow: z.number().nullish(),
})

const openAiNativeSchema = apiModelIdProviderModelSchema.extend({
Expand Down Expand Up @@ -318,6 +319,7 @@ export const PROVIDER_SETTINGS_KEYS = keysOf<ProviderSettings>()([
// Gemini
"geminiApiKey",
"googleGeminiBaseUrl",
"maxContextWindow",
// OpenAI Native
"openAiNativeApiKey",
"openAiNativeBaseUrl",
Expand Down
9 changes: 8 additions & 1 deletion src/api/providers/gemini.ts
Original file line number Diff line number Diff line change
Expand Up @@ -132,9 +132,16 @@ export class GeminiHandler extends BaseProvider implements SingleCompletionHandl
override getModel() {
const modelId = this.options.apiModelId
let id = modelId && modelId in geminiModels ? (modelId as GeminiModelId) : geminiDefaultModelId
const info: ModelInfo = geminiModels[id]
let info: ModelInfo = geminiModels[id]
const params = getModelParams({ format: "gemini", modelId: id, model: info, settings: this.options })

if (this.options.maxContextWindow) {
info = {
...info,
contextWindow: this.options.maxContextWindow,
}
}

// The `:thinking` suffix indicates that the model is a "Hybrid"
// reasoning model and that reasoning is required to be enabled.
// The actual model ID honored by Gemini's API does not have this
Expand Down
5 changes: 4 additions & 1 deletion src/core/task/Task.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1667,7 +1667,10 @@ export class Task extends EventEmitter<ClineEvents> {
? this.apiConfiguration.modelMaxTokens || DEFAULT_THINKING_MODEL_MAX_TOKENS
: modelInfo.maxTokens

const contextWindow = modelInfo.contextWindow
const contextWindow =
this.apiConfiguration.apiProvider === "gemini" && this.apiConfiguration.maxContextWindow
? this.apiConfiguration.maxContextWindow
: modelInfo.contextWindow

const truncateResult = await truncateConversationIfNeeded({
messages: this.apiConversationHistory,
Expand Down
77 changes: 77 additions & 0 deletions src/core/task/__tests__/Task.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import { ClineProvider } from "../../webview/ClineProvider"
import { ApiStreamChunk } from "../../../api/transform/stream"
import { ContextProxy } from "../../config/ContextProxy"
import { processUserContentMentions } from "../../mentions/processUserContentMentions"
import * as slidingWindow from "../../sliding-window"

jest.mock("execa", () => ({
execa: jest.fn(),
Expand Down Expand Up @@ -533,6 +534,82 @@ describe("Cline", () => {
})
})

it("should use maxContextWindow when provider is gemini and maxContextWindow is set", async () => {
// Arrange: set apiProvider to gemini and maxContextWindow
const geminiConfig = {
...mockApiConfig,
apiProvider: "gemini" as const,
maxContextWindow: 42,
}
const created = Task.create({
provider: mockProvider,
apiConfiguration: geminiConfig,
task: "test gemini context window",
}) as unknown as [any, Promise<void>]
const cline = created[0] as any
const task = created[1] as Promise<void>

// Stub model info to have a different default contextWindow
;(cline.api as any).getModel = jest.fn().mockReturnValue({
id: "gemini-model",
info: {
contextWindow: 100,
supportsReasoningBudget: true,
maxTokens: 1000,
supportsComputerUse: false,
supportsPromptCache: false,
inputPrice: 0,
outputPrice: 0,
},
})

// Stub required methods to let attemptApiRequest proceed
;(cline as any).getSystemPrompt = jest.fn().mockResolvedValue("")
;(cline as any).getTokenUsage = jest.fn().mockReturnValue({
contextTokens: 1,
totalTokensIn: 0,
totalTokensOut: 0,
totalCost: 0,
})

// Stub createMessage to avoid real API calls
jest.spyOn(cline.api as any, "createMessage").mockReturnValue((async function* () {})())

// Spy on truncateConversationIfNeeded to capture its options
const twSpy = jest.spyOn(slidingWindow, "truncateConversationIfNeeded").mockResolvedValue({
messages: [],
summary: "",
cost: 0,
prevContextTokens: 0,
newContextTokens: 0,
error: undefined,
})

// Force abort immediately so the stream loop exits
Object.defineProperty(cline, "abort", {
get: () => false,
set: () => {},
configurable: true,
})

// Act: run through the generator
try {
for await (const _ of cline.attemptApiRequest()) {
}
} catch {
/* ignore */
}

// Assert: the contextWindow passed to truncateConversationIfNeeded is the maxContextWindow
expect(twSpy).toHaveBeenCalled()
const optionsPassed = twSpy.mock.calls[0][0]
expect(optionsPassed.contextWindow).toBe(42)

// Cleanup
await cline.abortTask(true)
await task.catch(() => {})
})

it.skip("should handle API retry with countdown", async () => {
const [cline, task] = Task.create({
provider: mockProvider,
Expand Down
11 changes: 11 additions & 0 deletions webview-ui/src/components/settings/ApiOptions.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ import { ApiErrorMessage } from "./ApiErrorMessage"
import { ThinkingBudget } from "./ThinkingBudget"
import { DiffSettingsControl } from "./DiffSettingsControl"
import { TemperatureControl } from "./TemperatureControl"
import { MaxContextWindowControl } from "./MaxContextWindowControl"
import { RateLimitSecondsControl } from "./RateLimitSecondsControl"
import { BedrockCustomArn } from "./providers/BedrockCustomArn"
import { buildDocLink } from "@src/utils/docLinks"
Expand Down Expand Up @@ -461,6 +462,7 @@ const ApiOptions = ({
modelInfo={selectedModelInfo}
isDescriptionExpanded={isDescriptionExpanded}
setIsDescriptionExpanded={setIsDescriptionExpanded}
maxContextWindow={selectedProvider === "gemini" ? apiConfiguration.maxContextWindow : undefined}
/>
</>
)}
Expand All @@ -484,6 +486,15 @@ const ApiOptions = ({
onChange={handleInputChange("modelTemperature", noTransform)}
maxValue={2}
/>

{selectedProvider === "gemini" && (
<MaxContextWindowControl
value={apiConfiguration.maxContextWindow}
onChange={handleInputChange("maxContextWindow", noTransform)}
maxValue={1048576}
/>
)}

<RateLimitSecondsControl
value={apiConfiguration.rateLimitSeconds || 0}
onChange={(value) => setApiConfigurationField("rateLimitSeconds", value)}
Expand Down
71 changes: 71 additions & 0 deletions webview-ui/src/components/settings/MaxContextWindowControl.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
import { VSCodeCheckbox } from "@vscode/webview-ui-toolkit/react"
import { useEffect, useState } from "react"
import { useAppTranslation } from "@/i18n/TranslationContext"
import { useDebounce } from "react-use"

import { Slider } from "@/components/ui"

interface MaxContextWindowControlProps {
value: number | undefined | null
onChange: (value: number | undefined | null) => void
maxValue?: number
}

export const MaxContextWindowControl = ({ value, onChange, maxValue = 1000000 }: MaxContextWindowControlProps) => {
const { t } = useAppTranslation()
const [isCustomMaxContextWindow, setIsCustomMaxContextWindow] = useState(value !== undefined)
const [inputValue, setInputValue] = useState(value)

useDebounce(() => onChange(inputValue), 50, [onChange, inputValue])

// Sync internal state with prop changes when switching profiles.
useEffect(() => {
const hasCustomMaxContextWindow = value !== undefined && value !== null
setIsCustomMaxContextWindow(hasCustomMaxContextWindow)
setInputValue(value)
}, [value])

return (
<>
<div>
<VSCodeCheckbox
checked={isCustomMaxContextWindow}
onChange={(e: any) => {
const isChecked = e.target.checked
setIsCustomMaxContextWindow(isChecked)

if (!isChecked) {
setInputValue(null) // Unset the max context window limit, note that undefined is unserializable.
} else {
setInputValue(value ?? 0) // Use the value from apiConfiguration, if set.
}
}}>
<label className="block font-medium mb-1">{t("settings:maxContextWindow.useCustom")}</label>
</VSCodeCheckbox>
<div className="text-sm text-vscode-descriptionForeground mt-1">
{t("settings:maxContextWindow.description")}
</div>
</div>

{isCustomMaxContextWindow && (
<div className="flex flex-col gap-3 pl-3 border-l-2 border-vscode-button-background">
<div>
<div className="flex items-center gap-2">
<Slider
min={32000}
max={maxValue}
step={1}
value={[inputValue ?? 1048576]}
onValueChange={([value]) => setInputValue(value)}
/>
<span className="w-10">{inputValue}</span>
</div>
<div className="text-vscode-descriptionForeground text-sm mt-1">
{t("settings:maxContextWindow.rangeDescription")}
</div>
</div>
</div>
)}
</>
)
}
14 changes: 14 additions & 0 deletions webview-ui/src/components/settings/ModelInfoView.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ type ModelInfoViewProps = {
modelInfo?: ModelInfo
isDescriptionExpanded: boolean
setIsDescriptionExpanded: (isExpanded: boolean) => void
maxContextWindow?: number | null
}

export const ModelInfoView = ({
Expand All @@ -22,9 +23,13 @@ export const ModelInfoView = ({
modelInfo,
isDescriptionExpanded,
setIsDescriptionExpanded,
maxContextWindow,
}: ModelInfoViewProps) => {
const { t } = useAppTranslation()

const maxContextWindowValue =
apiProvider === "gemini" && maxContextWindow ? maxContextWindow : modelInfo?.contextWindow

const infoItems = [
<ModelInfoSupportsItem
isSupported={modelInfo?.supportsImages ?? false}
Expand Down Expand Up @@ -71,6 +76,15 @@ export const ModelInfoView = ({
{formatPrice(modelInfo.cacheWritesPrice || 0)} / 1M tokens
</>
),
maxContextWindowValue && (
<>
<span className="font-medium">{t("settings:maxContextWindow.maxContextWindow")}:</span>{" "}
{maxContextWindowValue.toLocaleString()} tokens
{apiProvider === "gemini" && maxContextWindowValue && (
<span className="text-vscode-descriptionForeground ml-1">(custom limit)</span>
)}
</>
),
apiProvider === "gemini" && (
<span className="italic">
{selectedModelId.includes("pro-preview")
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
import { render, screen, fireEvent } from "@testing-library/react"
import { MaxContextWindowControl } from "../MaxContextWindowControl"
import "@testing-library/jest-dom"

class MockResizeObserver {
observe() {}
unobserve() {}
disconnect() {}
}
global.ResizeObserver = MockResizeObserver

jest.mock("@/components/ui", () => ({
...jest.requireActual("@/components/ui"),
Slider: ({ value, onValueChange, min = 0, max = 100, "data-testid": dataTestId }: any) => (
<input
type="range"
min={min}
max={max}
value={value[0]}
onChange={(e) => onValueChange([parseInt(e.target.value, 10)])}
data-testid={dataTestId}
/>
),
}))

describe("MaxContextWindowControl", () => {
it("updates when checkbox is toggled", async () => {
const onChange = jest.fn()
render(<MaxContextWindowControl value={123} onChange={onChange} />)

const checkbox = screen.getByRole("checkbox") as HTMLInputElement
fireEvent.click(checkbox)

await new Promise((r) => setTimeout(r, 100))
expect(onChange).toHaveBeenCalledWith(null)

fireEvent.click(checkbox)

await new Promise((r) => setTimeout(r, 100))
expect(onChange).toHaveBeenCalledWith(123)
})

it("calls onChange when slider is moved", async () => {
const onChange = jest.fn()
render(<MaxContextWindowControl value={35000} onChange={onChange} />)

const checkbox = screen.getByRole("checkbox") as HTMLInputElement
expect(checkbox).toBeChecked()

const slider = screen.getByRole("slider")
fireEvent.change(slider, { target: { value: "50000" } })

await new Promise((r) => setTimeout(r, 120))

expect(onChange).toHaveBeenCalledWith(50000)
})
})
6 changes: 6 additions & 0 deletions webview-ui/src/i18n/locales/ca/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -503,6 +503,12 @@
"description": "Controla l'aleatorietat en les respostes del model.",
"rangeDescription": "Valors més alts fan que la sortida sigui més aleatòria, valors més baixos la fan més determinista."
},
"maxContextWindow": {
"maxContextWindow": "",
"useCustom": "Utilitza una finestra de context màxim personalitzada",
"description": "Estableix la mida màxima de la finestra de context per al model. Quan les converses s'acosten a aquest límit, Roo Code condensarà automàticament els missatges més antics per mantenir-se dins de la restricció.",
"rangeDescription": "Els valors més alts permeten converses més llargues abans de condensar-se, els valors més baixos activen la condensació abans per mantenir-se dins dels límits de velocitat de l'API."
},
"modelInfo": {
"supportsImages": "Suporta imatges",
"noImages": "No suporta imatges",
Expand Down
6 changes: 6 additions & 0 deletions webview-ui/src/i18n/locales/de/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -503,6 +503,12 @@
"description": "Steuert die Zufälligkeit in den Antworten des Modells.",
"rangeDescription": "Höhere Werte machen die Ausgabe zufälliger, niedrigere Werte machen sie deterministischer."
},
"maxContextWindow": {
"maxContextWindow": "Maximales Kontextfenster",
"useCustom": "Benutzerdefiniertes maximales Kontextfenster verwenden",
"description": "Legt die maximale Kontextfenstergröße für das Modell fest. Wenn Konversationen diese Grenze erreichen, komprimiert Roo Code ältere Nachrichten automatisch, um die Beschränkung einzuhalten.",
"rangeDescription": "Höhere Werte ermöglichen längere Gespräche vor der Verdichtung, niedrigere Werte lösen eine frühere Verdichtung aus, um innerhalb der API-Ratengrenzen zu bleiben."
},
"modelInfo": {
"supportsImages": "Unterstützt Bilder",
"noImages": "Unterstützt keine Bilder",
Expand Down
6 changes: 6 additions & 0 deletions webview-ui/src/i18n/locales/en/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -503,6 +503,12 @@
"description": "Controls randomness in the model's responses.",
"rangeDescription": "Higher values make output more random, lower values make it more deterministic."
},
"maxContextWindow": {
"maxContextWindow": "Max Context Window",
"useCustom": "Use custom max context window",
"description": "Sets the maximum context window size for the model. When conversations approach this limit, Roo Code will automatically condense older messages to stay within the constraint.",
"rangeDescription": "Higher values allow longer conversations before condensing, lower values trigger condensing sooner to stay within API rate limits."
},
"modelInfo": {
"supportsImages": "Supports images",
"noImages": "Does not support images",
Expand Down
6 changes: 6 additions & 0 deletions webview-ui/src/i18n/locales/es/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -503,6 +503,12 @@
"description": "Controla la aleatoriedad en las respuestas del modelo.",
"rangeDescription": "Valores más altos hacen que la salida sea más aleatoria, valores más bajos la hacen más determinista."
},
"maxContextWindow": {
"maxContextWindow": "",
"useCustom": "Usar ventana de contexto máxima personalizada",
"description": "Establece el tamaño máximo de la ventana de contexto del modelo. Cuando las conversaciones se acercan a este límite, Roo Code condensará automáticamente los mensajes antiguos para ajustarse a la restricción.",
"rangeDescription": "Los valores más altos permiten conversaciones más largas antes de condensarse; los valores más bajos activan la condensación antes para permanecer dentro de los límites de velocidad de la API."
},
"modelInfo": {
"supportsImages": "Soporta imágenes",
"noImages": "No soporta imágenes",
Expand Down
Loading
Loading