diff --git a/src/api/index.ts b/src/api/index.ts index 861ba59b99c..24007c66827 100644 --- a/src/api/index.ts +++ b/src/api/index.ts @@ -23,6 +23,8 @@ import { RequestyHandler } from "./providers/requesty" import { HumanRelayHandler } from "./providers/human-relay" import { FakeAIHandler } from "./providers/fake-ai" import { XAIHandler } from "./providers/xai" +import { GroqHandler } from "./providers/groq" +import { ChutesHandler } from "./providers/chutes" export interface SingleCompletionHandler { completePrompt(prompt: string): Promise @@ -88,6 +90,10 @@ export function buildApiHandler(configuration: ApiConfiguration): ApiHandler { return new FakeAIHandler(options) case "xai": return new XAIHandler(options) + case "groq": + return new GroqHandler(options) + case "chutes": + return new ChutesHandler(options) default: return new AnthropicHandler(options) } diff --git a/src/api/providers/__tests__/chutes.test.ts b/src/api/providers/__tests__/chutes.test.ts new file mode 100644 index 00000000000..946aff96c88 --- /dev/null +++ b/src/api/providers/__tests__/chutes.test.ts @@ -0,0 +1,142 @@ +// npx jest src/api/providers/__tests__/chutes.test.ts + +import OpenAI from "openai" +import { Anthropic } from "@anthropic-ai/sdk" + +import { ChutesModelId, chutesDefaultModelId, chutesModels } from "../../../shared/api" + +import { ChutesHandler } from "../chutes" + +jest.mock("openai", () => { + const createMock = jest.fn() + return jest.fn(() => ({ chat: { completions: { create: createMock } } })) +}) + +describe("ChutesHandler", () => { + let handler: ChutesHandler + let mockCreate: jest.Mock + + beforeEach(() => { + jest.clearAllMocks() + mockCreate = (OpenAI as unknown as jest.Mock)().chat.completions.create + handler = new ChutesHandler({}) + }) + + test("should use the correct Chutes base URL", () => { + new ChutesHandler({}) + expect(OpenAI).toHaveBeenCalledWith(expect.objectContaining({ baseURL: "https://llm.chutes.ai/v1" })) + }) + + test("should use the provided API key", () => { + const chutesApiKey = "test-chutes-api-key" + new ChutesHandler({ chutesApiKey }) + expect(OpenAI).toHaveBeenCalledWith(expect.objectContaining({ apiKey: chutesApiKey })) + }) + + test("should return default model when no model is specified", () => { + const model = handler.getModel() + expect(model.id).toBe(chutesDefaultModelId) + expect(model.info).toEqual(chutesModels[chutesDefaultModelId]) + }) + + test("should return specified model when valid model is provided", () => { + const testModelId: ChutesModelId = "deepseek-ai/DeepSeek-R1" + const handlerWithModel = new ChutesHandler({ apiModelId: testModelId }) + const model = handlerWithModel.getModel() + + expect(model.id).toBe(testModelId) + expect(model.info).toEqual(chutesModels[testModelId]) + }) + + test("completePrompt method should return text from Chutes API", async () => { + const expectedResponse = "This is a test response from Chutes" + mockCreate.mockResolvedValueOnce({ choices: [{ message: { content: expectedResponse } }] }) + const result = await handler.completePrompt("test prompt") + expect(result).toBe(expectedResponse) + }) + + test("should handle errors in completePrompt", async () => { + const errorMessage = "Chutes API error" + mockCreate.mockRejectedValueOnce(new Error(errorMessage)) + await expect(handler.completePrompt("test prompt")).rejects.toThrow(`Chutes completion error: ${errorMessage}`) + }) + + test("createMessage should yield text content from stream", async () => { + const testContent = "This is test content from Chutes stream" + + mockCreate.mockImplementationOnce(() => { + return { + [Symbol.asyncIterator]: () => ({ + next: jest + .fn() + .mockResolvedValueOnce({ + done: false, + value: { choices: [{ delta: { content: testContent } }] }, + }) + .mockResolvedValueOnce({ done: true }), + }), + } + }) + + const stream = handler.createMessage("system prompt", []) + const firstChunk = await stream.next() + + expect(firstChunk.done).toBe(false) + expect(firstChunk.value).toEqual({ type: "text", text: testContent }) + }) + + test("createMessage should yield usage data from stream", async () => { + mockCreate.mockImplementationOnce(() => { + return { + [Symbol.asyncIterator]: () => ({ + next: jest + .fn() + .mockResolvedValueOnce({ + done: false, + value: { choices: [{ delta: {} }], usage: { prompt_tokens: 10, completion_tokens: 20 } }, + }) + .mockResolvedValueOnce({ done: true }), + }), + } + }) + + const stream = handler.createMessage("system prompt", []) + const firstChunk = await stream.next() + + expect(firstChunk.done).toBe(false) + expect(firstChunk.value).toEqual({ type: "usage", inputTokens: 10, outputTokens: 20 }) + }) + + test("createMessage should pass correct parameters to Chutes client", async () => { + const modelId: ChutesModelId = "deepseek-ai/DeepSeek-R1" + const modelInfo = chutesModels[modelId] + const handlerWithModel = new ChutesHandler({ apiModelId: modelId }) + + mockCreate.mockImplementationOnce(() => { + return { + [Symbol.asyncIterator]: () => ({ + async next() { + return { done: true } + }, + }), + } + }) + + const systemPrompt = "Test system prompt for Chutes" + const messages: Anthropic.Messages.MessageParam[] = [{ role: "user", content: "Test message for Chutes" }] + + const messageGenerator = handlerWithModel.createMessage(systemPrompt, messages) + await messageGenerator.next() + + expect(mockCreate).toHaveBeenCalledWith( + expect.objectContaining({ + model: modelId, + max_tokens: modelInfo.maxTokens, + temperature: 0.5, + messages: expect.arrayContaining([{ role: "system", content: systemPrompt }]), + stream: true, + stream_options: { include_usage: true }, + }), + ) + }) +}) diff --git a/src/api/providers/__tests__/groq.test.ts b/src/api/providers/__tests__/groq.test.ts new file mode 100644 index 00000000000..6b38dcc9274 --- /dev/null +++ b/src/api/providers/__tests__/groq.test.ts @@ -0,0 +1,142 @@ +// npx jest src/api/providers/__tests__/groq.test.ts + +import OpenAI from "openai" +import { Anthropic } from "@anthropic-ai/sdk" + +import { GroqModelId, groqDefaultModelId, groqModels } from "../../../shared/api" + +import { GroqHandler } from "../groq" + +jest.mock("openai", () => { + const createMock = jest.fn() + return jest.fn(() => ({ chat: { completions: { create: createMock } } })) +}) + +describe("GroqHandler", () => { + let handler: GroqHandler + let mockCreate: jest.Mock + + beforeEach(() => { + jest.clearAllMocks() + mockCreate = (OpenAI as unknown as jest.Mock)().chat.completions.create + handler = new GroqHandler({}) + }) + + test("should use the correct Groq base URL", () => { + new GroqHandler({}) + expect(OpenAI).toHaveBeenCalledWith(expect.objectContaining({ baseURL: "https://api.groq.com/openai/v1" })) + }) + + test("should use the provided API key", () => { + const groqApiKey = "test-groq-api-key" + new GroqHandler({ groqApiKey }) + expect(OpenAI).toHaveBeenCalledWith(expect.objectContaining({ apiKey: groqApiKey })) + }) + + test("should return default model when no model is specified", () => { + const model = handler.getModel() + expect(model.id).toBe(groqDefaultModelId) // Use groqDefaultModelId + expect(model.info).toEqual(groqModels[groqDefaultModelId]) // Use groqModels + }) + + test("should return specified model when valid model is provided", () => { + const testModelId: GroqModelId = "llama-3.3-70b-versatile" // Use a valid Groq model ID and type + const handlerWithModel = new GroqHandler({ apiModelId: testModelId }) // Instantiate GroqHandler + const model = handlerWithModel.getModel() + + expect(model.id).toBe(testModelId) + expect(model.info).toEqual(groqModels[testModelId]) // Use groqModels + }) + + test("completePrompt method should return text from Groq API", async () => { + const expectedResponse = "This is a test response from Groq" + mockCreate.mockResolvedValueOnce({ choices: [{ message: { content: expectedResponse } }] }) + const result = await handler.completePrompt("test prompt") + expect(result).toBe(expectedResponse) + }) + + test("should handle errors in completePrompt", async () => { + const errorMessage = "Groq API error" + mockCreate.mockRejectedValueOnce(new Error(errorMessage)) + await expect(handler.completePrompt("test prompt")).rejects.toThrow(`Groq completion error: ${errorMessage}`) + }) + + test("createMessage should yield text content from stream", async () => { + const testContent = "This is test content from Groq stream" + + mockCreate.mockImplementationOnce(() => { + return { + [Symbol.asyncIterator]: () => ({ + next: jest + .fn() + .mockResolvedValueOnce({ + done: false, + value: { choices: [{ delta: { content: testContent } }] }, + }) + .mockResolvedValueOnce({ done: true }), + }), + } + }) + + const stream = handler.createMessage("system prompt", []) + const firstChunk = await stream.next() + + expect(firstChunk.done).toBe(false) + expect(firstChunk.value).toEqual({ type: "text", text: testContent }) + }) + + test("createMessage should yield usage data from stream", async () => { + mockCreate.mockImplementationOnce(() => { + return { + [Symbol.asyncIterator]: () => ({ + next: jest + .fn() + .mockResolvedValueOnce({ + done: false, + value: { choices: [{ delta: {} }], usage: { prompt_tokens: 10, completion_tokens: 20 } }, + }) + .mockResolvedValueOnce({ done: true }), + }), + } + }) + + const stream = handler.createMessage("system prompt", []) + const firstChunk = await stream.next() + + expect(firstChunk.done).toBe(false) + expect(firstChunk.value).toEqual({ type: "usage", inputTokens: 10, outputTokens: 20 }) + }) + + test("createMessage should pass correct parameters to Groq client", async () => { + const modelId: GroqModelId = "llama-3.1-8b-instant" + const modelInfo = groqModels[modelId] + const handlerWithModel = new GroqHandler({ apiModelId: modelId }) + + mockCreate.mockImplementationOnce(() => { + return { + [Symbol.asyncIterator]: () => ({ + async next() { + return { done: true } + }, + }), + } + }) + + const systemPrompt = "Test system prompt for Groq" + const messages: Anthropic.Messages.MessageParam[] = [{ role: "user", content: "Test message for Groq" }] + + const messageGenerator = handlerWithModel.createMessage(systemPrompt, messages) + await messageGenerator.next() + + expect(mockCreate).toHaveBeenCalledWith( + expect.objectContaining({ + model: modelId, + max_tokens: modelInfo.maxTokens, + temperature: 0.5, + messages: expect.arrayContaining([{ role: "system", content: systemPrompt }]), + stream: true, + stream_options: { include_usage: true }, + }), + ) + }) +}) diff --git a/src/api/providers/base-openai-compatible-provider.ts b/src/api/providers/base-openai-compatible-provider.ts new file mode 100644 index 00000000000..82eeb830331 --- /dev/null +++ b/src/api/providers/base-openai-compatible-provider.ts @@ -0,0 +1,129 @@ +import { Anthropic } from "@anthropic-ai/sdk" +import OpenAI from "openai" + +import { ApiHandlerOptions, ModelInfo } from "../../shared/api" +import { ApiStream } from "../transform/stream" +import { convertToOpenAiMessages } from "../transform/openai-format" + +import { SingleCompletionHandler } from "../index" +import { DEFAULT_HEADERS } from "./constants" +import { BaseProvider } from "./base-provider" + +type BaseOpenAiCompatibleProviderOptions = ApiHandlerOptions & { + providerName: string + baseURL: string + defaultProviderModelId: ModelName + providerModels: Record + defaultTemperature?: number +} + +export abstract class BaseOpenAiCompatibleProvider + extends BaseProvider + implements SingleCompletionHandler +{ + protected readonly providerName: string + protected readonly baseURL: string + protected readonly defaultTemperature: number + protected readonly defaultProviderModelId: ModelName + protected readonly providerModels: Record + + protected readonly options: ApiHandlerOptions + + private client: OpenAI + + constructor({ + providerName, + baseURL, + defaultProviderModelId, + providerModels, + defaultTemperature, + ...options + }: BaseOpenAiCompatibleProviderOptions) { + super() + + this.providerName = providerName + this.baseURL = baseURL + this.defaultProviderModelId = defaultProviderModelId + this.providerModels = providerModels + this.defaultTemperature = defaultTemperature ?? 0 + + this.options = options + + if (!this.options.apiKey) { + throw new Error("API key is required") + } + + this.client = new OpenAI({ + baseURL, + apiKey: this.options.apiKey, + defaultHeaders: DEFAULT_HEADERS, + }) + } + + override async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream { + const { + id: model, + info: { maxTokens: max_tokens }, + } = this.getModel() + + const temperature = this.options.modelTemperature ?? this.defaultTemperature + + const params: OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming = { + model, + max_tokens, + temperature, + messages: [{ role: "system", content: systemPrompt }, ...convertToOpenAiMessages(messages)], + stream: true, + stream_options: { include_usage: true }, + } + + const stream = await this.client.chat.completions.create(params) + + for await (const chunk of stream) { + const delta = chunk.choices[0]?.delta + + if (delta?.content) { + yield { + type: "text", + text: delta.content, + } + } + + if (chunk.usage) { + yield { + type: "usage", + inputTokens: chunk.usage.prompt_tokens || 0, + outputTokens: chunk.usage.completion_tokens || 0, + } + } + } + } + + async completePrompt(prompt: string): Promise { + const { id: modelId } = this.getModel() + + try { + const response = await this.client.chat.completions.create({ + model: modelId, + messages: [{ role: "user", content: prompt }], + }) + + return response.choices[0]?.message.content || "" + } catch (error) { + if (error instanceof Error) { + throw new Error(`${this.providerName} completion error: ${error.message}`) + } + + throw error + } + } + + override getModel() { + const id = + this.options.apiModelId && this.options.apiModelId in this.providerModels + ? (this.options.apiModelId as ModelName) + : this.defaultProviderModelId + + return { id, info: this.providerModels[id] } + } +} diff --git a/src/api/providers/chutes.ts b/src/api/providers/chutes.ts new file mode 100644 index 00000000000..6f7481f1809 --- /dev/null +++ b/src/api/providers/chutes.ts @@ -0,0 +1,17 @@ +import { ApiHandlerOptions, ChutesModelId, chutesDefaultModelId, chutesModels } from "../../shared/api" + +import { BaseOpenAiCompatibleProvider } from "./base-openai-compatible-provider" + +export class ChutesHandler extends BaseOpenAiCompatibleProvider { + constructor(options: ApiHandlerOptions) { + super({ + ...options, + providerName: "Chutes", + baseURL: "https://llm.chutes.ai/v1", + apiKey: options.chutesApiKey, + defaultProviderModelId: chutesDefaultModelId, + providerModels: chutesModels, + defaultTemperature: 0.5, + }) + } +} diff --git a/src/api/providers/groq.ts b/src/api/providers/groq.ts new file mode 100644 index 00000000000..2f4e763b8e5 --- /dev/null +++ b/src/api/providers/groq.ts @@ -0,0 +1,17 @@ +import { ApiHandlerOptions, GroqModelId, groqDefaultModelId, groqModels } from "../../shared/api" // Updated imports for Groq + +import { BaseOpenAiCompatibleProvider } from "./base-openai-compatible-provider" + +export class GroqHandler extends BaseOpenAiCompatibleProvider { + constructor(options: ApiHandlerOptions) { + super({ + ...options, + providerName: "Groq", + baseURL: "https://api.groq.com/openai/v1", + apiKey: options.groqApiKey, + defaultProviderModelId: groqDefaultModelId, + providerModels: groqModels, + defaultTemperature: 0.5, + }) + } +} diff --git a/src/exports/roo-code.d.ts b/src/exports/roo-code.d.ts index 386c983cc88..8a7452dfb8d 100644 --- a/src/exports/roo-code.d.ts +++ b/src/exports/roo-code.d.ts @@ -21,6 +21,8 @@ type ProviderSettings = { | "human-relay" | "fake-ai" | "xai" + | "groq" + | "chutes" ) | undefined apiModelId?: string | undefined @@ -120,6 +122,8 @@ type ProviderSettings = { requestyApiKey?: string | undefined requestyModelId?: string | undefined xaiApiKey?: string | undefined + groqApiKey?: string | undefined + chutesApiKey?: string | undefined modelMaxTokens?: number | undefined modelMaxThinkingTokens?: number | undefined includeMaxTokens?: boolean | undefined @@ -158,6 +162,8 @@ type GlobalSettings = { | "human-relay" | "fake-ai" | "xai" + | "groq" + | "chutes" ) | undefined }[] diff --git a/src/exports/types.ts b/src/exports/types.ts index 330f9c8c21e..b3d441e9ab6 100644 --- a/src/exports/types.ts +++ b/src/exports/types.ts @@ -22,6 +22,8 @@ type ProviderSettings = { | "human-relay" | "fake-ai" | "xai" + | "groq" + | "chutes" ) | undefined apiModelId?: string | undefined @@ -121,6 +123,8 @@ type ProviderSettings = { requestyApiKey?: string | undefined requestyModelId?: string | undefined xaiApiKey?: string | undefined + groqApiKey?: string | undefined + chutesApiKey?: string | undefined modelMaxTokens?: number | undefined modelMaxThinkingTokens?: number | undefined includeMaxTokens?: boolean | undefined @@ -161,6 +165,8 @@ type GlobalSettings = { | "human-relay" | "fake-ai" | "xai" + | "groq" + | "chutes" ) | undefined }[] diff --git a/src/i18n/locales/ca/common.json b/src/i18n/locales/ca/common.json index 633b90ec3d0..2bd61dfd930 100644 --- a/src/i18n/locales/ca/common.json +++ b/src/i18n/locales/ca/common.json @@ -89,5 +89,11 @@ "path_placeholder": "D:\\RooCodeStorage", "enter_absolute_path": "Introdueix una ruta completa (p. ex. D:\\RooCodeStorage o /home/user/storage)", "enter_valid_path": "Introdueix una ruta vàlida" + }, + "settings": { + "providers": { + "groqApiKey": "Clau API de Groq", + "getGroqApiKey": "Obté la clau API de Groq" + } } } diff --git a/src/i18n/locales/de/common.json b/src/i18n/locales/de/common.json index ceda64bad71..5eb6b05e8ff 100644 --- a/src/i18n/locales/de/common.json +++ b/src/i18n/locales/de/common.json @@ -89,5 +89,11 @@ "input": { "task_prompt": "Was soll Roo tun?", "task_placeholder": "Gib deine Aufgabe hier ein" + }, + "settings": { + "providers": { + "groqApiKey": "Groq API-Schlüssel", + "getGroqApiKey": "Groq API-Schlüssel erhalten" + } } } diff --git a/src/i18n/locales/es/common.json b/src/i18n/locales/es/common.json index 2bfb43055a8..beea1ba3a9d 100644 --- a/src/i18n/locales/es/common.json +++ b/src/i18n/locales/es/common.json @@ -89,5 +89,11 @@ "input": { "task_prompt": "¿Qué debe hacer Roo?", "task_placeholder": "Escribe tu tarea aquí" + }, + "settings": { + "providers": { + "groqApiKey": "Clave API de Groq", + "getGroqApiKey": "Obtener clave API de Groq" + } } } diff --git a/src/i18n/locales/fr/common.json b/src/i18n/locales/fr/common.json index 7399432c6a8..c34d0105a44 100644 --- a/src/i18n/locales/fr/common.json +++ b/src/i18n/locales/fr/common.json @@ -89,5 +89,11 @@ "input": { "task_prompt": "Que doit faire Roo ?", "task_placeholder": "Écris ta tâche ici" + }, + "settings": { + "providers": { + "groqApiKey": "Clé API Groq", + "getGroqApiKey": "Obtenir la clé API Groq" + } } } diff --git a/src/i18n/locales/hi/common.json b/src/i18n/locales/hi/common.json index bf5421eff8f..07438c873b1 100644 --- a/src/i18n/locales/hi/common.json +++ b/src/i18n/locales/hi/common.json @@ -89,5 +89,11 @@ "input": { "task_prompt": "Roo को क्या करना है?", "task_placeholder": "अपना कार्य यहाँ लिखें" + }, + "settings": { + "providers": { + "groqApiKey": "ग्रोक एपीआई कुंजी", + "getGroqApiKey": "ग्रोक एपीआई कुंजी प्राप्त करें" + } } } diff --git a/src/i18n/locales/it/common.json b/src/i18n/locales/it/common.json index 69e2c2123f1..476285169f5 100644 --- a/src/i18n/locales/it/common.json +++ b/src/i18n/locales/it/common.json @@ -89,5 +89,11 @@ "input": { "task_prompt": "Cosa deve fare Roo?", "task_placeholder": "Scrivi il tuo compito qui" + }, + "settings": { + "providers": { + "groqApiKey": "Chiave API Groq", + "getGroqApiKey": "Ottieni chiave API Groq" + } } } diff --git a/src/i18n/locales/ja/common.json b/src/i18n/locales/ja/common.json index 6f40c8e03d5..f44469d0c90 100644 --- a/src/i18n/locales/ja/common.json +++ b/src/i18n/locales/ja/common.json @@ -89,5 +89,11 @@ "input": { "task_prompt": "Rooにどんなことをさせますか?", "task_placeholder": "タスクをここに入力してください" + }, + "settings": { + "providers": { + "groqApiKey": "Groq APIキー", + "getGroqApiKey": "Groq APIキーを取得" + } } } diff --git a/src/i18n/locales/ko/common.json b/src/i18n/locales/ko/common.json index 9026315da2e..944d9ba19b8 100644 --- a/src/i18n/locales/ko/common.json +++ b/src/i18n/locales/ko/common.json @@ -89,5 +89,11 @@ "input": { "task_prompt": "Roo에게 무엇을 시킬까요?", "task_placeholder": "여기에 작업을 입력하세요" + }, + "settings": { + "providers": { + "groqApiKey": "Groq API 키", + "getGroqApiKey": "Groq API 키 받기" + } } } diff --git a/src/i18n/locales/pl/common.json b/src/i18n/locales/pl/common.json index 49f51cefff1..46ed243b496 100644 --- a/src/i18n/locales/pl/common.json +++ b/src/i18n/locales/pl/common.json @@ -89,5 +89,11 @@ "input": { "task_prompt": "Co ma zrobić Roo?", "task_placeholder": "Wpisz swoje zadanie tutaj" + }, + "settings": { + "providers": { + "groqApiKey": "Klucz API Groq", + "getGroqApiKey": "Uzyskaj klucz API Groq" + } } } diff --git a/src/i18n/locales/pt-BR/common.json b/src/i18n/locales/pt-BR/common.json index 80112a91ab8..a6588b2fdad 100644 --- a/src/i18n/locales/pt-BR/common.json +++ b/src/i18n/locales/pt-BR/common.json @@ -89,5 +89,11 @@ "path_placeholder": "D:\\RooCodeStorage", "enter_absolute_path": "Por favor, digite um caminho absoluto (ex: D:\\RooCodeStorage ou /home/user/storage)", "enter_valid_path": "Por favor, digite um caminho válido" + }, + "settings": { + "providers": { + "groqApiKey": "Chave de API Groq", + "getGroqApiKey": "Obter chave de API Groq" + } } } diff --git a/src/i18n/locales/ru/common.json b/src/i18n/locales/ru/common.json index 80829e138ce..baef76ea78c 100644 --- a/src/i18n/locales/ru/common.json +++ b/src/i18n/locales/ru/common.json @@ -89,5 +89,11 @@ "input": { "task_prompt": "Что должен сделать Roo?", "task_placeholder": "Введите вашу задачу здесь" + }, + "settings": { + "providers": { + "groqApiKey": "Ключ API Groq", + "getGroqApiKey": "Получить ключ API Groq" + } } } diff --git a/src/i18n/locales/tr/common.json b/src/i18n/locales/tr/common.json index 61b8e12fb5a..2c4ec8b3548 100644 --- a/src/i18n/locales/tr/common.json +++ b/src/i18n/locales/tr/common.json @@ -89,5 +89,11 @@ "input": { "task_prompt": "Roo ne yapsın?", "task_placeholder": "Görevini buraya yaz" + }, + "settings": { + "providers": { + "groqApiKey": "Groq API Anahtarı", + "getGroqApiKey": "Groq API Anahtarı Al" + } } } diff --git a/src/i18n/locales/vi/common.json b/src/i18n/locales/vi/common.json index 8945e9e098e..499309df750 100644 --- a/src/i18n/locales/vi/common.json +++ b/src/i18n/locales/vi/common.json @@ -89,5 +89,11 @@ "input": { "task_prompt": "Bạn muốn Roo làm gì?", "task_placeholder": "Nhập nhiệm vụ của bạn ở đây" + }, + "settings": { + "providers": { + "groqApiKey": "Khóa API Groq", + "getGroqApiKey": "Lấy khóa API Groq" + } } } diff --git a/src/i18n/locales/zh-CN/common.json b/src/i18n/locales/zh-CN/common.json index 2fc49c9b378..ac3754ccd64 100644 --- a/src/i18n/locales/zh-CN/common.json +++ b/src/i18n/locales/zh-CN/common.json @@ -89,5 +89,11 @@ "input": { "task_prompt": "让Roo做什么?", "task_placeholder": "在这里输入任务" + }, + "settings": { + "providers": { + "groqApiKey": "Groq API 密钥", + "getGroqApiKey": "获取 Groq API 密钥" + } } } diff --git a/src/i18n/locales/zh-TW/common.json b/src/i18n/locales/zh-TW/common.json index a51cfa0e9a4..93c59acbbbb 100644 --- a/src/i18n/locales/zh-TW/common.json +++ b/src/i18n/locales/zh-TW/common.json @@ -89,5 +89,11 @@ "input": { "task_prompt": "讓 Roo 做什麼?", "task_placeholder": "在這裡輸入工作" + }, + "settings": { + "providers": { + "groqApiKey": "Groq API 金鑰", + "getGroqApiKey": "取得 Groq API 金鑰" + } } } diff --git a/src/schemas/index.ts b/src/schemas/index.ts index c6e34fe3955..e69fff6dcc8 100644 --- a/src/schemas/index.ts +++ b/src/schemas/index.ts @@ -29,6 +29,8 @@ export const providerNames = [ "human-relay", "fake-ai", "xai", + "groq", + "chutes", ] as const export const providerNamesSchema = z.enum(providerNames) @@ -423,6 +425,10 @@ export const providerSettingsSchema = z.object({ requestyModelId: z.string().optional(), // X.AI (Grok) xaiApiKey: z.string().optional(), + // Groq + groqApiKey: z.string().optional(), + // Chutes AI + chutesApiKey: z.string().optional(), // Claude 3.7 Sonnet Thinking modelMaxTokens: z.number().optional(), modelMaxThinkingTokens: z.number().optional(), @@ -529,6 +535,10 @@ const providerSettingsRecord: ProviderSettingsRecord = { fakeAi: undefined, // X.AI (Grok) xaiApiKey: undefined, + // Groq + groqApiKey: undefined, + // Chutes AI + chutesApiKey: undefined, } export const PROVIDER_SETTINGS_KEYS = Object.keys(providerSettingsRecord) as Keys[] @@ -721,6 +731,8 @@ export type SecretState = Pick< | "unboundApiKey" | "requestyApiKey" | "xaiApiKey" + | "groqApiKey" + | "chutesApiKey" > type SecretStateRecord = Record, undefined> @@ -740,6 +752,8 @@ const secretStateRecord: SecretStateRecord = { unboundApiKey: undefined, requestyApiKey: undefined, xaiApiKey: undefined, + groqApiKey: undefined, + chutesApiKey: undefined, } export const SECRET_STATE_KEYS = Object.keys(secretStateRecord) as Keys[] diff --git a/src/shared/api.ts b/src/shared/api.ts index 17bff9db47e..0acb82d45e4 100644 --- a/src/shared/api.ts +++ b/src/shared/api.ts @@ -1400,6 +1400,251 @@ export const vscodeLlmModels = { } > +// Groq +// https://console.groq.com/docs/models +export type GroqModelId = + | "llama-3.1-8b-instant" + | "llama-3.3-70b-versatile" + | "meta-llama/llama-4-scout-17b-16e-instruct" + | "meta-llama/llama-4-maverick-17b-128e-instruct" + | "mistral-saba-24b" + | "qwen-qwq-32b" + | "deepseek-r1-distill-llama-70b" +export const groqDefaultModelId: GroqModelId = "llama-3.3-70b-versatile" // Defaulting to Llama3 70B Versatile +export const groqModels = { + // Models based on API response: https://api.groq.com/openai/v1/models + "llama-3.1-8b-instant": { + maxTokens: 131072, + contextWindow: 131072, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0, + outputPrice: 0, + description: "Meta Llama 3.1 8B Instant model, 128K context.", + }, + "llama-3.3-70b-versatile": { + maxTokens: 32768, + contextWindow: 131072, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0, + outputPrice: 0, + description: "Meta Llama 3.3 70B Versatile model, 128K context.", + }, + "meta-llama/llama-4-scout-17b-16e-instruct": { + maxTokens: 8192, + contextWindow: 131072, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0, + outputPrice: 0, + description: "Meta Llama 4 Scout 17B Instruct model, 128K context.", + }, + "meta-llama/llama-4-maverick-17b-128e-instruct": { + maxTokens: 8192, + contextWindow: 131072, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0, + outputPrice: 0, + description: "Meta Llama 4 Maverick 17B Instruct model, 128K context.", + }, + "mistral-saba-24b": { + maxTokens: 32768, + contextWindow: 32768, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0, + outputPrice: 0, + description: "Mistral Saba 24B model, 32K context.", + }, + "qwen-qwq-32b": { + maxTokens: 131072, + contextWindow: 131072, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0, + outputPrice: 0, + description: "Alibaba Qwen QwQ 32B model, 128K context.", + }, + "deepseek-r1-distill-llama-70b": { + maxTokens: 131072, + contextWindow: 131072, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0, + outputPrice: 0, + description: "DeepSeek R1 Distill Llama 70B model, 128K context.", + }, +} as const satisfies Record + +// Chutes AI +// https://llm.chutes.ai/v1 (OpenAI compatible) +export type ChutesModelId = + | "deepseek-ai/DeepSeek-R1" + | "deepseek-ai/DeepSeek-V3" + | "unsloth/Llama-3.3-70B-Instruct" + | "chutesai/Llama-4-Scout-17B-16E-Instruct" + | "unsloth/Mistral-Nemo-Instruct-2407" + | "unsloth/gemma-3-12b-it" + | "NousResearch/DeepHermes-3-Llama-3-8B-Preview" + | "unsloth/gemma-3-4b-it" + | "nvidia/Llama-3_3-Nemotron-Super-49B-v1" + | "nvidia/Llama-3_1-Nemotron-Ultra-253B-v1" + | "chutesai/Llama-4-Maverick-17B-128E-Instruct-FP8" + | "deepseek-ai/DeepSeek-V3-Base" + | "deepseek-ai/DeepSeek-R1-Zero" + | "deepseek-ai/DeepSeek-V3-0324" + | "microsoft/MAI-DS-R1-FP8" + | "tngtech/DeepSeek-R1T-Chimera" +export const chutesDefaultModelId: ChutesModelId = "deepseek-ai/DeepSeek-R1" +export const chutesModels = { + "deepseek-ai/DeepSeek-R1": { + maxTokens: 32768, + contextWindow: 163840, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0, + outputPrice: 0, + description: "DeepSeek R1 model.", + }, + "deepseek-ai/DeepSeek-V3": { + maxTokens: 32768, + contextWindow: 163840, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0, + outputPrice: 0, + description: "DeepSeek V3 model.", + }, + "unsloth/Llama-3.3-70B-Instruct": { + maxTokens: 32768, // From Groq + contextWindow: 131072, // From Groq + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0, + outputPrice: 0, + description: "Unsloth Llama 3.3 70B Instruct model.", + }, + "chutesai/Llama-4-Scout-17B-16E-Instruct": { + maxTokens: 32768, + contextWindow: 512000, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0, + outputPrice: 0, + description: "ChutesAI Llama 4 Scout 17B Instruct model, 512K context.", + }, + "unsloth/Mistral-Nemo-Instruct-2407": { + maxTokens: 32768, + contextWindow: 128000, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0, + outputPrice: 0, + description: "Unsloth Mistral Nemo Instruct model.", + }, + "unsloth/gemma-3-12b-it": { + maxTokens: 32768, + contextWindow: 131072, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0, + outputPrice: 0, + description: "Unsloth Gemma 3 12B IT model.", + }, + "NousResearch/DeepHermes-3-Llama-3-8B-Preview": { + maxTokens: 32768, + contextWindow: 131072, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0, + outputPrice: 0, + description: "Nous DeepHermes 3 Llama 3 8B Preview model.", + }, + "unsloth/gemma-3-4b-it": { + maxTokens: 32768, + contextWindow: 131072, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0, + outputPrice: 0, + description: "Unsloth Gemma 3 4B IT model.", + }, + "nvidia/Llama-3_3-Nemotron-Super-49B-v1": { + maxTokens: 32768, + contextWindow: 131072, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0, + outputPrice: 0, + description: "Nvidia Llama 3.3 Nemotron Super 49B model.", + }, + "nvidia/Llama-3_1-Nemotron-Ultra-253B-v1": { + maxTokens: 32768, + contextWindow: 131072, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0, + outputPrice: 0, + description: "Nvidia Llama 3.1 Nemotron Ultra 253B model.", + }, + "chutesai/Llama-4-Maverick-17B-128E-Instruct-FP8": { + maxTokens: 32768, + contextWindow: 256000, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0, + outputPrice: 0, + description: "ChutesAI Llama 4 Maverick 17B Instruct FP8 model.", + }, + "deepseek-ai/DeepSeek-V3-Base": { + maxTokens: 32768, + contextWindow: 163840, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0, + outputPrice: 0, + description: "DeepSeek V3 Base model.", + }, + "deepseek-ai/DeepSeek-R1-Zero": { + maxTokens: 32768, + contextWindow: 163840, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0, + outputPrice: 0, + description: "DeepSeek R1 Zero model.", + }, + "deepseek-ai/DeepSeek-V3-0324": { + maxTokens: 32768, + contextWindow: 163840, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0, + outputPrice: 0, + description: "DeepSeek V3 (0324) model.", + }, + "microsoft/MAI-DS-R1-FP8": { + maxTokens: 32768, + contextWindow: 163840, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0, + outputPrice: 0, + description: "Microsoft MAI-DS-R1 FP8 model.", + }, + "tngtech/DeepSeek-R1T-Chimera": { + maxTokens: 32768, + contextWindow: 163840, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0, + outputPrice: 0, + description: "TNGTech DeepSeek R1T Chimera model.", + }, +} as const satisfies Record + /** * Constants */ diff --git a/webview-ui/src/components/settings/ApiOptions.tsx b/webview-ui/src/components/settings/ApiOptions.tsx index 2b0d363221d..5f2d50b94c9 100644 --- a/webview-ui/src/components/settings/ApiOptions.tsx +++ b/webview-ui/src/components/settings/ApiOptions.tsx @@ -1613,6 +1613,49 @@ const ApiOptions = ({ )} + {selectedProvider === "groq" && ( + <> + + + +
+ {t("settings:providers.apiKeyStorageNotice")} +
+ {!apiConfiguration?.groqApiKey && ( + + {t("settings:providers.getGroqApiKey")} + + )} + + )} + + {selectedProvider === "chutes" && ( + <> + + + +
+ {t("settings:providers.apiKeyStorageNotice")} +
+ {/* Add a link to get Chutes API key if available */} + {/* {!apiConfiguration?.chutesApiKey && ( + + {t("settings:providers.getChutesApiKey")} + + )} */} + + )} + {selectedProvider === "unbound" && ( <> a.label.localeCompare(b.label)) export const VERTEX_REGIONS = [ diff --git a/webview-ui/src/components/ui/hooks/useSelectedModel.ts b/webview-ui/src/components/ui/hooks/useSelectedModel.ts index 6b5b01b9186..b02a4d024b0 100644 --- a/webview-ui/src/components/ui/hooks/useSelectedModel.ts +++ b/webview-ui/src/components/ui/hooks/useSelectedModel.ts @@ -19,6 +19,10 @@ import { vertexModels, xaiDefaultModelId, xaiModels, + groqModels, + groqDefaultModelId, + chutesModels, + chutesDefaultModelId, vscodeLlmModels, vscodeLlmDefaultModelId, openRouterDefaultModelId, @@ -84,6 +88,10 @@ function getSelectedModelInfo({ return routerModels.unbound[id] ?? routerModels.unbound[unboundDefaultModelId] case "xai": return xaiModels[id as keyof typeof xaiModels] ?? xaiModels[xaiDefaultModelId] + case "groq": + return groqModels[id as keyof typeof groqModels] ?? groqModels[groqDefaultModelId] + case "chutes": + return chutesModels[id as keyof typeof chutesModels] ?? chutesModels[chutesDefaultModelId] case "bedrock": // Special case for custom ARN. if (id === "custom-arn") { diff --git a/webview-ui/src/i18n/locales/ca/settings.json b/webview-ui/src/i18n/locales/ca/settings.json index 37ee6faa2b9..ca7546b2a5f 100644 --- a/webview-ui/src/i18n/locales/ca/settings.json +++ b/webview-ui/src/i18n/locales/ca/settings.json @@ -123,9 +123,13 @@ "anthropicApiKey": "Clau API d'Anthropic", "getAnthropicApiKey": "Obtenir clau API d'Anthropic", "anthropicUseAuthToken": "Passar la clau API d'Anthropic com a capçalera d'autorització en lloc de X-Api-Key", + "chutesApiKey": "Clau API de Chutes", + "getChutesApiKey": "Obtenir clau API de Chutes", "deepSeekApiKey": "Clau API de DeepSeek", "getDeepSeekApiKey": "Obtenir clau API de DeepSeek", "geminiApiKey": "Clau API de Gemini", + "getGroqApiKey": "Obtenir clau API de Groq", + "groqApiKey": "Clau API de Groq", "getGeminiApiKey": "Obtenir clau API de Gemini", "openAiApiKey": "Clau API d'OpenAI", "openAiBaseUrl": "URL base", diff --git a/webview-ui/src/i18n/locales/de/settings.json b/webview-ui/src/i18n/locales/de/settings.json index 8a8c006dfc1..db4398402fa 100644 --- a/webview-ui/src/i18n/locales/de/settings.json +++ b/webview-ui/src/i18n/locales/de/settings.json @@ -123,10 +123,14 @@ "anthropicApiKey": "Anthropic API-Schlüssel", "getAnthropicApiKey": "Anthropic API-Schlüssel erhalten", "anthropicUseAuthToken": "Anthropic API-Schlüssel als Authorization-Header anstelle von X-Api-Key übergeben", + "chutesApiKey": "Chutes API-Schlüssel", + "getChutesApiKey": "Chutes API-Schlüssel erhalten", "deepSeekApiKey": "DeepSeek API-Schlüssel", "getDeepSeekApiKey": "DeepSeek API-Schlüssel erhalten", "geminiApiKey": "Gemini API-Schlüssel", "getGeminiApiKey": "Gemini API-Schlüssel erhalten", + "getGroqApiKey": "Groq API-Schlüssel erhalten", + "groqApiKey": "Groq API-Schlüssel", "openAiApiKey": "OpenAI API-Schlüssel", "openAiBaseUrl": "Basis-URL", "getOpenAiApiKey": "OpenAI API-Schlüssel erhalten", diff --git a/webview-ui/src/i18n/locales/en/settings.json b/webview-ui/src/i18n/locales/en/settings.json index 62f243b888c..b817318df65 100644 --- a/webview-ui/src/i18n/locales/en/settings.json +++ b/webview-ui/src/i18n/locales/en/settings.json @@ -123,9 +123,13 @@ "anthropicApiKey": "Anthropic API Key", "getAnthropicApiKey": "Get Anthropic API Key", "anthropicUseAuthToken": "Pass Anthropic API Key as Authorization header instead of X-Api-Key", + "chutesApiKey": "Chutes API Key", + "getChutesApiKey": "Get Chutes API Key", "deepSeekApiKey": "DeepSeek API Key", "getDeepSeekApiKey": "Get DeepSeek API Key", "geminiApiKey": "Gemini API Key", + "getGroqApiKey": "Get Groq API Key", + "groqApiKey": "Groq API Key", "getGeminiApiKey": "Get Gemini API Key", "openAiApiKey": "OpenAI API Key", "openAiBaseUrl": "Base URL", diff --git a/webview-ui/src/i18n/locales/es/settings.json b/webview-ui/src/i18n/locales/es/settings.json index 406aa002d86..4a7191ffeb4 100644 --- a/webview-ui/src/i18n/locales/es/settings.json +++ b/webview-ui/src/i18n/locales/es/settings.json @@ -123,9 +123,13 @@ "anthropicApiKey": "Clave API de Anthropic", "getAnthropicApiKey": "Obtener clave API de Anthropic", "anthropicUseAuthToken": "Pasar la clave API de Anthropic como encabezado de autorización en lugar de X-Api-Key", + "chutesApiKey": "Clave API de Chutes", + "getChutesApiKey": "Obtener clave API de Chutes", "deepSeekApiKey": "Clave API de DeepSeek", "getDeepSeekApiKey": "Obtener clave API de DeepSeek", "geminiApiKey": "Clave API de Gemini", + "getGroqApiKey": "Obtener clave API de Groq", + "groqApiKey": "Clave API de Groq", "getGeminiApiKey": "Obtener clave API de Gemini", "openAiApiKey": "Clave API de OpenAI", "openAiBaseUrl": "URL base", diff --git a/webview-ui/src/i18n/locales/fr/settings.json b/webview-ui/src/i18n/locales/fr/settings.json index 4ec79cce9c2..b2a1ef33e45 100644 --- a/webview-ui/src/i18n/locales/fr/settings.json +++ b/webview-ui/src/i18n/locales/fr/settings.json @@ -123,9 +123,13 @@ "anthropicApiKey": "Clé API Anthropic", "getAnthropicApiKey": "Obtenir la clé API Anthropic", "anthropicUseAuthToken": "Passer la clé API Anthropic comme en-tête d'autorisation au lieu de X-Api-Key", + "chutesApiKey": "Clé API Chutes", + "getChutesApiKey": "Obtenir la clé API Chutes", "deepSeekApiKey": "Clé API DeepSeek", "getDeepSeekApiKey": "Obtenir la clé API DeepSeek", "geminiApiKey": "Clé API Gemini", + "getGroqApiKey": "Obtenir la clé API Groq", + "groqApiKey": "Clé API Groq", "getGeminiApiKey": "Obtenir la clé API Gemini", "openAiApiKey": "Clé API OpenAI", "openAiBaseUrl": "URL de base", diff --git a/webview-ui/src/i18n/locales/hi/settings.json b/webview-ui/src/i18n/locales/hi/settings.json index 597c4ddceaa..f62d6373564 100644 --- a/webview-ui/src/i18n/locales/hi/settings.json +++ b/webview-ui/src/i18n/locales/hi/settings.json @@ -123,9 +123,13 @@ "anthropicApiKey": "Anthropic API कुंजी", "getAnthropicApiKey": "Anthropic API कुंजी प्राप्त करें", "anthropicUseAuthToken": "X-Api-Key के बजाय Anthropic API कुंजी को Authorization हेडर के रूप में पास करें", + "chutesApiKey": "Chutes API कुंजी", + "getChutesApiKey": "Chutes API कुंजी प्राप्त करें", "deepSeekApiKey": "DeepSeek API कुंजी", "getDeepSeekApiKey": "DeepSeek API कुंजी प्राप्त करें", "geminiApiKey": "Gemini API कुंजी", + "getGroqApiKey": "Groq API कुंजी प्राप्त करें", + "groqApiKey": "Groq API कुंजी", "getGeminiApiKey": "Gemini API कुंजी प्राप्त करें", "openAiApiKey": "OpenAI API कुंजी", "openAiBaseUrl": "बेस URL", diff --git a/webview-ui/src/i18n/locales/it/settings.json b/webview-ui/src/i18n/locales/it/settings.json index a151ae5dab1..1c20c2d5957 100644 --- a/webview-ui/src/i18n/locales/it/settings.json +++ b/webview-ui/src/i18n/locales/it/settings.json @@ -123,9 +123,13 @@ "anthropicApiKey": "Chiave API Anthropic", "getAnthropicApiKey": "Ottieni chiave API Anthropic", "anthropicUseAuthToken": "Passa la chiave API Anthropic come header di autorizzazione invece di X-Api-Key", + "chutesApiKey": "Chiave API Chutes", + "getChutesApiKey": "Ottieni chiave API Chutes", "deepSeekApiKey": "Chiave API DeepSeek", "getDeepSeekApiKey": "Ottieni chiave API DeepSeek", "geminiApiKey": "Chiave API Gemini", + "getGroqApiKey": "Ottieni chiave API Groq", + "groqApiKey": "Chiave API Groq", "getGeminiApiKey": "Ottieni chiave API Gemini", "openAiApiKey": "Chiave API OpenAI", "openAiBaseUrl": "URL base", diff --git a/webview-ui/src/i18n/locales/ja/settings.json b/webview-ui/src/i18n/locales/ja/settings.json index 1d027d93a55..1ed0ab7c269 100644 --- a/webview-ui/src/i18n/locales/ja/settings.json +++ b/webview-ui/src/i18n/locales/ja/settings.json @@ -123,9 +123,13 @@ "anthropicApiKey": "Anthropic APIキー", "getAnthropicApiKey": "Anthropic APIキーを取得", "anthropicUseAuthToken": "Anthropic APIキーをX-Api-Keyの代わりにAuthorizationヘッダーとして渡す", + "chutesApiKey": "Chutes APIキー", + "getChutesApiKey": "Chutes APIキーを取得", "deepSeekApiKey": "DeepSeek APIキー", "getDeepSeekApiKey": "DeepSeek APIキーを取得", "geminiApiKey": "Gemini APIキー", + "getGroqApiKey": "Groq APIキーを取得", + "groqApiKey": "Groq APIキー", "getGeminiApiKey": "Gemini APIキーを取得", "openAiApiKey": "OpenAI APIキー", "openAiBaseUrl": "ベースURL", diff --git a/webview-ui/src/i18n/locales/ko/settings.json b/webview-ui/src/i18n/locales/ko/settings.json index 0d5b884cef8..b0ceaee5304 100644 --- a/webview-ui/src/i18n/locales/ko/settings.json +++ b/webview-ui/src/i18n/locales/ko/settings.json @@ -123,9 +123,13 @@ "anthropicApiKey": "Anthropic API 키", "getAnthropicApiKey": "Anthropic API 키 받기", "anthropicUseAuthToken": "X-Api-Key 대신 Authorization 헤더로 Anthropic API 키 전달", + "chutesApiKey": "Chutes API 키", + "getChutesApiKey": "Chutes API 키 받기", "deepSeekApiKey": "DeepSeek API 키", "getDeepSeekApiKey": "DeepSeek API 키 받기", "geminiApiKey": "Gemini API 키", + "getGroqApiKey": "Groq API 키 받기", + "groqApiKey": "Groq API 키", "getGeminiApiKey": "Gemini API 키 받기", "openAiApiKey": "OpenAI API 키", "openAiBaseUrl": "기본 URL", diff --git a/webview-ui/src/i18n/locales/pl/settings.json b/webview-ui/src/i18n/locales/pl/settings.json index 1a1ac06e647..c4f28f19f61 100644 --- a/webview-ui/src/i18n/locales/pl/settings.json +++ b/webview-ui/src/i18n/locales/pl/settings.json @@ -123,9 +123,13 @@ "anthropicApiKey": "Klucz API Anthropic", "getAnthropicApiKey": "Uzyskaj klucz API Anthropic", "anthropicUseAuthToken": "Przekaż klucz API Anthropic jako nagłówek Authorization zamiast X-Api-Key", + "chutesApiKey": "Klucz API Chutes", + "getChutesApiKey": "Uzyskaj klucz API Chutes", "deepSeekApiKey": "Klucz API DeepSeek", "getDeepSeekApiKey": "Uzyskaj klucz API DeepSeek", "geminiApiKey": "Klucz API Gemini", + "getGroqApiKey": "Uzyskaj klucz API Groq", + "groqApiKey": "Klucz API Groq", "getGeminiApiKey": "Uzyskaj klucz API Gemini", "openAiApiKey": "Klucz API OpenAI", "openAiBaseUrl": "URL bazowy", diff --git a/webview-ui/src/i18n/locales/pt-BR/settings.json b/webview-ui/src/i18n/locales/pt-BR/settings.json index cf4cd9dada6..76b3f597d29 100644 --- a/webview-ui/src/i18n/locales/pt-BR/settings.json +++ b/webview-ui/src/i18n/locales/pt-BR/settings.json @@ -123,9 +123,13 @@ "anthropicApiKey": "Chave de API Anthropic", "getAnthropicApiKey": "Obter chave de API Anthropic", "anthropicUseAuthToken": "Passar a chave de API Anthropic como cabeçalho Authorization em vez de X-Api-Key", + "chutesApiKey": "Chave de API Chutes", + "getChutesApiKey": "Obter chave de API Chutes", "deepSeekApiKey": "Chave de API DeepSeek", "getDeepSeekApiKey": "Obter chave de API DeepSeek", "geminiApiKey": "Chave de API Gemini", + "getGroqApiKey": "Obter chave de API Groq", + "groqApiKey": "Chave de API Groq", "getGeminiApiKey": "Obter chave de API Gemini", "openAiApiKey": "Chave de API OpenAI", "openAiBaseUrl": "URL Base", diff --git a/webview-ui/src/i18n/locales/ru/settings.json b/webview-ui/src/i18n/locales/ru/settings.json index 80aae23aac5..f46d1d98374 100644 --- a/webview-ui/src/i18n/locales/ru/settings.json +++ b/webview-ui/src/i18n/locales/ru/settings.json @@ -123,9 +123,13 @@ "anthropicApiKey": "Anthropic API-ключ", "getAnthropicApiKey": "Получить Anthropic API-ключ", "anthropicUseAuthToken": "Передавать Anthropic API-ключ как Authorization-заголовок вместо X-Api-Key", + "chutesApiKey": "Chutes API-ключ", + "getChutesApiKey": "Получить Chutes API-ключ", "deepSeekApiKey": "DeepSeek API-ключ", "getDeepSeekApiKey": "Получить DeepSeek API-ключ", "geminiApiKey": "Gemini API-ключ", + "getGroqApiKey": "Получить Groq API-ключ", + "groqApiKey": "Groq API-ключ", "getGeminiApiKey": "Получить Gemini API-ключ", "openAiApiKey": "OpenAI API-ключ", "openAiBaseUrl": "Базовый URL", diff --git a/webview-ui/src/i18n/locales/tr/settings.json b/webview-ui/src/i18n/locales/tr/settings.json index e5a1fc0375f..580e35ee043 100644 --- a/webview-ui/src/i18n/locales/tr/settings.json +++ b/webview-ui/src/i18n/locales/tr/settings.json @@ -123,9 +123,13 @@ "anthropicApiKey": "Anthropic API Anahtarı", "getAnthropicApiKey": "Anthropic API Anahtarı Al", "anthropicUseAuthToken": "Anthropic API Anahtarını X-Api-Key yerine Authorization başlığı olarak geçir", + "chutesApiKey": "Chutes API Anahtarı", + "getChutesApiKey": "Chutes API Anahtarı Al", "deepSeekApiKey": "DeepSeek API Anahtarı", "getDeepSeekApiKey": "DeepSeek API Anahtarı Al", "geminiApiKey": "Gemini API Anahtarı", + "getGroqApiKey": "Groq API Anahtarı Al", + "groqApiKey": "Groq API Anahtarı", "getGeminiApiKey": "Gemini API Anahtarı Al", "openAiApiKey": "OpenAI API Anahtarı", "openAiBaseUrl": "Temel URL", diff --git a/webview-ui/src/i18n/locales/vi/settings.json b/webview-ui/src/i18n/locales/vi/settings.json index 2c6c7f34bf3..51174c4d4f4 100644 --- a/webview-ui/src/i18n/locales/vi/settings.json +++ b/webview-ui/src/i18n/locales/vi/settings.json @@ -122,9 +122,13 @@ "anthropicApiKey": "Khóa API Anthropic", "getAnthropicApiKey": "Lấy khóa API Anthropic", "anthropicUseAuthToken": "Truyền khóa API Anthropic dưới dạng tiêu đề Authorization thay vì X-Api-Key", + "chutesApiKey": "Khóa API Chutes", + "getChutesApiKey": "Lấy khóa API Chutes", "deepSeekApiKey": "Khóa API DeepSeek", "getDeepSeekApiKey": "Lấy khóa API DeepSeek", "geminiApiKey": "Khóa API Gemini", + "getGroqApiKey": "Lấy khóa API Groq", + "groqApiKey": "Khóa API Groq", "getGeminiApiKey": "Lấy khóa API Gemini", "openAiApiKey": "Khóa API OpenAI", "openAiBaseUrl": "URL cơ sở", diff --git a/webview-ui/src/i18n/locales/zh-CN/settings.json b/webview-ui/src/i18n/locales/zh-CN/settings.json index b9f32090e3e..6a706361e2f 100644 --- a/webview-ui/src/i18n/locales/zh-CN/settings.json +++ b/webview-ui/src/i18n/locales/zh-CN/settings.json @@ -123,9 +123,13 @@ "anthropicApiKey": "Anthropic API 密钥", "getAnthropicApiKey": "获取 Anthropic API 密钥", "anthropicUseAuthToken": "将 Anthropic API 密钥作为 Authorization 标头传递,而不是 X-Api-Key", + "chutesApiKey": "Chutes API 密钥", + "getChutesApiKey": "获取 Chutes API 密钥", "deepSeekApiKey": "DeepSeek API 密钥", "getDeepSeekApiKey": "获取 DeepSeek API 密钥", "geminiApiKey": "Gemini API 密钥", + "getGroqApiKey": "获取 Groq API 密钥", + "groqApiKey": "Groq API 密钥", "getGeminiApiKey": "获取 Gemini API 密钥", "openAiApiKey": "OpenAI API 密钥", "openAiBaseUrl": "OpenAI 基础 URL", diff --git a/webview-ui/src/i18n/locales/zh-TW/settings.json b/webview-ui/src/i18n/locales/zh-TW/settings.json index 508df804ac1..75afc650dd6 100644 --- a/webview-ui/src/i18n/locales/zh-TW/settings.json +++ b/webview-ui/src/i18n/locales/zh-TW/settings.json @@ -123,9 +123,13 @@ "anthropicApiKey": "Anthropic API 金鑰", "getAnthropicApiKey": "取得 Anthropic API 金鑰", "anthropicUseAuthToken": "將 Anthropic API 金鑰作為 Authorization 標頭傳遞,而非使用 X-Api-Key", + "chutesApiKey": "Chutes API 金鑰", + "getChutesApiKey": "取得 Chutes API 金鑰", "deepSeekApiKey": "DeepSeek API 金鑰", "getDeepSeekApiKey": "取得 DeepSeek API 金鑰", "geminiApiKey": "Gemini API 金鑰", + "getGroqApiKey": "取得 Groq API 金鑰", + "groqApiKey": "Groq API 金鑰", "getGeminiApiKey": "取得 Gemini API 金鑰", "openAiApiKey": "OpenAI API 金鑰", "openAiBaseUrl": "基礎 URL",