diff --git a/packages/types/src/providers/sambanova.ts b/packages/types/src/providers/sambanova.ts index bed143f6e5c..f339d8bcab0 100644 --- a/packages/types/src/providers/sambanova.ts +++ b/packages/types/src/providers/sambanova.ts @@ -6,10 +6,12 @@ export type SambaNovaModelId = | "Meta-Llama-3.3-70B-Instruct" | "DeepSeek-R1" | "DeepSeek-V3-0324" + | "DeepSeek-V3.1" | "DeepSeek-R1-Distill-Llama-70B" | "Llama-4-Maverick-17B-128E-Instruct" | "Llama-3.3-Swallow-70B-Instruct-v0.4" | "Qwen3-32B" + | "gpt-oss-120b" export const sambaNovaDefaultModelId: SambaNovaModelId = "Meta-Llama-3.3-70B-Instruct" @@ -51,6 +53,15 @@ export const sambaNovaModels = { outputPrice: 4.5, description: "DeepSeek V3 model with 32K context window.", }, + "DeepSeek-V3.1": { + maxTokens: 8192, + contextWindow: 32768, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 3.0, + outputPrice: 4.5, + description: "DeepSeek V3.1 model with 32K context window.", + }, "DeepSeek-R1-Distill-Llama-70B": { maxTokens: 8192, contextWindow: 131072, @@ -87,4 +98,13 @@ export const sambaNovaModels = { outputPrice: 0.8, description: "Alibaba Qwen 3 32B model with 8K context window.", }, + "gpt-oss-120b": { + maxTokens: 8192, + contextWindow: 131072, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0.22, + outputPrice: 0.59, + description: "OpenAI gpt oss 120b model with 128k context window.", + }, } as const satisfies Record