diff --git a/providers/gatewayz/models/alibaba-tongyi-deepresearch-30b-a3b.toml b/providers/gatewayz/models/alibaba-tongyi-deepresearch-30b-a3b.toml new file mode 100644 index 000000000..7a37f78f0 --- /dev/null +++ b/providers/gatewayz/models/alibaba-tongyi-deepresearch-30b-a3b.toml @@ -0,0 +1,16 @@ +name = "Tongyi DeepResearch 30B A3B" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = true +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/alibaba-tongyi-deepresearch-30b-a3bfree.toml b/providers/gatewayz/models/alibaba-tongyi-deepresearch-30b-a3bfree.toml new file mode 100644 index 000000000..76a579250 --- /dev/null +++ b/providers/gatewayz/models/alibaba-tongyi-deepresearch-30b-a3bfree.toml @@ -0,0 +1,16 @@ +name = "Tongyi DeepResearch 30B A3B (free)" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = true +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/amazon-nova-premier-v1.toml b/providers/gatewayz/models/amazon-nova-premier-v1.toml new file mode 100644 index 000000000..51fc408b9 --- /dev/null +++ b/providers/gatewayz/models/amazon-nova-premier-v1.toml @@ -0,0 +1,16 @@ +name = "Amazon: Nova Premier 1.0" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 1000000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/anthropic-claude-haiku-4.5.toml b/providers/gatewayz/models/anthropic-claude-haiku-4.5.toml new file mode 100644 index 000000000..5abe80115 --- /dev/null +++ b/providers/gatewayz/models/anthropic-claude-haiku-4.5.toml @@ -0,0 +1,16 @@ +name = "Anthropic: Claude Haiku 4.5" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 200000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/anthropic-claude-sonnet-4.5.toml b/providers/gatewayz/models/anthropic-claude-sonnet-4.5.toml new file mode 100644 index 000000000..657e1d98a --- /dev/null +++ b/providers/gatewayz/models/anthropic-claude-sonnet-4.5.toml @@ -0,0 +1,16 @@ +name = "Anthropic: Claude Sonnet 4.5" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 1000000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/arcee-ai-afm-4.5b.toml b/providers/gatewayz/models/arcee-ai-afm-4.5b.toml new file mode 100644 index 000000000..4761a2987 --- /dev/null +++ b/providers/gatewayz/models/arcee-ai-afm-4.5b.toml @@ -0,0 +1,16 @@ +name = "Arcee AI: AFM 4.5B" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 65536 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/baidu-ernie-4.5-21b-a3b-thinking.toml b/providers/gatewayz/models/baidu-ernie-4.5-21b-a3b-thinking.toml new file mode 100644 index 000000000..79b7fb041 --- /dev/null +++ b/providers/gatewayz/models/baidu-ernie-4.5-21b-a3b-thinking.toml @@ -0,0 +1,16 @@ +name = "Baidu: ERNIE 4.5 21B A3B Thinking" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = true +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/deepcogito-cogito-v2-preview-llama-405b.toml b/providers/gatewayz/models/deepcogito-cogito-v2-preview-llama-405b.toml new file mode 100644 index 000000000..138049c8b --- /dev/null +++ b/providers/gatewayz/models/deepcogito-cogito-v2-preview-llama-405b.toml @@ -0,0 +1,16 @@ +name = "Deep Cogito: Cogito V2 Preview Llama 405B" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 32768 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/deepseek-deepseek-v3.1-terminus.toml b/providers/gatewayz/models/deepseek-deepseek-v3.1-terminus.toml new file mode 100644 index 000000000..99dddd365 --- /dev/null +++ b/providers/gatewayz/models/deepseek-deepseek-v3.1-terminus.toml @@ -0,0 +1,16 @@ +name = "DeepSeek: DeepSeek V3.1 Terminus" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 163840 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/deepseek-deepseek-v3.1-terminusexacto.toml b/providers/gatewayz/models/deepseek-deepseek-v3.1-terminusexacto.toml new file mode 100644 index 000000000..a71ae51b3 --- /dev/null +++ b/providers/gatewayz/models/deepseek-deepseek-v3.1-terminusexacto.toml @@ -0,0 +1,16 @@ +name = "DeepSeek: DeepSeek V3.1 Terminus (exacto)" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/deepseek-deepseek-v3.2-exp.toml b/providers/gatewayz/models/deepseek-deepseek-v3.2-exp.toml new file mode 100644 index 000000000..a878480d0 --- /dev/null +++ b/providers/gatewayz/models/deepseek-deepseek-v3.2-exp.toml @@ -0,0 +1,16 @@ +name = "DeepSeek: DeepSeek V3.2 Exp" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 163840 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/google-gemini-2.5-flash-image.toml b/providers/gatewayz/models/google-gemini-2.5-flash-image.toml new file mode 100644 index 000000000..1fd9f3168 --- /dev/null +++ b/providers/gatewayz/models/google-gemini-2.5-flash-image.toml @@ -0,0 +1,16 @@ +name = "Google: Gemini 2.5 Flash Image (Nano Banana)" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 32768 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text", "image"] diff --git a/providers/gatewayz/models/google-gemini-2.5-flash-lite-preview-09-2025.toml b/providers/gatewayz/models/google-gemini-2.5-flash-lite-preview-09-2025.toml new file mode 100644 index 000000000..3251a798d --- /dev/null +++ b/providers/gatewayz/models/google-gemini-2.5-flash-lite-preview-09-2025.toml @@ -0,0 +1,16 @@ +name = "Google: Gemini 2.5 Flash Lite Preview 09-2025" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 1048576 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/google-gemini-2.5-flash-preview-09-2025.toml b/providers/gatewayz/models/google-gemini-2.5-flash-preview-09-2025.toml new file mode 100644 index 000000000..e5654699c --- /dev/null +++ b/providers/gatewayz/models/google-gemini-2.5-flash-preview-09-2025.toml @@ -0,0 +1,16 @@ +name = "Google: Gemini 2.5 Flash Preview 09-2025" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 1048576 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/ibm-granite-granite-4.0-h-micro.toml b/providers/gatewayz/models/ibm-granite-granite-4.0-h-micro.toml new file mode 100644 index 000000000..4cbd268bb --- /dev/null +++ b/providers/gatewayz/models/ibm-granite-granite-4.0-h-micro.toml @@ -0,0 +1,16 @@ +name = "IBM: Granite 4.0 Micro" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131000 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/inclusionai-ling-1t.toml b/providers/gatewayz/models/inclusionai-ling-1t.toml new file mode 100644 index 000000000..be061f8cb --- /dev/null +++ b/providers/gatewayz/models/inclusionai-ling-1t.toml @@ -0,0 +1,16 @@ +name = "inclusionAI: Ling-1T" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/inclusionai-ring-1t.toml b/providers/gatewayz/models/inclusionai-ring-1t.toml new file mode 100644 index 000000000..99c2b2cbf --- /dev/null +++ b/providers/gatewayz/models/inclusionai-ring-1t.toml @@ -0,0 +1,16 @@ +name = "inclusionAI: Ring 1T" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/liquid-lfm-2.2-6b.toml b/providers/gatewayz/models/liquid-lfm-2.2-6b.toml new file mode 100644 index 000000000..81b0d3001 --- /dev/null +++ b/providers/gatewayz/models/liquid-lfm-2.2-6b.toml @@ -0,0 +1,16 @@ +name = "LiquidAI/LFM2-2.6B" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 32768 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/liquid-lfm2-8b-a1b.toml b/providers/gatewayz/models/liquid-lfm2-8b-a1b.toml new file mode 100644 index 000000000..8981beaa9 --- /dev/null +++ b/providers/gatewayz/models/liquid-lfm2-8b-a1b.toml @@ -0,0 +1,16 @@ +name = "LiquidAI/LFM2-8B-A1B" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 32768 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/meituan-longcat-flash-chat.toml b/providers/gatewayz/models/meituan-longcat-flash-chat.toml new file mode 100644 index 000000000..064f830b8 --- /dev/null +++ b/providers/gatewayz/models/meituan-longcat-flash-chat.toml @@ -0,0 +1,16 @@ +name = "Meituan: LongCat Flash Chat" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/meituan-longcat-flash-chatfree.toml b/providers/gatewayz/models/meituan-longcat-flash-chatfree.toml new file mode 100644 index 000000000..6b09d8472 --- /dev/null +++ b/providers/gatewayz/models/meituan-longcat-flash-chatfree.toml @@ -0,0 +1,16 @@ +name = "Meituan: LongCat Flash Chat (free)" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/minimax-minimax-m2.toml b/providers/gatewayz/models/minimax-minimax-m2.toml new file mode 100644 index 000000000..5187955ea --- /dev/null +++ b/providers/gatewayz/models/minimax-minimax-m2.toml @@ -0,0 +1,16 @@ +name = "MiniMax: MiniMax M2" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 196608 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/minimax-minimax-m2free.toml b/providers/gatewayz/models/minimax-minimax-m2free.toml new file mode 100644 index 000000000..dc24cdfb4 --- /dev/null +++ b/providers/gatewayz/models/minimax-minimax-m2free.toml @@ -0,0 +1,16 @@ +name = "MiniMax: MiniMax M2 (free)" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 204800 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/mistralai-voxtral-small-24b-2507.toml b/providers/gatewayz/models/mistralai-voxtral-small-24b-2507.toml new file mode 100644 index 000000000..df751430a --- /dev/null +++ b/providers/gatewayz/models/mistralai-voxtral-small-24b-2507.toml @@ -0,0 +1,16 @@ +name = "Mistral: Voxtral Small 24B 2507" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 32000 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/nvidia-llama-3.3-nemotron-super-49b-v1.5.toml b/providers/gatewayz/models/nvidia-llama-3.3-nemotron-super-49b-v1.5.toml new file mode 100644 index 000000000..37fa209b0 --- /dev/null +++ b/providers/gatewayz/models/nvidia-llama-3.3-nemotron-super-49b-v1.5.toml @@ -0,0 +1,16 @@ +name = "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/nvidia-nemotron-nano-12b-v2-vl.toml b/providers/gatewayz/models/nvidia-nemotron-nano-12b-v2-vl.toml new file mode 100644 index 000000000..90c6db2a5 --- /dev/null +++ b/providers/gatewayz/models/nvidia-nemotron-nano-12b-v2-vl.toml @@ -0,0 +1,16 @@ +name = "NVIDIA: Nemotron Nano 12B 2 VL" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/nvidia-nemotron-nano-12b-v2-vlfree.toml b/providers/gatewayz/models/nvidia-nemotron-nano-12b-v2-vlfree.toml new file mode 100644 index 000000000..ab0a1aea3 --- /dev/null +++ b/providers/gatewayz/models/nvidia-nemotron-nano-12b-v2-vlfree.toml @@ -0,0 +1,16 @@ +name = "NVIDIA: Nemotron Nano 12B 2 VL (free)" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 128000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/nvidia-nemotron-nano-9b-v2free.toml b/providers/gatewayz/models/nvidia-nemotron-nano-9b-v2free.toml new file mode 100644 index 000000000..e148d3c0e --- /dev/null +++ b/providers/gatewayz/models/nvidia-nemotron-nano-9b-v2free.toml @@ -0,0 +1,16 @@ +name = "NVIDIA: Nemotron Nano 9B V2 (free)" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 128000 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/openai-gpt-5-codex.toml b/providers/gatewayz/models/openai-gpt-5-codex.toml new file mode 100644 index 000000000..142973bf9 --- /dev/null +++ b/providers/gatewayz/models/openai-gpt-5-codex.toml @@ -0,0 +1,16 @@ +name = "OpenAI: GPT-5 Codex" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 400000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/openai-gpt-5-image-mini.toml b/providers/gatewayz/models/openai-gpt-5-image-mini.toml new file mode 100644 index 000000000..2c7a5f87d --- /dev/null +++ b/providers/gatewayz/models/openai-gpt-5-image-mini.toml @@ -0,0 +1,16 @@ +name = "OpenAI: GPT-5 Image Mini" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 400000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text", "image"] diff --git a/providers/gatewayz/models/openai-gpt-5-image.toml b/providers/gatewayz/models/openai-gpt-5-image.toml new file mode 100644 index 000000000..a833e6fdd --- /dev/null +++ b/providers/gatewayz/models/openai-gpt-5-image.toml @@ -0,0 +1,16 @@ +name = "OpenAI: GPT-5 Image" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 400000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text", "image"] diff --git a/providers/gatewayz/models/openai-gpt-5-pro.toml b/providers/gatewayz/models/openai-gpt-5-pro.toml new file mode 100644 index 000000000..b5b3eeefa --- /dev/null +++ b/providers/gatewayz/models/openai-gpt-5-pro.toml @@ -0,0 +1,16 @@ +name = "OpenAI: GPT-5 Pro" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 400000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/openai-gpt-oss-safeguard-20b.toml b/providers/gatewayz/models/openai-gpt-oss-safeguard-20b.toml new file mode 100644 index 000000000..fdb220bb7 --- /dev/null +++ b/providers/gatewayz/models/openai-gpt-oss-safeguard-20b.toml @@ -0,0 +1,16 @@ +name = "OpenAI: gpt-oss-safeguard-20b" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/openai-o3-deep-research.toml b/providers/gatewayz/models/openai-o3-deep-research.toml new file mode 100644 index 000000000..afcaaeeba --- /dev/null +++ b/providers/gatewayz/models/openai-o3-deep-research.toml @@ -0,0 +1,16 @@ +name = "OpenAI: o3 Deep Research" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = true +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 200000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/openai-o4-mini-deep-research.toml b/providers/gatewayz/models/openai-o4-mini-deep-research.toml new file mode 100644 index 000000000..dbfe32727 --- /dev/null +++ b/providers/gatewayz/models/openai-o4-mini-deep-research.toml @@ -0,0 +1,16 @@ +name = "OpenAI: o4 Mini Deep Research" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = true +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 200000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/openai-text-embedding-3-large.toml b/providers/gatewayz/models/openai-text-embedding-3-large.toml new file mode 100644 index 000000000..f59f3890f --- /dev/null +++ b/providers/gatewayz/models/openai-text-embedding-3-large.toml @@ -0,0 +1,16 @@ +name = "OpenAI: Text Embedding 3 Large" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 8192 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/opengvlab-internvl3-78b.toml b/providers/gatewayz/models/opengvlab-internvl3-78b.toml new file mode 100644 index 000000000..c6743defb --- /dev/null +++ b/providers/gatewayz/models/opengvlab-internvl3-78b.toml @@ -0,0 +1,16 @@ +name = "OpenGVLab: InternVL3 78B" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 32768 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/perplexity-sonar-pro-search.toml b/providers/gatewayz/models/perplexity-sonar-pro-search.toml new file mode 100644 index 000000000..82bfc5588 --- /dev/null +++ b/providers/gatewayz/models/perplexity-sonar-pro-search.toml @@ -0,0 +1,16 @@ +name = "Perplexity: Sonar Pro Search" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 200000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen-plus-2025-07-28.toml b/providers/gatewayz/models/qwen-qwen-plus-2025-07-28.toml new file mode 100644 index 000000000..ecd24ae66 --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen-plus-2025-07-28.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen Plus 0728" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 1000000 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen-plus-2025-07-28thinking.toml b/providers/gatewayz/models/qwen-qwen-plus-2025-07-28thinking.toml new file mode 100644 index 000000000..566626372 --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen-plus-2025-07-28thinking.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen Plus 0728 (thinking)" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = true +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 1000000 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen3-coder-flash.toml b/providers/gatewayz/models/qwen-qwen3-coder-flash.toml new file mode 100644 index 000000000..1f06f20bd --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen3-coder-flash.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen3 Coder Flash" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 128000 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen3-coder-plus.toml b/providers/gatewayz/models/qwen-qwen3-coder-plus.toml new file mode 100644 index 000000000..9036775f3 --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen3-coder-plus.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen3 Coder Plus" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 128000 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen3-max.toml b/providers/gatewayz/models/qwen-qwen3-max.toml new file mode 100644 index 000000000..df03c7cb9 --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen3-max.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen3 Max" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 256000 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen3-next-80b-a3b-instruct.toml b/providers/gatewayz/models/qwen-qwen3-next-80b-a3b-instruct.toml new file mode 100644 index 000000000..2190ba33f --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen3-next-80b-a3b-instruct.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen3 Next 80B A3B Instruct" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 262144 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen3-next-80b-a3b-thinking.toml b/providers/gatewayz/models/qwen-qwen3-next-80b-a3b-thinking.toml new file mode 100644 index 000000000..ff2007e75 --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen3-next-80b-a3b-thinking.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen3 Next 80B A3B Thinking" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = true +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 262144 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen3-vl-235b-a22b-instruct.toml b/providers/gatewayz/models/qwen-qwen3-vl-235b-a22b-instruct.toml new file mode 100644 index 000000000..8de68cdfa --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen3-vl-235b-a22b-instruct.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen3 VL 235B A22B Instruct" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen3-vl-235b-a22b-thinking.toml b/providers/gatewayz/models/qwen-qwen3-vl-235b-a22b-thinking.toml new file mode 100644 index 000000000..7081fa604 --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen3-vl-235b-a22b-thinking.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen3 VL 235B A22B Thinking" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = true +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 262144 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen3-vl-30b-a3b-instruct.toml b/providers/gatewayz/models/qwen-qwen3-vl-30b-a3b-instruct.toml new file mode 100644 index 000000000..0e8e1898c --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen3-vl-30b-a3b-instruct.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen3 VL 30B A3B Instruct" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 0 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen3-vl-30b-a3b-thinking.toml b/providers/gatewayz/models/qwen-qwen3-vl-30b-a3b-thinking.toml new file mode 100644 index 000000000..2935b04bd --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen3-vl-30b-a3b-thinking.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen3 VL 30B A3B Thinking" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = true +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen3-vl-32b-instruct.toml b/providers/gatewayz/models/qwen-qwen3-vl-32b-instruct.toml new file mode 100644 index 000000000..9199745f8 --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen3-vl-32b-instruct.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen3 VL 32B Instruct" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 262144 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen3-vl-8b-instruct.toml b/providers/gatewayz/models/qwen-qwen3-vl-8b-instruct.toml new file mode 100644 index 000000000..45a960c22 --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen3-vl-8b-instruct.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen3 VL 8B Instruct" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen3-vl-8b-thinking.toml b/providers/gatewayz/models/qwen-qwen3-vl-8b-thinking.toml new file mode 100644 index 000000000..90c7cb863 --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen3-vl-8b-thinking.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen3 VL 8B Thinking" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = true +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 256000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/relace-relace-apply-3.toml b/providers/gatewayz/models/relace-relace-apply-3.toml new file mode 100644 index 000000000..d1bde167d --- /dev/null +++ b/providers/gatewayz/models/relace-relace-apply-3.toml @@ -0,0 +1,16 @@ +name = "Relace: Relace Apply 3" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 256000 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/thedrummer-cydonia-24b-v4.1.toml b/providers/gatewayz/models/thedrummer-cydonia-24b-v4.1.toml new file mode 100644 index 000000000..820884c1f --- /dev/null +++ b/providers/gatewayz/models/thedrummer-cydonia-24b-v4.1.toml @@ -0,0 +1,16 @@ +name = "TheDrummer: Cydonia 24B V4.1" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/x-ai-grok-4-fast.toml b/providers/gatewayz/models/x-ai-grok-4-fast.toml new file mode 100644 index 000000000..a4a5eb273 --- /dev/null +++ b/providers/gatewayz/models/x-ai-grok-4-fast.toml @@ -0,0 +1,16 @@ +name = "xAI: Grok 4 Fast" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 2000000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/z-ai-glm-4.6.toml b/providers/gatewayz/models/z-ai-glm-4.6.toml new file mode 100644 index 000000000..cc4244a6f --- /dev/null +++ b/providers/gatewayz/models/z-ai-glm-4.6.toml @@ -0,0 +1,16 @@ +name = "Z.AI: GLM 4.6" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 202752 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/z-ai-glm-4.6exacto.toml b/providers/gatewayz/models/z-ai-glm-4.6exacto.toml new file mode 100644 index 000000000..3de4cd9bf --- /dev/null +++ b/providers/gatewayz/models/z-ai-glm-4.6exacto.toml @@ -0,0 +1,16 @@ +name = "Z.AI: GLM 4.6 (exacto)" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 202752 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/provider.toml b/providers/gatewayz/provider.toml new file mode 100644 index 000000000..892367df1 --- /dev/null +++ b/providers/gatewayz/provider.toml @@ -0,0 +1,5 @@ +name = "Gatewayz" +env = ["GATEWAYZ_API_KEY"] +npm = "@ai-sdk/openai-compatible" +api = "https://api.gatewayz.ai/v1" +doc = "https://api.gatewayz.ai/docs"