From 6d7adc24b0205f81bc783a613803647fdeaec730 Mon Sep 17 00:00:00 2001 From: Joshbly Date: Fri, 31 Jan 2025 18:56:44 -0500 Subject: [PATCH] feat: Add o3-mini and o3-mini-2025-01-31 model variants --- .../azure_openai/azure_openai.yaml | 12 +++++ .../model_providers/openai/llm/_position.yaml | 2 + .../openai/llm/o3-mini-2025-01-31.yaml | 33 +++++++++++++ .../model_providers/openai/llm/o3-mini.yaml | 33 +++++++++++++ .../openrouter/llm/_position.yaml | 2 + .../openrouter/llm/o3-mini-2025-01-31.yaml | 49 +++++++++++++++++++ .../openrouter/llm/o3-mini.yaml | 49 +++++++++++++++++++ 7 files changed, 180 insertions(+) create mode 100644 api/core/model_runtime/model_providers/openai/llm/o3-mini-2025-01-31.yaml create mode 100644 api/core/model_runtime/model_providers/openai/llm/o3-mini.yaml create mode 100644 api/core/model_runtime/model_providers/openrouter/llm/o3-mini-2025-01-31.yaml create mode 100644 api/core/model_runtime/model_providers/openrouter/llm/o3-mini.yaml diff --git a/api/core/model_runtime/model_providers/azure_openai/azure_openai.yaml b/api/core/model_runtime/model_providers/azure_openai/azure_openai.yaml index 970b386b086b34..c3ba180aaee202 100644 --- a/api/core/model_runtime/model_providers/azure_openai/azure_openai.yaml +++ b/api/core/model_runtime/model_providers/azure_openai/azure_openai.yaml @@ -138,6 +138,18 @@ model_credential_schema: show_on: - variable: __model_type value: llm + - label: + en_US: o3-mini + value: o3-mini + show_on: + - variable: __model_type + value: llm + - label: + en_US: o3-mini-2025-01-31 + value: o3-mini-2025-01-31 + show_on: + - variable: __model_type + value: llm - label: en_US: o1-preview value: o1-preview diff --git a/api/core/model_runtime/model_providers/openai/llm/_position.yaml b/api/core/model_runtime/model_providers/openai/llm/_position.yaml index be279d95208690..0d3143c2aeb256 100644 --- a/api/core/model_runtime/model_providers/openai/llm/_position.yaml +++ b/api/core/model_runtime/model_providers/openai/llm/_position.yaml @@ -2,6 +2,8 @@ - o1-2024-12-17 - o1-mini - o1-mini-2024-09-12 +- o3-mini +- o3-mini-2025-01-31 - gpt-4 - gpt-4o - gpt-4o-2024-05-13 diff --git a/api/core/model_runtime/model_providers/openai/llm/o3-mini-2025-01-31.yaml b/api/core/model_runtime/model_providers/openai/llm/o3-mini-2025-01-31.yaml new file mode 100644 index 00000000000000..91276cbf5ad02e --- /dev/null +++ b/api/core/model_runtime/model_providers/openai/llm/o3-mini-2025-01-31.yaml @@ -0,0 +1,33 @@ +model: o3-mini-2025-01-31 +label: + zh_Hans: o3-mini-2025-01-31 + en_US: o3-mini-2025-01-31 +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 200000 +parameter_rules: + - name: max_tokens + use_template: max_tokens + default: 100000 + min: 1 + max: 100000 + - name: response_format + label: + zh_Hans: 回复格式 + en_US: response_format + type: string + help: + zh_Hans: 指定模型必须输出的格式 + en_US: specifying the format that the model must output + required: false + options: + - text + - json_object +pricing: + input: '1.10' + output: '4.40' + unit: '0.000001' + currency: USD \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/openai/llm/o3-mini.yaml b/api/core/model_runtime/model_providers/openai/llm/o3-mini.yaml new file mode 100644 index 00000000000000..976269101f55c1 --- /dev/null +++ b/api/core/model_runtime/model_providers/openai/llm/o3-mini.yaml @@ -0,0 +1,33 @@ +model: o3-mini +label: + zh_Hans: o3-mini + en_US: o3-mini +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 200000 +parameter_rules: + - name: max_tokens + use_template: max_tokens + default: 100000 + min: 1 + max: 100000 + - name: response_format + label: + zh_Hans: 回复格式 + en_US: response_format + type: string + help: + zh_Hans: 指定模型必须输出的格式 + en_US: specifying the format that the model must output + required: false + options: + - text + - json_object +pricing: + input: '1.10' + output: '4.40' + unit: '0.000001' + currency: USD \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/openrouter/llm/_position.yaml b/api/core/model_runtime/model_providers/openrouter/llm/_position.yaml index 5a25c84c34c0cb..dd4f3fde51d5cd 100644 --- a/api/core/model_runtime/model_providers/openrouter/llm/_position.yaml +++ b/api/core/model_runtime/model_providers/openrouter/llm/_position.yaml @@ -1,5 +1,7 @@ - openai/o1-preview - openai/o1-mini +- openai/o3-mini +- openai/o3-mini-2025-01-31 - openai/gpt-4o - openai/gpt-4o-mini - openai/gpt-4 diff --git a/api/core/model_runtime/model_providers/openrouter/llm/o3-mini-2025-01-31.yaml b/api/core/model_runtime/model_providers/openrouter/llm/o3-mini-2025-01-31.yaml new file mode 100644 index 00000000000000..fe656117a5b433 --- /dev/null +++ b/api/core/model_runtime/model_providers/openrouter/llm/o3-mini-2025-01-31.yaml @@ -0,0 +1,49 @@ +model: openai/o3-mini-2025-01-31 +label: + en_US: o3-mini-2025-01-31 +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 200000 +parameter_rules: + - name: temperature + use_template: temperature + - name: top_p + use_template: top_p + - name: top_k + label: + zh_Hans: 取样数量 + en_US: Top k + type: int + help: + zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 + en_US: Only sample from the top K options for each subsequent token. + required: false + - name: presence_penalty + use_template: presence_penalty + - name: frequency_penalty + use_template: frequency_penalty + - name: max_tokens + use_template: max_tokens + default: 512 + min: 1 + max: 100000 + - name: response_format + label: + zh_Hans: 回复格式 + en_US: response_format + type: string + help: + zh_Hans: 指定模型必须输出的格式 + en_US: specifying the format that the model must output + required: false + options: + - text + - json_object +pricing: + input: "1.10" + output: "4.40" + unit: "0.000001" + currency: USD \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/openrouter/llm/o3-mini.yaml b/api/core/model_runtime/model_providers/openrouter/llm/o3-mini.yaml new file mode 100644 index 00000000000000..7866adaec12a44 --- /dev/null +++ b/api/core/model_runtime/model_providers/openrouter/llm/o3-mini.yaml @@ -0,0 +1,49 @@ +model: openai/o3-mini +label: + en_US: o3-mini +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 200000 +parameter_rules: + - name: temperature + use_template: temperature + - name: top_p + use_template: top_p + - name: top_k + label: + zh_Hans: 取样数量 + en_US: Top k + type: int + help: + zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 + en_US: Only sample from the top K options for each subsequent token. + required: false + - name: presence_penalty + use_template: presence_penalty + - name: frequency_penalty + use_template: frequency_penalty + - name: max_tokens + use_template: max_tokens + default: 512 + min: 1 + max: 100000 + - name: response_format + label: + zh_Hans: 回复格式 + en_US: response_format + type: string + help: + zh_Hans: 指定模型必须输出的格式 + en_US: specifying the format that the model must output + required: false + options: + - text + - json_object +pricing: + input: "1.10" + output: "4.40" + unit: "0.000001" + currency: USD \ No newline at end of file