diff --git a/codex-rs/core/src/chat_completions.rs b/codex-rs/core/src/chat_completions.rs index 7760c48fbf4..f55512e520e 100644 --- a/codex-rs/core/src/chat_completions.rs +++ b/codex-rs/core/src/chat_completions.rs @@ -25,6 +25,7 @@ use crate::flags::OPENAI_REQUEST_MAX_RETRIES; use crate::flags::OPENAI_STREAM_IDLE_TIMEOUT_MS; use crate::models::ContentItem; use crate::models::ResponseItem; +use crate::openai_tools::create_tools_json_for_chat_completions_api; use crate::util::backoff; /// Implementation for the classic Chat Completions API. This is intentionally @@ -56,17 +57,22 @@ pub(crate) async fn stream_chat_completions( } } + let tools_json = create_tools_json_for_chat_completions_api(prompt, model)?; let payload = json!({ "model": model, "messages": messages, - "stream": true + "stream": true, + "tools": tools_json, }); let base_url = provider.base_url.trim_end_matches('/'); let url = format!("{}/chat/completions", base_url); debug!(url, "POST (chat)"); - trace!("request payload: {}", payload); + trace!( + "request payload: {}", + serde_json::to_string_pretty(&payload).unwrap_or_default() + ); let api_key = provider.api_key()?; let mut attempt = 0; diff --git a/codex-rs/core/src/client.rs b/codex-rs/core/src/client.rs index 72ce845fc87..034cfaec45e 100644 --- a/codex-rs/core/src/client.rs +++ b/codex-rs/core/src/client.rs @@ -1,7 +1,5 @@ -use std::collections::BTreeMap; use std::io::BufRead; use std::path::Path; -use std::sync::LazyLock; use std::time::Duration; use bytes::Bytes; @@ -11,7 +9,6 @@ use reqwest::StatusCode; use serde::Deserialize; use serde::Serialize; use serde_json::Value; -use serde_json::json; use tokio::sync::mpsc; use tokio::time::timeout; use tokio_util::io::ReaderStream; @@ -36,71 +33,9 @@ use crate::flags::OPENAI_STREAM_IDLE_TIMEOUT_MS; use crate::model_provider_info::ModelProviderInfo; use crate::model_provider_info::WireApi; use crate::models::ResponseItem; +use crate::openai_tools::create_tools_json_for_responses_api; use crate::util::backoff; -/// When serialized as JSON, this produces a valid "Tool" in the OpenAI -/// Responses API. -#[derive(Debug, Clone, Serialize)] -#[serde(tag = "type")] -enum OpenAiTool { - #[serde(rename = "function")] - Function(ResponsesApiTool), - #[serde(rename = "local_shell")] - LocalShell {}, -} - -#[derive(Debug, Clone, Serialize)] -struct ResponsesApiTool { - name: &'static str, - description: &'static str, - strict: bool, - parameters: JsonSchema, -} - -/// Generic JSON‑Schema subset needed for our tool definitions -#[derive(Debug, Clone, Serialize)] -#[serde(tag = "type", rename_all = "lowercase")] -enum JsonSchema { - String, - Number, - Array { - items: Box, - }, - Object { - properties: BTreeMap, - required: &'static [&'static str], - #[serde(rename = "additionalProperties")] - additional_properties: bool, - }, -} - -/// Tool usage specification -static DEFAULT_TOOLS: LazyLock> = LazyLock::new(|| { - let mut properties = BTreeMap::new(); - properties.insert( - "command".to_string(), - JsonSchema::Array { - items: Box::new(JsonSchema::String), - }, - ); - properties.insert("workdir".to_string(), JsonSchema::String); - properties.insert("timeout".to_string(), JsonSchema::Number); - - vec![OpenAiTool::Function(ResponsesApiTool { - name: "shell", - description: "Runs a shell command, and returns its output.", - strict: false, - parameters: JsonSchema::Object { - properties, - required: &["command"], - additional_properties: false, - }, - })] -}); - -static DEFAULT_CODEX_MODEL_TOOLS: LazyLock> = - LazyLock::new(|| vec![OpenAiTool::LocalShell {}]); - #[derive(Clone)] pub struct ModelClient { model: String, @@ -161,27 +96,8 @@ impl ModelClient { return stream_from_fixture(path).await; } - // Assemble tool list: built-in tools + any extra tools from the prompt. - let default_tools = if self.model.starts_with("codex") { - &DEFAULT_CODEX_MODEL_TOOLS - } else { - &DEFAULT_TOOLS - }; - let mut tools_json = Vec::with_capacity(default_tools.len() + prompt.extra_tools.len()); - for t in default_tools.iter() { - tools_json.push(serde_json::to_value(t)?); - } - tools_json.extend( - prompt - .extra_tools - .clone() - .into_iter() - .map(|(name, tool)| mcp_tool_to_openai_tool(name, tool)), - ); - - debug!("tools_json: {}", serde_json::to_string_pretty(&tools_json)?); - let full_instructions = prompt.get_full_instructions(); + let tools_json = create_tools_json_for_responses_api(prompt, &self.model)?; let payload = Payload { model: &self.model, instructions: &full_instructions, @@ -276,34 +192,6 @@ impl ModelClient { } } -fn mcp_tool_to_openai_tool( - fully_qualified_name: String, - tool: mcp_types::Tool, -) -> serde_json::Value { - let mcp_types::Tool { - description, - mut input_schema, - .. - } = tool; - - // OpenAI models mandate the "properties" field in the schema. The Agents - // SDK fixed this by inserting an empty object for "properties" if it is not - // already present https://github.com/openai/openai-agents-python/issues/449 - // so here we do the same. - if input_schema.properties.is_none() { - input_schema.properties = Some(serde_json::Value::Object(serde_json::Map::new())); - } - - // TODO(mbolin): Change the contract of this function to return - // ResponsesApiTool. - json!({ - "name": fully_qualified_name, - "description": description, - "parameters": input_schema, - "type": "function", - }) -} - #[derive(Debug, Deserialize, Serialize)] struct SseEvent { #[serde(rename = "type")] diff --git a/codex-rs/core/src/lib.rs b/codex-rs/core/src/lib.rs index 8398ff7650a..77941a9a51a 100644 --- a/codex-rs/core/src/lib.rs +++ b/codex-rs/core/src/lib.rs @@ -27,6 +27,7 @@ mod model_provider_info; pub use model_provider_info::ModelProviderInfo; pub use model_provider_info::WireApi; mod models; +mod openai_tools; mod project_doc; pub mod protocol; mod rollout; diff --git a/codex-rs/core/src/openai_tools.rs b/codex-rs/core/src/openai_tools.rs new file mode 100644 index 00000000000..0cbdcae0d39 --- /dev/null +++ b/codex-rs/core/src/openai_tools.rs @@ -0,0 +1,158 @@ +use serde::Serialize; +use serde_json::json; +use std::collections::BTreeMap; +use std::sync::LazyLock; + +use crate::client_common::Prompt; + +#[derive(Debug, Clone, Serialize)] +pub(crate) struct ResponsesApiTool { + name: &'static str, + description: &'static str, + strict: bool, + parameters: JsonSchema, +} + +/// When serialized as JSON, this produces a valid "Tool" in the OpenAI +/// Responses API. +#[derive(Debug, Clone, Serialize)] +#[serde(tag = "type")] +pub(crate) enum OpenAiTool { + #[serde(rename = "function")] + Function(ResponsesApiTool), + #[serde(rename = "local_shell")] + LocalShell {}, +} + +/// Generic JSON‑Schema subset needed for our tool definitions +#[derive(Debug, Clone, Serialize)] +#[serde(tag = "type", rename_all = "lowercase")] +pub(crate) enum JsonSchema { + String, + Number, + Array { + items: Box, + }, + Object { + properties: BTreeMap, + required: &'static [&'static str], + #[serde(rename = "additionalProperties")] + additional_properties: bool, + }, +} + +/// Tool usage specification +static DEFAULT_TOOLS: LazyLock> = LazyLock::new(|| { + let mut properties = BTreeMap::new(); + properties.insert( + "command".to_string(), + JsonSchema::Array { + items: Box::new(JsonSchema::String), + }, + ); + properties.insert("workdir".to_string(), JsonSchema::String); + properties.insert("timeout".to_string(), JsonSchema::Number); + + vec![OpenAiTool::Function(ResponsesApiTool { + name: "shell", + description: "Runs a shell command, and returns its output.", + strict: false, + parameters: JsonSchema::Object { + properties, + required: &["command"], + additional_properties: false, + }, + })] +}); + +static DEFAULT_CODEX_MODEL_TOOLS: LazyLock> = + LazyLock::new(|| vec![OpenAiTool::LocalShell {}]); + +/// Returns JSON values that are compatible with Function Calling in the +/// Responses API: +/// https://platform.openai.com/docs/guides/function-calling?api-mode=responses +pub(crate) fn create_tools_json_for_responses_api( + prompt: &Prompt, + model: &str, +) -> crate::error::Result> { + // Assemble tool list: built-in tools + any extra tools from the prompt. + let default_tools = if model.starts_with("codex") { + &DEFAULT_CODEX_MODEL_TOOLS + } else { + &DEFAULT_TOOLS + }; + let mut tools_json = Vec::with_capacity(default_tools.len() + prompt.extra_tools.len()); + for t in default_tools.iter() { + tools_json.push(serde_json::to_value(t)?); + } + tools_json.extend( + prompt + .extra_tools + .clone() + .into_iter() + .map(|(name, tool)| mcp_tool_to_openai_tool(name, tool)), + ); + + tracing::debug!("tools_json: {}", serde_json::to_string_pretty(&tools_json)?); + Ok(tools_json) +} + +/// Returns JSON values that are compatible with Function Calling in the +/// Chat Completions API: +/// https://platform.openai.com/docs/guides/function-calling?api-mode=chat +pub(crate) fn create_tools_json_for_chat_completions_api( + prompt: &Prompt, + model: &str, +) -> crate::error::Result> { + // We start with the JSON for the Responses API and than rewrite it to match + // the chat completions tool call format. + let responses_api_tools_json = create_tools_json_for_responses_api(prompt, model)?; + let tools_json = responses_api_tools_json + .into_iter() + .filter_map(|mut tool| { + if tool.get("type") != Some(&serde_json::Value::String("function".to_string())) { + return None; + } + + if let Some(map) = tool.as_object_mut() { + // Remove "type" field as it is not needed in chat completions. + map.remove("type"); + Some(json!({ + "type": "function", + "function": map, + })) + } else { + None + } + }) + .collect::>(); + Ok(tools_json) +} + +fn mcp_tool_to_openai_tool( + fully_qualified_name: String, + tool: mcp_types::Tool, +) -> serde_json::Value { + let mcp_types::Tool { + description, + mut input_schema, + .. + } = tool; + + // OpenAI models mandate the "properties" field in the schema. The Agents + // SDK fixed this by inserting an empty object for "properties" if it is not + // already present https://github.com/openai/openai-agents-python/issues/449 + // so here we do the same. + if input_schema.properties.is_none() { + input_schema.properties = Some(serde_json::Value::Object(serde_json::Map::new())); + } + + // TODO(mbolin): Change the contract of this function to return + // ResponsesApiTool. + json!({ + "name": fully_qualified_name, + "description": description, + "parameters": input_schema, + "type": "function", + }) +}