Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 22 additions & 6 deletions codex-rs/core/src/codex.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3486,19 +3486,35 @@ pub(crate) async fn run_turn(
// Note that pending_input would be something like a message the user
// submitted through the UI while the model was running. Though the UI
// may support this, the model might not.
let pending_input = sess
let pending_response_items = sess
.get_pending_input()
.await
.into_iter()
.map(ResponseItem::from)
.collect::<Vec<ResponseItem>>();

if !pending_response_items.is_empty() {
for response_item in pending_response_items {
if let Some(TurnItem::UserMessage(user_message)) = parse_turn_item(&response_item) {
// todo(aibrahim): move pending input to be UserInput only to keep TextElements. context: https://github.com/openai/codex/pull/10656#discussion_r2765522480
sess.record_user_prompt_and_emit_turn_item(
turn_context.as_ref(),
&user_message.content,
response_item,
)
Comment on lines +3500 to +3504
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P2 Badge Preserve user-input metadata when emitting pending input events

The new path emits UserMessage items for pending input by calling record_user_prompt_and_emit_turn_item with user_message.content derived from parse_turn_item, but parse_user_message rebuilds UserInput from ContentItem and explicitly sets text_elements: Vec::new() (see codex-rs/core/src/event_mapping.rs), dropping UI-only metadata like mention spans and local image paths. This means mid-turn injected input that included mentions or local images will now emit user-message events missing those spans/paths, so the UI can no longer render highlights or local image references for the injected prompt. To preserve the intended metadata (per the comment on record_user_prompt_and_emit_turn_item), the pending input needs to retain the original UserInput rather than reconstructing it from ResponseItem.

Useful? React with 👍 / 👎.

.await;
} else {
sess.record_conversation_items(
&turn_context,
std::slice::from_ref(&response_item),
)
.await;
}
}
}

// Construct the input that we will send to the model.
let sampling_request_input: Vec<ResponseItem> = {
sess.record_conversation_items(&turn_context, &pending_input)
.await;
sess.clone_history().await.for_prompt()
};
let sampling_request_input: Vec<ResponseItem> = { sess.clone_history().await.for_prompt() };

let sampling_request_input_messages = sampling_request_input
.iter()
Expand Down
5 changes: 5 additions & 0 deletions codex-rs/core/tests/suite/pending_input.rs
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,11 @@ async fn injected_user_input_triggers_follow_up_request_with_deltas() {

let _ = gate_completed_tx.send(());

let _ = wait_for_event(&codex, |event| {
matches!(event, EventMsg::UserMessage(message) if message.message == "second prompt")
})
.await;

wait_for_event(&codex, |event| matches!(event, EventMsg::TurnComplete(_))).await;

let requests = server.requests().await;
Expand Down
Loading