Skip to content

Commit 3da2313

Browse files
authored
Support allowed_token_ids in ChatCompletionRequest (#19143)
Signed-off-by: Xu Song <xusong.vip@gmail.com>
1 parent b61dc5f commit 3da2313

File tree

1 file changed

+2
-0
lines changed

1 file changed

+2
-0
lines changed

vllm/entrypoints/openai/protocol.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -271,6 +271,7 @@ class ChatCompletionRequest(OpenAIBaseModel):
271271
spaces_between_special_tokens: bool = True
272272
truncate_prompt_tokens: Optional[Annotated[int, Field(ge=1)]] = None
273273
prompt_logprobs: Optional[int] = None
274+
allowed_token_ids: Optional[list[int]] = None
274275
# --8<-- [end:chat-completion-sampling-params]
275276

276277
# --8<-- [start:chat-completion-extra-params]
@@ -549,6 +550,7 @@ def to_sampling_params(
549550
else RequestOutputKind.FINAL_ONLY,
550551
guided_decoding=guided_decoding,
551552
logit_bias=self.logit_bias,
553+
allowed_token_ids=self.allowed_token_ids,
552554
extra_args=({"kv_transfer_params": self.kv_transfer_params}
553555
if self.kv_transfer_params else None))
554556

0 commit comments

Comments
 (0)