Skip to content

Commit

Permalink
fixup! [Feature]: Add OpenAI server prompt_logprobs support #6508
Browse files Browse the repository at this point in the history
  • Loading branch information
gnpinkert committed Aug 15, 2024
1 parent b1f117a commit 23fc4b3
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 6 deletions.
6 changes: 2 additions & 4 deletions vllm/entrypoints/openai/protocol.py
Original file line number Diff line number Diff line change
Expand Up @@ -537,8 +537,7 @@ class CompletionResponseChoice(OpenAIBaseModel):
"to stop, None if the completion finished for some other reason "
"including encountering the EOS token"),
)
prompt_logprobs: Optional[List[Optional[Dict[int, Logprob]]]] = Field(
default=None)
prompt_logprobs: Optional[List[Optional[Dict[int, Logprob]]]] = None


class CompletionResponse(OpenAIBaseModel):
Expand Down Expand Up @@ -634,8 +633,7 @@ class ChatCompletionResponse(OpenAIBaseModel):
model: str
choices: List[ChatCompletionResponseChoice]
usage: UsageInfo
prompt_logprobs: Optional[List[Optional[Dict[int, Logprob]]]] = Field(
default=None)
prompt_logprobs: Optional[List[Optional[Dict[int, Logprob]]]] = None


class DeltaMessage(OpenAIBaseModel):
Expand Down
3 changes: 1 addition & 2 deletions vllm/entrypoints/openai/serving_chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -516,8 +516,7 @@ async def chat_completion_full_generator(
model=model_name,
choices=choices,
usage=usage,
prompt_logprobs=final_res.prompt_logprobs
if request.prompt_logprobs else None)
prompt_logprobs=final_res.prompt_logprobs)

return response

Expand Down

0 comments on commit 23fc4b3

Please sign in to comment.