Skip to content

Commit ba195f5

Browse files
committed
use static method to clean prompt logprobs
Signed-off-by: maxDavid40 <maxdavid40@gmail.com>
1 parent ee09046 commit ba195f5

File tree

2 files changed

+4
-6
lines changed

2 files changed

+4
-6
lines changed

vllm/entrypoints/openai/serving_chat.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -841,6 +841,8 @@ async def chat_completion_full_generator(
841841

842842
request_metadata.final_usage_info = usage
843843

844+
self._clean_prompt_logprobs(final_res.prompt_logprobs)
845+
844846
response = ChatCompletionResponse(
845847
id=request_id,
846848
created=created_time,

vllm/entrypoints/openai/serving_completion.py

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -396,13 +396,9 @@ def request_output_to_completion_response(
396396
for final_res in final_res_batch:
397397
prompt_token_ids = final_res.prompt_token_ids
398398
assert prompt_token_ids is not None
399+
self._clean_prompt_logprobs(final_res.prompt_logprobs)
399400
prompt_logprobs = final_res.prompt_logprobs
400-
if prompt_logprobs:
401-
for logprob_dict in prompt_logprobs:
402-
if logprob_dict:
403-
for logprob_values in logprob_dict.values():
404-
if logprob_values.logprob == float('-inf'):
405-
logprob_values.logprob = -9999.0
401+
406402
prompt_text = final_res.prompt
407403

408404
token_ids: GenericSequence[int]

0 commit comments

Comments
 (0)