From 5394ad738720ab9c2dfb0d00fcd894ac27a4fd67 Mon Sep 17 00:00:00 2001 From: Chauncey Date: Mon, 5 May 2025 10:22:35 +0800 Subject: [PATCH] [Bugfix] fix KeyError on top logprobs are special tokens (#17637) Signed-off-by: chaunceyjiang --- vllm/entrypoints/openai/serving_chat.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/vllm/entrypoints/openai/serving_chat.py b/vllm/entrypoints/openai/serving_chat.py index 83a92a98026e8..5c11836fbff44 100644 --- a/vllm/entrypoints/openai/serving_chat.py +++ b/vllm/entrypoints/openai/serving_chat.py @@ -1111,7 +1111,8 @@ class OpenAIServingChat(OpenAIServing): return_as_token_id is not None else self.return_tokens_as_token_ids for i, token_id in enumerate(token_ids): step_top_logprobs = top_logprobs[i] - if step_top_logprobs is None: + if step_top_logprobs is None or step_top_logprobs.get( + token_id) is None: token = tokenizer.decode(token_id) if should_return_as_token_id: token = f"token_id:{token_id}"