From d2c919dcc20b1ea77a94fa01e813ebbb31f8a66a Mon Sep 17 00:00:00 2001 From: realliujiaxu Date: Sat, 13 Dec 2025 01:03:35 +0800 Subject: [PATCH] [bugfix] fix bug when top_logprobs=0 with spec decoding (#30059) Signed-off-by: realliujiaxu --- tests/v1/sample/test_logprobs.py | 4 +++- tests/v1/sample/test_rejection_sampler.py | 2 +- vllm/v1/sample/rejection_sampler.py | 2 +- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/tests/v1/sample/test_logprobs.py b/tests/v1/sample/test_logprobs.py index c89c33be80c10..76a0e8e25a4ae 100644 --- a/tests/v1/sample/test_logprobs.py +++ b/tests/v1/sample/test_logprobs.py @@ -528,9 +528,11 @@ def test_logprobs_mode(logprobs_mode: LogprobsMode): ), ], ) +@pytest.mark.parametrize("top_logprobs", [0, 3]) def test_spec_decode_logprobs( logprobs_mode: LogprobsMode, model_setup: tuple[str, str, str], + top_logprobs: int, ): """Spec decode logprobs should match those of the base model. @@ -543,7 +545,7 @@ def test_spec_decode_logprobs( prompt = "Hello world " * 50 sampling_params = SamplingParams( - temperature=0, logprobs=3, max_tokens=10, ignore_eos=False + temperature=0, logprobs=top_logprobs, max_tokens=10, ignore_eos=False ) method, model_name, spec_model_name = model_setup max_model_len = 256 diff --git a/tests/v1/sample/test_rejection_sampler.py b/tests/v1/sample/test_rejection_sampler.py index bf7726ebf907f..61caffee45daf 100644 --- a/tests/v1/sample/test_rejection_sampler.py +++ b/tests/v1/sample/test_rejection_sampler.py @@ -111,7 +111,7 @@ def create_sampling_metadata( top_p=top_p, top_k=top_k, generators=generators, - max_num_logprobs=0, + max_num_logprobs=None, no_penalties=no_penalties, prompt_token_ids=prompt_token_ids, frequency_penalties=frequency_penalties, diff --git a/vllm/v1/sample/rejection_sampler.py b/vllm/v1/sample/rejection_sampler.py index ccaf07e18c468..50b91d8292ee8 100644 --- a/vllm/v1/sample/rejection_sampler.py +++ b/vllm/v1/sample/rejection_sampler.py @@ -145,7 +145,7 @@ class RejectionSampler(nn.Module): ) logprobs_tensors = None - if sampling_metadata.max_num_logprobs: + if sampling_metadata.max_num_logprobs is not None: logprobs_tensors = self._get_logprobs_tensors( sampling_metadata.max_num_logprobs, metadata,