[Bugfix] Add kwargs to RequestOutput __init__ to be forward compatible (#18513)

Signed-off-by: Linkun <github@lkchen.net>
This commit is contained in:
lkchen 2025-05-22 05:24:43 -07:00 committed by GitHub
parent f6037d1907
commit a35a494745
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 24 additions and 0 deletions

View File

@ -59,6 +59,7 @@ steps:
- pytest -v -s async_engine # AsyncLLMEngine
- NUM_SCHEDULER_STEPS=4 pytest -v -s async_engine/test_async_llm_engine.py
- pytest -v -s test_inputs.py
- pytest -v -s test_outputs.py
- pytest -v -s multimodal
- pytest -v -s test_utils.py # Utils
- pytest -v -s worker # Worker

14
tests/test_outputs.py Normal file
View File

@ -0,0 +1,14 @@
# SPDX-License-Identifier: Apache-2.0
from vllm.outputs import RequestOutput
def test_request_output_forward_compatible():
output = RequestOutput(request_id="test_request_id",
prompt="test prompt",
prompt_token_ids=[1, 2, 3],
prompt_logprobs=None,
outputs=[],
finished=False,
example_arg_added_in_new_version="some_value")
assert output is not None

View File

@ -9,12 +9,15 @@ from typing import Any, Generic, Optional, Union
import torch
from typing_extensions import TypeVar, deprecated
from vllm.logger import init_logger
from vllm.lora.request import LoRARequest
from vllm.multimodal.inputs import MultiModalPlaceholderDict
from vllm.sampling_params import RequestOutputKind
from vllm.sequence import (PromptLogprobs, RequestMetrics, SampleLogprobs,
SequenceGroup, SequenceGroupBase, SequenceStatus)
logger = init_logger(__name__)
@dataclass
class CompletionOutput:
@ -122,7 +125,13 @@ class RequestOutput:
*,
multi_modal_placeholders: Optional[MultiModalPlaceholderDict] = None,
kv_transfer_params: Optional[dict[str, Any]] = None,
# Forward compatibility, code that uses args added in new release can
# still run with older versions of vLLM without breaking.
**kwargs: Any,
) -> None:
if kwargs:
logger.warning_once("RequestOutput: Ignoring extra arguments: %s",
str(kwargs))
self.request_id = request_id
self.prompt = prompt
self.prompt_token_ids = prompt_token_ids