mirror of
https://git.datalinker.icu/vllm-project/vllm.git
synced 2025-12-10 16:15:40 +08:00
[BugFix][Core] Multistep Fix Crash on Request Cancellation (#8059)
This commit is contained in:
parent
4f5d8446ed
commit
8423aef4c8
@ -88,9 +88,15 @@ class MultiStepOutputProcessor(SequenceGroupOutputProcessor):
|
||||
# TODO: Add support for async if necessary
|
||||
assert not is_async
|
||||
|
||||
# Sequences can be in RUNNING or FINISHED_ABORTED state
|
||||
# once scheduled, as a sequence is moved to FINSIHED_ABORTED
|
||||
# if a client disconnects from the api server.
|
||||
seqs = sequence_group.get_seqs(status=SequenceStatus.RUNNING)
|
||||
if seqs is None:
|
||||
seqs = sequence_group.get_seqs(
|
||||
status=SequenceStatus.FINISHED_ABORTED)
|
||||
|
||||
assert seqs, "expected running sequences"
|
||||
assert seqs, "Expected RUNNING or FINISHED_ABORTED sequences"
|
||||
assert len(seqs) == 1, (
|
||||
"Beam search not supported in multi-step decoding.")
|
||||
seq = seqs[0]
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user