[Bugfix] Fix GLM-4.1-V video prompt update (#20635)

Signed-off-by: Isotr0py <2037008807@qq.com>
This commit is contained in:
Isotr0py 2025-07-09 07:13:58 +08:00 committed by GitHub
parent 32dffc2772
commit b9fca83256
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -65,7 +65,7 @@ from vllm.multimodal.parse import (ImageSize, MultiModalDataItems,
MultiModalDataParser)
from vllm.multimodal.processing import (BaseMultiModalProcessor,
BaseProcessingInfo, PromptReplacement,
PromptUpdate)
PromptUpdate, PromptUpdateDetails)
from vllm.multimodal.profiling import BaseDummyInputsBuilder
from vllm.platforms import _Backend
from vllm.sequence import IntermediateTensors
@ -1213,7 +1213,10 @@ class Glm4vMultiModalProcessor(BaseMultiModalProcessor[Glm4vProcessingInfo]):
placeholder.append(eoi_token_id)
placeholder.extend(frame_idx)
placeholder.append(eov_token_id)
return placeholder
return PromptUpdateDetails.select_token_id(
placeholder,
embed_token_id=hf_processor.video_token_id,
)
return [
PromptReplacement(