mirror of
https://git.datalinker.icu/vllm-project/vllm.git
synced 2025-12-24 17:25:51 +08:00
[Bugfix] Fix for the condition to accept empty encoder inputs for mllama (#17732)
Signed-off-by: Gregory Shtrasberg <Gregory.Shtrasberg@amd.com>
This commit is contained in:
parent
d456aea71f
commit
de906b95f9
@ -2021,7 +2021,7 @@ class LLMEngine:
|
||||
if not prompt_ids:
|
||||
if prompt_type == "encoder" and model_config.is_multimodal_model:
|
||||
pass # Mllama may have empty encoder inputs for text-only data
|
||||
if prompt_inputs["type"] == "embeds":
|
||||
elif prompt_inputs["type"] == "embeds":
|
||||
pass
|
||||
else:
|
||||
raise ValueError(f"The {prompt_type} prompt cannot be empty")
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user