mirror of
https://git.datalinker.icu/vllm-project/vllm.git
synced 2025-12-13 19:44:28 +08:00
[Bugfix] Fix failing multimodal standard test (#22153)
Signed-off-by: Isotr0py <mozf@mail2.sysu.edu.cn>
This commit is contained in:
parent
d3c18c9cb0
commit
6a39ba85fe
@ -105,6 +105,8 @@ def test_model_tensor_schema(model_arch: str, vllm_runner: type[VllmRunner],
|
|||||||
|
|
||||||
model_info = HF_EXAMPLE_MODELS.get_hf_info(model_arch)
|
model_info = HF_EXAMPLE_MODELS.get_hf_info(model_arch)
|
||||||
model_info.check_available_online(on_fail="skip")
|
model_info.check_available_online(on_fail="skip")
|
||||||
|
model_info.check_transformers_version(on_fail="skip",
|
||||||
|
check_max_version=False)
|
||||||
|
|
||||||
model_id = model_info.default
|
model_id = model_info.default
|
||||||
|
|
||||||
|
|||||||
@ -80,6 +80,8 @@ class _HfExamplesInfo:
|
|||||||
self,
|
self,
|
||||||
*,
|
*,
|
||||||
on_fail: Literal["error", "skip"],
|
on_fail: Literal["error", "skip"],
|
||||||
|
check_min_version: bool = True,
|
||||||
|
check_max_version: bool = True,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
If the installed transformers version does not meet the requirements,
|
If the installed transformers version does not meet the requirements,
|
||||||
@ -96,9 +98,11 @@ class _HfExamplesInfo:
|
|||||||
msg = f"`transformers=={current_version}` installed, but `transformers"
|
msg = f"`transformers=={current_version}` installed, but `transformers"
|
||||||
# Only check the base version for the min/max version, otherwise preview
|
# Only check the base version for the min/max version, otherwise preview
|
||||||
# models cannot be run because `x.yy.0.dev0`<`x.yy.0`
|
# models cannot be run because `x.yy.0.dev0`<`x.yy.0`
|
||||||
if min_version and Version(cur_base_version) < Version(min_version):
|
if (check_min_version and min_version
|
||||||
|
and Version(cur_base_version) < Version(min_version)):
|
||||||
msg += f">={min_version}` is required to run this model."
|
msg += f">={min_version}` is required to run this model."
|
||||||
elif max_version and Version(cur_base_version) > Version(max_version):
|
elif (check_max_version and max_version
|
||||||
|
and Version(cur_base_version) > Version(max_version)):
|
||||||
msg += f"<={max_version}` is required to run this model."
|
msg += f"<={max_version}` is required to run this model."
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
@ -185,6 +189,8 @@ _TEXT_GENERATION_EXAMPLE_MODELS = {
|
|||||||
min_transformers_version="4.53"),
|
min_transformers_version="4.53"),
|
||||||
"GlmForCausalLM": _HfExamplesInfo("THUDM/glm-4-9b-chat-hf"),
|
"GlmForCausalLM": _HfExamplesInfo("THUDM/glm-4-9b-chat-hf"),
|
||||||
"Glm4ForCausalLM": _HfExamplesInfo("THUDM/GLM-4-9B-0414"),
|
"Glm4ForCausalLM": _HfExamplesInfo("THUDM/GLM-4-9B-0414"),
|
||||||
|
"Glm4MoeForCausalLM": _HfExamplesInfo("zai-org/GLM-4.5",
|
||||||
|
min_transformers_version="4.54"), # noqa: E501
|
||||||
"GPT2LMHeadModel": _HfExamplesInfo("openai-community/gpt2",
|
"GPT2LMHeadModel": _HfExamplesInfo("openai-community/gpt2",
|
||||||
{"alias": "gpt2"}),
|
{"alias": "gpt2"}),
|
||||||
"GPTBigCodeForCausalLM": _HfExamplesInfo("bigcode/starcoder",
|
"GPTBigCodeForCausalLM": _HfExamplesInfo("bigcode/starcoder",
|
||||||
@ -378,8 +384,6 @@ _MULTIMODAL_EXAMPLE_MODELS = {
|
|||||||
trust_remote_code=True,
|
trust_remote_code=True,
|
||||||
hf_overrides={"architectures": ["GLM4VForCausalLM"]}), # noqa: E501
|
hf_overrides={"architectures": ["GLM4VForCausalLM"]}), # noqa: E501
|
||||||
"Glm4vForConditionalGeneration": _HfExamplesInfo("THUDM/GLM-4.1V-9B-Thinking"), # noqa: E501
|
"Glm4vForConditionalGeneration": _HfExamplesInfo("THUDM/GLM-4.1V-9B-Thinking"), # noqa: E501
|
||||||
"Glm4MoeForCausalLM": _HfExamplesInfo("zai-org/GLM-4.5",
|
|
||||||
min_transformers_version="4.54"), # noqa: E501
|
|
||||||
"Glm4v_moeForConditionalGeneration": _HfExamplesInfo("zai-org/GLM-4.5V-Air",
|
"Glm4v_moeForConditionalGeneration": _HfExamplesInfo("zai-org/GLM-4.5V-Air",
|
||||||
is_available_online=False), # noqa: E501
|
is_available_online=False), # noqa: E501
|
||||||
"H2OVLChatModel": _HfExamplesInfo("h2oai/h2ovl-mississippi-800m",
|
"H2OVLChatModel": _HfExamplesInfo("h2oai/h2ovl-mississippi-800m",
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user