mirror of
https://git.datalinker.icu/vllm-project/vllm.git
synced 2025-12-10 04:15:01 +08:00
Remove Falcon3 2x7B from CI (#17404)
Signed-off-by: Harry Mellor <19981378+hmellor@users.noreply.github.com>
This commit is contained in:
parent
a6977dbd15
commit
0350809f3a
@ -29,7 +29,7 @@ AITER_MODEL_LIST = [
|
||||
"openbmb/MiniCPM3-4B",
|
||||
"Qwen/Qwen-7B-Chat",
|
||||
"Qwen/Qwen2.5-0.5B-Instruct",
|
||||
"ehristoforu/Falcon3-MoE-2x7B-Insruct",
|
||||
"TitanML/tiny-mixtral",
|
||||
]
|
||||
|
||||
|
||||
@ -83,9 +83,8 @@ AITER_MODEL_LIST = [
|
||||
pytest.param("stabilityai/stablelm-3b-4e1t"), # stablelm
|
||||
pytest.param("bigcode/starcoder2-3b"), # starcoder2
|
||||
pytest.param(
|
||||
"ehristoforu/Falcon3-MoE-2x7B-Insruct", # mixtral
|
||||
marks=[pytest.mark.cpu_model,
|
||||
large_gpu_mark(min_gb=48)],
|
||||
"TitanML/tiny-mixtral", # mixtral
|
||||
marks=[pytest.mark.cpu_model],
|
||||
)
|
||||
])
|
||||
@pytest.mark.parametrize("max_tokens", [32])
|
||||
|
||||
@ -191,7 +191,7 @@ _TEXT_GENERATION_EXAMPLE_MODELS = {
|
||||
trust_remote_code=True),
|
||||
"MistralForCausalLM": _HfExamplesInfo("mistralai/Mistral-7B-Instruct-v0.1"),
|
||||
"MixtralForCausalLM": _HfExamplesInfo("mistralai/Mixtral-8x7B-Instruct-v0.1", # noqa: E501
|
||||
{"falcon3": "ehristoforu/Falcon3-MoE-2x7B-Insruct"}), # noqa: E501
|
||||
{"tiny": "TitanML/tiny-mixtral"}), # noqa: E501
|
||||
"QuantMixtralForCausalLM": _HfExamplesInfo("mistral-community/Mixtral-8x22B-v0.1-AWQ"), # noqa: E501
|
||||
"MptForCausalLM": _HfExamplesInfo("mpt", is_available_online=False),
|
||||
"MPTForCausalLM": _HfExamplesInfo("mosaicml/mpt-7b"),
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user