mirror of
https://git.datalinker.icu/vllm-project/vllm.git
synced 2025-12-10 12:25:32 +08:00
[Bugfix] Fix test_mixtral_moe (#24371)
This commit is contained in:
parent
00a4e56d8d
commit
7555d6b34a
@ -371,8 +371,8 @@ def test_fused_moe_wn16(m: int, n: int, k: int, e: int, topk: int,
|
|||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"use_rocm_aiter", [True, False] if current_platform.is_rocm() else [False])
|
"use_rocm_aiter", [True, False] if current_platform.is_rocm() else [False])
|
||||||
@torch.inference_mode()
|
@torch.inference_mode()
|
||||||
def test_mixtral_moe(dtype: torch.dtype, padding: bool, use_rocm_aiter: bool,
|
def test_mixtral_moe(dist_init, dtype: torch.dtype, padding: bool,
|
||||||
monkeypatch):
|
use_rocm_aiter: bool, monkeypatch):
|
||||||
"""Make sure our Mixtral MoE implementation agrees with the one from
|
"""Make sure our Mixtral MoE implementation agrees with the one from
|
||||||
huggingface."""
|
huggingface."""
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user