[Bugfix] Fix test_mixtral_moe (#24371)

This commit is contained in:
Jee Jee Li 2025-09-07 00:32:03 +08:00 committed by GitHub
parent 00a4e56d8d
commit 7555d6b34a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -371,8 +371,8 @@ def test_fused_moe_wn16(m: int, n: int, k: int, e: int, topk: int,
@pytest.mark.parametrize(
"use_rocm_aiter", [True, False] if current_platform.is_rocm() else [False])
@torch.inference_mode()
def test_mixtral_moe(dtype: torch.dtype, padding: bool, use_rocm_aiter: bool,
monkeypatch):
def test_mixtral_moe(dist_init, dtype: torch.dtype, padding: bool,
use_rocm_aiter: bool, monkeypatch):
"""Make sure our Mixtral MoE implementation agrees with the one from
huggingface."""