From 7555d6b34af1aad14786a9451ff69968316a3ab4 Mon Sep 17 00:00:00 2001 From: Jee Jee Li Date: Sun, 7 Sep 2025 00:32:03 +0800 Subject: [PATCH] [Bugfix] Fix test_mixtral_moe (#24371) --- tests/kernels/moe/test_moe.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/kernels/moe/test_moe.py b/tests/kernels/moe/test_moe.py index 0ea9667914fd..850c486b9524 100644 --- a/tests/kernels/moe/test_moe.py +++ b/tests/kernels/moe/test_moe.py @@ -371,8 +371,8 @@ def test_fused_moe_wn16(m: int, n: int, k: int, e: int, topk: int, @pytest.mark.parametrize( "use_rocm_aiter", [True, False] if current_platform.is_rocm() else [False]) @torch.inference_mode() -def test_mixtral_moe(dtype: torch.dtype, padding: bool, use_rocm_aiter: bool, - monkeypatch): +def test_mixtral_moe(dist_init, dtype: torch.dtype, padding: bool, + use_rocm_aiter: bool, monkeypatch): """Make sure our Mixtral MoE implementation agrees with the one from huggingface."""