[Misc] Bump up test_fused_moe tolerance (#10364)

Signed-off-by: ElizaWszola <eliza@neuralmagic.com>
This commit is contained in:
ElizaWszola 2024-11-15 17:31:18 +01:00 committed by GitHub
parent 691a3ec047
commit 79ee45b428
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -45,7 +45,7 @@ def test_fused_moe(
score = torch.randn((m, e), device="cuda", dtype=dtype)
triton_output = fused_moe(a, w1, w2, score, topk, renormalize=False)
torch_output = torch_moe(a, w1, w2, score, topk)
torch.testing.assert_close(triton_output, torch_output, atol=1e-2, rtol=0)
torch.testing.assert_close(triton_output, torch_output, atol=2e-2, rtol=0)
@pytest.mark.parametrize("dtype",