Fix torch version check for SM100 mxfp4 (#22535)

Signed-off-by: Zifei Tong <zifeitong@gmail.com>
Signed-off-by: mgoin <mgoin64@gmail.com>
Co-authored-by: mgoin <mgoin64@gmail.com>
This commit is contained in:
zifeitong 2025-08-12 12:54:42 -07:00 committed by GitHub
parent 422f22e012
commit 6534d2fc97
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -741,12 +741,14 @@ class FusedMoE(torch.nn.Module):
# we padding globally so EP buffer allocation works
if quant_config and quant_config.get_name() == "mxfp4":
if not is_torch_equal_or_newer("2.8.0"):
raise RuntimeError("Mxfp4 on hopper requires torch >= 2.8.0")
if current_platform.is_device_capability(
90) and not has_triton_kernels():
raise NotImplementedError(
"Triton kernels must be installed for mxfp4 on hopper")
if not current_platform.is_device_capability(100):
if not is_torch_equal_or_newer("2.8.0"):
raise RuntimeError(
"Mxfp4 on non-blackwell requires torch >= 2.8.0")
if not has_triton_kernels():
raise NotImplementedError(
"triton_kernels must be installed for "
"mxfp4 on non-blackwell")
if (current_platform.is_rocm()
or envs.VLLM_USE_FLASHINFER_MOE_MXFP4_MXFP8
or envs.VLLM_USE_FLASHINFER_MOE_MXFP4_BF16):