Fix use_ep

Signed-off-by: Tyler Michael Smith <tyler@neuralmagic.com>
This commit is contained in:
Tyler Michael Smith 2025-04-07 19:56:41 +00:00
parent 090c856d76
commit 87e47eb1db

View File

@ -437,7 +437,7 @@ class FusedMoE(torch.nn.Module):
# Use expert parallelism instead of tensor parallelism?
vllm_config = get_current_vllm_config()
use_ep = (vllm_config.parallel_config.enable_expert_parallel
and self.tp_size > 1)
and (self.tp_size != 1 or self.dp_size != 1))
# For smuggling this layer into the fused moe custom op
self.use_direct_call = self.dp_size == 1