[Bugfix] Fix use_direct_call condition in FusedMoE layer for (#14382)

Signed-off-by: Tyler Michael Smith <tyler@neuralmagic.com>
This commit is contained in:
Tyler Michael Smith 2025-03-06 17:17:21 -05:00 committed by GitHub
parent 99b0915d3b
commit 958adce478
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -359,7 +359,7 @@ class FusedMoE(torch.nn.Module):
# For smuggling this layer into the fused moe custom op
self.use_direct_call = self.dp_size == 1
if self.use_direct_call:
if not self.use_direct_call:
compilation_config = vllm_config.compilation_config
if prefix in compilation_config.static_forward_context:
raise ValueError("Duplicate layer name: {}".format(prefix))