[Bugfix] Fix MoeWNA16Method activation (#14024)

This commit is contained in:
Jee Jee Li 2025-02-28 23:22:42 +08:00 committed by GitHub
parent f58f8b5c96
commit e0734387fb
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -293,9 +293,10 @@ class MoeWNA16Method(FusedMoEMethodBase):
custom_routing_function: Optional[Callable] = None,
scoring_func: str = "softmax",
e_score_correction_bias: Optional[torch.Tensor] = None,
activation: str = "silu",
) -> torch.Tensor:
from vllm.model_executor.layers.fused_moe import fused_experts
assert activation == "silu", "Only SiLU activation is supported."
topk_weights, topk_ids = FusedMoE.select_experts(
hidden_states=x,
router_logits=router_logits,