[Bugfix] fix dots.llm1.inst (#29687)

Signed-off-by: zjy0516 <riverclouds.zhu@qq.com>
Co-authored-by: Roger Wang <hey@rogerw.io>
This commit is contained in:
Jiangyun Zhu 2025-11-29 07:25:26 +08:00 committed by GitHub
parent 7675ba30de
commit a51f4186f2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -181,13 +181,14 @@ class Dots1MoE(nn.Module):
hidden_states = hidden_states.view(-1, hidden_dim)
router_logits, _ = self.gate(hidden_states)
final_hidden_states = (
self.experts(hidden_states=hidden_states, router_logits=router_logits)
* self.routed_scaling_factor
)
shared_out, routed_out = self.experts(
hidden_states=hidden_states, router_logits=router_logits
)
if self.shared_experts is not None:
final_hidden_states = final_hidden_states[0] + final_hidden_states[1]
final_hidden_states = (routed_out + shared_out) * self.routed_scaling_factor
else:
final_hidden_states = routed_out * self.routed_scaling_factor
if self.tp_size > 1:
final_hidden_states = tensor_model_parallel_all_reduce(final_hidden_states)