[Misc] Fix PhiMoE expert mapping (#21085)

Signed-off-by: Jee Jee Li <pandaleefree@gmail.com>
This commit is contained in:
Jee Jee Li 2025-07-17 13:47:49 +08:00 committed by GitHub
parent 4fcef49ec4
commit c5b8b5953a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -533,14 +533,9 @@ class PhiMoEModel(nn.Module):
("qkv_proj", "v_proj", "v"),
]
expert_params_mapping = FusedMoE.make_expert_params_mapping(
ckpt_gate_proj_name="w1",
ckpt_down_proj_name="w2",
ckpt_up_proj_name="w3",
num_experts=self.config.num_local_experts)
params_dict = dict(self.named_parameters())
loaded_params: set[str] = set()
expert_params_mapping = self.get_expert_mapping()
for name, loaded_weight in weights:
if (self.quant_config is not None and
(scale_name := self.quant_config.get_cache_scale(name))):