not tie_word_embeddings for glm-4.5 and glm-4.5v (#22460)

Signed-off-by: zRzRzRzRzRzRzR <2448370773@qq.com>
This commit is contained in:
Yuxuan Zhang 2025-08-08 10:37:23 +08:00 committed by GitHub
parent 17eaaef595
commit c152e2a8a0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -601,8 +601,6 @@ class Glm4MoeForCausalLM(nn.Module, SupportsPP, SupportsLoRA):
quant_config=quant_config)
else:
self.lm_head = PPMissingLayer()
if self.config.tie_word_embeddings:
self.lm_head.weight = self.model.embed_tokens.weight
self.logits_processor = LogitsProcessor(config.vocab_size)
self.make_empty_intermediate_tensors = (
self.model.make_empty_intermediate_tensors)