[refactor] Add prefix support to embed_tokens in DeepSeek MTP (#30788)

Signed-off-by: zzhx1 <zzh_201018@outlook.com>
This commit is contained in:
zzhxxx 2025-12-18 12:45:56 +08:00 committed by GitHub
parent 5f2f3fba1d
commit b166ef20e1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -141,6 +141,7 @@ class DeepSeekMultiTokenPredictor(nn.Module):
self.embed_tokens = VocabParallelEmbedding(
config.vocab_size,
config.hidden_size,
prefix=maybe_prefix(prefix, "embed_tokens"),
)
self.logits_processor = LogitsProcessor(config.vocab_size)