mirror of
https://git.datalinker.icu/vllm-project/vllm.git
synced 2025-12-10 08:04:58 +08:00
Fix internlm after https://github.com/vllm-project/vllm/pull/2860 (#2861)
This commit is contained in:
parent
7eacffd951
commit
0c48b37c31
@ -175,7 +175,8 @@ class LlamaDecoderLayer(nn.Module):
|
||||
self.self_attn = LlamaAttention(
|
||||
hidden_size=self.hidden_size,
|
||||
num_heads=config.num_attention_heads,
|
||||
num_kv_heads=config.num_key_value_heads,
|
||||
num_kv_heads=getattr(config, "num_key_value_heads",
|
||||
config.num_attention_heads),
|
||||
rope_theta=rope_theta,
|
||||
rope_scaling=rope_scaling,
|
||||
max_position_embeddings=max_position_embeddings,
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user