mirror of
https://git.datalinker.icu/vllm-project/vllm.git
synced 2025-12-24 14:35:50 +08:00
Don'e assume position_embedding_type will be present for BERT and RoBERTa models (#30770)
Signed-off-by: Harry Mellor <19981378+hmellor@users.noreply.github.com>
This commit is contained in:
parent
676db55eec
commit
6f15ac5de7
@ -55,7 +55,9 @@ class BertEmbedding(nn.Module):
|
||||
"position_ids",
|
||||
torch.arange(config.max_position_embeddings).unsqueeze(0),
|
||||
)
|
||||
self.position_embedding_type = config.position_embedding_type
|
||||
self.position_embedding_type = getattr(
|
||||
config, "position_embedding_type", "absolute"
|
||||
)
|
||||
if self.position_embedding_type != "absolute":
|
||||
raise ValueError(
|
||||
"Only 'absolute' position_embedding_type" + " is supported"
|
||||
|
||||
@ -57,12 +57,6 @@ class RobertaEmbedding(nn.Module):
|
||||
torch.arange(config.max_position_embeddings).unsqueeze(0),
|
||||
)
|
||||
|
||||
self.position_embedding_type = config.position_embedding_type
|
||||
if self.position_embedding_type != "absolute":
|
||||
raise ValueError(
|
||||
"Only 'absolute' position_embedding_type" + " is supported"
|
||||
)
|
||||
|
||||
def forward(
|
||||
self,
|
||||
input_ids: torch.Tensor,
|
||||
@ -135,12 +129,12 @@ class RobertaEmbeddingModel(BertEmbeddingModel):
|
||||
def _build_model(
|
||||
self, vllm_config: VllmConfig, prefix: str = ""
|
||||
) -> BertModel | BertWithRope:
|
||||
if vllm_config.model_config.hf_config.position_embedding_type == "rotary":
|
||||
return JinaRobertaModel(vllm_config=vllm_config, prefix=prefix)
|
||||
hf_config = vllm_config.model_config.hf_config
|
||||
kwargs = dict(vllm_config=vllm_config, prefix=prefix)
|
||||
if getattr(hf_config, "position_embedding_type", "absolute") == "absolute":
|
||||
return BertModel(**kwargs, embedding_class=RobertaEmbedding)
|
||||
else:
|
||||
return BertModel(
|
||||
vllm_config=vllm_config, prefix=prefix, embedding_class=RobertaEmbedding
|
||||
)
|
||||
return JinaRobertaModel(**kwargs)
|
||||
|
||||
def load_weights(self, weights: Iterable[tuple[str, torch.Tensor]]):
|
||||
weights_list = list(weights)
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user