[Misc]Remove redundant hidden_size property in ModelConfig (#29749)

Signed-off-by: Xingyu Liu <charlotteliu12x@gmail.com>
Co-authored-by: Harry Mellor <19981378+hmellor@users.noreply.github.com>
This commit is contained in:
Xingyu Liu 2025-12-01 01:14:23 +08:00 committed by GitHub
parent 39d28108f4
commit 21c2627934
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 2 additions and 9 deletions

View File

@ -1726,19 +1726,12 @@ class ModelConfig:
logger.debug_once("head dtype: %s", head_dtype)
return head_dtype
@property
def hidden_size(self):
if hasattr(self.hf_config, "hidden_size"):
return self.hf_config.hidden_size
text_config = self.hf_config.get_text_config()
return text_config.hidden_size
@property
def embedding_size(self):
dense_modules = try_get_dense_modules(self.model, revision=self.revision)
if dense_modules is not None:
return dense_modules[-1]["out_features"]
return self.hidden_size
return self.get_hidden_size()
def get_and_verify_max_len(self, max_model_len: int):
# Consider max_model_len in tokenizer_config only when

View File

@ -301,7 +301,7 @@ def as_seq_cls_model(cls: _T) -> _T:
quant_config = vllm_config.quant_config
self.score = ReplicatedLinear(
model_config.hidden_size,
model_config.get_hidden_size(),
text_config.num_labels,
bias=False,
params_dtype=vllm_config.model_config.head_dtype,