From ba558c029ad65ab4f040c8320607ebd87612cf08 Mon Sep 17 00:00:00 2001 From: Tova Movshovitz Date: Wed, 19 Nov 2025 13:37:11 +0200 Subject: [PATCH] [config] Expose `get_total_num_hidden_layers()` in ModelConfig (#28961) Signed-off-by: tovam Co-authored-by: Wentao Ye <44945378+yewentao256@users.noreply.github.com> Co-authored-by: Cyrus Leung --- vllm/config/model.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/vllm/config/model.py b/vllm/config/model.py index 3e8790a26e0e3..f61dbb6a695a2 100644 --- a/vllm/config/model.py +++ b/vllm/config/model.py @@ -1369,11 +1369,7 @@ class ModelConfig: # Coerce to 0 if explicitly set to None return num_experts or 0 - def get_layers_start_end_indices( - self, parallel_config: ParallelConfig - ) -> tuple[int, int]: - from vllm.distributed.utils import get_pp_indices - + def get_total_num_hidden_layers(self) -> int: if ( self.hf_text_config.model_type == "deepseek_mtp" or self.hf_config.model_type == "mimo_mtp" @@ -1393,6 +1389,15 @@ class ModelConfig: total_num_hidden_layers = getattr( self.hf_text_config, "num_hidden_layers", 0 ) + return total_num_hidden_layers + + def get_layers_start_end_indices( + self, parallel_config: ParallelConfig + ) -> tuple[int, int]: + from vllm.distributed.utils import get_pp_indices + + total_num_hidden_layers = self.get_total_num_hidden_layers() + # the layout order is: DP x PP x TP pp_rank = ( parallel_config.rank // parallel_config.tensor_parallel_size