Use @property and private field for data_parallel_rank_local (#17053)

Signed-off-by: Harry Mellor <19981378+hmellor@users.noreply.github.com>
This commit is contained in:
Harry Mellor 2025-04-23 16:50:08 +01:00 committed by GitHub
parent f3a21e9c68
commit bdb3660312
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 18 additions and 5 deletions

View File

@ -1593,8 +1593,21 @@ class ParallelConfig:
the product of the tensor parallel size and data parallel size."""
data_parallel_rank: int = 0
"""Rank of the data parallel group."""
data_parallel_rank_local: Optional[int] = None
"""Local rank of the data parallel group, defaults to global rank."""
_data_parallel_rank_local: Optional[int] = field(default=None, init=False)
"""Private field to store the local rank of the data parallel group."""
@property
def data_parallel_rank_local(self) -> int:
"""Local rank of the data parallel group, defaults to global rank."""
if self._data_parallel_rank_local is None:
return self.data_parallel_rank
return self._data_parallel_rank_local
@data_parallel_rank_local.setter
def data_parallel_rank_local(self, value: int) -> None:
"""Set the local rank of the data parallel group."""
self._data_parallel_rank_local = value
data_parallel_master_ip: str = "127.0.0.1"
"""IP of the data parallel master."""
data_parallel_master_port: int = 29500

View File

@ -439,10 +439,10 @@ class MPClient(EngineCoreClient):
) -> None:
# Default case - single core engine.
dp_rank = vllm_config.parallel_config.data_parallel_rank
local_dp_rank = vllm_config.parallel_config.data_parallel_rank_local
core_engine = new_core_engine(
dp_rank, local_dp_rank if local_dp_rank is not None else dp_rank)
vllm_config.parallel_config.data_parallel_rank,
vllm_config.parallel_config.data_parallel_rank_local,
)
core_engines.append(core_engine)
self.core_engine = core_engine