From 561f38dc3cfc54cca7710b033a86a315deb24111 Mon Sep 17 00:00:00 2001 From: Tyler Michael Smith Date: Tue, 9 Sep 2025 20:32:36 -0400 Subject: [PATCH] [Bugfix] Improve EPLB config validation error message (#24524) Signed-off-by: Tyler Michael Smith --- vllm/config/parallel.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/vllm/config/parallel.py b/vllm/config/parallel.py index 3a74b5fb7e64f..2f8ad5c6b6b04 100644 --- a/vllm/config/parallel.py +++ b/vllm/config/parallel.py @@ -368,8 +368,10 @@ class ParallelConfig: else: if self.eplb_config.num_redundant_experts != 0: raise ValueError( - "num_redundant_experts should be used with EPLB." - f"{self.eplb_config.num_redundant_experts}.") + "num_redundant_experts is set to " + f"{self.eplb_config.num_redundant_experts} but EPLB is not " + "enabled. Either enable EPLB or unset " + "num_redundant_experts.") if self.distributed_executor_backend is None and self.world_size > 1: # We use multiprocessing by default if world_size fits on the # current node and we aren't in a ray placement group.