From dc1b6af36298c48cffdcb37fe329b9a51f7304f0 Mon Sep 17 00:00:00 2001 From: Sage Moore Date: Tue, 8 Jul 2025 16:45:11 +0000 Subject: [PATCH] format Signed-off-by: Sage Moore --- vllm/forward_context.py | 1 + 1 file changed, 1 insertion(+) diff --git a/vllm/forward_context.py b/vllm/forward_context.py index e87172bc3d79c..76d4801e0c1bb 100644 --- a/vllm/forward_context.py +++ b/vllm/forward_context.py @@ -61,6 +61,7 @@ class DPMetadata: # num_tokens_across_dp. If there's an incorrect ordering of ARs # across DP ranks, this tensor can end up containing the number # of padded tokens for a DP rank. + assert torch.all((should_ubatch_tensor == 0) | (should_ubatch_tensor == 1)) result: bool = bool(torch.all(should_ubatch_tensor == 1).item())