From 8335667c22f1ae53923d85f810581c10df03a125 Mon Sep 17 00:00:00 2001 From: Kseniya Parkhamchuk <43078183+KsuParkhamchuk@users.noreply.github.com> Date: Sun, 8 Jun 2025 21:22:10 -0500 Subject: [PATCH] [Frontend] Remove unreachable code from llm.py (#19288) Signed-off-by: KsuParkhamchuk --- vllm/entrypoints/llm.py | 1 - 1 file changed, 1 deletion(-) diff --git a/vllm/entrypoints/llm.py b/vllm/entrypoints/llm.py index fd28bf39e2d5..7c9ca41dd794 100644 --- a/vllm/entrypoints/llm.py +++ b/vllm/entrypoints/llm.py @@ -519,7 +519,6 @@ class LLM: Sequence) and len(lora_request) != len(prompts): raise ValueError( "Lora request list should be the same length as the prompts") - return lora_request if lora_request is None or isinstance(lora_request, LoRARequest): return [lora_request] * len(prompts)