From 384dd1b0a899c6761010b42aefe1159c8062f0a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mat=C3=BA=C5=A1=20N=C3=A1me=C5=A1n=C3=BD?= Date: Tue, 26 Aug 2025 14:13:15 +0200 Subject: [PATCH] [Bugfix] Add missing enable_log_outputs parameter to init_app_state function (#23634) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Matúš Námešný --- vllm/entrypoints/openai/api_server.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/vllm/entrypoints/openai/api_server.py b/vllm/entrypoints/openai/api_server.py index 14ba8aa641837..db02767fdfd71 100644 --- a/vllm/entrypoints/openai/api_server.py +++ b/vllm/entrypoints/openai/api_server.py @@ -1748,6 +1748,7 @@ async def init_app_state( reasoning_parser=args.reasoning_parser, enable_prompt_tokens_details=args.enable_prompt_tokens_details, enable_force_include_usage=args.enable_force_include_usage, + enable_log_outputs=args.enable_log_outputs, ) if "generate" in supported_tasks else None state.openai_serving_chat = OpenAIServingChat( engine_client, @@ -1765,6 +1766,7 @@ async def init_app_state( reasoning_parser=args.reasoning_parser, enable_prompt_tokens_details=args.enable_prompt_tokens_details, enable_force_include_usage=args.enable_force_include_usage, + enable_log_outputs=args.enable_log_outputs, ) if "generate" in supported_tasks else None state.openai_serving_completion = OpenAIServingCompletion( engine_client,