From fad6e2538ebacbed4ba9cb04f0d143b7daf2be29 Mon Sep 17 00:00:00 2001 From: Reid <61492567+reidliu41@users.noreply.github.com> Date: Tue, 8 Apr 2025 06:30:35 +0800 Subject: [PATCH] [Misc] add description attribute in CLI (#15921) Signed-off-by: reidliu41 Co-authored-by: reidliu41 --- vllm/entrypoints/cli/benchmark/base.py | 1 + vllm/entrypoints/cli/benchmark/main.py | 1 + vllm/entrypoints/cli/openai.py | 7 +++++-- vllm/entrypoints/cli/serve.py | 3 ++- 4 files changed, 9 insertions(+), 3 deletions(-) diff --git a/vllm/entrypoints/cli/benchmark/base.py b/vllm/entrypoints/cli/benchmark/base.py index c41b2c5867818..94fb415f581f4 100644 --- a/vllm/entrypoints/cli/benchmark/base.py +++ b/vllm/entrypoints/cli/benchmark/base.py @@ -32,6 +32,7 @@ class BenchmarkSubcommandBase(CLISubcommand): parser = subparsers.add_parser( self.name, help=self.help, + description=self.help, usage=f"vllm bench {self.name} [options]") self.add_cli_args(parser) return parser diff --git a/vllm/entrypoints/cli/benchmark/main.py b/vllm/entrypoints/cli/benchmark/main.py index 7583540920d30..1bcb25be2fcae 100644 --- a/vllm/entrypoints/cli/benchmark/main.py +++ b/vllm/entrypoints/cli/benchmark/main.py @@ -33,6 +33,7 @@ class BenchmarkSubcommand(CLISubcommand): bench_parser = subparsers.add_parser( "bench", help="vLLM bench subcommand.", + description="vLLM bench subcommand.", usage="vllm bench [options]") bench_subparsers = bench_parser.add_subparsers(required=True, dest="bench_type") diff --git a/vllm/entrypoints/cli/openai.py b/vllm/entrypoints/cli/openai.py index 21a7d48b75c18..1d1bba1d49ce0 100644 --- a/vllm/entrypoints/cli/openai.py +++ b/vllm/entrypoints/cli/openai.py @@ -126,7 +126,8 @@ class ChatCommand(CLISubcommand): subparsers: argparse._SubParsersAction) -> FlexibleArgumentParser: chat_parser = subparsers.add_parser( "chat", - help="Generate chat completions via the running API server", + help="Generate chat completions via the running API server.", + description="Generate chat completions via the running API server.", usage="vllm chat [options]") _add_query_options(chat_parser) chat_parser.add_argument( @@ -162,7 +163,9 @@ class CompleteCommand(CLISubcommand): complete_parser = subparsers.add_parser( "complete", help=("Generate text completions based on the given prompt " - "via the running API server"), + "via the running API server."), + description=("Generate text completions based on the given prompt " + "via the running API server."), usage="vllm complete [options]") _add_query_options(complete_parser) return complete_parser diff --git a/vllm/entrypoints/cli/serve.py b/vllm/entrypoints/cli/serve.py index e89ac4e219997..5c8781b50d2ca 100644 --- a/vllm/entrypoints/cli/serve.py +++ b/vllm/entrypoints/cli/serve.py @@ -34,7 +34,8 @@ class ServeSubcommand(CLISubcommand): subparsers: argparse._SubParsersAction) -> FlexibleArgumentParser: serve_parser = subparsers.add_parser( "serve", - help="Start the vLLM OpenAI Compatible API server", + help="Start the vLLM OpenAI Compatible API server.", + description="Start the vLLM OpenAI Compatible API server.", usage="vllm serve [model_tag] [options]") serve_parser.add_argument("model_tag", type=str,