[Frontend] Support custom request_id from request (#9550)

Co-authored-by: Yuhong Guo <yuhong.gyh@antgroup.com>
This commit is contained in:
Yuhong Guo 2024-10-23 02:07:30 +08:00 committed by GitHub
parent 32a1ee74a0
commit 434984e665
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 8 additions and 2 deletions

View File

@ -284,6 +284,12 @@ class ChatCompletionRequest(OpenAIBaseModel):
"The priority of the request (lower means earlier handling; "
"default: 0). Any priority other than 0 will raise an error "
"if the served model does not use priority scheduling."))
request_id: str = Field(
default_factory=lambda: f"{random_uuid()}",
description=(
"The request_id related to this request. If the caller does "
"not set it, a random_uuid will be generated. This id is used "
"through out the inference process and return in response."))
# doc: end-chat-completion-extra-params

View File

@ -38,7 +38,7 @@ from vllm.sequence import Logprob
from vllm.tracing import (contains_trace_headers, extract_trace_headers,
log_tracing_disabled_warning)
from vllm.transformers_utils.tokenizer import AnyTokenizer, MistralTokenizer
from vllm.utils import iterate_with_cancellation, random_uuid
from vllm.utils import iterate_with_cancellation
logger = init_logger(__name__)
@ -176,7 +176,7 @@ class OpenAIServingChat(OpenAIServing):
"\"auto\" tool choice requires "
"--enable-auto-tool-choice and --tool-call-parser to be set")
request_id = f"chat-{random_uuid()}"
request_id = f"chat-{request.request_id}"
request_metadata = RequestResponseMetadata(request_id=request_id)
if raw_request: