[Frontend] Support setting logprobs to -1 (#25031)

Signed-off-by: chaunceyjiang <chaunceyjiang@gmail.com>
This commit is contained in:
Chauncey 2025-09-18 18:34:42 +08:00 committed by GitHub
parent abdfcd4f3d
commit cc935fdd7e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 28 additions and 3 deletions

View File

@ -99,3 +99,26 @@ async def test_prompt_logprobs(client: openai.AsyncOpenAI):
assert completion.prompt_logprobs is not None assert completion.prompt_logprobs is not None
assert len(completion.prompt_logprobs) > 0 assert len(completion.prompt_logprobs) > 0
@pytest.mark.asyncio
async def test_top_logprobs(client: openai.AsyncOpenAI):
messages = [{
"role": "system",
"content": "You are a helpful assistant."
}, {
"role": "user",
"content": "Beijing is the capital of which country?"
}]
completion = await client.chat.completions.create(
model=MODEL_NAME,
messages=messages,
extra_body={
"top_logprobs": -1,
"logprobs": "true",
},
)
assert completion.choices[0].logprobs is not None
assert completion.choices[0].logprobs.content is not None
assert len(completion.choices[0].logprobs.content) > 0

View File

@ -832,10 +832,12 @@ class ChatCompletionRequest(OpenAIBaseModel):
raise ValueError("`prompt_logprobs=-1` is only supported with " raise ValueError("`prompt_logprobs=-1` is only supported with "
"vLLM engine V1.") "vLLM engine V1.")
if (top_logprobs := data.get("top_logprobs")) is not None: if (top_logprobs := data.get("top_logprobs")) is not None:
if top_logprobs < 0: if top_logprobs < 0 and top_logprobs != -1:
raise ValueError("`top_logprobs` must be a positive value.") raise ValueError(
"`top_logprobs` must be a positive value or -1.")
if top_logprobs > 0 and not data.get("logprobs"): if (top_logprobs == -1
or top_logprobs > 0) and not data.get("logprobs"):
raise ValueError( raise ValueError(
"when using `top_logprobs`, `logprobs` must be set to true." "when using `top_logprobs`, `logprobs` must be set to true."
) )