mirror of
https://git.datalinker.icu/vllm-project/vllm.git
synced 2025-12-10 01:24:54 +08:00
[Frontend] Support setting logprobs to -1 (#25031)
Signed-off-by: chaunceyjiang <chaunceyjiang@gmail.com>
This commit is contained in:
parent
abdfcd4f3d
commit
cc935fdd7e
@ -99,3 +99,26 @@ async def test_prompt_logprobs(client: openai.AsyncOpenAI):
|
||||
|
||||
assert completion.prompt_logprobs is not None
|
||||
assert len(completion.prompt_logprobs) > 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_top_logprobs(client: openai.AsyncOpenAI):
|
||||
messages = [{
|
||||
"role": "system",
|
||||
"content": "You are a helpful assistant."
|
||||
}, {
|
||||
"role": "user",
|
||||
"content": "Beijing is the capital of which country?"
|
||||
}]
|
||||
|
||||
completion = await client.chat.completions.create(
|
||||
model=MODEL_NAME,
|
||||
messages=messages,
|
||||
extra_body={
|
||||
"top_logprobs": -1,
|
||||
"logprobs": "true",
|
||||
},
|
||||
)
|
||||
assert completion.choices[0].logprobs is not None
|
||||
assert completion.choices[0].logprobs.content is not None
|
||||
assert len(completion.choices[0].logprobs.content) > 0
|
||||
|
||||
@ -832,10 +832,12 @@ class ChatCompletionRequest(OpenAIBaseModel):
|
||||
raise ValueError("`prompt_logprobs=-1` is only supported with "
|
||||
"vLLM engine V1.")
|
||||
if (top_logprobs := data.get("top_logprobs")) is not None:
|
||||
if top_logprobs < 0:
|
||||
raise ValueError("`top_logprobs` must be a positive value.")
|
||||
if top_logprobs < 0 and top_logprobs != -1:
|
||||
raise ValueError(
|
||||
"`top_logprobs` must be a positive value or -1.")
|
||||
|
||||
if top_logprobs > 0 and not data.get("logprobs"):
|
||||
if (top_logprobs == -1
|
||||
or top_logprobs > 0) and not data.get("logprobs"):
|
||||
raise ValueError(
|
||||
"when using `top_logprobs`, `logprobs` must be set to true."
|
||||
)
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user