mirror of
https://git.datalinker.icu/vllm-project/vllm.git
synced 2025-12-10 04:26:00 +08:00
[Chore] eliminate duplicated and unconditional object serialization in anthropic messages api (#27792)
Signed-off-by: Vico Chu <vico24826@gmail.com>
This commit is contained in:
parent
7a8375f8a0
commit
d4aa65c998
@ -231,9 +231,11 @@ class AnthropicServingMessages(OpenAIServingChat):
|
|||||||
See https://docs.anthropic.com/en/api/messages
|
See https://docs.anthropic.com/en/api/messages
|
||||||
for the API specification. This API mimics the Anthropic messages API.
|
for the API specification. This API mimics the Anthropic messages API.
|
||||||
"""
|
"""
|
||||||
logger.debug("Received messages request %s", request.model_dump_json())
|
if logger.isEnabledFor(logging.DEBUG):
|
||||||
|
logger.debug("Received messages request %s", request.model_dump_json())
|
||||||
chat_req = self._convert_anthropic_to_openai_request(request)
|
chat_req = self._convert_anthropic_to_openai_request(request)
|
||||||
logger.debug("Convert to OpenAI request %s", request.model_dump_json())
|
if logger.isEnabledFor(logging.DEBUG):
|
||||||
|
logger.debug("Convert to OpenAI request %s", chat_req.model_dump_json())
|
||||||
generator = await self.create_chat_completion(chat_req, raw_request)
|
generator = await self.create_chat_completion(chat_req, raw_request)
|
||||||
|
|
||||||
if isinstance(generator, ErrorResponse):
|
if isinstance(generator, ErrorResponse):
|
||||||
|
|||||||
@ -648,10 +648,9 @@ async def create_messages(request: AnthropicMessagesRequest, raw_request: Reques
|
|||||||
return translate_error_response(generator)
|
return translate_error_response(generator)
|
||||||
|
|
||||||
elif isinstance(generator, AnthropicMessagesResponse):
|
elif isinstance(generator, AnthropicMessagesResponse):
|
||||||
logger.debug(
|
resp = generator.model_dump(exclude_none=True)
|
||||||
"Anthropic Messages Response: %s", generator.model_dump(exclude_none=True)
|
logger.debug("Anthropic Messages Response: %s", resp)
|
||||||
)
|
return JSONResponse(content=resp)
|
||||||
return JSONResponse(content=generator.model_dump(exclude_none=True))
|
|
||||||
|
|
||||||
return StreamingResponse(content=generator, media_type="text/event-stream")
|
return StreamingResponse(content=generator, media_type="text/event-stream")
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user