mirror of
https://git.datalinker.icu/vllm-project/vllm.git
synced 2025-12-09 08:55:46 +08:00
[ResponseAPI] Simplify input/output message serialization (#26620)
Signed-off-by: Jialin Ouyang <Jialin.Ouyang@gmail.com>
This commit is contained in:
parent
767c3ab869
commit
4073c82c4e
@ -12,6 +12,8 @@ from openai_harmony import (
|
||||
Message,
|
||||
)
|
||||
|
||||
from vllm.entrypoints.openai.protocol import serialize_message, serialize_messages
|
||||
|
||||
from ...utils import RemoteOpenAIServer
|
||||
|
||||
MODEL_NAME = "openai/gpt-oss-20b"
|
||||
@ -758,3 +760,32 @@ async def test_output_messages_enabled(client: OpenAI, model_name: str, server):
|
||||
assert response.status == "completed"
|
||||
assert len(response.input_messages) > 0
|
||||
assert len(response.output_messages) > 0
|
||||
|
||||
|
||||
def test_serialize_message() -> None:
|
||||
dict_value = {"a": 1, "b": "2"}
|
||||
assert serialize_message(dict_value) == dict_value
|
||||
|
||||
msg_value = {
|
||||
"role": "assistant",
|
||||
"name": None,
|
||||
"content": [{"type": "text", "text": "Test 1"}],
|
||||
"channel": "analysis",
|
||||
}
|
||||
msg = Message.from_dict(msg_value)
|
||||
assert serialize_message(msg) == msg_value
|
||||
|
||||
|
||||
def test_serialize_messages() -> None:
|
||||
assert serialize_messages(None) is None
|
||||
assert serialize_messages([]) is None
|
||||
|
||||
dict_value = {"a": 3, "b": "4"}
|
||||
msg_value = {
|
||||
"role": "assistant",
|
||||
"name": None,
|
||||
"content": [{"type": "text", "text": "Test 2"}],
|
||||
"channel": "analysis",
|
||||
}
|
||||
msg = Message.from_dict(msg_value)
|
||||
assert serialize_messages([msg, dict_value]) == [msg_value, dict_value]
|
||||
|
||||
@ -2065,6 +2065,26 @@ class ResponseUsage(OpenAIBaseModel):
|
||||
total_tokens: int
|
||||
|
||||
|
||||
def serialize_message(msg):
|
||||
"""
|
||||
Serializes a single message
|
||||
"""
|
||||
if isinstance(msg, dict):
|
||||
return msg
|
||||
elif hasattr(msg, "__dict__"):
|
||||
return msg.to_dict()
|
||||
else:
|
||||
# fallback to pyandic dump
|
||||
return msg.model_dump_json()
|
||||
|
||||
|
||||
def serialize_messages(msgs):
|
||||
"""
|
||||
Serializes multiple messages
|
||||
"""
|
||||
return [serialize_message(msg) for msg in msgs] if msgs else None
|
||||
|
||||
|
||||
class ResponsesResponse(OpenAIBaseModel):
|
||||
id: str = Field(default_factory=lambda: f"resp_{random_uuid()}")
|
||||
created_at: int = Field(default_factory=lambda: int(time.time()))
|
||||
@ -2107,35 +2127,13 @@ class ResponsesResponse(OpenAIBaseModel):
|
||||
# https://github.com/openai/harmony/issues/78
|
||||
@field_serializer("output_messages", when_used="json")
|
||||
def serialize_output_messages(self, msgs, _info):
|
||||
if msgs:
|
||||
serialized = []
|
||||
for m in msgs:
|
||||
if isinstance(m, dict):
|
||||
serialized.append(m)
|
||||
elif hasattr(m, "__dict__"):
|
||||
serialized.append(m.to_dict())
|
||||
else:
|
||||
# fallback to pyandic dump
|
||||
serialized.append(m.model_dump_json())
|
||||
return serialized
|
||||
return None
|
||||
return serialize_messages(msgs)
|
||||
|
||||
# NOTE: openAI harmony doesn't serialize TextContent properly, this fixes it
|
||||
# https://github.com/openai/harmony/issues/78
|
||||
@field_serializer("input_messages", when_used="json")
|
||||
def serialize_input_messages(self, msgs, _info):
|
||||
if msgs:
|
||||
serialized = []
|
||||
for m in msgs:
|
||||
if isinstance(m, dict):
|
||||
serialized.append(m)
|
||||
elif hasattr(m, "__dict__"):
|
||||
serialized.append(m.to_dict())
|
||||
else:
|
||||
# fallback to pyandic dump
|
||||
serialized.append(m.model_dump_json())
|
||||
return serialized
|
||||
return None
|
||||
return serialize_messages(msgs)
|
||||
|
||||
@classmethod
|
||||
def from_request(
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user