[gpt-oss][1b] streaming add item id, content id (#24788)

Signed-off-by: Andrew Xia <axia@meta.com>
This commit is contained in:
Andrew Xia 2025-09-16 11:41:12 -07:00 committed by GitHub
parent cd1f885bcf
commit f4d6eb95cf
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 31 additions and 2 deletions

View File

@ -318,6 +318,9 @@ async def test_streaming(client: OpenAI, model_name: str, background: bool):
background=background,
)
current_item_id = ""
current_content_index = -1
events = []
current_event_mode = None
resp_id = None
@ -329,6 +332,26 @@ async def test_streaming(client: OpenAI, model_name: str, background: bool):
current_event_mode = event.type
print(f"\n[{event.type}] ", end="", flush=True)
# verify current_item_id is correct
if event.type == "response.output_item.added":
assert event.item.id != current_item_id
current_item_id = event.item.id
elif event.type in [
"response.output_text.delta",
"response.reasoning_text.delta"
]:
assert event.item_id == current_item_id
# verify content_index_id is correct
if event.type == "response.content_part.added":
assert event.content_index != current_content_index
current_content_index = event.content_index
elif event.type in [
"response.output_text.delta",
"response.reasoning_text.delta"
]:
assert event.content_index == current_content_index
if "text.delta" in event.type:
print(event.delta, end="", flush=True)
elif "reasoning_text.delta" in event.type:

View File

@ -1260,9 +1260,9 @@ class OpenAIServingResponses(OpenAIServing):
_increment_sequence_number_and_return: Callable[[BaseModel],
BaseModel],
) -> AsyncGenerator[BaseModel, None]:
current_content_index = 0 # FIXME: this number is never changed
current_content_index = -1
current_output_index = 0
current_item_id = "" # FIXME: this number is never changed
current_item_id: str = ""
sent_output_item_added = False
async for ctx in result_generator:
@ -1353,6 +1353,7 @@ class OpenAIServingResponses(OpenAIServing):
and ctx.parser.current_recipient is None):
if not sent_output_item_added:
sent_output_item_added = True
current_item_id = f"msg_{random_uuid()}"
yield _increment_sequence_number_and_return(
openai_responses_types.
ResponseOutputItemAddedEvent(
@ -1368,6 +1369,7 @@ class OpenAIServingResponses(OpenAIServing):
status="in_progress",
),
))
current_content_index += 1
yield _increment_sequence_number_and_return(
openai_responses_types.
ResponseContentPartAddedEvent(
@ -1398,6 +1400,7 @@ class OpenAIServingResponses(OpenAIServing):
and ctx.parser.current_recipient is None):
if not sent_output_item_added:
sent_output_item_added = True
current_item_id = f"msg_{random_uuid()}"
yield _increment_sequence_number_and_return(
openai_responses_types.
ResponseOutputItemAddedEvent(
@ -1412,6 +1415,7 @@ class OpenAIServingResponses(OpenAIServing):
status="in_progress",
),
))
current_content_index += 1
yield _increment_sequence_number_and_return(
openai_responses_types.
ResponseContentPartAddedEvent(
@ -1444,6 +1448,7 @@ class OpenAIServingResponses(OpenAIServing):
) and ctx.parser.current_recipient == "python":
if not sent_output_item_added:
sent_output_item_added = True
current_item_id = f"tool_{random_uuid()}"
yield _increment_sequence_number_and_return(
openai_responses_types.
ResponseOutputItemAddedEvent(
@ -1516,6 +1521,7 @@ class OpenAIServingResponses(OpenAIServing):
raise ValueError(
f"Unknown function name: {function_name}")
current_item_id = f"tool_{random_uuid()}"
yield _increment_sequence_number_and_return(
openai_responses_types.ResponseOutputItemAddedEvent(
type="response.output_item.added",