diff --git a/examples/online_serving/openai_chat_completion_tool_calls_with_reasoning.py b/examples/online_serving/openai_chat_completion_tool_calls_with_reasoning.py index 9e7a69c6c87d..8c6470aa3dd4 100644 --- a/examples/online_serving/openai_chat_completion_tool_calls_with_reasoning.py +++ b/examples/online_serving/openai_chat_completion_tool_calls_with_reasoning.py @@ -31,14 +31,6 @@ available_tools = {"get_current_weather": get_current_weather} openai_api_key = "EMPTY" openai_api_base = "http://localhost:8000/v1" -client = OpenAI( - api_key=openai_api_key, - base_url=openai_api_base, -) - -models = client.models.list() -model = models.data[0].id - tools = [{ "type": "function", "function": { @@ -109,69 +101,87 @@ def extract_reasoning_and_calls(chunks: list): return reasoning_content, arguments, function_names -print("---------Full Generate With Automatic Function Calling-------------") -tool_calls = client.chat.completions.create(messages=messages, - model=model, - tools=tools) -print(f"reasoning_content: {tool_calls.choices[0].message.reasoning_content}") -print(f"function name: " - f"{tool_calls.choices[0].message.tool_calls[0].function.name}") -print(f"function arguments: " - f"{tool_calls.choices[0].message.tool_calls[0].function.arguments}") +def main(): + client = OpenAI( + api_key=openai_api_key, + base_url=openai_api_base, + ) -print("----------Stream Generate With Automatic Function Calling-----------") -tool_calls_stream = client.chat.completions.create(messages=messages, - model=model, - tools=tools, - stream=True) -chunks = [] -for chunk in tool_calls_stream: - chunks.append(chunk) + models = client.models.list() + model = models.data[0].id -reasoning_content, arguments, function_names = extract_reasoning_and_calls( - chunks) + print( + "---------Full Generate With Automatic Function Calling-------------") + tool_calls = client.chat.completions.create(messages=messages, + model=model, + tools=tools) + print( + f"reasoning_content: {tool_calls.choices[0].message.reasoning_content}" + ) + print(f"function name: " + f"{tool_calls.choices[0].message.tool_calls[0].function.name}") + print(f"function arguments: " + f"{tool_calls.choices[0].message.tool_calls[0].function.arguments}") -print(f"reasoning_content: {reasoning_content}") -print(f"function name: {function_names[0]}") -print(f"function arguments: {arguments[0]}") + print( + "----------Stream Generate With Automatic Function Calling-----------") + tool_calls_stream = client.chat.completions.create(messages=messages, + model=model, + tools=tools, + stream=True) -print("----------Full Generate With Named Function Calling-----------------") -tool_calls = client.chat.completions.create(messages=messages, - model=model, - tools=tools, - tool_choice={ - "type": "function", - "function": { - "name": - "get_current_weather" - } - }) + chunks = list(tool_calls_stream) -tool_call = tool_calls.choices[0].message.tool_calls[0].function -print(f"reasoning_content: {tool_calls.choices[0].message.reasoning_content}") -print(f"function name: {tool_call.name}") -print(f"function arguments: {tool_call.arguments}") -print("----------Stream Generate With Named Function Calling--------------") + reasoning_content, arguments, function_names = extract_reasoning_and_calls( + chunks) -tool_calls_stream = client.chat.completions.create( - messages=messages, - model=model, - tools=tools, - tool_choice={ - "type": "function", - "function": { - "name": "get_current_weather" - } - }, - stream=True) + print(f"reasoning_content: {reasoning_content}") + print(f"function name: {function_names[0]}") + print(f"function arguments: {arguments[0]}") -chunks = [] -for chunk in tool_calls_stream: - chunks.append(chunk) + print( + "----------Full Generate With Named Function Calling-----------------") + tool_calls = client.chat.completions.create(messages=messages, + model=model, + tools=tools, + tool_choice={ + "type": "function", + "function": { + "name": + "get_current_weather" + } + }) -reasoning_content, arguments, function_names = extract_reasoning_and_calls( - chunks) -print(f"reasoning_content: {reasoning_content}") -print(f"function name: {function_names[0]}") -print(f"function arguments: {arguments[0]}") -print("\n\n") + tool_call = tool_calls.choices[0].message.tool_calls[0].function + print( + f"reasoning_content: {tool_calls.choices[0].message.reasoning_content}" + ) + print(f"function name: {tool_call.name}") + print(f"function arguments: {tool_call.arguments}") + print( + "----------Stream Generate With Named Function Calling--------------") + + tool_calls_stream = client.chat.completions.create( + messages=messages, + model=model, + tools=tools, + tool_choice={ + "type": "function", + "function": { + "name": "get_current_weather" + } + }, + stream=True) + + chunks = list(tool_calls_stream) + + reasoning_content, arguments, function_names = extract_reasoning_and_calls( + chunks) + print(f"reasoning_content: {reasoning_content}") + print(f"function name: {function_names[0]}") + print(f"function arguments: {arguments[0]}") + print("\n\n") + + +if __name__ == "__main__": + main()