# SPDX-License-Identifier: Apache-2.0 # SPDX-FileCopyrightText: Copyright contributors to the vLLM project from openai.types.chat import ( ChatCompletionAssistantMessageParam, ChatCompletionMessageToolCallParam, ChatCompletionToolMessageParam, ) from openai.types.chat.chat_completion_message_tool_call_param import ( Function as FunctionCallTool, ) from openai.types.responses import ResponseFunctionToolCall from openai.types.responses.tool import Tool from vllm import envs from vllm.entrypoints.openai.protocol import ( ChatCompletionMessageParam, ResponseInputOutputItem, ) def construct_chat_message_with_tool_call( item: ResponseInputOutputItem, ) -> ChatCompletionMessageParam: if isinstance(item, ResponseFunctionToolCall): # Append the function call as a tool call. return ChatCompletionAssistantMessageParam( role="assistant", tool_calls=[ ChatCompletionMessageToolCallParam( id=item.call_id, function=FunctionCallTool( name=item.name, arguments=item.arguments, ), type="function", ) ], ) elif item.get("type") == "function_call_output": # Append the function call output as a tool message. return ChatCompletionToolMessageParam( role="tool", content=item.get("output"), tool_call_id=item.get("call_id"), ) return item # type: ignore def extract_tool_types(tools: list[Tool]) -> set[str]: """ Extracts the tool types from the given tools. """ tool_types: set[str] = set() for tool in tools: if tool.type == "mcp": # Allow the MCP Tool type to enable built in tools if the # server_label is allowlisted in # envs.VLLM_GPT_OSS_SYSTEM_TOOL_MCP_LABELS if tool.server_label in envs.VLLM_GPT_OSS_SYSTEM_TOOL_MCP_LABELS: tool_types.add(tool.server_label) else: tool_types.add(tool.type) return tool_types def convert_tool_responses_to_completions_format(tool: dict) -> dict: """ Convert a flat tool schema: {"type": "function", "name": "...", "description": "...", "parameters": {...}} into: {"type": "function", "function": {...}} """ return { "type": "function", "function": tool, }