diff --git a/vllm/entrypoints/chat_utils.py b/vllm/entrypoints/chat_utils.py index 8485022024a4f..6a7975adeac81 100644 --- a/vllm/entrypoints/chat_utils.py +++ b/vllm/entrypoints/chat_utils.py @@ -1629,12 +1629,17 @@ def _postprocess_messages(messages: list[ConversationMessage]) -> None: # so, for messages that have tool_calls, parse the string (which we get # from openAI format) to dict for message in messages: - if ( - message["role"] == "assistant" - and "tool_calls" in message - and isinstance(message["tool_calls"], list) - ): - for item in message["tool_calls"]: + if message["role"] == "assistant" and "tool_calls" in message: + tool_calls = message.get("tool_calls") + if not isinstance(tool_calls, list): + continue + + if len(tool_calls) == 0: + # Drop empty tool_calls to keep templates on the normal assistant path. + message.pop("tool_calls", None) + continue + + for item in tool_calls: # if arguments is None or empty string, set to {} if content := item["function"].get("arguments"): if not isinstance(content, (dict, list)):