[Refactor] Lazy import tool_parser (#27974)

Signed-off-by: chaunceyjiang <chaunceyjiang@gmail.com>
This commit is contained in:
Chauncey 2025-11-04 10:10:10 +08:00 committed by GitHub
parent 6ddae74054
commit c02fccdbd2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
40 changed files with 266 additions and 158 deletions

View File

@ -407,7 +407,6 @@ Here is a summary of a plugin file:
# the name list in register_module can be used
# in --tool-call-parser. you can define as many
# tool parsers as you want here.
@ToolParserManager.register_module(["example"])
class ExampleToolParser(ToolParser):
def __init__(self, tokenizer: AnyTokenizer):
super().__init__(tokenizer)
@ -439,6 +438,12 @@ Here is a summary of a plugin file:
return ExtractedToolCallInformation(tools_called=False,
tool_calls=[],
content=text)
# register the tool parser to ToolParserManager
ToolParserManager.register_lazy_module(
name="example",
module_path="vllm.entrypoints.openai.tool_parsers.example",
class_name="ExampleToolParser",
)
```

View File

@ -3,7 +3,9 @@
import pytest
from vllm.entrypoints.openai.tool_parsers import DeepSeekV31ToolParser
from vllm.entrypoints.openai.tool_parsers.deepseekv31_tool_parser import (
DeepSeekV31ToolParser,
)
from vllm.transformers_utils.tokenizer import get_tokenizer
MODEL = "deepseek-ai/DeepSeek-V3.1"

View File

@ -13,7 +13,7 @@ from vllm.entrypoints.openai.protocol import (
FunctionCall,
ToolCall,
)
from vllm.entrypoints.openai.tool_parsers import Ernie45ToolParser
from vllm.entrypoints.openai.tool_parsers.ernie45_tool_parser import Ernie45ToolParser
from vllm.transformers_utils.detokenizer_utils import detokenize_incrementally
from vllm.transformers_utils.tokenizer import AnyTokenizer, get_tokenizer

View File

@ -7,7 +7,9 @@ import json
import pytest
from vllm.entrypoints.openai.protocol import FunctionCall, ToolCall
from vllm.entrypoints.openai.tool_parsers import Glm4MoeModelToolParser
from vllm.entrypoints.openai.tool_parsers.glm4_moe_tool_parser import (
Glm4MoeModelToolParser,
)
from vllm.transformers_utils.tokenizer import get_tokenizer
pytestmark = pytest.mark.cpu_test

View File

@ -9,7 +9,7 @@ import pytest
from partial_json_parser.core.options import Allow
from vllm.entrypoints.openai.protocol import DeltaMessage, FunctionCall, ToolCall
from vllm.entrypoints.openai.tool_parsers import JambaToolParser
from vllm.entrypoints.openai.tool_parsers.jamba_tool_parser import JambaToolParser
from vllm.transformers_utils.detokenizer_utils import detokenize_incrementally
from vllm.transformers_utils.tokenizer import AnyTokenizer, get_tokenizer

View File

@ -7,7 +7,7 @@ import json
import pytest
from vllm.entrypoints.openai.protocol import FunctionCall, ToolCall
from vllm.entrypoints.openai.tool_parsers import KimiK2ToolParser
from vllm.entrypoints.openai.tool_parsers.kimi_k2_tool_parser import KimiK2ToolParser
from vllm.transformers_utils.tokenizer import get_tokenizer
pytestmark = pytest.mark.cpu_test

View File

@ -12,7 +12,7 @@ from vllm.entrypoints.openai.protocol import (
FunctionCall,
ToolCall,
)
from vllm.entrypoints.openai.tool_parsers import MinimaxToolParser
from vllm.entrypoints.openai.tool_parsers.minimax_tool_parser import MinimaxToolParser
from vllm.transformers_utils.tokenizer import get_tokenizer
pytestmark = pytest.mark.cpu_test

View File

@ -15,7 +15,7 @@ from openai_harmony import (
)
from vllm.entrypoints.openai.protocol import FunctionCall, ToolCall
from vllm.entrypoints.openai.tool_parsers import OpenAIToolParser
from vllm.entrypoints.openai.tool_parsers.openai_tool_parser import OpenAIToolParser
from vllm.transformers_utils.tokenizer import get_tokenizer
MODEL = "gpt2"

View File

@ -14,7 +14,7 @@ from vllm.entrypoints.openai.protocol import (
FunctionCall,
ToolCall,
)
from vllm.entrypoints.openai.tool_parsers import SeedOssToolParser
from vllm.entrypoints.openai.tool_parsers.seed_oss_tool_parser import SeedOssToolParser
from vllm.transformers_utils.detokenizer_utils import detokenize_incrementally
from vllm.transformers_utils.tokenizer import AnyTokenizer, get_tokenizer

View File

@ -12,7 +12,7 @@ from vllm.entrypoints.openai.protocol import (
FunctionCall,
ToolCall,
)
from vllm.entrypoints.openai.tool_parsers import xLAMToolParser
from vllm.entrypoints.openai.tool_parsers.xlam_tool_parser import xLAMToolParser
from vllm.transformers_utils.detokenizer_utils import detokenize_incrementally
from vllm.transformers_utils.tokenizer import AnyTokenizer, get_tokenizer

View File

@ -1943,7 +1943,7 @@ def create_server_unix_socket(path: str) -> socket.socket:
def validate_api_server_args(args):
valid_tool_parses = ToolParserManager.tool_parsers.keys()
valid_tool_parses = ToolParserManager.list_registered()
if args.enable_auto_tool_choice and args.tool_call_parser not in valid_tool_parses:
raise KeyError(
f"invalid tool call parser: {args.tool_call_parser} "

View File

@ -219,7 +219,7 @@ class FrontendArgs:
frontend_kwargs["middleware"]["default"] = []
# Special case: Tool call parser shows built-in options.
valid_tool_parsers = list(ToolParserManager.tool_parsers.keys())
valid_tool_parsers = list(ToolParserManager.list_registered())
parsers_str = ",".join(valid_tool_parsers)
frontend_kwargs["tool_call_parser"]["metavar"] = (
f"{{{parsers_str}}} or name registered in --tool-parser-plugin"

View File

@ -1,61 +1,142 @@
# SPDX-License-Identifier: Apache-2.0
# SPDX-FileCopyrightText: Copyright contributors to the vLLM project
from .abstract_tool_parser import ToolParser, ToolParserManager
from .deepseekv3_tool_parser import DeepSeekV3ToolParser
from .deepseekv31_tool_parser import DeepSeekV31ToolParser
from .ernie45_tool_parser import Ernie45ToolParser
from .glm4_moe_tool_parser import Glm4MoeModelToolParser
from .granite_20b_fc_tool_parser import Granite20bFCToolParser
from .granite_tool_parser import GraniteToolParser
from .hermes_tool_parser import Hermes2ProToolParser
from .hunyuan_a13b_tool_parser import HunyuanA13BToolParser
from .internlm2_tool_parser import Internlm2ToolParser
from .jamba_tool_parser import JambaToolParser
from .kimi_k2_tool_parser import KimiK2ToolParser
from .llama4_pythonic_tool_parser import Llama4PythonicToolParser
from .llama_tool_parser import Llama3JsonToolParser
from .longcat_tool_parser import LongcatFlashToolParser
from .minimax_m2_tool_parser import MinimaxM2ToolParser
from .minimax_tool_parser import MinimaxToolParser
from .mistral_tool_parser import MistralToolParser
from .olmo3_tool_parser import Olmo3PythonicToolParser
from .openai_tool_parser import OpenAIToolParser
from .phi4mini_tool_parser import Phi4MiniJsonToolParser
from .pythonic_tool_parser import PythonicToolParser
from .qwen3coder_tool_parser import Qwen3CoderToolParser
from .qwen3xml_tool_parser import Qwen3XMLToolParser
from .seed_oss_tool_parser import SeedOssToolParser
from .step3_tool_parser import Step3ToolParser
from .xlam_tool_parser import xLAMToolParser
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
ToolParser,
ToolParserManager,
)
__all__ = [
"ToolParser",
"ToolParserManager",
"Granite20bFCToolParser",
"GraniteToolParser",
"Hermes2ProToolParser",
"MistralToolParser",
"Internlm2ToolParser",
"Llama3JsonToolParser",
"JambaToolParser",
"Llama4PythonicToolParser",
"LongcatFlashToolParser",
"PythonicToolParser",
"Phi4MiniJsonToolParser",
"DeepSeekV3ToolParser",
"DeepSeekV31ToolParser",
"Ernie45ToolParser",
"xLAMToolParser",
"Olmo3PythonicToolParser",
"MinimaxToolParser",
"KimiK2ToolParser",
"HunyuanA13BToolParser",
"Glm4MoeModelToolParser",
"Qwen3CoderToolParser",
"Qwen3XMLToolParser",
"SeedOssToolParser",
"Step3ToolParser",
"OpenAIToolParser",
"MinimaxM2ToolParser",
]
__all__ = ["ToolParser", "ToolParserManager"]
"""
Register a lazy module mapping.
Example:
ToolParserManager.register_lazy_module(
name="kimi_k2",
module_path="vllm.entrypoints.openai.tool_parsers.kimi_k2_parser",
class_name="KimiK2ToolParser",
)
"""
_TOOL_PARSERS_TO_REGISTER = {
"deepseek_v3": ( # name
"deepseekv3_tool_parser", # filename
"DeepSeekV3ToolParser", # class_name
),
"deepseek_v31": (
"deepseekv31_tool_parser",
"DeepSeekV31ToolParser",
),
"ernie45": (
"ernie45_tool_parser",
"Ernie45ToolParser",
),
"glm45": (
"glm4_moe_tool_parser",
"Glm4MoeModelToolParser",
),
"granite-20b-fc": (
"granite_20b_fc_tool_parser",
"Granite20bFCToolParser",
),
"granite": (
"granite_tool_parser",
"GraniteToolParser",
),
"hermes": (
"hermes_tool_parser",
"Hermes2ProToolParser",
),
"hunyuan_a13b": (
"hunyuan_a13b_tool_parser",
"HunyuanA13BToolParser",
),
"internlm": (
"internlm2_tool_parser",
"Internlm2ToolParser",
),
"jamba": (
"jamba_tool_parser",
"JambaToolParser",
),
"kimi_k2": (
"kimi_k2_tool_parser",
"KimiK2ToolParser",
),
"llama3_json": (
"llama_tool_parser",
"Llama3JsonToolParser",
),
"llama4_json": (
"llama_tool_parser",
"Llama4JsonToolParser",
),
"llama4_pythonic": (
"llama4_pythonic_tool_parser",
"Llama4PythonicToolParser",
),
"longcat": (
"longcat_tool_parser",
"LongcatFlashToolParser",
),
"minimax_m2": (
"minimax_m2_tool_parser",
"MinimaxM2ToolParser",
),
"minimax": (
"minimax_tool_parser",
"MinimaxToolParser",
),
"mistral": (
"mistral_tool_parser",
"MistralToolParser",
),
"olmo3": (
"olmo3_tool_parser",
"Olmo3PythonicToolParser",
),
"openai": (
"openai_tool_parser",
"OpenAIToolParser",
),
"phi4_mini_json": (
"phi4mini_tool_parser",
"Phi4MiniJsonToolParser",
),
"pythonic": (
"pythonic_tool_parser",
"PythonicToolParser",
),
"qwen3_coder": (
"qwen3coder_tool_parser",
"Qwen3CoderToolParser",
),
"qwen3_xml": (
"qwen3xml_tool_parser",
"Qwen3XmlToolParser",
),
"seed_oss": (
"seed_oss_tool_parser",
"SeedOsSToolParser",
),
"step3": (
"step3_tool_parser",
"Step3ToolParser",
),
"xlam": (
"xlam_tool_parser",
"xLAMToolParser",
),
}
def register_lazy_tool_parsers():
for name, (file_name, class_name) in _TOOL_PARSERS_TO_REGISTER.items():
module_path = f"vllm.entrypoints.openai.tool_parsers.{file_name}"
ToolParserManager.register_lazy_module(name, module_path, class_name)
register_lazy_tool_parsers()

View File

@ -1,6 +1,7 @@
# SPDX-License-Identifier: Apache-2.0
# SPDX-FileCopyrightText: Copyright contributors to the vLLM project
import importlib
import os
from collections.abc import Callable, Sequence
from functools import cached_property
@ -99,89 +100,158 @@ class ToolParser:
class ToolParserManager:
tool_parsers: dict[str, type] = {}
"""
Central registry for ToolParser implementations.
Supports two modes:
- Eager (immediate) registration via `register_module`
- Lazy registration via `register_lazy_module`
"""
tool_parsers: dict[str, type[ToolParser]] = {}
lazy_parsers: dict[str, tuple[str, str]] = {} # name -> (module_path, class_name)
@classmethod
def get_tool_parser(cls, name) -> type:
def get_tool_parser(cls, name: str) -> type[ToolParser]:
"""
Get tool parser by name which is registered by `register_module`.
Retrieve a registered or lazily registered ToolParser class.
Raise a KeyError exception if the name is not registered.
If the parser is lazily registered,
it will be imported and cached on first access.
Raises KeyError if not found.
"""
if name in cls.tool_parsers:
return cls.tool_parsers[name]
raise KeyError(f"tool helper: '{name}' not found in tool_parsers")
if name in cls.lazy_parsers:
return cls._load_lazy_parser(name)
raise KeyError(f"Tool parser '{name}' not found.")
@classmethod
def _load_lazy_parser(cls, name: str) -> type[ToolParser]:
"""Import and register a lazily loaded parser."""
module_path, class_name = cls.lazy_parsers[name]
try:
mod = importlib.import_module(module_path)
parser_cls = getattr(mod, class_name)
if not issubclass(parser_cls, ToolParser):
raise TypeError(
f"{class_name} in {module_path} is not a ToolParser subclass."
)
cls.tool_parsers[name] = parser_cls # cache
return parser_cls
except Exception as e:
logger.exception(
"Failed to import lazy tool parser '%s' from %s: %s",
name,
module_path,
e,
)
raise
@classmethod
def _register_module(
cls,
module: type,
module: type[ToolParser],
module_name: str | list[str] | None = None,
force: bool = True,
) -> None:
"""Register a ToolParser class immediately."""
if not issubclass(module, ToolParser):
raise TypeError(
f"module must be subclass of ToolParser, but got {type(module)}"
)
if module_name is None:
module_name = module.__name__
if isinstance(module_name, str):
module_name = [module_name]
for name in module_name:
module_names = [module_name]
elif is_list_of(module_name, str):
module_names = module_name
else:
raise TypeError("module_name must be str, list[str], or None.")
for name in module_names:
if not force and name in cls.tool_parsers:
existed_module = cls.tool_parsers[name]
raise KeyError(
f"{name} is already registered at {existed_module.__module__}"
)
existed = cls.tool_parsers[name]
raise KeyError(f"{name} is already registered at {existed.__module__}")
cls.tool_parsers[name] = module
@classmethod
def register_lazy_module(cls, name: str, module_path: str, class_name: str) -> None:
"""
Register a lazy module mapping.
Example:
ToolParserManager.register_lazy_module(
name="kimi_k2",
module_path="vllm.entrypoints.openai.tool_parsers.kimi_k2_parser",
class_name="KimiK2ToolParser",
)
"""
cls.lazy_parsers[name] = (module_path, class_name)
@classmethod
def register_module(
cls,
name: str | list[str] | None = None,
force: bool = True,
module: type | None = None,
) -> type | Callable:
module: type[ToolParser] | None = None,
) -> type[ToolParser] | Callable[[type[ToolParser]], type[ToolParser]]:
"""
Register module with the given name or name list. it can be used as a
decoder(with module as None) or normal function(with module as not
None).
Register module immediately or lazily (as a decorator).
Usage:
@ToolParserManager.register_module("kimi_k2")
class KimiK2ToolParser(ToolParser):
...
Or:
ToolParserManager.register_module(module=SomeToolParser)
"""
if not isinstance(force, bool):
raise TypeError(f"force must be a boolean, but got {type(force)}")
# raise the error ahead of time
if not (name is None or isinstance(name, str) or is_list_of(name, str)):
raise TypeError(
"name must be None, an instance of str, or a sequence of str, "
f"but got {type(name)}"
)
# use it as a normal method: x.register_module(module=SomeClass)
# Immediate registration
if module is not None:
cls._register_module(module=module, module_name=name, force=force)
return module
# use it as a decorator: @x.register_module()
def _register(module):
cls._register_module(module=module, module_name=name, force=force)
return module
# Decorator usage
def _decorator(obj: type[ToolParser]) -> type[ToolParser]:
module_path = obj.__module__
class_name = obj.__name__
return _register
if isinstance(name, str):
names = [name]
elif is_list_of(name, str):
names = name
else:
names = [class_name]
for n in names:
# Lazy mapping only: do not import now
cls.lazy_parsers[n] = (module_path, class_name)
return obj
return _decorator
@classmethod
def list_registered(cls) -> list[str]:
"""Return names of all eagerly and lazily registered tool parsers."""
return sorted(set(cls.tool_parsers.keys()) | set(cls.lazy_parsers.keys()))
@classmethod
def import_tool_parser(cls, plugin_path: str) -> None:
"""
Import a user-defined tool parser by the path of the tool parser define
file.
"""
module_name = os.path.splitext(os.path.basename(plugin_path))[0]
"""Import a user-defined parser file from arbitrary path."""
module_name = os.path.splitext(os.path.basename(plugin_path))[0]
try:
import_from_path(module_name, plugin_path)
except Exception:
logger.exception(
"Failed to load module '%s' from %s.", module_name, plugin_path
)
return

View File

@ -17,7 +17,6 @@ from vllm.entrypoints.openai.protocol import (
)
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
ToolParser,
ToolParserManager,
)
from vllm.logger import init_logger
from vllm.transformers_utils.tokenizer import AnyTokenizer
@ -25,7 +24,6 @@ from vllm.transformers_utils.tokenizer import AnyTokenizer
logger = init_logger(__name__)
@ToolParserManager.register_module("deepseek_v31")
class DeepSeekV31ToolParser(ToolParser):
def __init__(self, tokenizer: AnyTokenizer):
super().__init__(tokenizer)

View File

@ -17,7 +17,6 @@ from vllm.entrypoints.openai.protocol import (
)
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
ToolParser,
ToolParserManager,
)
from vllm.logger import init_logger
from vllm.transformers_utils.tokenizer import AnyTokenizer
@ -25,7 +24,6 @@ from vllm.transformers_utils.tokenizer import AnyTokenizer
logger = init_logger(__name__)
@ToolParserManager.register_module("deepseek_v3")
class DeepSeekV3ToolParser(ToolParser):
def __init__(self, tokenizer: AnyTokenizer):
super().__init__(tokenizer)

View File

@ -17,7 +17,6 @@ from vllm.entrypoints.openai.protocol import (
)
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
ToolParser,
ToolParserManager,
)
from vllm.logger import init_logger
from vllm.transformers_utils.tokenizer import AnyTokenizer
@ -25,7 +24,6 @@ from vllm.transformers_utils.tokenizer import AnyTokenizer
logger = init_logger(__name__)
@ToolParserManager.register_module("ernie45")
class Ernie45ToolParser(ToolParser):
def __init__(self, tokenizer: AnyTokenizer):
"""

View File

@ -20,7 +20,6 @@ from vllm.entrypoints.openai.protocol import (
)
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
ToolParser,
ToolParserManager,
)
from vllm.logger import init_logger
from vllm.transformers_utils.tokenizer import AnyTokenizer
@ -28,7 +27,6 @@ from vllm.transformers_utils.tokenizer import AnyTokenizer
logger = init_logger(__name__)
@ToolParserManager.register_module("glm45")
class Glm4MoeModelToolParser(ToolParser):
def __init__(self, tokenizer: AnyTokenizer):
super().__init__(tokenizer)

View File

@ -21,7 +21,6 @@ from vllm.entrypoints.openai.protocol import (
)
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
ToolParser,
ToolParserManager,
)
from vllm.entrypoints.openai.tool_parsers.utils import (
consume_space,
@ -35,7 +34,6 @@ from vllm.transformers_utils.tokenizer import AnyTokenizer
logger = init_logger(__name__)
@ToolParserManager.register_module("granite-20b-fc")
class Granite20bFCToolParser(ToolParser):
"""
Tool call parser for the granite-20b-functioncalling model intended

View File

@ -19,7 +19,6 @@ from vllm.entrypoints.openai.protocol import (
)
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
ToolParser,
ToolParserManager,
)
from vllm.entrypoints.openai.tool_parsers.utils import (
consume_space,
@ -33,7 +32,6 @@ from vllm.transformers_utils.tokenizer import AnyTokenizer
logger = init_logger(__name__)
@ToolParserManager.register_module("granite")
class GraniteToolParser(ToolParser):
"""
Tool call parser for the granite 3.0 models. Intended

View File

@ -20,7 +20,6 @@ from vllm.entrypoints.openai.protocol import (
)
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
ToolParser,
ToolParserManager,
)
from vllm.logger import init_logger
from vllm.transformers_utils.tokenizer import AnyTokenizer, MistralTokenizer
@ -28,7 +27,6 @@ from vllm.transformers_utils.tokenizer import AnyTokenizer, MistralTokenizer
logger = init_logger(__name__)
@ToolParserManager.register_module("hermes")
class Hermes2ProToolParser(ToolParser):
def __init__(self, tokenizer: AnyTokenizer):
super().__init__(tokenizer)

View File

@ -19,7 +19,6 @@ from vllm.entrypoints.openai.protocol import (
)
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
ToolParser,
ToolParserManager,
)
from vllm.entrypoints.openai.tool_parsers.utils import consume_space
from vllm.logger import init_logger
@ -29,7 +28,6 @@ from vllm.utils import random_uuid
logger = init_logger(__name__)
@ToolParserManager.register_module("hunyuan_a13b")
class HunyuanA13BToolParser(ToolParser):
def __init__(self, tokenizer: AnyTokenizer):
super().__init__(tokenizer)

View File

@ -19,7 +19,6 @@ from vllm.entrypoints.openai.protocol import (
)
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
ToolParser,
ToolParserManager,
)
from vllm.entrypoints.openai.tool_parsers.utils import extract_intermediate_diff
from vllm.logger import init_logger
@ -28,7 +27,6 @@ from vllm.transformers_utils.tokenizer import AnyTokenizer
logger = init_logger(__name__)
@ToolParserManager.register_module(["internlm"])
class Internlm2ToolParser(ToolParser):
def __init__(self, tokenizer: AnyTokenizer):
super().__init__(tokenizer)

View File

@ -18,7 +18,7 @@ from vllm.entrypoints.openai.protocol import (
FunctionCall,
ToolCall,
)
from vllm.entrypoints.openai.tool_parsers import ToolParser, ToolParserManager
from vllm.entrypoints.openai.tool_parsers import ToolParser
from vllm.entrypoints.openai.tool_parsers.utils import extract_intermediate_diff
from vllm.logger import init_logger
from vllm.transformers_utils.tokenizer import AnyTokenizer
@ -27,7 +27,6 @@ from vllm.transformers_utils.tokenizers import MistralTokenizer
logger = init_logger(__name__)
@ToolParserManager.register_module("jamba")
class JambaToolParser(ToolParser):
def __init__(self, tokenizer: AnyTokenizer):
super().__init__(tokenizer)

View File

@ -17,7 +17,6 @@ from vllm.entrypoints.openai.protocol import (
)
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
ToolParser,
ToolParserManager,
)
from vllm.logger import init_logger
from vllm.transformers_utils.tokenizer import AnyTokenizer
@ -25,7 +24,6 @@ from vllm.transformers_utils.tokenizer import AnyTokenizer
logger = init_logger(__name__)
@ToolParserManager.register_module(["kimi_k2"])
class KimiK2ToolParser(ToolParser):
def __init__(self, tokenizer: AnyTokenizer):
super().__init__(tokenizer)

View File

@ -20,7 +20,6 @@ from vllm.entrypoints.openai.protocol import (
)
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
ToolParser,
ToolParserManager,
)
from vllm.logger import init_logger
@ -31,7 +30,6 @@ class _UnexpectedAstError(Exception):
pass
@ToolParserManager.register_module("llama4_pythonic")
class Llama4PythonicToolParser(ToolParser):
"""
Toolcall parser for Llama4 that produce tool calls in a pythonic style

View File

@ -21,7 +21,6 @@ from vllm.entrypoints.openai.protocol import (
)
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
ToolParser,
ToolParserManager,
)
from vllm.entrypoints.openai.tool_parsers.utils import (
find_common_prefix,
@ -33,8 +32,6 @@ from vllm.logger import init_logger
logger = init_logger(__name__)
@ToolParserManager.register_module("llama3_json")
@ToolParserManager.register_module("llama4_json")
class Llama3JsonToolParser(ToolParser):
"""
Tool call parser for Llama 3.x and 4 models intended for use with the

View File

@ -3,12 +3,10 @@
import regex as re
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import ToolParserManager
from vllm.entrypoints.openai.tool_parsers.hermes_tool_parser import Hermes2ProToolParser
from vllm.transformers_utils.tokenizer import AnyTokenizer
@ToolParserManager.register_module("longcat")
class LongcatFlashToolParser(Hermes2ProToolParser):
def __init__(self, tokenizer: AnyTokenizer):
super().__init__(tokenizer)

View File

@ -19,7 +19,6 @@ from vllm.entrypoints.openai.protocol import (
)
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
ToolParser,
ToolParserManager,
)
from vllm.logger import init_logger
from vllm.transformers_utils.tokenizer import AnyTokenizer
@ -27,7 +26,6 @@ from vllm.transformers_utils.tokenizer import AnyTokenizer
logger = init_logger(__name__)
@ToolParserManager.register_module("minimax_m2")
class MinimaxM2ToolParser(ToolParser):
def __init__(self, tokenizer: AnyTokenizer):
super().__init__(tokenizer)

View File

@ -19,7 +19,6 @@ from vllm.entrypoints.openai.protocol import (
)
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
ToolParser,
ToolParserManager,
)
from vllm.entrypoints.openai.tool_parsers.utils import extract_intermediate_diff
from vllm.logger import init_logger
@ -28,7 +27,6 @@ from vllm.transformers_utils.tokenizer import AnyTokenizer
logger = init_logger(__name__)
@ToolParserManager.register_module("minimax")
class MinimaxToolParser(ToolParser):
def __init__(self, tokenizer: AnyTokenizer):
super().__init__(tokenizer)

View File

@ -22,7 +22,6 @@ from vllm.entrypoints.openai.protocol import (
)
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
ToolParser,
ToolParserManager,
)
from vllm.entrypoints.openai.tool_parsers.utils import extract_intermediate_diff
from vllm.logger import init_logger
@ -53,7 +52,6 @@ def _is_fn_name_regex_support(model_tokenizer: AnyTokenizer) -> bool:
)
@ToolParserManager.register_module("mistral")
class MistralToolParser(ToolParser):
"""
Tool call parser for Mistral 7B Instruct v0.3, intended for use with

View File

@ -20,7 +20,6 @@ from vllm.entrypoints.openai.protocol import (
)
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
ToolParser,
ToolParserManager,
)
from vllm.logger import init_logger
@ -31,7 +30,6 @@ class _UnexpectedAstError(Exception):
pass
@ToolParserManager.register_module("olmo3")
class Olmo3PythonicToolParser(ToolParser):
"""
Tool call parser for Olmo 3 models that produce tool calls as

View File

@ -14,7 +14,6 @@ from vllm.entrypoints.openai.protocol import (
)
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
ToolParser,
ToolParserManager,
)
from vllm.logger import init_logger
@ -26,7 +25,6 @@ else:
logger = init_logger(__name__)
@ToolParserManager.register_module("openai")
class OpenAIToolParser(ToolParser):
def __init__(self, tokenizer: "AnyTokenizer"):
super().__init__(tokenizer)

View File

@ -18,14 +18,12 @@ from vllm.entrypoints.openai.protocol import (
)
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
ToolParser,
ToolParserManager,
)
from vllm.logger import init_logger
logger = init_logger(__name__)
@ToolParserManager.register_module("phi4_mini_json")
class Phi4MiniJsonToolParser(ToolParser):
"""
Tool call parser for phi-4-mini models intended for use with the

View File

@ -21,7 +21,6 @@ from vllm.entrypoints.openai.protocol import (
)
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
ToolParser,
ToolParserManager,
)
from vllm.logger import init_logger
@ -32,7 +31,6 @@ class _UnexpectedAstError(Exception):
pass
@ToolParserManager.register_module("pythonic")
class PythonicToolParser(ToolParser):
"""
Tool call parser for models that produce tool calls in a pythonic style,

View File

@ -20,7 +20,6 @@ from vllm.entrypoints.openai.protocol import (
)
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
ToolParser,
ToolParserManager,
)
from vllm.logger import init_logger
from vllm.transformers_utils.tokenizer import AnyTokenizer
@ -28,7 +27,6 @@ from vllm.transformers_utils.tokenizer import AnyTokenizer
logger = init_logger(__name__)
@ToolParserManager.register_module("qwen3_coder")
class Qwen3CoderToolParser(ToolParser):
def __init__(self, tokenizer: AnyTokenizer):
super().__init__(tokenizer)

View File

@ -21,7 +21,6 @@ from vllm.entrypoints.openai.protocol import (
)
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
ToolParser,
ToolParserManager,
)
from vllm.logger import init_logger
from vllm.transformers_utils.tokenizer import AnyTokenizer
@ -1165,7 +1164,6 @@ class StreamingXMLToolCallParser:
self.deferred_param_raw_value = ""
@ToolParserManager.register_module("qwen3_xml")
class Qwen3XMLToolParser(ToolParser):
def __init__(self, tokenizer: AnyTokenizer):
super().__init__(tokenizer)

View File

@ -23,7 +23,6 @@ from vllm.entrypoints.openai.protocol import (
)
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
ToolParser,
ToolParserManager,
)
from vllm.logger import init_logger
from vllm.transformers_utils.tokenizer import AnyTokenizer
@ -31,7 +30,6 @@ from vllm.transformers_utils.tokenizer import AnyTokenizer
logger = init_logger(__name__)
@ToolParserManager.register_module("seed_oss")
class SeedOssToolParser(ToolParser):
TOOL_CALL_START = "<seed:tool_call>"
TOOL_CALL_END = "</seed:tool_call>"

View File

@ -19,7 +19,6 @@ from vllm.entrypoints.openai.protocol import (
)
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
ToolParser,
ToolParserManager,
)
from vllm.logger import init_logger
from vllm.transformers_utils.tokenizer import AnyTokenizer
@ -28,7 +27,6 @@ from vllm.utils import random_uuid
logger = init_logger(__name__)
@ToolParserManager.register_module(["step3"])
class Step3ToolParser(ToolParser):
"""
Tool parser for a model that uses a specific XML-like format for tool calls.

View File

@ -19,7 +19,6 @@ from vllm.entrypoints.openai.protocol import (
)
from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
ToolParser,
ToolParserManager,
)
from vllm.logger import init_logger
from vllm.transformers_utils.tokenizer import AnyTokenizer
@ -28,7 +27,6 @@ from vllm.utils import random_uuid
logger = init_logger(__name__)
@ToolParserManager.register_module("xlam")
class xLAMToolParser(ToolParser):
def __init__(self, tokenizer: AnyTokenizer):
super().__init__(tokenizer)