[Bugfix] Fix processor initialization in transformers 4.53.0 (#20244)

Signed-off-by: Isotr0py <2037008807@qq.com>
This commit is contained in:
Isotr0py 2025-06-30 18:16:16 +08:00 committed by GitHub
parent f5dfa07531
commit e936e401de
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -5,7 +5,9 @@ from dataclasses import dataclass
from typing import TYPE_CHECKING, Any, NamedTuple, Optional, Union
import torch
from packaging.version import Version
from transformers import BatchFeature, PretrainedConfig, ProcessorMixin
from transformers import __version__ as TRANSFORMERS_VERSION
from typing_extensions import TypeVar
from vllm.jsontree import JSONTree, json_map_leaves
@ -128,9 +130,13 @@ class InputProcessingContext(InputContext):
/,
**kwargs: object,
) -> _P:
# Transformers 4.53.0 has issue with passing tokenizer to
# initialize processor. We disable it for this version.
# See: https://github.com/vllm-project/vllm/issues/20224
if Version(TRANSFORMERS_VERSION) != Version("4.53.0"):
kwargs["tokenizer"] = self.tokenizer
return super().get_hf_processor(
typ,
tokenizer=self.tokenizer,
**kwargs,
)