From 75a99b98bf8e96b573f0c695291912a4ac8d8180 Mon Sep 17 00:00:00 2001 From: Brayden Zhong Date: Tue, 15 Jul 2025 22:42:40 -0400 Subject: [PATCH] [Chore] Remove outdated transformers check (#20989) Signed-off-by: Brayden Zhong --- vllm/model_executor/models/idefics3.py | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/vllm/model_executor/models/idefics3.py b/vllm/model_executor/models/idefics3.py index 4643468af4ce1..de216a81e9344 100644 --- a/vllm/model_executor/models/idefics3.py +++ b/vllm/model_executor/models/idefics3.py @@ -22,8 +22,8 @@ from typing import Literal, Optional, TypedDict, Union import torch from torch import nn -from transformers import (AddedToken, BatchFeature, Idefics3Config, - Idefics3ImageProcessor, Idefics3Processor) +from transformers import (BatchFeature, Idefics3Config, Idefics3ImageProcessor, + Idefics3Processor) from vllm.config import VllmConfig from vllm.model_executor.layers.linear import ReplicatedLinear @@ -199,21 +199,14 @@ class Idefics3ProcessingInfo(BaseProcessingInfo): return grid_w * grid_h + 1 - # TODO: Remove after requiring transformers>=4.52 - def _get_content(self, token: Union[AddedToken, str]) -> str: - if isinstance(token, str): - return token - - return token.content - def _get_image_token( self, processor: Optional[Idefics3Processor]) -> tuple[str, str, str]: if processor is None: processor = self.get_hf_processor() - image_token = self._get_content(processor.image_token) - fake_image_token = self._get_content(processor.fake_image_token) + image_token = processor.image_token + fake_image_token = processor.fake_image_token global_image_token = processor.global_image_tag return image_token, fake_image_token, global_image_token