[Chore] Remove outdated transformers check (#20989)

Signed-off-by: Brayden Zhong <b8zhong@uwaterloo.ca>
This commit is contained in:
Brayden Zhong 2025-07-15 22:42:40 -04:00 committed by GitHub
parent b5c3b68359
commit 75a99b98bf
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -22,8 +22,8 @@ from typing import Literal, Optional, TypedDict, Union
import torch
from torch import nn
from transformers import (AddedToken, BatchFeature, Idefics3Config,
Idefics3ImageProcessor, Idefics3Processor)
from transformers import (BatchFeature, Idefics3Config, Idefics3ImageProcessor,
Idefics3Processor)
from vllm.config import VllmConfig
from vllm.model_executor.layers.linear import ReplicatedLinear
@ -199,21 +199,14 @@ class Idefics3ProcessingInfo(BaseProcessingInfo):
return grid_w * grid_h + 1
# TODO: Remove after requiring transformers>=4.52
def _get_content(self, token: Union[AddedToken, str]) -> str:
if isinstance(token, str):
return token
return token.content
def _get_image_token(
self,
processor: Optional[Idefics3Processor]) -> tuple[str, str, str]:
if processor is None:
processor = self.get_hf_processor()
image_token = self._get_content(processor.image_token)
fake_image_token = self._get_content(processor.fake_image_token)
image_token = processor.image_token
fake_image_token = processor.fake_image_token
global_image_token = processor.global_image_tag
return image_token, fake_image_token, global_image_token