From 6aeb1dab4a3585fb0a16f76b7aa2779fffdd5fc8 Mon Sep 17 00:00:00 2001 From: Cyrus Leung Date: Thu, 11 Sep 2025 16:48:25 +0800 Subject: [PATCH] [Bugfix] Fix incorrect import of CacheConfig (#24631) Signed-off-by: DarkLight1337 --- vllm/attention/layers/cross_attention.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/vllm/attention/layers/cross_attention.py b/vllm/attention/layers/cross_attention.py index 5f814b23888b3..c24fa4e15f679 100644 --- a/vllm/attention/layers/cross_attention.py +++ b/vllm/attention/layers/cross_attention.py @@ -6,14 +6,13 @@ from typing import Optional import numpy as np import torch -from transformers import CacheConfig from vllm import envs from vllm.attention.backends.abstract import (AttentionBackend, AttentionMetadata, AttentionType) from vllm.attention.layer import Attention from vllm.attention.selector import get_attn_backend -from vllm.config import VllmConfig +from vllm.config import CacheConfig, VllmConfig from vllm.logger import init_logger from vllm.multimodal import MULTIMODAL_REGISTRY from vllm.utils import cdiv