From 65197a5fb37ef4d8b93e0b99ecc8b902fe948e97 Mon Sep 17 00:00:00 2001 From: Jee Jee Li Date: Sat, 23 Aug 2025 14:05:27 +0800 Subject: [PATCH] [Misc] Modify CacheConfig import (#23459) Signed-off-by: Jee Jee Li --- vllm/attention/layers/encoder_only_attention.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vllm/attention/layers/encoder_only_attention.py b/vllm/attention/layers/encoder_only_attention.py index 7b3dcbd823c06..cea05df5b96d2 100644 --- a/vllm/attention/layers/encoder_only_attention.py +++ b/vllm/attention/layers/encoder_only_attention.py @@ -5,13 +5,13 @@ from copy import copy from typing import Optional import torch -from transformers import CacheConfig from vllm import envs from vllm.attention.backends.abstract import (AttentionBackend, AttentionMetadata, AttentionType) from vllm.attention.layer import Attention from vllm.attention.selector import get_attn_backend +from vllm.config import CacheConfig from vllm.v1.attention.backends.utils import (CommonAttentionMetadata, subclass_attention_backend)