From 91ca929dc7aa626c926cd992b62a45b47e610b6a Mon Sep 17 00:00:00 2001 From: Lehua Ding Date: Fri, 21 Mar 2025 18:54:11 +0800 Subject: [PATCH] [V1] Fix wrong import path of get_flash_attn_version (#15280) Signed-off-by: Lehua Ding --- vllm/v1/attention/backends/mla/common.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vllm/v1/attention/backends/mla/common.py b/vllm/v1/attention/backends/mla/common.py index 188a425b107e4..31244443108b6 100644 --- a/vllm/v1/attention/backends/mla/common.py +++ b/vllm/v1/attention/backends/mla/common.py @@ -195,8 +195,8 @@ from vllm import _custom_ops as ops from vllm.attention.backends.abstract import (AttentionBackend, AttentionLayer, AttentionMetadata, MLAAttentionImpl) -from vllm.attention.backends.utils import get_flash_attn_version from vllm.attention.ops.triton_merge_attn_states import merge_attn_states +from vllm.fa_utils import get_flash_attn_version from vllm.logger import init_logger from vllm.model_executor.layers.linear import (ColumnParallelLinear, LinearBase, RowParallelLinear,