[V1] Fix wrong import path of get_flash_attn_version (#15280)

Signed-off-by: Lehua Ding <lehuading@tencent.com>
This commit is contained in:
Lehua Ding 2025-03-21 18:54:11 +08:00 committed by GitHub
parent 84e00adc8a
commit 91ca929dc7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -195,8 +195,8 @@ from vllm import _custom_ops as ops
from vllm.attention.backends.abstract import (AttentionBackend, AttentionLayer,
AttentionMetadata,
MLAAttentionImpl)
from vllm.attention.backends.utils import get_flash_attn_version
from vllm.attention.ops.triton_merge_attn_states import merge_attn_states
from vllm.fa_utils import get_flash_attn_version
from vllm.logger import init_logger
from vllm.model_executor.layers.linear import (ColumnParallelLinear,
LinearBase, RowParallelLinear,