From 5210dc3940b0f6554a6db46287281d1be9d187ed Mon Sep 17 00:00:00 2001 From: Xudong Ma Date: Wed, 15 Oct 2025 01:37:49 -0700 Subject: [PATCH] [Misc] Update TritonLanguagePlaceholder to have attributes that are used by Flash Linear Attention ops. (#26853) Co-authored-by: Xudong Ma --- vllm/triton_utils/importing.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/vllm/triton_utils/importing.py b/vllm/triton_utils/importing.py index e1a509a303c53..f05bc555bfdc3 100644 --- a/vllm/triton_utils/importing.py +++ b/vllm/triton_utils/importing.py @@ -98,3 +98,6 @@ class TritonLanguagePlaceholder(types.ModuleType): self.int64 = None self.int32 = None self.tensor = None + self.exp = None + self.log = None + self.log2 = None