Remove torch_xla.tpu.version() from pallas.py. (#21065)

Signed-off-by: Qiliang Cui <derrhein@gmail.com>
This commit is contained in:
QiliangCui 2025-07-16 17:25:26 -07:00 committed by GitHub
parent 01513a334a
commit 72ad273582
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -167,10 +167,6 @@ class PallasAttentionBackendImpl(AttentionImpl):
"are not implemented for "
"PallasAttentionBackendImpl")
tpu_version = torch_xla.tpu.version()
if tpu_version < 4:
raise NotImplementedError("TPU version must be 4 or higher.")
def forward(
self,
layer: AttentionLayer,