[Bug] Fix torch warning of tf32 usage (#29112)

Signed-off-by: yewentao256 <zhyanwentao@126.com>
This commit is contained in:
Wentao Ye 2025-11-20 20:54:59 -05:00 committed by GitHub
parent ed6ae1e36a
commit e1eefa4c40
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -852,5 +852,6 @@ def init_batch_invariance():
enable_batch_invariant_mode()
# Disable TF32 for batch invariance - it causes non-deterministic rounding
torch.backends.cuda.matmul.allow_tf32 = False
torch.backends.cudnn.allow_tf32 = False
torch.backends.cuda.matmul.fp32_precision = "ieee"
torch.backends.cudnn.conv.fp32_precision = "ieee"
torch.backends.cudnn.rnn.fp32_precision = "ieee"