Fix lora tests failure in TPU CI due to the removal of LoRA bias (#26723)

Signed-off-by: Xiongfei Wei <isaacwxf23@gmail.com>
This commit is contained in:
XiongfeiWei 2025-10-13 22:04:23 -07:00 committed by GitHub
parent 2e36cdbe2b
commit 4497c8f821
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -2128,12 +2128,11 @@ def replace_set_lora(model):
lora_a: torch.Tensor,
lora_b: torch.Tensor,
embeddings_tensor: torch.Tensor | None,
bias: torch.Tensor | None = None,
):
# TODO: The integer index leads to a recompilation, but converting it
# to a tensor doesn't seem to work anymore. This might be fixed with a
# later release of torch_xla.
self._original_set_lora(index, lora_a, lora_b, embeddings_tensor, bias)
self._original_set_lora(index, lora_a, lora_b, embeddings_tensor)
torch_xla.sync(wait=False)
def _tpu_reset_lora(self, index: int):