[Bugfix] Fix _synced_weight_loader (#24565)

Signed-off-by: Kyuyeun Kim <kyuyeunk@google.com>
This commit is contained in:
Kyuyeun Kim 2025-09-11 01:52:33 -07:00 committed by GitHub
parent 0fc36463e0
commit ed5ae4aace
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -52,10 +52,11 @@ def set_weight_attrs(
def _make_synced_weight_loader(original_weight_loader):
def _synced_weight_loader(param, *args, **kwargs):
original_weight_loader(param, *args, **kwargs)
out = original_weight_loader(param, *args, **kwargs)
# torch._sync doesn't support, is not needed for CPU tensors.
if param.device != torch.device("cpu"):
torch._sync(param)
return out
return _synced_weight_loader