fix dit lora block loader on loras with diff keys

This commit is contained in:
kijai 2025-08-06 23:15:12 +03:00
parent dafbcae4e6
commit 87d0cf42db

View File

@ -1935,18 +1935,25 @@ class DiTBlockLoraLoader:
if ratio == 0: if ratio == 0:
keys_to_delete.append(key) keys_to_delete.append(key)
else: else:
value = loaded[key].weights # Only modify LoRA adapters, skip diff tuples
weights_list = list(loaded[key].weights) value = loaded[key]
if hasattr(value, 'weights'):
print(f"Modifying LoRA adapter for key: {key}")
weights_list = list(value.weights)
weights_list[2] = ratio weights_list[2] = ratio
loaded[key].weights = tuple(weights_list) loaded[key].weights = tuple(weights_list)
else:
print(f"Skipping non-LoRA entry for key: {key}")
for key in keys_to_delete: for key in keys_to_delete:
del loaded[key] del loaded[key]
print("loading lora keys:") print("loading lora keys:")
for key, value in loaded.items(): for key, value in loaded.items():
if hasattr(value, 'weights'):
print(f"Key: {key}, Alpha: {value.weights[2]}") print(f"Key: {key}, Alpha: {value.weights[2]}")
else:
print(f"Key: {key}, Type: {type(value)}")
if model is not None: if model is not None:
new_modelpatcher = model.clone() new_modelpatcher = model.clone()