mirror of
https://git.datalinker.icu/comfyanonymous/ComfyUI
synced 2025-12-10 06:24:26 +08:00
Support "transformer." LoRA prefix for Z-Image
Please try to be consistent in your LoRA loading code Qwen supports the "transformer." prefix. This PR adds it to Z-Image (for some reason the code for Lumina2 is used for Z-Image)
This commit is contained in:
parent
bed12674a1
commit
51119d3283
@ -320,6 +320,7 @@ def model_lora_keys_unet(model, key_map={}):
|
|||||||
to = diffusers_keys[k]
|
to = diffusers_keys[k]
|
||||||
key_lora = k[:-len(".weight")]
|
key_lora = k[:-len(".weight")]
|
||||||
key_map["diffusion_model.{}".format(key_lora)] = to
|
key_map["diffusion_model.{}".format(key_lora)] = to
|
||||||
|
key_map["transformer.{}".format(key_lora)] = to
|
||||||
key_map["lycoris_{}".format(key_lora.replace(".", "_"))] = to
|
key_map["lycoris_{}".format(key_lora.replace(".", "_"))] = to
|
||||||
|
|
||||||
return key_map
|
return key_map
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user