From 51119d3283056817b6260107f535ae356df2ee62 Mon Sep 17 00:00:00 2001 From: dxqb <183307934+dxqb@users.noreply.github.com> Date: Sat, 6 Dec 2025 01:28:18 +0100 Subject: [PATCH] Support "transformer." LoRA prefix for Z-Image Please try to be consistent in your LoRA loading code Qwen supports the "transformer." prefix. This PR adds it to Z-Image (for some reason the code for Lumina2 is used for Z-Image) --- comfy/lora.py | 1 + 1 file changed, 1 insertion(+) diff --git a/comfy/lora.py b/comfy/lora.py index 3a9077869..774be0488 100644 --- a/comfy/lora.py +++ b/comfy/lora.py @@ -320,6 +320,7 @@ def model_lora_keys_unet(model, key_map={}): to = diffusers_keys[k] key_lora = k[:-len(".weight")] key_map["diffusion_model.{}".format(key_lora)] = to + key_map["transformer.{}".format(key_lora)] = to key_map["lycoris_{}".format(key_lora.replace(".", "_"))] = to return key_map