mirror of
https://git.datalinker.icu/kijai/ComfyUI-CogVideoXWrapper.git
synced 2025-12-08 20:34:23 +08:00
Allow mixing Fun and not fun loras
This commit is contained in:
parent
e187cfe22f
commit
e5fc7c1bf3
@ -241,16 +241,21 @@ class DownloadAndLoadCogVideoModel:
|
|||||||
|
|
||||||
#LoRAs
|
#LoRAs
|
||||||
if lora is not None:
|
if lora is not None:
|
||||||
try:
|
dimensionx_loras = ["orbit", "dimensionx"] # for now dimensionx loras need scaling
|
||||||
|
dimensionx_lora = False
|
||||||
adapter_list = []
|
adapter_list = []
|
||||||
adapter_weights = []
|
adapter_weights = []
|
||||||
for l in lora:
|
for l in lora:
|
||||||
|
if any(item in l["path"].lower() for item in dimensionx_loras):
|
||||||
|
dimensionx_lora = True
|
||||||
fuse = True if l["fuse_lora"] else False
|
fuse = True if l["fuse_lora"] else False
|
||||||
lora_sd = load_torch_file(l["path"])
|
lora_sd = load_torch_file(l["path"])
|
||||||
|
lora_rank = None
|
||||||
for key, val in lora_sd.items():
|
for key, val in lora_sd.items():
|
||||||
if "lora_B" in key:
|
if "lora_B" in key:
|
||||||
lora_rank = val.shape[1]
|
lora_rank = val.shape[1]
|
||||||
break
|
break
|
||||||
|
if lora_rank is not None:
|
||||||
log.info(f"Merging rank {lora_rank} LoRA weights from {l['path']} with strength {l['strength']}")
|
log.info(f"Merging rank {lora_rank} LoRA weights from {l['path']} with strength {l['strength']}")
|
||||||
adapter_name = l['path'].split("/")[-1].split(".")[0]
|
adapter_name = l['path'].split("/")[-1].split(".")[0]
|
||||||
adapter_weight = l['strength']
|
adapter_weight = l['strength']
|
||||||
@ -258,19 +263,21 @@ class DownloadAndLoadCogVideoModel:
|
|||||||
|
|
||||||
adapter_list.append(adapter_name)
|
adapter_list.append(adapter_name)
|
||||||
adapter_weights.append(adapter_weight)
|
adapter_weights.append(adapter_weight)
|
||||||
for l in lora:
|
else:
|
||||||
|
try: #Fun trainer LoRAs are loaded differently
|
||||||
|
from .lora_utils import merge_lora
|
||||||
|
log.info(f"Merging LoRA weights from {l['path']} with strength {l['strength']}")
|
||||||
|
transformer = merge_lora(transformer, l["path"], l["strength"])
|
||||||
|
except:
|
||||||
|
raise ValueError(f"Can't recognize LoRA {l['path']}")
|
||||||
|
|
||||||
pipe.set_adapters(adapter_list, adapter_weights=adapter_weights)
|
pipe.set_adapters(adapter_list, adapter_weights=adapter_weights)
|
||||||
if fuse:
|
if fuse:
|
||||||
lora_scale = 1
|
lora_scale = 1
|
||||||
dimension_loras = ["orbit", "dimensionx"] # for now dimensionx loras need scaling
|
if dimensionx_lora:
|
||||||
if any(item in lora[-1]["path"].lower() for item in dimension_loras):
|
|
||||||
lora_scale = lora_scale / lora_rank
|
lora_scale = lora_scale / lora_rank
|
||||||
pipe.fuse_lora(lora_scale=lora_scale, components=["transformer"])
|
pipe.fuse_lora(lora_scale=lora_scale, components=["transformer"])
|
||||||
except: #Fun trainer LoRAs are loaded differently
|
|
||||||
from .lora_utils import merge_lora
|
|
||||||
for l in lora:
|
|
||||||
log.info(f"Merging LoRA weights from {l['path']} with strength {l['strength']}")
|
|
||||||
transformer = merge_lora(transformer, l["path"], l["strength"])
|
|
||||||
|
|
||||||
if "fused" in attention_mode:
|
if "fused" in attention_mode:
|
||||||
from diffusers.models.attention import Attention
|
from diffusers.models.attention import Attention
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user