Change Flux block lora loaderr into generic DiT lora block loader and support Wan with it

This commit is contained in:
kijai 2025-05-28 12:57:17 +03:00
parent 07b804cb3f
commit 37eb7bddcb
4 changed files with 30 additions and 9 deletions

View File

@ -167,9 +167,10 @@ NODE_CONFIG = {
"Screencap_mss": {"class": Screencap_mss, "name": "Screencap mss"},
"WebcamCaptureCV2": {"class": WebcamCaptureCV2, "name": "Webcam Capture CV2"},
"DifferentialDiffusionAdvanced": {"class": DifferentialDiffusionAdvanced, "name": "Differential Diffusion Advanced"},
"FluxBlockLoraLoader": {"class": FluxBlockLoraLoader, "name": "Flux Block Lora Loader"},
"DiTBlockLoraLoader": {"class": DiTBlockLoraLoader, "name": "DiT Block Lora Loader"},
"FluxBlockLoraSelect": {"class": FluxBlockLoraSelect, "name": "Flux Block Lora Select"},
"HunyuanVideoBlockLoraSelect": {"class": HunyuanVideoBlockLoraSelect, "name": "Hunyuan Video Block Lora Select"},
"Wan21BlockLoraSelect": {"class": Wan21BlockLoraSelect, "name": "Wan21 Block Lora Select"},
"CustomControlNetWeightsFluxFromList": {"class": CustomControlNetWeightsFluxFromList, "name": "Custom ControlNet Weights Flux From List"},
"CheckpointLoaderKJ": {"class": CheckpointLoaderKJ, "name": "CheckpointLoaderKJ"},
"DiffusionModelLoaderKJ": {"class": DiffusionModelLoaderKJ, "name": "Diffusion Model Loader KJ"},

View File

@ -2683,7 +2683,7 @@ class LoadImagesFromFolderKJ:
if not os.path.isdir(folder):
raise FileNotFoundError(f"Folder '{folder} cannot be found.'")
valid_extensions = ['.jpg', '.jpeg', '.png', '.webp']
valid_extensions = ['.jpg', '.jpeg', '.png', '.webp', '.tga']
image_paths = []
if include_subfolders:
for root, _, files in os.walk(folder):

View File

@ -1947,7 +1947,7 @@ class FluxBlockLoraSelect:
return {"required": arg_dict}
RETURN_TYPES = ("SELECTEDBLOCKS", )
RETURN_TYPES = ("SELECTEDDITBLOCKS", )
RETURN_NAMES = ("blocks", )
OUTPUT_TOOLTIPS = ("The modified diffusion model.",)
FUNCTION = "load_lora"
@ -1959,9 +1959,6 @@ class FluxBlockLoraSelect:
return (kwargs,)
class HunyuanVideoBlockLoraSelect:
def __init__(self):
self.loaded_lora = None
@classmethod
def INPUT_TYPES(s):
arg_dict = {}
@ -1975,7 +1972,29 @@ class HunyuanVideoBlockLoraSelect:
return {"required": arg_dict}
RETURN_TYPES = ("SELECTEDBLOCKS", )
RETURN_TYPES = ("SELECTEDDITBLOCKS", )
RETURN_NAMES = ("blocks", )
OUTPUT_TOOLTIPS = ("The modified diffusion model.",)
FUNCTION = "load_lora"
CATEGORY = "KJNodes/experimental"
DESCRIPTION = "Select individual block alpha values, value of 0 removes the block altogether"
def load_lora(self, **kwargs):
return (kwargs,)
class Wan21BlockLoraSelect:
@classmethod
def INPUT_TYPES(s):
arg_dict = {}
argument = ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1000.0, "step": 0.01})
for i in range(40):
arg_dict["blocks.{}.".format(i)] = argument
return {"required": arg_dict}
RETURN_TYPES = ("SELECTEDDITBLOCKS", )
RETURN_NAMES = ("blocks", )
OUTPUT_TOOLTIPS = ("The modified diffusion model.",)
FUNCTION = "load_lora"
@ -1986,7 +2005,7 @@ class HunyuanVideoBlockLoraSelect:
def load_lora(self, **kwargs):
return (kwargs,)
class FluxBlockLoraLoader:
class DiTBlockLoraLoader:
def __init__(self):
self.loaded_lora = None
@ -2000,7 +2019,7 @@ class FluxBlockLoraLoader:
"optional": {
"lora_name": (folder_paths.get_filename_list("loras"), {"tooltip": "The name of the LoRA."}),
"opt_lora_path": ("STRING", {"forceInput": True, "tooltip": "Absolute path of the LoRA."}),
"blocks": ("SELECTEDBLOCKS",),
"blocks": ("SELECTEDDITBLOCKS",),
}
}

View File

@ -89,6 +89,7 @@ app.registerExtension({
case "FluxBlockLoraSelect":
case "HunyuanVideoBlockLoraSelect":
case "Wan21BlockLoraSelect":
nodeType.prototype.onNodeCreated = function () {
this.addWidget("button", "Set all", null, () => {
const userInput = prompt("Enter the values to set for widgets (e.g., s0,1,2-7=2.0, d0,1,2-7=2.0, or 1.0):", "");