From e362c926d36cced0c79689062ba768d09e6925d8 Mon Sep 17 00:00:00 2001 From: kijai <40791699+kijai@users.noreply.github.com> Date: Sat, 31 Aug 2024 12:06:39 +0300 Subject: [PATCH] Update nodes.py --- nodes/nodes.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/nodes/nodes.py b/nodes/nodes.py index 26d1702..d24f47e 100644 --- a/nodes/nodes.py +++ b/nodes/nodes.py @@ -1853,12 +1853,22 @@ class FluxBlockLoraLoader: #print(filtered_dict) last_arg_size = 0 for arg in kwargs: - for key in loaded: + for key in list(loaded.keys()): # Convert keys to a list to avoid runtime error due to size change if arg in key and last_arg_size < len(arg): ratio = kwargs[arg] - value = loaded[key] - last_arg_size = len(arg) - loaded[key] = (value[0], value[1][:-3] + (ratio, value[1][-2], value[1][-1])) + if ratio == 0: + del loaded[key] # Remove the key if ratio is 0 + else: + value = loaded[key] + last_arg_size = len(arg) + loaded[key] = (value[0], value[1][:-3] + (ratio, value[1][-2], value[1][-1])) + print("loading lora keys:") + for key, value in loaded.items(): + if len(value) > 1 and len(value[1]) > 2: + alpha = value[1][-3] # Assuming the alpha value is the third last element in the tuple + else: + alpha = None + print(f"Key: {key}, Alpha: {alpha}") if model is not None: new_modelpatcher = model.clone() k = new_modelpatcher.add_patches(loaded, strength_model)