From 6eb25e6b3d7d9858d846e9ab928cbcfd3648a5bb Mon Sep 17 00:00:00 2001 From: kijai <40791699+kijai@users.noreply.github.com> Date: Mon, 8 Sep 2025 10:28:05 +0300 Subject: [PATCH] Update lora_nodes.py --- nodes/lora_nodes.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nodes/lora_nodes.py b/nodes/lora_nodes.py index 2344550..a358ddd 100644 --- a/nodes/lora_nodes.py +++ b/nodes/lora_nodes.py @@ -463,7 +463,7 @@ def resize_lora_model(lora_sd, new_rank, save_dtype, device, dynamic_method, dyn total_keys = len([k for k in lora_sd if k.endswith(".weight")]) pbar = comfy.utils.ProgressBar(total_keys) - for key, value in tqdm(lora_sd.items()): + for key, value in tqdm(lora_sd.items(), leave=True, desc="Resizing LoRA weights", total=total_keys): key_parts = key.split(".") block_down_name = None for _format in LORA_DOWN_UP_FORMATS: @@ -521,7 +521,7 @@ def resize_lora_model(lora_sd, new_rank, save_dtype, device, dynamic_method, dyn if not np.isnan(fro_retained): fro_list.append(float(fro_retained)) log_str = f"{block_down_name:75} | sum(S) retained: {sum_retained:.1%}, fro retained: {fro_retained:.1%}, max(S) ratio: {max_ratio:0.1f}" - print(log_str) + tqdm.write(log_str) verbose_str += log_str if verbose and dynamic_method: