diff --git a/nodes/image_nodes.py b/nodes/image_nodes.py index 4180468..16bf4af 100644 --- a/nodes/image_nodes.py +++ b/nodes/image_nodes.py @@ -2475,7 +2475,7 @@ class ImageResizeKJv2: "width": ("INT", { "default": 512, "min": 0, "max": MAX_RESOLUTION, "step": 1, }), "height": ("INT", { "default": 512, "min": 0, "max": MAX_RESOLUTION, "step": 1, }), "upscale_method": (s.upscale_methods,), - "keep_proportion": (["stretch", "resize", "pad", "pad_edge", "pad_edge_pixel", "crop", "pillarbox_blur"], { "default": False }), + "keep_proportion": (["stretch", "resize", "pad", "pad_edge", "pad_edge_pixel", "crop", "pillarbox_blur", "total_pixels"], { "default": False }), "pad_color": ("STRING", { "default": "0, 0, 0", "tooltip": "Color to use for padding."}), "crop_position": (["center", "top", "bottom", "left", "right"], { "default": "center" }), "divisible_by": ("INT", { "default": 2, "min": 0, "max": 512, "step": 1, }), @@ -2512,19 +2512,23 @@ highest dimension. else: device = torch.device("cpu") - if width == 0: - width = W - if height == 0: - height = H - pillarbox_blur = keep_proportion == "pillarbox_blur" # Initialize padding variables pad_left = pad_right = pad_top = pad_bottom = 0 - - if keep_proportion == "resize" or keep_proportion.startswith("pad") or pillarbox_blur: + + if keep_proportion in ["resize", "total_pixels"] or keep_proportion.startswith("pad") or pillarbox_blur: + if keep_proportion == "total_pixels": + total_pixels = width * height + aspect_ratio = W / H + new_height = int(math.sqrt(total_pixels / aspect_ratio)) + new_width = int(math.sqrt(total_pixels * aspect_ratio)) + # If one of the dimensions is zero, calculate it to maintain the aspect ratio - if width == 0 and height != 0: + elif width == 0 and height == 0: + new_width = W + new_height = H + elif width == 0 and height != 0: ratio = height / H new_width = round(W * ratio) new_height = height @@ -2570,6 +2574,11 @@ highest dimension. width = new_width height = new_height + else: + if width == 0: + width = W + if height == 0: + height = H if divisible_by > 1: width = width - (width % divisible_by) diff --git a/nodes/model_optimization_nodes.py b/nodes/model_optimization_nodes.py index 152d62f..68fbd55 100644 --- a/nodes/model_optimization_nodes.py +++ b/nodes/model_optimization_nodes.py @@ -709,6 +709,7 @@ class TorchCompileModelFluxAdvancedV2: }, "optional": { "dynamo_cache_size_limit": ("INT", {"default": 64, "min": 0, "max": 1024, "step": 1, "tooltip": "torch._dynamo.config.cache_size_limit"}), + "force_parameter_static_shapes": ("BOOLEAN", {"default": True, "tooltip": "torch._dynamo.config.force_parameter_static_shapes"}), } } RETURN_TYPES = ("MODEL",) @@ -717,11 +718,12 @@ class TorchCompileModelFluxAdvancedV2: CATEGORY = "KJNodes/torchcompile" EXPERIMENTAL = True - def patch(self, model, backend, mode, fullgraph, single_blocks, double_blocks, dynamic, dynamo_cache_size_limit): + def patch(self, model, backend, mode, fullgraph, single_blocks, double_blocks, dynamic, dynamo_cache_size_limit=64, force_parameter_static_shapes=True): from comfy_api.torch_helpers import set_torch_compile_wrapper m = model.clone() diffusion_model = m.get_model_object("diffusion_model") torch._dynamo.config.cache_size_limit = dynamo_cache_size_limit + torch._dynamo.config.force_parameter_static_shapes = force_parameter_static_shapes compile_key_list = [] @@ -872,6 +874,10 @@ class TorchCompileModelWanVideoV2: "dynamic": ("BOOLEAN", {"default": False, "tooltip": "Enable dynamic mode"}), "compile_transformer_blocks_only": ("BOOLEAN", {"default": True, "tooltip": "Compile only transformer blocks, faster compile and less error prone"}), "dynamo_cache_size_limit": ("INT", {"default": 64, "min": 0, "max": 1024, "step": 1, "tooltip": "torch._dynamo.config.cache_size_limit"}), + + }, + "optional": { + "force_parameter_static_shapes": ("BOOLEAN", {"default": True, "tooltip": "torch._dynamo.config.force_parameter_static_shapes"}), }, } RETURN_TYPES = ("MODEL",) @@ -880,11 +886,12 @@ class TorchCompileModelWanVideoV2: CATEGORY = "KJNodes/torchcompile" EXPERIMENTAL = True - def patch(self, model, backend, fullgraph, mode, dynamic, dynamo_cache_size_limit, compile_transformer_blocks_only): + def patch(self, model, backend, fullgraph, mode, dynamic, dynamo_cache_size_limit, compile_transformer_blocks_only, force_parameter_static_shapes=True): from comfy_api.torch_helpers import set_torch_compile_wrapper m = model.clone() diffusion_model = m.get_model_object("diffusion_model") - torch._dynamo.config.cache_size_limit = dynamo_cache_size_limit + torch._dynamo.config.cache_size_limit = dynamo_cache_size_limit + torch._dynamo.config.force_parameter_static_shapes = force_parameter_static_shapes try: if compile_transformer_blocks_only: compile_key_list = [] diff --git a/web/js/setgetnodes.js b/web/js/setgetnodes.js index c453188..5c62f31 100644 --- a/web/js/setgetnodes.js +++ b/web/js/setgetnodes.js @@ -103,11 +103,9 @@ app.registerExtension({ } //On Connect if (link_info && node.graph && slotType == 1 && isChangeConnect) { - const fromNode = node.graph._nodes.find((otherNode) => otherNode.id == link_info.origin_id); - - if (fromNode && fromNode.outputs && fromNode.outputs[link_info.origin_slot]) { - const type = fromNode.outputs[link_info.origin_slot].type; - + const resolve = link_info.resolve(node.graph) + const type = (resolve?.subgraphInput ?? resolve?.output)?.type + if (type) { if (this.title === "Set"){ this.title = (!disablePrefix ? "Set_" : "") + type; } @@ -470,9 +468,8 @@ app.registerExtension({ }; this.goToSetter = function() { - const setter = this.findSetter(this.graph); - this.canvas.centerOnNode(setter); - this.canvas.selectNode(setter, false); + this.canvas.centerOnNode(this.currentSetter); + this.canvas.selectNode(this.currentSetter, false); }; // This node is purely frontend and does not impact the resulting prompt so should not be serialized @@ -496,7 +493,8 @@ app.registerExtension({ } getExtraMenuOptions(_, options) { let menuEntry = this.drawConnection ? "Hide connections" : "Show connections"; - + this.currentSetter = this.findSetter(this.graph) + if (!this.currentSetter) return options.unshift( { content: "Go to setter", @@ -507,12 +505,9 @@ app.registerExtension({ { content: menuEntry, callback: () => { - this.currentSetter = this.findSetter(this.graph); - if (this.currentSetter.length == 0) return; - let linkType = (this.currentSetter.inputs[0].type); + let linkType = (this.currentSetter.inputs[0].type); this.drawConnection = !this.drawConnection; this.slotColor = this.canvas.default_connection_color_byType[linkType] - menuEntry = this.drawConnection ? "Hide connections" : "Show connections"; this.canvas.setDirty(true, true); }, },