mirror of
https://git.datalinker.icu/kijai/ComfyUI-KJNodes.git
synced 2025-12-09 12:54:40 +08:00
Merge branch 'kijai:main' into main
This commit is contained in:
commit
2260159a5c
63
nodes.py
63
nodes.py
@ -4,7 +4,7 @@ import torch.nn.functional as F
|
||||
from torchvision.transforms import Resize, CenterCrop, InterpolationMode
|
||||
from torchvision.transforms import functional as TF
|
||||
import scipy.ndimage
|
||||
from scipy.spatial import Voronoi, voronoi_plot_2d
|
||||
from scipy.spatial import Voronoi
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
from PIL import ImageFilter, Image, ImageDraw, ImageFont
|
||||
@ -638,17 +638,10 @@ class GrowMaskWithBlur:
|
||||
"incremental_expandrate": ("INT", {"default": 0, "min": 0, "max": 100, "step": 1}),
|
||||
"tapered_corners": ("BOOLEAN", {"default": True}),
|
||||
"flip_input": ("BOOLEAN", {"default": False}),
|
||||
"use_cuda": ("BOOLEAN", {"default": True}),
|
||||
"blur_radius": ("INT", {
|
||||
"default": 0,
|
||||
"min": 0,
|
||||
"max": 999,
|
||||
"step": 1
|
||||
}),
|
||||
"sigma": ("FLOAT", {
|
||||
"default": 1.0,
|
||||
"min": 0.1,
|
||||
"max": 10.0,
|
||||
"blur_radius": ("FLOAT", {
|
||||
"default": 0.0,
|
||||
"min": 0.0,
|
||||
"max": 100,
|
||||
"step": 0.1
|
||||
}),
|
||||
"lerp_alpha": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}),
|
||||
@ -662,7 +655,7 @@ class GrowMaskWithBlur:
|
||||
RETURN_NAMES = ("mask", "mask_inverted",)
|
||||
FUNCTION = "expand_mask"
|
||||
|
||||
def expand_mask(self, mask, expand, tapered_corners, flip_input, blur_radius, sigma, incremental_expandrate, use_cuda, lerp_alpha, decay_factor):
|
||||
def expand_mask(self, mask, expand, tapered_corners, flip_input, blur_radius, incremental_expandrate, lerp_alpha, decay_factor):
|
||||
alpha = lerp_alpha
|
||||
decay = decay_factor
|
||||
if( flip_input ):
|
||||
@ -696,22 +689,18 @@ class GrowMaskWithBlur:
|
||||
previous_output = output
|
||||
out.append(output)
|
||||
|
||||
blurred = torch.stack(out, dim=0).reshape((-1, 1, mask.shape[-2], mask.shape[-1])).movedim(1, -1).expand(-1, -1, -1, 3)
|
||||
if use_cuda:
|
||||
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
||||
blurred = blurred.to(device) # Move blurred tensor to the GPU
|
||||
|
||||
channels = blurred.shape[-1]
|
||||
if blur_radius != 0:
|
||||
blurkernel_size = blur_radius * 2 + 1
|
||||
blurkernel = gaussian_kernel(blurkernel_size, sigma, device=blurred.device).repeat(channels, 1, 1).unsqueeze(1)
|
||||
blurred = blurred.permute(0, 3, 1, 2) # Torch wants (B, C, H, W) we use (B, H, W, C)
|
||||
padded_image = F.pad(blurred, (blur_radius,blur_radius,blur_radius,blur_radius), 'reflect')
|
||||
blurred = F.conv2d(padded_image, blurkernel, padding=blurkernel_size // 2, groups=channels)[:,:,blur_radius:-blur_radius, blur_radius:-blur_radius]
|
||||
blurred = blurred.permute(0, 2, 3, 1)
|
||||
blurred = blurred[:, :, :, 0]
|
||||
return (blurred.cpu(), 1.0 - blurred.cpu(),)
|
||||
return (torch.stack(out, dim=0), 1.0 -torch.stack(out, dim=0),)
|
||||
# Convert the tensor list to PIL images, apply blur, and convert back
|
||||
for idx, tensor in enumerate(out):
|
||||
# Convert tensor to PIL image
|
||||
pil_image = TF.to_pil_image(tensor.cpu().detach())
|
||||
# Apply Gaussian blur
|
||||
pil_image = pil_image.filter(ImageFilter.GaussianBlur(blur_radius))
|
||||
# Convert back to tensor
|
||||
out[idx] = TF.to_tensor(pil_image)
|
||||
blurred = torch.stack(out, dim=0)
|
||||
|
||||
return (blurred, 1.0 - blurred)
|
||||
|
||||
|
||||
|
||||
@ -1104,7 +1093,6 @@ class EmptyLatentImagePresets:
|
||||
'960 x 512',
|
||||
'1024 x 512',
|
||||
'1536 x 640',
|
||||
'1536 x 640',
|
||||
'1344 x 768',
|
||||
'1216 x 832',
|
||||
'1152 x 896',
|
||||
@ -2260,6 +2248,12 @@ class OffsetMask:
|
||||
|
||||
return mask,
|
||||
|
||||
class AnyType(str):
|
||||
"""A special class that is always equal in not equal comparisons. Credit to pythongosssss"""
|
||||
|
||||
def __ne__(self, __value: object) -> bool:
|
||||
return False
|
||||
any = AnyType("*")
|
||||
class WidgetToString:
|
||||
@classmethod
|
||||
def IS_CHANGED(cls, **kwargs):
|
||||
@ -2273,6 +2267,9 @@ class WidgetToString:
|
||||
"widget_name": ("STRING", {"multiline": False}),
|
||||
"return_all": ("BOOLEAN", {"default": False}),
|
||||
},
|
||||
"optional": {
|
||||
"source": (any, {}),
|
||||
},
|
||||
"hidden": {"extra_pnginfo": "EXTRA_PNGINFO",
|
||||
"prompt": "PROMPT"},
|
||||
}
|
||||
@ -3064,7 +3061,7 @@ class ImageBatchRepeatInterleaving:
|
||||
|
||||
repeated_images = torch.repeat_interleave(images, repeats=repeats, dim=0)
|
||||
return (repeated_images, )
|
||||
|
||||
|
||||
NODE_CLASS_MAPPINGS = {
|
||||
"INTConstant": INTConstant,
|
||||
"FloatConstant": FloatConstant,
|
||||
@ -3121,7 +3118,7 @@ NODE_CLASS_MAPPINGS = {
|
||||
"GenerateNoise": GenerateNoise,
|
||||
"StableZero123_BatchSchedule": StableZero123_BatchSchedule,
|
||||
"GetImagesFromBatchIndexed": GetImagesFromBatchIndexed,
|
||||
"ImageBatchRepeatInterleaving": ImageBatchRepeatInterleaving
|
||||
"ImageBatchRepeatInterleaving": ImageBatchRepeatInterleaving,
|
||||
}
|
||||
NODE_DISPLAY_NAME_MAPPINGS = {
|
||||
"INTConstant": "INT Constant",
|
||||
@ -3148,7 +3145,7 @@ NODE_DISPLAY_NAME_MAPPINGS = {
|
||||
"ReverseImageBatch": "ReverseImageBatch",
|
||||
"ImageGridComposite2x2": "ImageGridComposite2x2",
|
||||
"ImageGridComposite3x3": "ImageGridComposite3x3",
|
||||
"ImageConcanate": "ImageConcanate",
|
||||
"ImageConcanate": "ImageConcatenate",
|
||||
"ImageBatchTestPattern": "ImageBatchTestPattern",
|
||||
"ReplaceImagesInBatch": "ReplaceImagesInBatch",
|
||||
"BatchCropFromMask": "BatchCropFromMask",
|
||||
@ -3178,5 +3175,5 @@ NODE_DISPLAY_NAME_MAPPINGS = {
|
||||
"GenerateNoise": "GenerateNoise",
|
||||
"StableZero123_BatchSchedule": "StableZero123_BatchSchedule",
|
||||
"GetImagesFromBatchIndexed": "GetImagesFromBatchIndexed",
|
||||
"ImageBatchRepeatInterleaving": "ImageBatchRepeatInterleaving"
|
||||
"ImageBatchRepeatInterleaving": "ImageBatchRepeatInterleaving",
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user