mirror of
https://git.datalinker.icu/kijai/ComfyUI-KJNodes.git
synced 2025-12-09 21:04:41 +08:00
Remake ColorToMask node
that code was atrocious
This commit is contained in:
parent
76c536d156
commit
171b70bfa5
46
nodes.py
46
nodes.py
@ -1000,35 +1000,41 @@ Converts chosen RGB value to a mask
|
|||||||
"green": ("INT", {"default": 0,"min": 0, "max": 255, "step": 1}),
|
"green": ("INT", {"default": 0,"min": 0, "max": 255, "step": 1}),
|
||||||
"blue": ("INT", {"default": 0,"min": 0, "max": 255, "step": 1}),
|
"blue": ("INT", {"default": 0,"min": 0, "max": 255, "step": 1}),
|
||||||
"threshold": ("INT", {"default": 10,"min": 0, "max": 255, "step": 1}),
|
"threshold": ("INT", {"default": 10,"min": 0, "max": 255, "step": 1}),
|
||||||
|
"per_batch": ("INT", {"default": 16, "min": 1, "max": 4096, "step": 1}),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
def clip(self, images, red, green, blue, threshold, invert):
|
def clip(self, images, red, green, blue, threshold, invert, per_batch):
|
||||||
color = np.array([red, green, blue])
|
|
||||||
images = 255. * images.cpu().numpy()
|
color = torch.tensor([red, green, blue], dtype=torch.uint8)
|
||||||
images = np.clip(images, 0, 255).astype(np.uint8)
|
black = torch.tensor([0, 0, 0], dtype=torch.uint8)
|
||||||
images = [Image.fromarray(image) for image in images]
|
white = torch.tensor([255, 255, 255], dtype=torch.uint8)
|
||||||
images = [np.array(image) for image in images]
|
|
||||||
|
|
||||||
black = [0, 0, 0]
|
|
||||||
white = [255, 255, 255]
|
|
||||||
if invert:
|
if invert:
|
||||||
black, white = white, black
|
black, white = white, black
|
||||||
|
|
||||||
new_images = []
|
steps = images.shape[0]
|
||||||
for image in images:
|
pbar = comfy.utils.ProgressBar(steps)
|
||||||
new_image = np.full_like(image, black)
|
tensors_out = []
|
||||||
|
|
||||||
color_distances = np.linalg.norm(image - color, axis=-1)
|
for start_idx in range(0, images.shape[0], per_batch):
|
||||||
complement_indexes = color_distances <= threshold
|
|
||||||
|
|
||||||
new_image[complement_indexes] = white
|
# Calculate color distances
|
||||||
|
color_distances = torch.norm(images[start_idx:start_idx+per_batch] * 255 - color, dim=-1)
|
||||||
|
|
||||||
new_images.append(new_image)
|
# Create a mask based on the threshold
|
||||||
|
mask = color_distances <= threshold
|
||||||
|
|
||||||
new_images = np.array(new_images).astype(np.float32) / 255.0
|
# Apply the mask to create new images
|
||||||
new_images = torch.from_numpy(new_images).permute(3, 0, 1, 2)
|
mask_out = torch.where(mask.unsqueeze(-1), white, black).float()
|
||||||
return new_images
|
mask_out = mask_out.mean(dim=-1)
|
||||||
|
|
||||||
|
tensors_out.append(mask_out.cpu())
|
||||||
|
batch_count = mask_out.shape[0]
|
||||||
|
pbar.update(batch_count)
|
||||||
|
|
||||||
|
tensors_out = torch.cat(tensors_out, dim=0)
|
||||||
|
return tensors_out,
|
||||||
|
|
||||||
class ConditioningMultiCombine:
|
class ConditioningMultiCombine:
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -5058,7 +5064,7 @@ Each mask is generated with the specified width and height.
|
|||||||
mask = torch.ones((height, width), dtype=torch.float32) * value
|
mask = torch.ones((height, width), dtype=torch.float32) * value
|
||||||
masks.append(mask)
|
masks.append(mask)
|
||||||
masks_out = torch.stack(masks, dim=0)
|
masks_out = torch.stack(masks, dim=0)
|
||||||
print(masks_out.shape)
|
|
||||||
return(masks_out,)
|
return(masks_out,)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user