mirror of
https://git.datalinker.icu/kijai/ComfyUI-KJNodes.git
synced 2026-01-24 00:14:37 +08:00
New nodes
This commit is contained in:
parent
26ca2925c6
commit
d165ae2542
@ -101,6 +101,9 @@ NODE_CONFIG = {
|
||||
#curve nodes
|
||||
"SplineEditor": {"class": SplineEditor, "name": "Spline Editor"},
|
||||
"CreateShapeMaskOnPath": {"class": CreateShapeMaskOnPath, "name": "Create Shape Mask On Path"},
|
||||
"CreateTextOnPath": {"class": CreateTextOnPath, "name": "Create Text On Path"},
|
||||
"CreateGradientFromCoords": {"class": CreateGradientFromCoords, "name": "Create Gradient From Coords"},
|
||||
"GradientToFloat": {"class": GradientToFloat, "name": "Gradient To Float"},
|
||||
"WeightScheduleExtend": {"class": WeightScheduleExtend, "name": "Weight Schedule Extend"},
|
||||
"MaskOrImageToWeight": {"class": MaskOrImageToWeight, "name": "Mask Or Image To Weight"},
|
||||
"WeightScheduleConvert": {"class": WeightScheduleConvert, "name": "Weight Schedule Convert"},
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import torch
|
||||
from torchvision import transforms
|
||||
import json
|
||||
from PIL import Image, ImageDraw, ImageFont
|
||||
from PIL import Image, ImageDraw, ImageFont, ImageColor
|
||||
import numpy as np
|
||||
from ..utility.utility import pil2tensor
|
||||
import folder_paths
|
||||
@ -388,6 +388,116 @@ Locations are center locations.
|
||||
out_images = torch.cat(image_list, dim=0).cpu().float()
|
||||
out_masks = torch.cat(mask_list, dim=0)
|
||||
return (out_images, out_masks, 1.0 - out_masks,)
|
||||
|
||||
class CreateGradientFromCoords:
|
||||
|
||||
RETURN_TYPES = ("IMAGE", )
|
||||
RETURN_NAMES = ("image", )
|
||||
FUNCTION = "generate"
|
||||
CATEGORY = "KJNodes/image"
|
||||
DESCRIPTION = """
|
||||
Creates a gradient image from coordinates.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def INPUT_TYPES(s):
|
||||
return {
|
||||
"required": {
|
||||
"coordinates": ("STRING", {"forceInput": True}),
|
||||
"frame_width": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}),
|
||||
"frame_height": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}),
|
||||
"start_color": ("STRING", {"default": 'white'}),
|
||||
"end_color": ("STRING", {"default": 'black'}),
|
||||
"multiplier": ("FLOAT", {"default": 1.0, "min": 0.01, "max": 100.0, "step": 0.01}),
|
||||
},
|
||||
}
|
||||
|
||||
def generate(self, coordinates, frame_width, frame_height, start_color, end_color, multiplier):
|
||||
# Parse the coordinates
|
||||
coordinates = json.loads(coordinates.replace("'", '"'))
|
||||
|
||||
# Create an image
|
||||
image = Image.new("RGB", (frame_width, frame_height))
|
||||
draw = ImageDraw.Draw(image)
|
||||
|
||||
# Extract start and end points for the gradient
|
||||
start_coord = coordinates[0]
|
||||
end_coord = coordinates[1]
|
||||
|
||||
start_color = ImageColor.getrgb(start_color)
|
||||
end_color = ImageColor.getrgb(end_color)
|
||||
|
||||
# Calculate the gradient direction (vector)
|
||||
gradient_direction = (end_coord['x'] - start_coord['x'], end_coord['y'] - start_coord['y'])
|
||||
gradient_length = (gradient_direction[0] ** 2 + gradient_direction[1] ** 2) ** 0.5
|
||||
|
||||
# Iterate over each pixel in the image
|
||||
for y in range(frame_height):
|
||||
for x in range(frame_width):
|
||||
# Calculate the projection of the point on the gradient line
|
||||
point_vector = (x - start_coord['x'], y - start_coord['y'])
|
||||
projection = (point_vector[0] * gradient_direction[0] + point_vector[1] * gradient_direction[1]) / gradient_length
|
||||
projection = max(min(projection, gradient_length), 0) # Clamp the projection value
|
||||
|
||||
# Calculate the blend factor for the current pixel
|
||||
blend = projection * multiplier / gradient_length
|
||||
|
||||
# Determine the color of the current pixel
|
||||
color = (
|
||||
int(start_color[0] + (end_color[0] - start_color[0]) * blend),
|
||||
int(start_color[1] + (end_color[1] - start_color[1]) * blend),
|
||||
int(start_color[2] + (end_color[2] - start_color[2]) * blend)
|
||||
)
|
||||
|
||||
# Set the pixel color
|
||||
draw.point((x, y), fill=color)
|
||||
|
||||
# Convert the PIL image to a tensor (assuming such a function exists in your context)
|
||||
image_tensor = pil2tensor(image)
|
||||
|
||||
return (image_tensor,)
|
||||
|
||||
class GradientToFloat:
|
||||
|
||||
RETURN_TYPES = ("FLOAT", "FLOAT",)
|
||||
RETURN_NAMES = ("float_x", "float_y", )
|
||||
FUNCTION = "sample"
|
||||
CATEGORY = "KJNodes/image"
|
||||
DESCRIPTION = """
|
||||
Calculates list of floats from image.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def INPUT_TYPES(s):
|
||||
return {
|
||||
"required": {
|
||||
"image": ("IMAGE", ),
|
||||
"steps": ("INT", {"default": 10, "min": 2, "max": 10000, "step": 1}),
|
||||
},
|
||||
}
|
||||
|
||||
def sample(self, image, steps):
|
||||
# Assuming image is a tensor with shape [B, H, W, C]
|
||||
B, H, W, C = image.shape
|
||||
|
||||
# Sample along the width axis (W)
|
||||
w_intervals = torch.linspace(0, W - 1, steps=steps, dtype=torch.int64)
|
||||
# Assuming we're sampling from the first batch and the first channel
|
||||
w_sampled = image[0, :, w_intervals, 0]
|
||||
|
||||
# Sample along the height axis (H)
|
||||
h_intervals = torch.linspace(0, H - 1, steps=steps, dtype=torch.int64)
|
||||
# Assuming we're sampling from the first batch and the first channel
|
||||
h_sampled = image[0, h_intervals, :, 0]
|
||||
|
||||
# Taking the mean across the height for width sampling, and across the width for height sampling
|
||||
w_values = w_sampled.mean(dim=0).tolist()
|
||||
h_values = h_sampled.mean(dim=1).tolist()
|
||||
|
||||
print("Sampled width axis values:", w_values)
|
||||
print("Sampled height axis values:", h_values)
|
||||
|
||||
return (w_values, h_values)
|
||||
|
||||
class MaskOrImageToWeight:
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user