ComfyUI-KJNodes/web/js/jsnodes.js
kijai 1959a4de55 Squashed commit of the following:
commit d5e37ff797ecfe975bed982f25cbfbfd1e60c7ee
Author: kijai <40791699+kijai@users.noreply.github.com>
Date:   Sat Apr 20 21:22:29 2024 +0300

    Update spline_editor.js

commit bdade789ee3650cbd2b42bb844b16e175267575d
Author: kijai <40791699+kijai@users.noreply.github.com>
Date:   Sat Apr 20 21:21:34 2024 +0300

    spline editor fixes

commit 1aef2b8e43397eddc49dca304d59bd7a6674f55d
Author: kijai <40791699+kijai@users.noreply.github.com>
Date:   Sat Apr 20 18:20:02 2024 +0300

    spline editor updates

commit cb1e98abf1d9fcacc9b4912971df175c5a8ea9ac
Author: kijai <40791699+kijai@users.noreply.github.com>
Date:   Sat Apr 20 11:27:50 2024 +0300

    Update spline_editor.js

commit 891daeb4389318e9880f1ff68f53eba068f46739
Merge: d9712b0 6e1fa8d
Author: kijai <40791699+kijai@users.noreply.github.com>
Date:   Fri Apr 19 18:40:44 2024 +0300

    Merge branch 'main' into develop

commit d9712b0e04c70c6299fa61ce5c128ba56228ea11
Author: kijai <40791699+kijai@users.noreply.github.com>
Date:   Thu Apr 18 01:45:32 2024 +0300

    spline editor work

commit 92711dc7625da578cf59520a41462a845069ac56
Author: Kijai <40791699+kijai@users.noreply.github.com>
Date:   Wed Apr 17 19:43:49 2024 +0300

    Update spline_editor.js

commit 6f256423afa6d3e8a0ca2add461328d9f9b4ce82
Author: Kijai <40791699+kijai@users.noreply.github.com>
Date:   Wed Apr 17 19:21:32 2024 +0300

    Update spline_editor.js

commit 47c23d5a19c84d22d94668f5b5ba2ceab5b94988
Author: Kijai <40791699+kijai@users.noreply.github.com>
Date:   Tue Apr 16 19:19:16 2024 +0300

    reworking spline editor (not functional yet)
2024-04-20 21:22:44 +03:00

135 lines
5.4 KiB
JavaScript

import { app } from "../../../scripts/app.js";
app.registerExtension({
name: "KJNodes.jsnodes",
async beforeRegisterNodeDef(nodeType, nodeData, app) {
switch (nodeData.name) {
case "ConditioningMultiCombine":
nodeType.prototype.onNodeCreated = function () {
//this.inputs_offset = nodeData.name.includes("selective")?1:0
this.cond_type = "CONDITIONING"
this.inputs_offset = nodeData.name.includes("selective")?1:0
this.addWidget("button", "Update inputs", null, () => {
if (!this.inputs) {
this.inputs = [];
}
const target_number_of_inputs = this.widgets.find(w => w.name === "inputcount")["value"];
if(target_number_of_inputs===this.inputs.length)return; // already set, do nothing
if(target_number_of_inputs < this.inputs.length){
for(let i = this.inputs.length; i>=this.inputs_offset+target_number_of_inputs; i--)
this.removeInput(i)
}
else{
for(let i = this.inputs.length+1-this.inputs_offset; i <= target_number_of_inputs; ++i)
this.addInput(`conditioning_${i}`, this.cond_type)
}
});
}
break;
case "SoundReactive":
nodeType.prototype.onNodeCreated = function () {
let audioContext;
let microphoneStream;
let animationFrameId;
let analyser;
let dataArray;
let startRangeHz;
let endRangeHz;
let smoothingFactor = 0.5;
let smoothedSoundLevel = 0;
// Function to update the widget value in real-time
const updateWidgetValueInRealTime = () => {
// Ensure analyser and dataArray are defined before using them
if (analyser && dataArray) {
analyser.getByteFrequencyData(dataArray);
const startRangeHzWidget = this.widgets.find(w => w.name === "start_range_hz");
if (startRangeHzWidget) startRangeHz = startRangeHzWidget.value;
const endRangeHzWidget = this.widgets.find(w => w.name === "end_range_hz");
if (endRangeHzWidget) endRangeHz = endRangeHzWidget.value;
const smoothingFactorWidget = this.widgets.find(w => w.name === "smoothing_factor");
if (smoothingFactorWidget) smoothingFactor = smoothingFactorWidget.value;
// Calculate frequency bin width (frequency resolution)
const frequencyBinWidth = audioContext.sampleRate / analyser.fftSize;
// Convert the widget values from Hz to indices
const startRangeIndex = Math.floor(startRangeHz / frequencyBinWidth);
const endRangeIndex = Math.floor(endRangeHz / frequencyBinWidth);
// Function to calculate the average value for a frequency range
const calculateAverage = (start, end) => {
const sum = dataArray.slice(start, end).reduce((acc, val) => acc + val, 0);
const average = sum / (end - start);
// Apply exponential moving average smoothing
smoothedSoundLevel = (average * (1 - smoothingFactor)) + (smoothedSoundLevel * smoothingFactor);
return smoothedSoundLevel;
};
// Calculate the average levels for each frequency range
const soundLevel = calculateAverage(startRangeIndex, endRangeIndex);
// Update the widget values
const lowLevelWidget = this.widgets.find(w => w.name === "sound_level");
if (lowLevelWidget) lowLevelWidget.value = soundLevel;
animationFrameId = requestAnimationFrame(updateWidgetValueInRealTime);
}
};
// Function to start capturing audio from the microphone
const startMicrophoneCapture = () => {
// Only create the audio context and analyser once
if (!audioContext) {
audioContext = new (window.AudioContext || window.webkitAudioContext)();
// Access the sample rate of the audio context
console.log(`Sample rate: ${audioContext.sampleRate}Hz`);
analyser = audioContext.createAnalyser();
analyser.fftSize = 2048;
dataArray = new Uint8Array(analyser.frequencyBinCount);
// Get the range values from widgets (assumed to be in Hz)
const lowRangeWidget = this.widgets.find(w => w.name === "low_range_hz");
if (lowRangeWidget) startRangeHz = lowRangeWidget.value;
const midRangeWidget = this.widgets.find(w => w.name === "mid_range_hz");
if (midRangeWidget) endRangeHz = midRangeWidget.value;
}
navigator.mediaDevices.getUserMedia({ audio: true }).then(stream => {
microphoneStream = stream;
const microphone = audioContext.createMediaStreamSource(stream);
microphone.connect(analyser);
updateWidgetValueInRealTime();
}).catch(error => {
console.error('Access to microphone was denied or an error occurred:', error);
});
};
// Function to stop capturing audio from the microphone
const stopMicrophoneCapture = () => {
if (animationFrameId) {
cancelAnimationFrame(animationFrameId);
}
if (microphoneStream) {
microphoneStream.getTracks().forEach(track => track.stop());
}
if (audioContext) {
audioContext.close();
// Reset audioContext to ensure it can be created again when starting
audioContext = null;
}
};
// Add start button
this.addWidget("button", "Start mic capture", null, startMicrophoneCapture);
// Add stop button
this.addWidget("button", "Stop mic capture", null, stopMicrophoneCapture);
};
break;
}
},
});