update DB

This commit is contained in:
Dr.Lt.Data 2023-09-12 01:06:35 +09:00
parent 6a9a2f52c2
commit 128f6b15e1
6 changed files with 84 additions and 1 deletions

View File

@ -149,6 +149,11 @@
"id":"https://github.com/ssitu/ComfyUI_fabric",
"tags":"fabric",
"description": "Similar to sd-webui-fabric, this custom nodes provide the functionality of <a href='https://github.com/sd-fabric/fabric' target='blank'>FABRIC</a>."
},
{
"id":"https://github.com/Zuellni/ComfyUI-ExLlama",
"tags":"ExLlama, prompt, language model",
"description": "Similar to text-generation-webui, this custom nodes provide the functionality of <a href='https://github.com/turboderp/exllama' target='blank'>exllama</a>."
}
]
}

View File

@ -457,6 +457,17 @@
"install_type": "git-clone",
"description": "Nodes: DeepFloyd, Filter, Select, Save, Decode, Encode, Repeat, Noise, Noise"
},
{
"author": "Zuellni",
"title": "ComfyUI-ExLlama",
"reference": "https://github.com/Zuellni/ComfyUI-ExLlama",
"files": [
"https://github.com/Zuellni/ComfyUI-ExLlama"
],
"pip": ["sentencepiece", "https://github.com/jllllll/exllama/releases/download/0.0.17/exllama-0.0.17+cu118-cp310-cp310-win_amd64.whl"],
"install_type": "git-clone",
"description": "Nodes: ExLlama Loader, ExLlama Generator. <BR>Used to load 4-bit GPTQ Llama/2 models. You can find a lot of them over at <a href='https://huggingface.co/TheBloke'>https://huggingface.co/TheBloke</a><p style='background-color: black; color: red;'>NOTE: You need to manually install a pip package that suits your system. For example. If your system is 'Python3.10 + Windows + CUDA 11.8' then you need to install 'exllama-0.0.17+cu118-cp310-cp310-win_amd64.whl'. Available package files are <a href='https://github.com/jllllll/exllama/releases'>here</a>."
},
{
"author": "AlekPet",
"title": "AlekPet/ComfyUI_Custom_Nodes_AlekPet",
@ -1731,6 +1742,16 @@
"install_type": "git-clone",
"description": "ComfyUI custom user.css and some script stuff. mainly for web interface."
},
{
"author": "budihartono",
"title": "Otonx's Custom Nodes",
"reference": "https://github.com/budihartono/comfyui_otonx_nodes",
"files": [
"https://github.com/budihartono/comfyui_otonx_nodes"
],
"install_type": "git-clone",
"description": "Nodes: OTX Multiple Values, OTX KSampler Feeder. This extension provides custom nodes for ComfyUI created for personal projects. Made available for reference. Nodes may be updated or changed intermittently or not at all. Review & test before use."
},
{
"author": "taabata",
"title": "Syrian Falcon Nodes",

View File

@ -1384,6 +1384,15 @@
"title_aux": "ImageReward"
}
],
"https://github.com/Zuellni/ComfyUI-ExLlama": [
[
"ZuellniExLlamaGenerator",
"ZuellniExLlamaLoader"
],
{
"title_aux": "ComfyUI-ExLlama"
}
],
"https://github.com/adieyal/comfyui-dynamicprompts": [
[
"DPCombinatorialGenerator",
@ -1618,6 +1627,15 @@
"title_aux": "braintacles-nodes"
}
],
"https://github.com/budihartono/comfyui_otonx_nodes": [
[
"OTX KSampler Feeder",
"OTX Multiple Values"
],
{
"title_aux": "Otonx's Custom Nodes"
}
],
"https://github.com/bvhari/ComfyUI_ImageProcessing": [
[
"BilateralFilter",

View File

@ -1948,7 +1948,7 @@ app.registerExtension({
nodeType.prototype.onDrawForeground = function (ctx) {
const r = onDrawForeground?.apply?.(this, arguments);
if(!this.flags.collapsed && badge_mode != 'none') {
if(!this.flags.collapsed && badge_mode != 'none' && this.size[1] > LiteGraph.NODE_TITLE_HEIGHT) {
let text = "";
if(badge_mode == 'id_nick')
text = `#${this.id} `;

View File

@ -1,5 +1,26 @@
{
"custom_nodes": [
{
"author": "Zuellni",
"title": "ComfyUI-ExLlama",
"reference": "https://github.com/Zuellni/ComfyUI-ExLlama",
"files": [
"https://github.com/Zuellni/ComfyUI-ExLlama"
],
"pip": ["sentencepiece", "https://github.com/jllllll/exllama/releases/download/0.0.17/exllama-0.0.17+cu118-cp310-cp310-win_amd64.whl"],
"install_type": "git-clone",
"description": "Nodes: ExLlama Loader, ExLlama Generator. <BR>Used to load 4-bit GPTQ Llama/2 models. You can find a lot of them over at <a href='https://huggingface.co/TheBloke'>https://huggingface.co/TheBloke</a><p style='background-color: black; color: red;'>NOTE: You need to manually install a pip package that suits your system. For example. If your system is 'Python3.10 + Windows + CUDA 11.8' then you need to install 'exllama-0.0.17+cu118-cp310-cp310-win_amd64.whl'. Available package files are <a href='https://github.com/jllllll/exllama/releases'>here</a>."
},
{
"author": "budihartono",
"title": "Otonx's Custom Nodes",
"reference": "https://github.com/budihartono/comfyui_otonx_nodes",
"files": [
"https://github.com/budihartono/comfyui_otonx_nodes"
],
"install_type": "git-clone",
"description": "Nodes: OTX Multiple Values, OTX KSampler Feeder. This extension provides custom nodes for ComfyUI created for personal projects. Made available for reference. Nodes may be updated or changed intermittently or not at all. Review & test before use."
},
{
"author": "bvhari",
"title": "ComfyUI_PerpWeight",

View File

@ -1384,6 +1384,15 @@
"title_aux": "ImageReward"
}
],
"https://github.com/Zuellni/ComfyUI-ExLlama": [
[
"ZuellniExLlamaGenerator",
"ZuellniExLlamaLoader"
],
{
"title_aux": "ComfyUI-ExLlama"
}
],
"https://github.com/adieyal/comfyui-dynamicprompts": [
[
"DPCombinatorialGenerator",
@ -1618,6 +1627,15 @@
"title_aux": "braintacles-nodes"
}
],
"https://github.com/budihartono/comfyui_otonx_nodes": [
[
"OTX KSampler Feeder",
"OTX Multiple Values"
],
{
"title_aux": "Otonx's Custom Nodes"
}
],
"https://github.com/bvhari/ComfyUI_ImageProcessing": [
[
"BilateralFilter",