mirror of
https://git.datalinker.icu/ltdrdata/ComfyUI-Manager
synced 2025-12-09 06:04:31 +08:00
restructuring
the existing cache-based implementation will be retained as a fallback under legacy/..., while glob/... will be updated to a cacheless implementation.
This commit is contained in:
parent
0146655f0f
commit
86c7482048
@ -11,13 +11,20 @@ def prestartup():
|
||||
|
||||
def start():
|
||||
logging.info('[START] ComfyUI-Manager')
|
||||
from .glob import manager_server # noqa: F401
|
||||
from .glob import share_3rdparty # noqa: F401
|
||||
from .glob import cm_global # noqa: F401
|
||||
from .common import cm_global # noqa: F401
|
||||
|
||||
if os.environ.get('ENABLE_LEGACY_COMFYUI_MANAGER_FRONT', 'false') == 'true':
|
||||
import nodes
|
||||
nodes.EXTENSION_WEB_DIRS['comfyui-manager-legacy'] = os.path.join(os.path.dirname(__file__), 'js')
|
||||
should_show_legacy_manager_front = os.environ.get('ENABLE_LEGACY_COMFYUI_MANAGER_FRONT', 'false') == 'true' or ENABLE_LEGACY_COMFYUI_MANAGER_FRONT_DEFAULT
|
||||
if not args.disable_manager and should_show_legacy_manager_front:
|
||||
try:
|
||||
from .legacy import manager_server # noqa: F401
|
||||
from .legacy import share_3rdparty # noqa: F401
|
||||
import nodes
|
||||
nodes.EXTENSION_WEB_DIRS['comfyui-manager-legacy'] = os.path.join(os.path.dirname(__file__), 'js')
|
||||
except Exception as e:
|
||||
print("Error enabling legacy ComfyUI Manager frontend:", e)
|
||||
else:
|
||||
from .glob import manager_server # noqa: F401
|
||||
from .glob import share_3rdparty # noqa: F401
|
||||
|
||||
|
||||
def should_be_disabled(fullpath:str) -> bool:
|
||||
|
||||
@ -15,7 +15,7 @@ import git
|
||||
import importlib
|
||||
|
||||
|
||||
import manager_util
|
||||
from .common import manager_util
|
||||
|
||||
# read env vars
|
||||
# COMFYUI_FOLDERS_BASE_PATH is not required in cm-cli.py
|
||||
@ -35,10 +35,11 @@ if not os.path.exists(os.path.join(comfy_path, 'folder_paths.py')):
|
||||
|
||||
|
||||
import utils.extra_config
|
||||
from .glob import cm_global
|
||||
from .common import cm_global
|
||||
from .glob import manager_core as core
|
||||
from .common import context
|
||||
from .glob.manager_core import unified_manager
|
||||
from .glob import cnr_utils
|
||||
from .common import cnr_utils
|
||||
|
||||
comfyui_manager_path = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
@ -84,7 +85,7 @@ def read_downgrade_blacklist():
|
||||
try:
|
||||
import configparser
|
||||
config = configparser.ConfigParser(strict=False)
|
||||
config.read(core.manager_config.path)
|
||||
config.read(context.manager_config_path)
|
||||
default_conf = config['default']
|
||||
|
||||
if 'downgrade_blacklist' in default_conf:
|
||||
@ -145,17 +146,17 @@ class Ctx:
|
||||
if os.path.exists(extra_model_paths_yaml):
|
||||
utils.extra_config.load_extra_path_config(extra_model_paths_yaml)
|
||||
|
||||
core.update_user_directory(user_directory)
|
||||
context.update_user_directory(user_directory)
|
||||
|
||||
if os.path.exists(core.manager_pip_overrides_path):
|
||||
with open(core.manager_pip_overrides_path, 'r', encoding="UTF-8", errors="ignore") as json_file:
|
||||
if os.path.exists(context.manager_pip_overrides_path):
|
||||
with open(context.manager_pip_overrides_path, 'r', encoding="UTF-8", errors="ignore") as json_file:
|
||||
cm_global.pip_overrides = json.load(json_file)
|
||||
|
||||
if sys.version_info < (3, 13):
|
||||
cm_global.pip_overrides = {'numpy': 'numpy<2'}
|
||||
|
||||
if os.path.exists(core.manager_pip_blacklist_path):
|
||||
with open(core.manager_pip_blacklist_path, 'r', encoding="UTF-8", errors="ignore") as f:
|
||||
if os.path.exists(context.manager_pip_blacklist_path):
|
||||
with open(context.manager_pip_blacklist_path, 'r', encoding="UTF-8", errors="ignore") as f:
|
||||
for x in f.readlines():
|
||||
y = x.strip()
|
||||
if y != '':
|
||||
@ -168,15 +169,15 @@ class Ctx:
|
||||
|
||||
@staticmethod
|
||||
def get_startup_scripts_path():
|
||||
return os.path.join(core.manager_startup_script_path, "install-scripts.txt")
|
||||
return os.path.join(context.manager_startup_script_path, "install-scripts.txt")
|
||||
|
||||
@staticmethod
|
||||
def get_restore_snapshot_path():
|
||||
return os.path.join(core.manager_startup_script_path, "restore-snapshot.json")
|
||||
return os.path.join(context.manager_startup_script_path, "restore-snapshot.json")
|
||||
|
||||
@staticmethod
|
||||
def get_snapshot_path():
|
||||
return core.manager_snapshot_path
|
||||
return context.manager_snapshot_path
|
||||
|
||||
@staticmethod
|
||||
def get_custom_nodes_paths():
|
||||
@ -701,7 +702,7 @@ def reinstall(
|
||||
cmd_ctx.set_channel_mode(channel, mode)
|
||||
cmd_ctx.set_no_deps(no_deps)
|
||||
|
||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, core.manager_files_path)
|
||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
||||
for_each_nodes(nodes, act=reinstall_node)
|
||||
pip_fixer.fix_broken()
|
||||
|
||||
@ -755,7 +756,7 @@ def update(
|
||||
if 'all' in nodes:
|
||||
asyncio.run(auto_save_snapshot())
|
||||
|
||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, core.manager_files_path)
|
||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
||||
|
||||
for x in nodes:
|
||||
if x.lower() in ['comfyui', 'comfy', 'all']:
|
||||
@ -856,7 +857,7 @@ def fix(
|
||||
if 'all' in nodes:
|
||||
asyncio.run(auto_save_snapshot())
|
||||
|
||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, core.manager_files_path)
|
||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
||||
for_each_nodes(nodes, fix_node, allow_all=True)
|
||||
pip_fixer.fix_broken()
|
||||
|
||||
@ -1133,7 +1134,7 @@ def restore_snapshot(
|
||||
print(f"[bold red]ERROR: `{snapshot_path}` is not exists.[/bold red]")
|
||||
exit(1)
|
||||
|
||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, core.manager_files_path)
|
||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
||||
try:
|
||||
asyncio.run(core.restore_snapshot(snapshot_path, extras))
|
||||
except Exception:
|
||||
@ -1165,7 +1166,7 @@ def restore_dependencies(
|
||||
total = len(node_paths)
|
||||
i = 1
|
||||
|
||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, core.manager_files_path)
|
||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
||||
for x in node_paths:
|
||||
print("----------------------------------------------------------------------------------------------------")
|
||||
print(f"Restoring [{i}/{total}]: {x}")
|
||||
@ -1184,7 +1185,7 @@ def post_install(
|
||||
):
|
||||
path = os.path.expanduser(path)
|
||||
|
||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, core.manager_files_path)
|
||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
||||
unified_manager.execute_install_script('', path, instant_execution=True)
|
||||
pip_fixer.fix_broken()
|
||||
|
||||
@ -1228,7 +1229,7 @@ def install_deps(
|
||||
print(f"[bold red]Invalid json file: {deps}[/bold red]")
|
||||
exit(1)
|
||||
|
||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, core.manager_files_path)
|
||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
||||
for k in json_obj['custom_nodes'].keys():
|
||||
state = core.simple_check_custom_node(k)
|
||||
if state == 'installed':
|
||||
|
||||
0
comfyui_manager/common/__init__.py
Normal file
0
comfyui_manager/common/__init__.py
Normal file
@ -6,7 +6,7 @@ import time
|
||||
from dataclasses import dataclass
|
||||
from typing import List
|
||||
|
||||
from . import manager_core
|
||||
from . import context
|
||||
from . import manager_util
|
||||
|
||||
import requests
|
||||
@ -48,9 +48,9 @@ async def _get_cnr_data(cache_mode=True, dont_wait=True):
|
||||
# Get ComfyUI version tag
|
||||
if is_desktop:
|
||||
# extract version from pyproject.toml instead of git tag
|
||||
comfyui_ver = manager_core.get_current_comfyui_ver() or 'unknown'
|
||||
comfyui_ver = context.get_current_comfyui_ver() or 'unknown'
|
||||
else:
|
||||
comfyui_ver = manager_core.get_comfyui_tag() or 'unknown'
|
||||
comfyui_ver = context.get_comfyui_tag() or 'unknown'
|
||||
|
||||
if is_desktop:
|
||||
if is_windows:
|
||||
109
comfyui_manager/common/context.py
Normal file
109
comfyui_manager/common/context.py
Normal file
@ -0,0 +1,109 @@
|
||||
import sys
|
||||
import os
|
||||
import logging
|
||||
from . import manager_util
|
||||
import toml
|
||||
import git
|
||||
|
||||
|
||||
# read env vars
|
||||
comfy_path: str = os.environ.get('COMFYUI_PATH')
|
||||
comfy_base_path = os.environ.get('COMFYUI_FOLDERS_BASE_PATH')
|
||||
|
||||
if comfy_path is None:
|
||||
try:
|
||||
comfy_path = os.path.abspath(os.path.dirname(sys.modules['__main__'].__file__))
|
||||
os.environ['COMFYUI_PATH'] = comfy_path
|
||||
except:
|
||||
logging.error("[ComfyUI-Manager] environment variable 'COMFYUI_PATH' is not specified.")
|
||||
exit(-1)
|
||||
|
||||
if comfy_base_path is None:
|
||||
comfy_base_path = comfy_path
|
||||
|
||||
channel_list_template_path = os.path.join(manager_util.comfyui_manager_path, 'channels.list.template')
|
||||
git_script_path = os.path.join(manager_util.comfyui_manager_path, "git_helper.py")
|
||||
|
||||
manager_files_path = None
|
||||
manager_config_path = None
|
||||
manager_channel_list_path = None
|
||||
manager_startup_script_path:str = None
|
||||
manager_snapshot_path = None
|
||||
manager_pip_overrides_path = None
|
||||
manager_pip_blacklist_path = None
|
||||
manager_components_path = None
|
||||
manager_batch_history_path = None
|
||||
|
||||
def update_user_directory(user_dir):
|
||||
global manager_files_path
|
||||
global manager_config_path
|
||||
global manager_channel_list_path
|
||||
global manager_startup_script_path
|
||||
global manager_snapshot_path
|
||||
global manager_pip_overrides_path
|
||||
global manager_pip_blacklist_path
|
||||
global manager_components_path
|
||||
global manager_batch_history_path
|
||||
|
||||
manager_files_path = os.path.abspath(os.path.join(user_dir, 'default', 'ComfyUI-Manager'))
|
||||
if not os.path.exists(manager_files_path):
|
||||
os.makedirs(manager_files_path)
|
||||
|
||||
manager_snapshot_path = os.path.join(manager_files_path, "snapshots")
|
||||
if not os.path.exists(manager_snapshot_path):
|
||||
os.makedirs(manager_snapshot_path)
|
||||
|
||||
manager_startup_script_path = os.path.join(manager_files_path, "startup-scripts")
|
||||
if not os.path.exists(manager_startup_script_path):
|
||||
os.makedirs(manager_startup_script_path)
|
||||
|
||||
manager_config_path = os.path.join(manager_files_path, 'config.ini')
|
||||
manager_channel_list_path = os.path.join(manager_files_path, 'channels.list')
|
||||
manager_pip_overrides_path = os.path.join(manager_files_path, "pip_overrides.json")
|
||||
manager_pip_blacklist_path = os.path.join(manager_files_path, "pip_blacklist.list")
|
||||
manager_components_path = os.path.join(manager_files_path, "components")
|
||||
manager_util.cache_dir = os.path.join(manager_files_path, "cache")
|
||||
manager_batch_history_path = os.path.join(manager_files_path, "batch_history")
|
||||
|
||||
if not os.path.exists(manager_util.cache_dir):
|
||||
os.makedirs(manager_util.cache_dir)
|
||||
|
||||
if not os.path.exists(manager_batch_history_path):
|
||||
os.makedirs(manager_batch_history_path)
|
||||
|
||||
try:
|
||||
import folder_paths
|
||||
update_user_directory(folder_paths.get_user_directory())
|
||||
|
||||
except Exception:
|
||||
# fallback:
|
||||
# This case is only possible when running with cm-cli, and in practice, this case is not actually used.
|
||||
update_user_directory(os.path.abspath(manager_util.comfyui_manager_path))
|
||||
|
||||
|
||||
def get_current_comfyui_ver():
|
||||
"""
|
||||
Extract version from pyproject.toml
|
||||
"""
|
||||
toml_path = os.path.join(comfy_path, 'pyproject.toml')
|
||||
if not os.path.exists(toml_path):
|
||||
return None
|
||||
else:
|
||||
try:
|
||||
with open(toml_path, "r", encoding="utf-8") as f:
|
||||
data = toml.load(f)
|
||||
|
||||
project = data.get('project', {})
|
||||
return project.get('version')
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
def get_comfyui_tag():
|
||||
try:
|
||||
with git.Repo(comfy_path) as repo:
|
||||
return repo.git.describe('--tags')
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
0
comfyui_manager/glob/__init__.py
Normal file
0
comfyui_manager/glob/__init__.py
Normal file
@ -23,7 +23,6 @@ import yaml
|
||||
import zipfile
|
||||
import traceback
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
import toml
|
||||
|
||||
orig_print = print
|
||||
|
||||
@ -32,13 +31,15 @@ from packaging import version
|
||||
|
||||
import uuid
|
||||
|
||||
from . import cm_global
|
||||
from . import cnr_utils
|
||||
from . import manager_util
|
||||
from . import git_utils
|
||||
from . import manager_downloader
|
||||
from .node_package import InstalledNodePackage
|
||||
from .enums import NetworkMode, SecurityLevel, DBMode
|
||||
from ..common import cm_global
|
||||
from ..common import cnr_utils
|
||||
from ..common import manager_util
|
||||
from ..common import git_utils
|
||||
from ..common import manager_downloader
|
||||
from ..common.node_package import InstalledNodePackage
|
||||
from ..common.enums import NetworkMode, SecurityLevel, DBMode
|
||||
from ..common import context
|
||||
|
||||
|
||||
version_code = [4, 0]
|
||||
version_str = f"V{version_code[0]}.{version_code[1]}" + (f'.{version_code[2]}' if len(version_code) > 2 else '')
|
||||
@ -77,32 +78,6 @@ def get_custom_nodes_paths():
|
||||
return [custom_nodes_path]
|
||||
|
||||
|
||||
def get_comfyui_tag():
|
||||
try:
|
||||
repo = git.Repo(comfy_path)
|
||||
return repo.git.describe('--tags')
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
def get_current_comfyui_ver():
|
||||
"""
|
||||
Extract version from pyproject.toml
|
||||
"""
|
||||
toml_path = os.path.join(comfy_path, 'pyproject.toml')
|
||||
if not os.path.exists(toml_path):
|
||||
return None
|
||||
else:
|
||||
try:
|
||||
with open(toml_path, "r", encoding="utf-8") as f:
|
||||
data = toml.load(f)
|
||||
|
||||
project = data.get('project', {})
|
||||
return project.get('version')
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
def get_script_env():
|
||||
new_env = os.environ.copy()
|
||||
git_exe = get_config().get('git_exe')
|
||||
@ -110,10 +85,10 @@ def get_script_env():
|
||||
new_env['GIT_EXE_PATH'] = git_exe
|
||||
|
||||
if 'COMFYUI_PATH' not in new_env:
|
||||
new_env['COMFYUI_PATH'] = comfy_path
|
||||
new_env['COMFYUI_PATH'] = context.comfy_path
|
||||
|
||||
if 'COMFYUI_FOLDERS_BASE_PATH' not in new_env:
|
||||
new_env['COMFYUI_FOLDERS_BASE_PATH'] = comfy_path
|
||||
new_env['COMFYUI_FOLDERS_BASE_PATH'] = context.comfy_path
|
||||
|
||||
return new_env
|
||||
|
||||
@ -137,10 +112,10 @@ def check_invalid_nodes():
|
||||
import folder_paths
|
||||
except:
|
||||
try:
|
||||
sys.path.append(comfy_path)
|
||||
sys.path.append(context.comfy_path)
|
||||
import folder_paths
|
||||
except:
|
||||
raise Exception(f"Invalid COMFYUI_FOLDERS_BASE_PATH: {comfy_path}")
|
||||
raise Exception(f"Invalid COMFYUI_FOLDERS_BASE_PATH: {context.comfy_path}")
|
||||
|
||||
def check(root):
|
||||
global invalid_nodes
|
||||
@ -735,6 +710,8 @@ class UnifiedManager:
|
||||
return latest
|
||||
|
||||
async def reload(self, cache_mode, dont_wait=True, update_cnr_map=True):
|
||||
import folder_paths
|
||||
|
||||
self.custom_node_map_cache = {}
|
||||
self.cnr_inactive_nodes = {} # node_id -> node_version -> fullpath
|
||||
self.nightly_inactive_nodes = {} # node_id -> fullpath
|
||||
@ -868,7 +845,7 @@ class UnifiedManager:
|
||||
else:
|
||||
if os.path.exists(requirements_path) and not no_deps:
|
||||
print("Install: pip packages")
|
||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, manager_files_path)
|
||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), context.comfy_path, context.manager_files_path)
|
||||
lines = manager_util.robust_readlines(requirements_path)
|
||||
for line in lines:
|
||||
package_name = remap_pip_package(line.strip())
|
||||
@ -890,7 +867,7 @@ class UnifiedManager:
|
||||
return res
|
||||
|
||||
def reserve_cnr_switch(self, target, zip_url, from_path, to_path, no_deps):
|
||||
script_path = os.path.join(manager_startup_script_path, "install-scripts.txt")
|
||||
script_path = os.path.join(context.manager_startup_script_path, "install-scripts.txt")
|
||||
with open(script_path, "a") as file:
|
||||
obj = [target, "#LAZY-CNR-SWITCH-SCRIPT", zip_url, from_path, to_path, no_deps, get_default_custom_nodes_path(), sys.executable]
|
||||
file.write(f"{obj}\n")
|
||||
@ -1296,7 +1273,7 @@ class UnifiedManager:
|
||||
print(f"Download: git clone '{clone_url}'")
|
||||
|
||||
if not instant_execution and platform.system() == 'Windows':
|
||||
res = manager_funcs.run_script([sys.executable, git_script_path, "--clone", get_default_custom_nodes_path(), clone_url, repo_path], cwd=get_default_custom_nodes_path())
|
||||
res = manager_funcs.run_script([sys.executable, context.git_script_path, "--clone", get_default_custom_nodes_path(), clone_url, repo_path], cwd=get_default_custom_nodes_path())
|
||||
if res != 0:
|
||||
return result.fail(f"Failed to clone repo: {clone_url}")
|
||||
else:
|
||||
@ -1577,10 +1554,10 @@ def get_channel_dict():
|
||||
if channel_dict is None:
|
||||
channel_dict = {}
|
||||
|
||||
if not os.path.exists(manager_channel_list_path):
|
||||
shutil.copy(channel_list_template_path, manager_channel_list_path)
|
||||
if not os.path.exists(context.manager_channel_list_path):
|
||||
shutil.copy(context.channel_list_template_path, context.manager_channel_list_path)
|
||||
|
||||
with open(manager_channel_list_path, 'r') as file:
|
||||
with open(context.manager_channel_list_path, 'r') as file:
|
||||
channels = file.read()
|
||||
for x in channels.split('\n'):
|
||||
channel_info = x.split("::")
|
||||
@ -1644,18 +1621,18 @@ def write_config():
|
||||
'db_mode': get_config()['db_mode'],
|
||||
}
|
||||
|
||||
directory = os.path.dirname(manager_config_path)
|
||||
directory = os.path.dirname(context.manager_config_path)
|
||||
if not os.path.exists(directory):
|
||||
os.makedirs(directory)
|
||||
|
||||
with open(manager_config_path, 'w') as configfile:
|
||||
with open(context.manager_config_path, 'w') as configfile:
|
||||
config.write(configfile)
|
||||
|
||||
|
||||
def read_config():
|
||||
try:
|
||||
config = configparser.ConfigParser(strict=False)
|
||||
config.read(manager_config_path)
|
||||
config.read(context.manager_config_path)
|
||||
default_conf = config['default']
|
||||
manager_util.use_uv = default_conf['use_uv'].lower() == 'true' if 'use_uv' in default_conf else False
|
||||
|
||||
@ -1775,10 +1752,10 @@ def switch_to_default_branch(repo):
|
||||
|
||||
|
||||
def reserve_script(repo_path, install_cmds):
|
||||
if not os.path.exists(manager_startup_script_path):
|
||||
os.makedirs(manager_startup_script_path)
|
||||
if not os.path.exists(context.manager_startup_script_path):
|
||||
os.makedirs(context.manager_startup_script_path)
|
||||
|
||||
script_path = os.path.join(manager_startup_script_path, "install-scripts.txt")
|
||||
script_path = os.path.join(context.manager_startup_script_path, "install-scripts.txt")
|
||||
with open(script_path, "a") as file:
|
||||
obj = [repo_path] + install_cmds
|
||||
file.write(f"{obj}\n")
|
||||
@ -1833,11 +1810,11 @@ def try_install_script(url, repo_path, install_cmd, instant_execution=False):
|
||||
# use subprocess to avoid file system lock by git (Windows)
|
||||
def __win_check_git_update(path, do_fetch=False, do_update=False):
|
||||
if do_fetch:
|
||||
command = [sys.executable, git_script_path, "--fetch", path]
|
||||
command = [sys.executable, context.git_script_path, "--fetch", path]
|
||||
elif do_update:
|
||||
command = [sys.executable, git_script_path, "--pull", path]
|
||||
command = [sys.executable, context.git_script_path, "--pull", path]
|
||||
else:
|
||||
command = [sys.executable, git_script_path, "--check", path]
|
||||
command = [sys.executable, context.git_script_path, "--check", path]
|
||||
|
||||
new_env = get_script_env()
|
||||
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=get_default_custom_nodes_path(), env=new_env)
|
||||
@ -1891,7 +1868,7 @@ def __win_check_git_update(path, do_fetch=False, do_update=False):
|
||||
|
||||
|
||||
def __win_check_git_pull(path):
|
||||
command = [sys.executable, git_script_path, "--pull", path]
|
||||
command = [sys.executable, context.git_script_path, "--pull", path]
|
||||
process = subprocess.Popen(command, env=get_script_env(), cwd=get_default_custom_nodes_path())
|
||||
process.wait()
|
||||
|
||||
@ -1907,7 +1884,7 @@ def execute_install_script(url, repo_path, lazy_mode=False, instant_execution=Fa
|
||||
else:
|
||||
if os.path.exists(requirements_path) and not no_deps:
|
||||
print("Install: pip packages")
|
||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, manager_files_path)
|
||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), context.comfy_path, context.manager_files_path)
|
||||
with open(requirements_path, "r") as requirements_file:
|
||||
for line in requirements_file:
|
||||
#handle comments
|
||||
@ -2143,7 +2120,7 @@ async def gitclone_install(url, instant_execution=False, msg_prefix='', no_deps=
|
||||
clone_url = git_utils.get_url_for_clone(url)
|
||||
|
||||
if not instant_execution and platform.system() == 'Windows':
|
||||
res = manager_funcs.run_script([sys.executable, git_script_path, "--clone", get_default_custom_nodes_path(), clone_url, repo_path], cwd=get_default_custom_nodes_path())
|
||||
res = manager_funcs.run_script([sys.executable, context.git_script_path, "--clone", get_default_custom_nodes_path(), clone_url, repo_path], cwd=get_default_custom_nodes_path())
|
||||
if res != 0:
|
||||
return result.fail(f"Failed to clone '{clone_url}' into '{repo_path}'")
|
||||
else:
|
||||
@ -2305,7 +2282,7 @@ def gitclone_uninstall(files):
|
||||
url = url[:-1]
|
||||
try:
|
||||
for custom_nodes_dir in get_custom_nodes_paths():
|
||||
dir_name = os.path.splitext(os.path.basename(url))[0].replace(".git", "")
|
||||
dir_name:str = os.path.splitext(os.path.basename(url))[0].replace(".git", "")
|
||||
dir_path = os.path.join(custom_nodes_dir, dir_name)
|
||||
|
||||
# safety check
|
||||
@ -2353,7 +2330,7 @@ def gitclone_set_active(files, is_disable):
|
||||
url = url[:-1]
|
||||
try:
|
||||
for custom_nodes_dir in get_custom_nodes_paths():
|
||||
dir_name = os.path.splitext(os.path.basename(url))[0].replace(".git", "")
|
||||
dir_name:str = os.path.splitext(os.path.basename(url))[0].replace(".git", "")
|
||||
dir_path = os.path.join(custom_nodes_dir, dir_name)
|
||||
|
||||
# safety check
|
||||
@ -2626,7 +2603,7 @@ async def get_current_snapshot(custom_nodes_only = False):
|
||||
await unified_manager.get_custom_nodes('default', 'cache')
|
||||
|
||||
# Get ComfyUI hash
|
||||
repo_path = comfy_path
|
||||
repo_path = context.comfy_path
|
||||
|
||||
comfyui_commit_hash = None
|
||||
if not custom_nodes_only:
|
||||
@ -2702,7 +2679,7 @@ async def save_snapshot_with_postfix(postfix, path=None, custom_nodes_only = Fal
|
||||
date_time_format = now.strftime("%Y-%m-%d_%H-%M-%S")
|
||||
file_name = f"{date_time_format}_{postfix}"
|
||||
|
||||
path = os.path.join(manager_snapshot_path, f"{file_name}.json")
|
||||
path = os.path.join(context.manager_snapshot_path, f"{file_name}.json")
|
||||
else:
|
||||
file_name = path.replace('\\', '/').split('/')[-1]
|
||||
file_name = file_name.split('.')[-2]
|
||||
@ -3284,7 +3261,7 @@ async def restore_snapshot(snapshot_path, git_helper_extras=None):
|
||||
|
||||
def get_comfyui_versions(repo=None):
|
||||
if repo is None:
|
||||
repo = git.Repo(comfy_path)
|
||||
repo = git.Repo(context.comfy_path)
|
||||
|
||||
try:
|
||||
remote = get_remote_name(repo)
|
||||
@ -3318,7 +3295,7 @@ def get_comfyui_versions(repo=None):
|
||||
|
||||
|
||||
def switch_comfyui(tag):
|
||||
repo = git.Repo(comfy_path)
|
||||
repo = git.Repo(context.comfy_path)
|
||||
|
||||
if tag == 'nightly':
|
||||
repo.git.checkout('master')
|
||||
|
||||
@ -19,9 +19,10 @@ import asyncio
|
||||
from collections import deque
|
||||
|
||||
from . import manager_core as core
|
||||
from . import manager_util
|
||||
from . import cm_global
|
||||
from . import manager_downloader
|
||||
from ..common import manager_util
|
||||
from ..common import cm_global
|
||||
from ..common import manager_downloader
|
||||
from ..common import context
|
||||
|
||||
|
||||
logging.info(f"### Loading: ComfyUI-Manager ({core.version_str})")
|
||||
@ -160,10 +161,10 @@ class ManagerFuncsInComfyUI(core.ManagerFuncs):
|
||||
|
||||
core.manager_funcs = ManagerFuncsInComfyUI()
|
||||
|
||||
from .manager_downloader import download_url, download_url_with_agent
|
||||
from comfyui_manager.common.manager_downloader import download_url, download_url_with_agent
|
||||
|
||||
core.comfy_path = os.path.dirname(folder_paths.__file__)
|
||||
core.js_path = os.path.join(core.comfy_path, "web", "extensions")
|
||||
context.comfy_path = os.path.dirname(folder_paths.__file__)
|
||||
core.js_path = os.path.join(context.comfy_path, "web", "extensions")
|
||||
|
||||
local_db_model = os.path.join(manager_util.comfyui_manager_path, "model-list.json")
|
||||
local_db_alter = os.path.join(manager_util.comfyui_manager_path, "alter-list.json")
|
||||
@ -214,7 +215,7 @@ def print_comfyui_version():
|
||||
is_detached = repo.head.is_detached
|
||||
current_branch = repo.active_branch.name
|
||||
|
||||
comfyui_tag = core.get_comfyui_tag()
|
||||
comfyui_tag = context.get_comfyui_tag()
|
||||
|
||||
try:
|
||||
if not os.environ.get('__COMFYUI_DESKTOP_VERSION__') and core.comfy_ui_commit_datetime.date() < core.comfy_ui_required_commit_datetime.date():
|
||||
@ -428,7 +429,7 @@ class TaskBatch:
|
||||
|
||||
def finalize(self):
|
||||
if self.batch_id is not None:
|
||||
batch_path = os.path.join(core.manager_batch_history_path, self.batch_id+".json")
|
||||
batch_path = os.path.join(context.manager_batch_history_path, self.batch_id+".json")
|
||||
json_obj = {
|
||||
"batch": self.batch_json,
|
||||
"nodepack_result": self.nodepack_result,
|
||||
@ -810,7 +811,7 @@ async def queue_batch(request):
|
||||
|
||||
@routes.get("/v2/manager/queue/history_list")
|
||||
async def get_history_list(request):
|
||||
history_path = core.manager_batch_history_path
|
||||
history_path = context.manager_batch_history_path
|
||||
|
||||
try:
|
||||
files = [os.path.join(history_path, f) for f in os.listdir(history_path) if os.path.isfile(os.path.join(history_path, f))]
|
||||
@ -827,7 +828,7 @@ async def get_history_list(request):
|
||||
async def get_history(request):
|
||||
try:
|
||||
json_name = request.rel_url.query["id"]+'.json'
|
||||
batch_path = os.path.join(core.manager_batch_history_path, json_name)
|
||||
batch_path = os.path.join(context.manager_batch_history_path, json_name)
|
||||
|
||||
with open(batch_path, 'r', encoding='utf-8') as file:
|
||||
json_str = file.read()
|
||||
@ -1136,7 +1137,7 @@ async def fetch_externalmodel_list(request):
|
||||
|
||||
@PromptServer.instance.routes.get("/v2/snapshot/getlist")
|
||||
async def get_snapshot_list(request):
|
||||
items = [f[:-5] for f in os.listdir(core.manager_snapshot_path) if f.endswith('.json')]
|
||||
items = [f[:-5] for f in os.listdir(context.manager_snapshot_path) if f.endswith('.json')]
|
||||
items.sort(reverse=True)
|
||||
return web.json_response({'items': items}, content_type='application/json')
|
||||
|
||||
@ -1150,7 +1151,7 @@ async def remove_snapshot(request):
|
||||
try:
|
||||
target = request.rel_url.query["target"]
|
||||
|
||||
path = os.path.join(core.manager_snapshot_path, f"{target}.json")
|
||||
path = os.path.join(context.manager_snapshot_path, f"{target}.json")
|
||||
if os.path.exists(path):
|
||||
os.remove(path)
|
||||
|
||||
@ -1168,12 +1169,12 @@ async def restore_snapshot(request):
|
||||
try:
|
||||
target = request.rel_url.query["target"]
|
||||
|
||||
path = os.path.join(core.manager_snapshot_path, f"{target}.json")
|
||||
path = os.path.join(context.manager_snapshot_path, f"{target}.json")
|
||||
if os.path.exists(path):
|
||||
if not os.path.exists(core.manager_startup_script_path):
|
||||
os.makedirs(core.manager_startup_script_path)
|
||||
if not os.path.exists(context.manager_startup_script_path):
|
||||
os.makedirs(context.manager_startup_script_path)
|
||||
|
||||
target_path = os.path.join(core.manager_startup_script_path, "restore-snapshot.json")
|
||||
target_path = os.path.join(context.manager_startup_script_path, "restore-snapshot.json")
|
||||
shutil.copy(path, target_path)
|
||||
|
||||
logging.info(f"Snapshot restore scheduled: `{target}`")
|
||||
@ -1729,7 +1730,7 @@ async def get_notice(request):
|
||||
if version_tag is not None:
|
||||
markdown_content += f"<HR>ComfyUI: {version_tag} [Desktop]"
|
||||
else:
|
||||
version_tag = core.get_comfyui_tag()
|
||||
version_tag = context.get_comfyui_tag()
|
||||
if version_tag is None:
|
||||
markdown_content += f"<HR>ComfyUI: {core.comfy_ui_revision}[{comfy_ui_hash[:6]}]({core.comfy_ui_commit_datetime.date()})"
|
||||
else:
|
||||
@ -1806,15 +1807,15 @@ async def save_component(request):
|
||||
name = data['name']
|
||||
workflow = data['workflow']
|
||||
|
||||
if not os.path.exists(core.manager_components_path):
|
||||
os.mkdir(core.manager_components_path)
|
||||
if not os.path.exists(context.manager_components_path):
|
||||
os.mkdir(context.manager_components_path)
|
||||
|
||||
if 'packname' in workflow and workflow['packname'] != '':
|
||||
sanitized_name = manager_util.sanitize_filename(workflow['packname']) + '.pack'
|
||||
else:
|
||||
sanitized_name = manager_util.sanitize_filename(name) + '.json'
|
||||
|
||||
filepath = os.path.join(core.manager_components_path, sanitized_name)
|
||||
filepath = os.path.join(context.manager_components_path, sanitized_name)
|
||||
components = {}
|
||||
if os.path.exists(filepath):
|
||||
with open(filepath) as f:
|
||||
@ -1831,14 +1832,14 @@ async def save_component(request):
|
||||
|
||||
@routes.post("/v2/manager/component/loads")
|
||||
async def load_components(request):
|
||||
if os.path.exists(core.manager_components_path):
|
||||
if os.path.exists(context.manager_components_path):
|
||||
try:
|
||||
json_files = [f for f in os.listdir(core.manager_components_path) if f.endswith('.json')]
|
||||
pack_files = [f for f in os.listdir(core.manager_components_path) if f.endswith('.pack')]
|
||||
json_files = [f for f in os.listdir(context.manager_components_path) if f.endswith('.json')]
|
||||
pack_files = [f for f in os.listdir(context.manager_components_path) if f.endswith('.pack')]
|
||||
|
||||
components = {}
|
||||
for json_file in json_files + pack_files:
|
||||
file_path = os.path.join(core.manager_components_path, json_file)
|
||||
file_path = os.path.join(context.manager_components_path, json_file)
|
||||
with open(file_path, 'r') as file:
|
||||
try:
|
||||
# When there is a conflict between the .pack and the .json, the pack takes precedence and overrides.
|
||||
@ -1926,7 +1927,7 @@ async def default_cache_update():
|
||||
|
||||
threading.Thread(target=lambda: asyncio.run(default_cache_update())).start()
|
||||
|
||||
if not os.path.exists(core.manager_config_path):
|
||||
if not os.path.exists(context.manager_config_path):
|
||||
core.get_config()
|
||||
core.write_config()
|
||||
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
import mimetypes
|
||||
from ..common import context
|
||||
from . import manager_core as core
|
||||
|
||||
import os
|
||||
@ -66,10 +67,10 @@ async def share_option(request):
|
||||
|
||||
|
||||
def get_openart_auth():
|
||||
if not os.path.exists(os.path.join(core.manager_files_path, ".openart_key")):
|
||||
if not os.path.exists(os.path.join(context.manager_files_path, ".openart_key")):
|
||||
return None
|
||||
try:
|
||||
with open(os.path.join(core.manager_files_path, ".openart_key"), "r") as f:
|
||||
with open(os.path.join(context.manager_files_path, ".openart_key"), "r") as f:
|
||||
openart_key = f.read().strip()
|
||||
return openart_key if openart_key else None
|
||||
except:
|
||||
@ -77,10 +78,10 @@ def get_openart_auth():
|
||||
|
||||
|
||||
def get_matrix_auth():
|
||||
if not os.path.exists(os.path.join(core.manager_files_path, "matrix_auth")):
|
||||
if not os.path.exists(os.path.join(context.manager_files_path, "matrix_auth")):
|
||||
return None
|
||||
try:
|
||||
with open(os.path.join(core.manager_files_path, "matrix_auth"), "r") as f:
|
||||
with open(os.path.join(context.manager_files_path, "matrix_auth"), "r") as f:
|
||||
matrix_auth = f.read()
|
||||
homeserver, username, password = matrix_auth.strip().split("\n")
|
||||
if not homeserver or not username or not password:
|
||||
@ -95,10 +96,10 @@ def get_matrix_auth():
|
||||
|
||||
|
||||
def get_comfyworkflows_auth():
|
||||
if not os.path.exists(os.path.join(core.manager_files_path, "comfyworkflows_sharekey")):
|
||||
if not os.path.exists(os.path.join(context.manager_files_path, "comfyworkflows_sharekey")):
|
||||
return None
|
||||
try:
|
||||
with open(os.path.join(core.manager_files_path, "comfyworkflows_sharekey"), "r") as f:
|
||||
with open(os.path.join(context.manager_files_path, "comfyworkflows_sharekey"), "r") as f:
|
||||
share_key = f.read()
|
||||
if not share_key.strip():
|
||||
return None
|
||||
@ -108,10 +109,10 @@ def get_comfyworkflows_auth():
|
||||
|
||||
|
||||
def get_youml_settings():
|
||||
if not os.path.exists(os.path.join(core.manager_files_path, ".youml")):
|
||||
if not os.path.exists(os.path.join(context.manager_files_path, ".youml")):
|
||||
return None
|
||||
try:
|
||||
with open(os.path.join(core.manager_files_path, ".youml"), "r") as f:
|
||||
with open(os.path.join(context.manager_files_path, ".youml"), "r") as f:
|
||||
youml_settings = f.read().strip()
|
||||
return youml_settings if youml_settings else None
|
||||
except:
|
||||
@ -119,7 +120,7 @@ def get_youml_settings():
|
||||
|
||||
|
||||
def set_youml_settings(settings):
|
||||
with open(os.path.join(core.manager_files_path, ".youml"), "w") as f:
|
||||
with open(os.path.join(context.manager_files_path, ".youml"), "w") as f:
|
||||
f.write(settings)
|
||||
|
||||
|
||||
@ -136,7 +137,7 @@ async def api_get_openart_auth(request):
|
||||
async def api_set_openart_auth(request):
|
||||
json_data = await request.json()
|
||||
openart_key = json_data['openart_key']
|
||||
with open(os.path.join(core.manager_files_path, ".openart_key"), "w") as f:
|
||||
with open(os.path.join(context.manager_files_path, ".openart_key"), "w") as f:
|
||||
f.write(openart_key)
|
||||
return web.Response(status=200)
|
||||
|
||||
@ -179,14 +180,14 @@ async def api_get_comfyworkflows_auth(request):
|
||||
@PromptServer.instance.routes.post("/v2/manager/set_esheep_workflow_and_images")
|
||||
async def set_esheep_workflow_and_images(request):
|
||||
json_data = await request.json()
|
||||
with open(os.path.join(core.manager_files_path, "esheep_share_message.json"), "w", encoding='utf-8') as file:
|
||||
with open(os.path.join(context.manager_files_path, "esheep_share_message.json"), "w", encoding='utf-8') as file:
|
||||
json.dump(json_data, file, indent=4)
|
||||
return web.Response(status=200)
|
||||
|
||||
|
||||
@PromptServer.instance.routes.get("/v2/manager/get_esheep_workflow_and_images")
|
||||
async def get_esheep_workflow_and_images(request):
|
||||
with open(os.path.join(core.manager_files_path, "esheep_share_message.json"), 'r', encoding='utf-8') as file:
|
||||
with open(os.path.join(context.manager_files_path, "esheep_share_message.json"), 'r', encoding='utf-8') as file:
|
||||
data = json.load(file)
|
||||
return web.Response(status=200, text=json.dumps(data))
|
||||
|
||||
@ -195,12 +196,12 @@ def set_matrix_auth(json_data):
|
||||
homeserver = json_data['homeserver']
|
||||
username = json_data['username']
|
||||
password = json_data['password']
|
||||
with open(os.path.join(core.manager_files_path, "matrix_auth"), "w") as f:
|
||||
with open(os.path.join(context.manager_files_path, "matrix_auth"), "w") as f:
|
||||
f.write("\n".join([homeserver, username, password]))
|
||||
|
||||
|
||||
def set_comfyworkflows_auth(comfyworkflows_sharekey):
|
||||
with open(os.path.join(core.manager_files_path, "comfyworkflows_sharekey"), "w") as f:
|
||||
with open(os.path.join(context.manager_files_path, "comfyworkflows_sharekey"), "w") as f:
|
||||
f.write(comfyworkflows_sharekey)
|
||||
|
||||
|
||||
|
||||
0
comfyui_manager/legacy/__init__.py
Normal file
0
comfyui_manager/legacy/__init__.py
Normal file
3248
comfyui_manager/legacy/manager_core.py
Normal file
3248
comfyui_manager/legacy/manager_core.py
Normal file
File diff suppressed because it is too large
Load Diff
1937
comfyui_manager/legacy/manager_server.py
Normal file
1937
comfyui_manager/legacy/manager_server.py
Normal file
File diff suppressed because it is too large
Load Diff
386
comfyui_manager/legacy/share_3rdparty.py
Normal file
386
comfyui_manager/legacy/share_3rdparty.py
Normal file
@ -0,0 +1,386 @@
|
||||
import mimetypes
|
||||
from ..common import context
|
||||
from . import manager_core as core
|
||||
|
||||
import os
|
||||
from aiohttp import web
|
||||
import aiohttp
|
||||
import json
|
||||
import hashlib
|
||||
|
||||
import folder_paths
|
||||
from server import PromptServer
|
||||
|
||||
|
||||
def extract_model_file_names(json_data):
|
||||
"""Extract unique file names from the input JSON data."""
|
||||
file_names = set()
|
||||
model_filename_extensions = {'.safetensors', '.ckpt', '.pt', '.pth', '.bin'}
|
||||
|
||||
# Recursively search for file names in the JSON data
|
||||
def recursive_search(data):
|
||||
if isinstance(data, dict):
|
||||
for value in data.values():
|
||||
recursive_search(value)
|
||||
elif isinstance(data, list):
|
||||
for item in data:
|
||||
recursive_search(item)
|
||||
elif isinstance(data, str) and '.' in data:
|
||||
file_names.add(os.path.basename(data)) # file_names.add(data)
|
||||
|
||||
recursive_search(json_data)
|
||||
return [f for f in list(file_names) if os.path.splitext(f)[1] in model_filename_extensions]
|
||||
|
||||
|
||||
def find_file_paths(base_dir, file_names):
|
||||
"""Find the paths of the files in the base directory."""
|
||||
file_paths = {}
|
||||
|
||||
for root, dirs, files in os.walk(base_dir):
|
||||
# Exclude certain directories
|
||||
dirs[:] = [d for d in dirs if d not in ['.git']]
|
||||
|
||||
for file in files:
|
||||
if file in file_names:
|
||||
file_paths[file] = os.path.join(root, file)
|
||||
return file_paths
|
||||
|
||||
|
||||
def compute_sha256_checksum(filepath):
|
||||
"""Compute the SHA256 checksum of a file, in chunks"""
|
||||
sha256 = hashlib.sha256()
|
||||
with open(filepath, 'rb') as f:
|
||||
for chunk in iter(lambda: f.read(4096), b''):
|
||||
sha256.update(chunk)
|
||||
return sha256.hexdigest()
|
||||
|
||||
|
||||
@PromptServer.instance.routes.get("/v2/manager/share_option")
|
||||
async def share_option(request):
|
||||
if "value" in request.rel_url.query:
|
||||
core.get_config()['share_option'] = request.rel_url.query['value']
|
||||
core.write_config()
|
||||
else:
|
||||
return web.Response(text=core.get_config()['share_option'], status=200)
|
||||
|
||||
return web.Response(status=200)
|
||||
|
||||
|
||||
def get_openart_auth():
|
||||
if not os.path.exists(os.path.join(context.manager_files_path, ".openart_key")):
|
||||
return None
|
||||
try:
|
||||
with open(os.path.join(context.manager_files_path, ".openart_key"), "r") as f:
|
||||
openart_key = f.read().strip()
|
||||
return openart_key if openart_key else None
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
def get_matrix_auth():
|
||||
if not os.path.exists(os.path.join(context.manager_files_path, "matrix_auth")):
|
||||
return None
|
||||
try:
|
||||
with open(os.path.join(context.manager_files_path, "matrix_auth"), "r") as f:
|
||||
matrix_auth = f.read()
|
||||
homeserver, username, password = matrix_auth.strip().split("\n")
|
||||
if not homeserver or not username or not password:
|
||||
return None
|
||||
return {
|
||||
"homeserver": homeserver,
|
||||
"username": username,
|
||||
"password": password,
|
||||
}
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
def get_comfyworkflows_auth():
|
||||
if not os.path.exists(os.path.join(context.manager_files_path, "comfyworkflows_sharekey")):
|
||||
return None
|
||||
try:
|
||||
with open(os.path.join(context.manager_files_path, "comfyworkflows_sharekey"), "r") as f:
|
||||
share_key = f.read()
|
||||
if not share_key.strip():
|
||||
return None
|
||||
return share_key
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
def get_youml_settings():
|
||||
if not os.path.exists(os.path.join(context.manager_files_path, ".youml")):
|
||||
return None
|
||||
try:
|
||||
with open(os.path.join(context.manager_files_path, ".youml"), "r") as f:
|
||||
youml_settings = f.read().strip()
|
||||
return youml_settings if youml_settings else None
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
def set_youml_settings(settings):
|
||||
with open(os.path.join(context.manager_files_path, ".youml"), "w") as f:
|
||||
f.write(settings)
|
||||
|
||||
|
||||
@PromptServer.instance.routes.get("/v2/manager/get_openart_auth")
|
||||
async def api_get_openart_auth(request):
|
||||
# print("Getting stored Matrix credentials...")
|
||||
openart_key = get_openart_auth()
|
||||
if not openart_key:
|
||||
return web.Response(status=404)
|
||||
return web.json_response({"openart_key": openart_key})
|
||||
|
||||
|
||||
@PromptServer.instance.routes.post("/v2/manager/set_openart_auth")
|
||||
async def api_set_openart_auth(request):
|
||||
json_data = await request.json()
|
||||
openart_key = json_data['openart_key']
|
||||
with open(os.path.join(context.manager_files_path, ".openart_key"), "w") as f:
|
||||
f.write(openart_key)
|
||||
return web.Response(status=200)
|
||||
|
||||
|
||||
@PromptServer.instance.routes.get("/v2/manager/get_matrix_auth")
|
||||
async def api_get_matrix_auth(request):
|
||||
# print("Getting stored Matrix credentials...")
|
||||
matrix_auth = get_matrix_auth()
|
||||
if not matrix_auth:
|
||||
return web.Response(status=404)
|
||||
return web.json_response(matrix_auth)
|
||||
|
||||
|
||||
@PromptServer.instance.routes.get("/v2/manager/youml/settings")
|
||||
async def api_get_youml_settings(request):
|
||||
youml_settings = get_youml_settings()
|
||||
if not youml_settings:
|
||||
return web.Response(status=404)
|
||||
return web.json_response(json.loads(youml_settings))
|
||||
|
||||
|
||||
@PromptServer.instance.routes.post("/v2/manager/youml/settings")
|
||||
async def api_set_youml_settings(request):
|
||||
json_data = await request.json()
|
||||
set_youml_settings(json.dumps(json_data))
|
||||
return web.Response(status=200)
|
||||
|
||||
|
||||
@PromptServer.instance.routes.get("/v2/manager/get_comfyworkflows_auth")
|
||||
async def api_get_comfyworkflows_auth(request):
|
||||
# Check if the user has provided Matrix credentials in a file called 'matrix_accesstoken'
|
||||
# in the same directory as the ComfyUI base folder
|
||||
# print("Getting stored Comfyworkflows.com auth...")
|
||||
comfyworkflows_auth = get_comfyworkflows_auth()
|
||||
if not comfyworkflows_auth:
|
||||
return web.Response(status=404)
|
||||
return web.json_response({"comfyworkflows_sharekey": comfyworkflows_auth})
|
||||
|
||||
|
||||
@PromptServer.instance.routes.post("/v2/manager/set_esheep_workflow_and_images")
|
||||
async def set_esheep_workflow_and_images(request):
|
||||
json_data = await request.json()
|
||||
with open(os.path.join(context.manager_files_path, "esheep_share_message.json"), "w", encoding='utf-8') as file:
|
||||
json.dump(json_data, file, indent=4)
|
||||
return web.Response(status=200)
|
||||
|
||||
|
||||
@PromptServer.instance.routes.get("/v2/manager/get_esheep_workflow_and_images")
|
||||
async def get_esheep_workflow_and_images(request):
|
||||
with open(os.path.join(context.manager_files_path, "esheep_share_message.json"), 'r', encoding='utf-8') as file:
|
||||
data = json.load(file)
|
||||
return web.Response(status=200, text=json.dumps(data))
|
||||
|
||||
|
||||
def set_matrix_auth(json_data):
|
||||
homeserver = json_data['homeserver']
|
||||
username = json_data['username']
|
||||
password = json_data['password']
|
||||
with open(os.path.join(context.manager_files_path, "matrix_auth"), "w") as f:
|
||||
f.write("\n".join([homeserver, username, password]))
|
||||
|
||||
|
||||
def set_comfyworkflows_auth(comfyworkflows_sharekey):
|
||||
with open(os.path.join(context.manager_files_path, "comfyworkflows_sharekey"), "w") as f:
|
||||
f.write(comfyworkflows_sharekey)
|
||||
|
||||
|
||||
def has_provided_matrix_auth(matrix_auth):
|
||||
return matrix_auth['homeserver'].strip() and matrix_auth['username'].strip() and matrix_auth['password'].strip()
|
||||
|
||||
|
||||
def has_provided_comfyworkflows_auth(comfyworkflows_sharekey):
|
||||
return comfyworkflows_sharekey.strip()
|
||||
|
||||
|
||||
@PromptServer.instance.routes.post("/v2/manager/share")
|
||||
async def share_art(request):
|
||||
# get json data
|
||||
json_data = await request.json()
|
||||
|
||||
matrix_auth = json_data['matrix_auth']
|
||||
comfyworkflows_sharekey = json_data['cw_auth']['cw_sharekey']
|
||||
|
||||
set_matrix_auth(matrix_auth)
|
||||
set_comfyworkflows_auth(comfyworkflows_sharekey)
|
||||
|
||||
share_destinations = json_data['share_destinations']
|
||||
credits = json_data['credits']
|
||||
title = json_data['title']
|
||||
description = json_data['description']
|
||||
is_nsfw = json_data['is_nsfw']
|
||||
prompt = json_data['prompt']
|
||||
potential_outputs = json_data['potential_outputs']
|
||||
selected_output_index = json_data['selected_output_index']
|
||||
|
||||
try:
|
||||
output_to_share = potential_outputs[int(selected_output_index)]
|
||||
except:
|
||||
# for now, pick the first output
|
||||
output_to_share = potential_outputs[0]
|
||||
|
||||
assert output_to_share['type'] in ('image', 'output')
|
||||
output_dir = folder_paths.get_output_directory()
|
||||
|
||||
if output_to_share['type'] == 'image':
|
||||
asset_filename = output_to_share['image']['filename']
|
||||
asset_subfolder = output_to_share['image']['subfolder']
|
||||
|
||||
if output_to_share['image']['type'] == 'temp':
|
||||
output_dir = folder_paths.get_temp_directory()
|
||||
else:
|
||||
asset_filename = output_to_share['output']['filename']
|
||||
asset_subfolder = output_to_share['output']['subfolder']
|
||||
|
||||
if asset_subfolder:
|
||||
asset_filepath = os.path.join(output_dir, asset_subfolder, asset_filename)
|
||||
else:
|
||||
asset_filepath = os.path.join(output_dir, asset_filename)
|
||||
|
||||
# get the mime type of the asset
|
||||
assetFileType = mimetypes.guess_type(asset_filepath)[0]
|
||||
|
||||
share_website_host = "UNKNOWN"
|
||||
if "comfyworkflows" in share_destinations:
|
||||
share_website_host = "https://comfyworkflows.com"
|
||||
share_endpoint = f"{share_website_host}/api"
|
||||
|
||||
# get presigned urls
|
||||
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
|
||||
async with session.post(
|
||||
f"{share_endpoint}/get_presigned_urls",
|
||||
json={
|
||||
"assetFileName": asset_filename,
|
||||
"assetFileType": assetFileType,
|
||||
"workflowJsonFileName": 'workflow.json',
|
||||
"workflowJsonFileType": 'application/json',
|
||||
},
|
||||
) as resp:
|
||||
assert resp.status == 200
|
||||
presigned_urls_json = await resp.json()
|
||||
assetFilePresignedUrl = presigned_urls_json["assetFilePresignedUrl"]
|
||||
assetFileKey = presigned_urls_json["assetFileKey"]
|
||||
workflowJsonFilePresignedUrl = presigned_urls_json["workflowJsonFilePresignedUrl"]
|
||||
workflowJsonFileKey = presigned_urls_json["workflowJsonFileKey"]
|
||||
|
||||
# upload asset
|
||||
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
|
||||
async with session.put(assetFilePresignedUrl, data=open(asset_filepath, "rb")) as resp:
|
||||
assert resp.status == 200
|
||||
|
||||
# upload workflow json
|
||||
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
|
||||
async with session.put(workflowJsonFilePresignedUrl, data=json.dumps(prompt['workflow']).encode('utf-8')) as resp:
|
||||
assert resp.status == 200
|
||||
|
||||
model_filenames = extract_model_file_names(prompt['workflow'])
|
||||
model_file_paths = find_file_paths(folder_paths.base_path, model_filenames)
|
||||
|
||||
models_info = {}
|
||||
for filename, filepath in model_file_paths.items():
|
||||
models_info[filename] = {
|
||||
"filename": filename,
|
||||
"sha256_checksum": compute_sha256_checksum(filepath),
|
||||
"relative_path": os.path.relpath(filepath, folder_paths.base_path),
|
||||
}
|
||||
|
||||
# make a POST request to /api/upload_workflow with form data key values
|
||||
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
|
||||
form = aiohttp.FormData()
|
||||
if comfyworkflows_sharekey:
|
||||
form.add_field("shareKey", comfyworkflows_sharekey)
|
||||
form.add_field("source", "comfyui_manager")
|
||||
form.add_field("assetFileKey", assetFileKey)
|
||||
form.add_field("assetFileType", assetFileType)
|
||||
form.add_field("workflowJsonFileKey", workflowJsonFileKey)
|
||||
form.add_field("sharedWorkflowWorkflowJsonString", json.dumps(prompt['workflow']))
|
||||
form.add_field("sharedWorkflowPromptJsonString", json.dumps(prompt['output']))
|
||||
form.add_field("shareWorkflowCredits", credits)
|
||||
form.add_field("shareWorkflowTitle", title)
|
||||
form.add_field("shareWorkflowDescription", description)
|
||||
form.add_field("shareWorkflowIsNSFW", str(is_nsfw).lower())
|
||||
form.add_field("currentSnapshot", json.dumps(await core.get_current_snapshot()))
|
||||
form.add_field("modelsInfo", json.dumps(models_info))
|
||||
|
||||
async with session.post(
|
||||
f"{share_endpoint}/upload_workflow",
|
||||
data=form,
|
||||
) as resp:
|
||||
assert resp.status == 200
|
||||
upload_workflow_json = await resp.json()
|
||||
workflowId = upload_workflow_json["workflowId"]
|
||||
|
||||
# check if the user has provided Matrix credentials
|
||||
if "matrix" in share_destinations:
|
||||
comfyui_share_room_id = '!LGYSoacpJPhIfBqVfb:matrix.org'
|
||||
filename = os.path.basename(asset_filepath)
|
||||
content_type = assetFileType
|
||||
|
||||
try:
|
||||
from matrix_client.api import MatrixHttpApi
|
||||
from matrix_client.client import MatrixClient
|
||||
|
||||
homeserver = 'matrix.org'
|
||||
if matrix_auth:
|
||||
homeserver = matrix_auth.get('homeserver', 'matrix.org')
|
||||
homeserver = homeserver.replace("http://", "https://")
|
||||
if not homeserver.startswith("https://"):
|
||||
homeserver = "https://" + homeserver
|
||||
|
||||
client = MatrixClient(homeserver)
|
||||
try:
|
||||
token = client.login(username=matrix_auth['username'], password=matrix_auth['password'])
|
||||
if not token:
|
||||
return web.json_response({"error": "Invalid Matrix credentials."}, content_type='application/json', status=400)
|
||||
except:
|
||||
return web.json_response({"error": "Invalid Matrix credentials."}, content_type='application/json', status=400)
|
||||
|
||||
matrix = MatrixHttpApi(homeserver, token=token)
|
||||
with open(asset_filepath, 'rb') as f:
|
||||
mxc_url = matrix.media_upload(f.read(), content_type, filename=filename)['content_uri']
|
||||
|
||||
workflow_json_mxc_url = matrix.media_upload(prompt['workflow'], 'application/json', filename='workflow.json')['content_uri']
|
||||
|
||||
text_content = ""
|
||||
if title:
|
||||
text_content += f"{title}\n"
|
||||
if description:
|
||||
text_content += f"{description}\n"
|
||||
if credits:
|
||||
text_content += f"\ncredits: {credits}\n"
|
||||
matrix.send_message(comfyui_share_room_id, text_content)
|
||||
matrix.send_content(comfyui_share_room_id, mxc_url, filename, 'm.image')
|
||||
matrix.send_content(comfyui_share_room_id, workflow_json_mxc_url, 'workflow.json', 'm.file')
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return web.json_response({"error": "An error occurred when sharing your art to Matrix."}, content_type='application/json', status=500)
|
||||
|
||||
return web.json_response({
|
||||
"comfyworkflows": {
|
||||
"url": None if "comfyworkflows" not in share_destinations else f"{share_website_host}/workflows/{workflowId}",
|
||||
},
|
||||
"matrix": {
|
||||
"success": None if "matrix" not in share_destinations else True
|
||||
}
|
||||
}, content_type='application/json', status=200)
|
||||
@ -12,10 +12,10 @@ import ast
|
||||
import logging
|
||||
import traceback
|
||||
|
||||
from .glob import security_check
|
||||
from .glob import manager_util
|
||||
from .glob import cm_global
|
||||
from .glob import manager_downloader
|
||||
from .common import security_check
|
||||
from .common import manager_util
|
||||
from .common import cm_global
|
||||
from .common import manager_downloader
|
||||
import folder_paths
|
||||
|
||||
manager_util.add_python_path_to_env()
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user