Modify the structure to be installable via pip.
This commit is contained in:
117
comfyui_manager/glob/cm_global.py
Normal file
117
comfyui_manager/glob/cm_global.py
Normal file
@@ -0,0 +1,117 @@
|
||||
import traceback
|
||||
|
||||
#
|
||||
# Global Var
|
||||
#
|
||||
# Usage:
|
||||
# import cm_global
|
||||
# cm_global.variables['comfyui.revision'] = 1832
|
||||
# print(f"log mode: {cm_global.variables['logger.enabled']}")
|
||||
#
|
||||
variables = {}
|
||||
|
||||
|
||||
#
|
||||
# Global API
|
||||
#
|
||||
# Usage:
|
||||
# [register API]
|
||||
# import cm_global
|
||||
#
|
||||
# def api_hello(msg):
|
||||
# print(f"hello: {msg}")
|
||||
# return msg
|
||||
#
|
||||
# cm_global.register_api('hello', api_hello)
|
||||
#
|
||||
# [use API]
|
||||
# import cm_global
|
||||
#
|
||||
# test = cm_global.try_call(api='hello', msg='an example')
|
||||
# print(f"'{test}' is returned")
|
||||
#
|
||||
|
||||
APIs = {}
|
||||
|
||||
|
||||
def register_api(k, f):
|
||||
global APIs
|
||||
APIs[k] = f
|
||||
|
||||
|
||||
def try_call(**kwargs):
|
||||
if 'api' in kwargs:
|
||||
api_name = kwargs['api']
|
||||
try:
|
||||
api = APIs.get(api_name)
|
||||
if api is not None:
|
||||
del kwargs['api']
|
||||
return api(**kwargs)
|
||||
else:
|
||||
print(f"WARN: The '{kwargs['api']}' API has not been registered.")
|
||||
except Exception as e:
|
||||
print(f"ERROR: An exception occurred while calling the '{api_name}' API.")
|
||||
raise e
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
#
|
||||
# Extension Info
|
||||
#
|
||||
# Usage:
|
||||
# import cm_global
|
||||
#
|
||||
# cm_global.extension_infos['my_extension'] = {'version': [0, 1], 'name': 'me', 'description': 'example extension', }
|
||||
#
|
||||
extension_infos = {}
|
||||
|
||||
on_extension_registered_handlers = {}
|
||||
|
||||
|
||||
def register_extension(extension_name, v):
|
||||
global extension_infos
|
||||
global on_extension_registered_handlers
|
||||
extension_infos[extension_name] = v
|
||||
|
||||
if extension_name in on_extension_registered_handlers:
|
||||
for k, f in on_extension_registered_handlers[extension_name]:
|
||||
try:
|
||||
f(extension_name, v)
|
||||
except Exception:
|
||||
print(f"[ERROR] '{k}' on_extension_registered_handlers")
|
||||
traceback.print_exc()
|
||||
|
||||
del on_extension_registered_handlers[extension_name]
|
||||
|
||||
|
||||
def add_on_extension_registered(k, extension_name, f):
|
||||
global on_extension_registered_handlers
|
||||
if extension_name in extension_infos:
|
||||
try:
|
||||
v = extension_infos[extension_name]
|
||||
f(extension_name, v)
|
||||
except Exception:
|
||||
print(f"[ERROR] '{k}' on_extension_registered_handler")
|
||||
traceback.print_exc()
|
||||
else:
|
||||
if extension_name not in on_extension_registered_handlers:
|
||||
on_extension_registered_handlers[extension_name] = []
|
||||
|
||||
on_extension_registered_handlers[extension_name].append((k, f))
|
||||
|
||||
|
||||
def add_on_revision_detected(k, f):
|
||||
if 'comfyui.revision' in variables:
|
||||
try:
|
||||
f(variables['comfyui.revision'])
|
||||
except Exception:
|
||||
print(f"[ERROR] '{k}' on_revision_detected_handler")
|
||||
traceback.print_exc()
|
||||
else:
|
||||
variables['cm.on_revision_detected_handler'].append((k, f))
|
||||
|
||||
|
||||
error_dict = {}
|
||||
|
||||
disable_front = False
|
||||
254
comfyui_manager/glob/cnr_utils.py
Normal file
254
comfyui_manager/glob/cnr_utils.py
Normal file
@@ -0,0 +1,254 @@
|
||||
import asyncio
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import time
|
||||
from dataclasses import dataclass
|
||||
from typing import List
|
||||
|
||||
from . import manager_core
|
||||
from . import manager_util
|
||||
|
||||
import requests
|
||||
import toml
|
||||
|
||||
base_url = "https://api.comfy.org"
|
||||
|
||||
|
||||
lock = asyncio.Lock()
|
||||
|
||||
is_cache_loading = False
|
||||
|
||||
async def get_cnr_data(cache_mode=True, dont_wait=True):
|
||||
try:
|
||||
return await _get_cnr_data(cache_mode, dont_wait)
|
||||
except asyncio.TimeoutError:
|
||||
print("A timeout occurred during the fetch process from ComfyRegistry.")
|
||||
return await _get_cnr_data(cache_mode=True, dont_wait=True) # timeout fallback
|
||||
|
||||
async def _get_cnr_data(cache_mode=True, dont_wait=True):
|
||||
global is_cache_loading
|
||||
|
||||
uri = f'{base_url}/nodes'
|
||||
|
||||
async def fetch_all():
|
||||
remained = True
|
||||
page = 1
|
||||
|
||||
full_nodes = {}
|
||||
|
||||
|
||||
# Determine form factor based on environment and platform
|
||||
is_desktop = bool(os.environ.get('__COMFYUI_DESKTOP_VERSION__'))
|
||||
system = platform.system().lower()
|
||||
is_windows = system == 'windows'
|
||||
is_mac = system == 'darwin'
|
||||
is_linux = system == 'linux'
|
||||
|
||||
# Get ComfyUI version tag
|
||||
if is_desktop:
|
||||
# extract version from pyproject.toml instead of git tag
|
||||
comfyui_ver = manager_core.get_current_comfyui_ver() or 'unknown'
|
||||
else:
|
||||
comfyui_ver = manager_core.get_comfyui_tag() or 'unknown'
|
||||
|
||||
if is_desktop:
|
||||
if is_windows:
|
||||
form_factor = 'desktop-win'
|
||||
elif is_mac:
|
||||
form_factor = 'desktop-mac'
|
||||
else:
|
||||
form_factor = 'other'
|
||||
else:
|
||||
if is_windows:
|
||||
form_factor = 'git-windows'
|
||||
elif is_mac:
|
||||
form_factor = 'git-mac'
|
||||
elif is_linux:
|
||||
form_factor = 'git-linux'
|
||||
else:
|
||||
form_factor = 'other'
|
||||
|
||||
while remained:
|
||||
# Add comfyui_version and form_factor to the API request
|
||||
sub_uri = f'{base_url}/nodes?page={page}&limit=30&comfyui_version={comfyui_ver}&form_factor={form_factor}'
|
||||
sub_json_obj = await asyncio.wait_for(manager_util.get_data_with_cache(sub_uri, cache_mode=False, silent=True, dont_cache=True), timeout=30)
|
||||
remained = page < sub_json_obj['totalPages']
|
||||
|
||||
for x in sub_json_obj['nodes']:
|
||||
full_nodes[x['id']] = x
|
||||
|
||||
if page % 5 == 0:
|
||||
print(f"FETCH ComfyRegistry Data: {page}/{sub_json_obj['totalPages']}")
|
||||
|
||||
page += 1
|
||||
time.sleep(0.5)
|
||||
|
||||
print("FETCH ComfyRegistry Data [DONE]")
|
||||
|
||||
for v in full_nodes.values():
|
||||
if 'latest_version' not in v:
|
||||
v['latest_version'] = dict(version='nightly')
|
||||
|
||||
return {'nodes': list(full_nodes.values())}
|
||||
|
||||
if cache_mode:
|
||||
is_cache_loading = True
|
||||
cache_state = manager_util.get_cache_state(uri)
|
||||
|
||||
if dont_wait:
|
||||
if cache_state == 'not-cached':
|
||||
return {}
|
||||
else:
|
||||
print("[ComfyUI-Manager] The ComfyRegistry cache update is still in progress, so an outdated cache is being used.")
|
||||
with open(manager_util.get_cache_path(uri), 'r', encoding="UTF-8", errors="ignore") as json_file:
|
||||
return json.load(json_file)['nodes']
|
||||
|
||||
if cache_state == 'cached':
|
||||
with open(manager_util.get_cache_path(uri), 'r', encoding="UTF-8", errors="ignore") as json_file:
|
||||
return json.load(json_file)['nodes']
|
||||
|
||||
try:
|
||||
json_obj = await fetch_all()
|
||||
manager_util.save_to_cache(uri, json_obj)
|
||||
return json_obj['nodes']
|
||||
except:
|
||||
res = {}
|
||||
print("Cannot connect to comfyregistry.")
|
||||
finally:
|
||||
if cache_mode:
|
||||
is_cache_loading = False
|
||||
|
||||
return res
|
||||
|
||||
|
||||
@dataclass
|
||||
class NodeVersion:
|
||||
changelog: str
|
||||
dependencies: List[str]
|
||||
deprecated: bool
|
||||
id: str
|
||||
version: str
|
||||
download_url: str
|
||||
|
||||
|
||||
def map_node_version(api_node_version):
|
||||
"""
|
||||
Maps node version data from API response to NodeVersion dataclass.
|
||||
|
||||
Args:
|
||||
api_data (dict): The 'node_version' part of the API response.
|
||||
|
||||
Returns:
|
||||
NodeVersion: An instance of NodeVersion dataclass populated with data from the API.
|
||||
"""
|
||||
return NodeVersion(
|
||||
changelog=api_node_version.get(
|
||||
"changelog", ""
|
||||
), # Provide a default value if 'changelog' is missing
|
||||
dependencies=api_node_version.get(
|
||||
"dependencies", []
|
||||
), # Provide a default empty list if 'dependencies' is missing
|
||||
deprecated=api_node_version.get(
|
||||
"deprecated", False
|
||||
), # Assume False if 'deprecated' is not specified
|
||||
id=api_node_version[
|
||||
"id"
|
||||
], # 'id' should be mandatory; raise KeyError if missing
|
||||
version=api_node_version[
|
||||
"version"
|
||||
], # 'version' should be mandatory; raise KeyError if missing
|
||||
download_url=api_node_version.get(
|
||||
"downloadUrl", ""
|
||||
), # Provide a default value if 'downloadUrl' is missing
|
||||
)
|
||||
|
||||
|
||||
def install_node(node_id, version=None):
|
||||
"""
|
||||
Retrieves the node version for installation.
|
||||
|
||||
Args:
|
||||
node_id (str): The unique identifier of the node.
|
||||
version (str, optional): Specific version of the node to retrieve. If omitted, the latest version is returned.
|
||||
|
||||
Returns:
|
||||
NodeVersion: Node version data or error message.
|
||||
"""
|
||||
if version is None:
|
||||
url = f"{base_url}/nodes/{node_id}/install"
|
||||
else:
|
||||
url = f"{base_url}/nodes/{node_id}/install?version={version}"
|
||||
|
||||
response = requests.get(url)
|
||||
if response.status_code == 200:
|
||||
# Convert the API response to a NodeVersion object
|
||||
return map_node_version(response.json())
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def all_versions_of_node(node_id):
|
||||
url = f"{base_url}/nodes/{node_id}/versions?statuses=NodeVersionStatusActive&statuses=NodeVersionStatusPending"
|
||||
|
||||
response = requests.get(url)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def read_cnr_info(fullpath):
|
||||
try:
|
||||
toml_path = os.path.join(fullpath, 'pyproject.toml')
|
||||
tracking_path = os.path.join(fullpath, '.tracking')
|
||||
|
||||
if not os.path.exists(toml_path) or not os.path.exists(tracking_path):
|
||||
return None # not valid CNR node pack
|
||||
|
||||
with open(toml_path, "r", encoding="utf-8") as f:
|
||||
data = toml.load(f)
|
||||
|
||||
project = data.get('project', {})
|
||||
name = project.get('name').strip().lower()
|
||||
|
||||
# normalize version
|
||||
# for example: 2.5 -> 2.5.0
|
||||
version = str(manager_util.StrictVersion(project.get('version')))
|
||||
|
||||
urls = project.get('urls', {})
|
||||
repository = urls.get('Repository')
|
||||
|
||||
if name and version: # repository is optional
|
||||
return {
|
||||
"id": name,
|
||||
"version": version,
|
||||
"url": repository
|
||||
}
|
||||
|
||||
return None
|
||||
except Exception:
|
||||
return None # not valid CNR node pack
|
||||
|
||||
|
||||
def generate_cnr_id(fullpath, cnr_id):
|
||||
cnr_id_path = os.path.join(fullpath, '.git', '.cnr-id')
|
||||
try:
|
||||
if not os.path.exists(cnr_id_path):
|
||||
with open(cnr_id_path, "w") as f:
|
||||
return f.write(cnr_id)
|
||||
except:
|
||||
print(f"[ComfyUI Manager] unable to create file: {cnr_id_path}")
|
||||
|
||||
|
||||
def read_cnr_id(fullpath):
|
||||
cnr_id_path = os.path.join(fullpath, '.git', '.cnr-id')
|
||||
try:
|
||||
if os.path.exists(cnr_id_path):
|
||||
with open(cnr_id_path) as f:
|
||||
return f.read().strip()
|
||||
except:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
526
comfyui_manager/glob/git_helper.py
Normal file
526
comfyui_manager/glob/git_helper.py
Normal file
@@ -0,0 +1,526 @@
|
||||
import subprocess
|
||||
import sys
|
||||
import os
|
||||
import traceback
|
||||
|
||||
import git
|
||||
import json
|
||||
import yaml
|
||||
import requests
|
||||
from tqdm.auto import tqdm
|
||||
from git.remote import RemoteProgress
|
||||
|
||||
|
||||
comfy_path = os.environ.get('COMFYUI_PATH')
|
||||
git_exe_path = os.environ.get('GIT_EXE_PATH')
|
||||
|
||||
if comfy_path is None:
|
||||
print("git_helper: environment variable 'COMFYUI_PATH' is not specified.")
|
||||
exit(-1)
|
||||
|
||||
if not os.path.exists(os.path.join(comfy_path, 'folder_paths.py')):
|
||||
print("git_helper: '{comfy_path}' is not a valid 'COMFYUI_PATH' location.")
|
||||
exit(-1)
|
||||
|
||||
def download_url(url, dest_folder, filename=None):
|
||||
# Ensure the destination folder exists
|
||||
if not os.path.exists(dest_folder):
|
||||
os.makedirs(dest_folder)
|
||||
|
||||
# Extract filename from URL if not provided
|
||||
if filename is None:
|
||||
filename = os.path.basename(url)
|
||||
|
||||
# Full path to save the file
|
||||
dest_path = os.path.join(dest_folder, filename)
|
||||
|
||||
# Download the file
|
||||
response = requests.get(url, stream=True)
|
||||
if response.status_code == 200:
|
||||
with open(dest_path, 'wb') as file:
|
||||
for chunk in response.iter_content(chunk_size=1024):
|
||||
if chunk:
|
||||
file.write(chunk)
|
||||
else:
|
||||
print(f"Failed to download file from {url}")
|
||||
|
||||
|
||||
nodelist_path = os.path.join(os.path.dirname(__file__), "custom-node-list.json")
|
||||
working_directory = os.getcwd()
|
||||
|
||||
if os.path.basename(working_directory) != 'custom_nodes':
|
||||
print("WARN: This script should be executed in custom_nodes dir")
|
||||
print(f"DBG: INFO {working_directory}")
|
||||
print(f"DBG: INFO {sys.argv}")
|
||||
# exit(-1)
|
||||
|
||||
|
||||
class GitProgress(RemoteProgress):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.pbar = tqdm(ascii=True)
|
||||
|
||||
def update(self, op_code, cur_count, max_count=None, message=''):
|
||||
self.pbar.total = max_count
|
||||
self.pbar.n = cur_count
|
||||
self.pbar.pos = 0
|
||||
self.pbar.refresh()
|
||||
|
||||
|
||||
def gitclone(custom_nodes_path, url, target_hash=None, repo_path=None):
|
||||
repo_name = os.path.splitext(os.path.basename(url))[0]
|
||||
|
||||
if repo_path is None:
|
||||
repo_path = os.path.join(custom_nodes_path, repo_name)
|
||||
|
||||
# Clone the repository from the remote URL
|
||||
repo = git.Repo.clone_from(url, repo_path, recursive=True, progress=GitProgress())
|
||||
|
||||
if target_hash is not None:
|
||||
print(f"CHECKOUT: {repo_name} [{target_hash}]")
|
||||
repo.git.checkout(target_hash)
|
||||
|
||||
repo.git.clear_cache()
|
||||
repo.close()
|
||||
|
||||
|
||||
def gitcheck(path, do_fetch=False):
|
||||
try:
|
||||
# Fetch the latest commits from the remote repository
|
||||
repo = git.Repo(path)
|
||||
|
||||
if repo.head.is_detached:
|
||||
print("CUSTOM NODE CHECK: True")
|
||||
return
|
||||
|
||||
current_branch = repo.active_branch
|
||||
branch_name = current_branch.name
|
||||
|
||||
remote_name = current_branch.tracking_branch().remote_name
|
||||
remote = repo.remote(name=remote_name)
|
||||
|
||||
if do_fetch:
|
||||
remote.fetch()
|
||||
|
||||
# Get the current commit hash and the commit hash of the remote branch
|
||||
commit_hash = repo.head.commit.hexsha
|
||||
|
||||
if f'{remote_name}/{branch_name}' in repo.refs:
|
||||
remote_commit_hash = repo.refs[f'{remote_name}/{branch_name}'].object.hexsha
|
||||
else:
|
||||
print("CUSTOM NODE CHECK: True") # non default branch is treated as updatable
|
||||
return
|
||||
|
||||
# Compare the commit hashes to determine if the local repository is behind the remote repository
|
||||
if commit_hash != remote_commit_hash:
|
||||
# Get the commit dates
|
||||
commit_date = repo.head.commit.committed_datetime
|
||||
remote_commit_date = repo.refs[f'{remote_name}/{branch_name}'].object.committed_datetime
|
||||
|
||||
# Compare the commit dates to determine if the local repository is behind the remote repository
|
||||
if commit_date < remote_commit_date:
|
||||
print("CUSTOM NODE CHECK: True")
|
||||
else:
|
||||
print("CUSTOM NODE CHECK: False")
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print("CUSTOM NODE CHECK: Error")
|
||||
|
||||
|
||||
def get_remote_name(repo):
|
||||
available_remotes = [remote.name for remote in repo.remotes]
|
||||
if 'origin' in available_remotes:
|
||||
return 'origin'
|
||||
elif 'upstream' in available_remotes:
|
||||
return 'upstream'
|
||||
elif len(available_remotes) > 0:
|
||||
return available_remotes[0]
|
||||
|
||||
if not available_remotes:
|
||||
print(f"[ComfyUI-Manager] No remotes are configured for this repository: {repo.working_dir}")
|
||||
else:
|
||||
print(f"[ComfyUI-Manager] Available remotes in '{repo.working_dir}': ")
|
||||
for remote in available_remotes:
|
||||
print(f"- {remote}")
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def switch_to_default_branch(repo):
|
||||
remote_name = get_remote_name(repo)
|
||||
|
||||
try:
|
||||
if remote_name is None:
|
||||
return False
|
||||
|
||||
default_branch = repo.git.symbolic_ref(f'refs/remotes/{remote_name}/HEAD').replace(f'refs/remotes/{remote_name}/', '')
|
||||
repo.git.checkout(default_branch)
|
||||
return True
|
||||
except:
|
||||
# try checkout master
|
||||
# try checkout main if failed
|
||||
try:
|
||||
repo.git.checkout(repo.heads.master)
|
||||
return True
|
||||
except:
|
||||
try:
|
||||
if remote_name is not None:
|
||||
repo.git.checkout('-b', 'master', f'{remote_name}/master')
|
||||
return True
|
||||
except:
|
||||
try:
|
||||
repo.git.checkout(repo.heads.main)
|
||||
return True
|
||||
except:
|
||||
try:
|
||||
if remote_name is not None:
|
||||
repo.git.checkout('-b', 'main', f'{remote_name}/main')
|
||||
return True
|
||||
except:
|
||||
pass
|
||||
|
||||
print("[ComfyUI Manager] Failed to switch to the default branch")
|
||||
return False
|
||||
|
||||
|
||||
def gitpull(path):
|
||||
# Check if the path is a git repository
|
||||
if not os.path.exists(os.path.join(path, '.git')):
|
||||
raise ValueError('Not a git repository')
|
||||
|
||||
# Pull the latest changes from the remote repository
|
||||
repo = git.Repo(path)
|
||||
if repo.is_dirty():
|
||||
print(f"STASH: '{path}' is dirty.")
|
||||
repo.git.stash()
|
||||
|
||||
commit_hash = repo.head.commit.hexsha
|
||||
try:
|
||||
if repo.head.is_detached:
|
||||
switch_to_default_branch(repo)
|
||||
|
||||
current_branch = repo.active_branch
|
||||
branch_name = current_branch.name
|
||||
|
||||
remote_name = current_branch.tracking_branch().remote_name
|
||||
remote = repo.remote(name=remote_name)
|
||||
|
||||
if f'{remote_name}/{branch_name}' not in repo.refs:
|
||||
switch_to_default_branch(repo)
|
||||
current_branch = repo.active_branch
|
||||
branch_name = current_branch.name
|
||||
|
||||
remote.fetch()
|
||||
if f'{remote_name}/{branch_name}' in repo.refs:
|
||||
remote_commit_hash = repo.refs[f'{remote_name}/{branch_name}'].object.hexsha
|
||||
else:
|
||||
print("CUSTOM NODE PULL: Fail") # update fail
|
||||
return
|
||||
|
||||
if commit_hash == remote_commit_hash:
|
||||
print("CUSTOM NODE PULL: None") # there is no update
|
||||
repo.close()
|
||||
return
|
||||
|
||||
remote.pull()
|
||||
|
||||
repo.git.submodule('update', '--init', '--recursive')
|
||||
new_commit_hash = repo.head.commit.hexsha
|
||||
|
||||
if commit_hash != new_commit_hash:
|
||||
print("CUSTOM NODE PULL: Success") # update success
|
||||
else:
|
||||
print("CUSTOM NODE PULL: Fail") # update fail
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print("CUSTOM NODE PULL: Fail") # unknown git error
|
||||
|
||||
repo.close()
|
||||
|
||||
|
||||
def checkout_comfyui_hash(target_hash):
|
||||
repo = git.Repo(comfy_path)
|
||||
commit_hash = repo.head.commit.hexsha
|
||||
|
||||
if commit_hash != target_hash:
|
||||
try:
|
||||
print(f"CHECKOUT: ComfyUI [{target_hash}]")
|
||||
repo.git.checkout(target_hash)
|
||||
except git.GitCommandError as e:
|
||||
print(f"Error checking out the ComfyUI: {str(e)}")
|
||||
|
||||
|
||||
def checkout_custom_node_hash(git_custom_node_infos):
|
||||
repo_name_to_url = {}
|
||||
|
||||
for url in git_custom_node_infos.keys():
|
||||
repo_name = url.split('/')[-1]
|
||||
|
||||
if repo_name.endswith('.git'):
|
||||
repo_name = repo_name[:-4]
|
||||
|
||||
repo_name_to_url[repo_name] = url
|
||||
|
||||
for path in os.listdir(working_directory):
|
||||
if path.endswith("ComfyUI-Manager"):
|
||||
continue
|
||||
|
||||
fullpath = os.path.join(working_directory, path)
|
||||
|
||||
if os.path.isdir(fullpath):
|
||||
is_disabled = path.endswith(".disabled")
|
||||
|
||||
try:
|
||||
git_dir = os.path.join(fullpath, '.git')
|
||||
if not os.path.exists(git_dir):
|
||||
continue
|
||||
|
||||
need_checkout = False
|
||||
repo_name = os.path.basename(fullpath)
|
||||
|
||||
if repo_name.endswith('.disabled'):
|
||||
repo_name = repo_name[:-9]
|
||||
|
||||
if repo_name not in repo_name_to_url:
|
||||
if not is_disabled:
|
||||
# should be disabled
|
||||
print(f"DISABLE: {repo_name}")
|
||||
new_path = fullpath + ".disabled"
|
||||
os.rename(fullpath, new_path)
|
||||
need_checkout = False
|
||||
else:
|
||||
item = git_custom_node_infos[repo_name_to_url[repo_name]]
|
||||
if item['disabled'] and is_disabled:
|
||||
pass
|
||||
elif item['disabled'] and not is_disabled:
|
||||
# disable
|
||||
print(f"DISABLE: {repo_name}")
|
||||
new_path = fullpath + ".disabled"
|
||||
os.rename(fullpath, new_path)
|
||||
|
||||
elif not item['disabled'] and is_disabled:
|
||||
# enable
|
||||
print(f"ENABLE: {repo_name}")
|
||||
new_path = fullpath[:-9]
|
||||
os.rename(fullpath, new_path)
|
||||
fullpath = new_path
|
||||
need_checkout = True
|
||||
else:
|
||||
need_checkout = True
|
||||
|
||||
if need_checkout:
|
||||
repo = git.Repo(fullpath)
|
||||
commit_hash = repo.head.commit.hexsha
|
||||
|
||||
if commit_hash != item['hash']:
|
||||
print(f"CHECKOUT: {repo_name} [{item['hash']}]")
|
||||
repo.git.checkout(item['hash'])
|
||||
|
||||
except Exception:
|
||||
print(f"Failed to restore snapshots for the custom node '{path}'")
|
||||
|
||||
# clone missing
|
||||
for k, v in git_custom_node_infos.items():
|
||||
if 'ComfyUI-Manager' in k:
|
||||
continue
|
||||
|
||||
if not v['disabled']:
|
||||
repo_name = k.split('/')[-1]
|
||||
if repo_name.endswith('.git'):
|
||||
repo_name = repo_name[:-4]
|
||||
|
||||
path = os.path.join(working_directory, repo_name)
|
||||
if not os.path.exists(path):
|
||||
print(f"CLONE: {path}")
|
||||
gitclone(working_directory, k, target_hash=v['hash'])
|
||||
|
||||
|
||||
def invalidate_custom_node_file(file_custom_node_infos):
|
||||
global nodelist_path
|
||||
|
||||
enabled_set = set()
|
||||
for item in file_custom_node_infos:
|
||||
if not item['disabled']:
|
||||
enabled_set.add(item['filename'])
|
||||
|
||||
for path in os.listdir(working_directory):
|
||||
fullpath = os.path.join(working_directory, path)
|
||||
|
||||
if not os.path.isdir(fullpath) and fullpath.endswith('.py'):
|
||||
if path not in enabled_set:
|
||||
print(f"DISABLE: {path}")
|
||||
new_path = fullpath+'.disabled'
|
||||
os.rename(fullpath, new_path)
|
||||
|
||||
elif not os.path.isdir(fullpath) and fullpath.endswith('.py.disabled'):
|
||||
path = path[:-9]
|
||||
if path in enabled_set:
|
||||
print(f"ENABLE: {path}")
|
||||
new_path = fullpath[:-9]
|
||||
os.rename(fullpath, new_path)
|
||||
|
||||
# download missing: just support for 'copy' style
|
||||
py_to_url = {}
|
||||
|
||||
with open(nodelist_path, 'r', encoding="UTF-8") as json_file:
|
||||
info = json.load(json_file)
|
||||
for item in info['custom_nodes']:
|
||||
if item['install_type'] == 'copy':
|
||||
for url in item['files']:
|
||||
if url.endswith('.py'):
|
||||
py = url.split('/')[-1]
|
||||
py_to_url[py] = url
|
||||
|
||||
for item in file_custom_node_infos:
|
||||
filename = item['filename']
|
||||
if not item['disabled']:
|
||||
target_path = os.path.join(working_directory, filename)
|
||||
|
||||
if not os.path.exists(target_path) and filename in py_to_url:
|
||||
url = py_to_url[filename]
|
||||
print(f"DOWNLOAD: {filename}")
|
||||
download_url(url, working_directory)
|
||||
|
||||
|
||||
def apply_snapshot(path):
|
||||
try:
|
||||
if os.path.exists(path):
|
||||
if not path.endswith('.json') and not path.endswith('.yaml'):
|
||||
print(f"Snapshot file not found: `{path}`")
|
||||
print("APPLY SNAPSHOT: False")
|
||||
return None
|
||||
|
||||
with open(path, 'r', encoding="UTF-8") as snapshot_file:
|
||||
if path.endswith('.json'):
|
||||
info = json.load(snapshot_file)
|
||||
elif path.endswith('.yaml'):
|
||||
info = yaml.load(snapshot_file, Loader=yaml.SafeLoader)
|
||||
info = info['custom_nodes']
|
||||
else:
|
||||
# impossible case
|
||||
print("APPLY SNAPSHOT: False")
|
||||
return None
|
||||
|
||||
comfyui_hash = info['comfyui']
|
||||
git_custom_node_infos = info['git_custom_nodes']
|
||||
file_custom_node_infos = info['file_custom_nodes']
|
||||
|
||||
if comfyui_hash:
|
||||
checkout_comfyui_hash(comfyui_hash)
|
||||
checkout_custom_node_hash(git_custom_node_infos)
|
||||
invalidate_custom_node_file(file_custom_node_infos)
|
||||
|
||||
print("APPLY SNAPSHOT: True")
|
||||
if 'pips' in info and info['pips']:
|
||||
return info['pips']
|
||||
else:
|
||||
return None
|
||||
|
||||
print(f"Snapshot file not found: `{path}`")
|
||||
print("APPLY SNAPSHOT: False")
|
||||
|
||||
return None
|
||||
except Exception as e:
|
||||
print(e)
|
||||
traceback.print_exc()
|
||||
print("APPLY SNAPSHOT: False")
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def restore_pip_snapshot(pips, options):
|
||||
non_url = []
|
||||
local_url = []
|
||||
non_local_url = []
|
||||
for k, v in pips.items():
|
||||
if v == "":
|
||||
non_url.append(k)
|
||||
else:
|
||||
if v.startswith('file:'):
|
||||
local_url.append(v)
|
||||
else:
|
||||
non_local_url.append(v)
|
||||
|
||||
failed = []
|
||||
if '--pip-non-url' in options:
|
||||
# try all at once
|
||||
res = 1
|
||||
try:
|
||||
res = subprocess.check_call([sys.executable, '-m', 'pip', 'install'] + non_url)
|
||||
except:
|
||||
pass
|
||||
|
||||
# fallback
|
||||
if res != 0:
|
||||
for x in non_url:
|
||||
res = 1
|
||||
try:
|
||||
res = subprocess.check_call([sys.executable, '-m', 'pip', 'install', x])
|
||||
except:
|
||||
pass
|
||||
|
||||
if res != 0:
|
||||
failed.append(x)
|
||||
|
||||
if '--pip-non-local-url' in options:
|
||||
for x in non_local_url:
|
||||
res = 1
|
||||
try:
|
||||
res = subprocess.check_call([sys.executable, '-m', 'pip', 'install', x])
|
||||
except:
|
||||
pass
|
||||
|
||||
if res != 0:
|
||||
failed.append(x)
|
||||
|
||||
if '--pip-local-url' in options:
|
||||
for x in local_url:
|
||||
res = 1
|
||||
try:
|
||||
res = subprocess.check_call([sys.executable, '-m', 'pip', 'install', x])
|
||||
except:
|
||||
pass
|
||||
|
||||
if res != 0:
|
||||
failed.append(x)
|
||||
|
||||
print(f"Installation failed for pip packages: {failed}")
|
||||
|
||||
|
||||
def setup_environment():
|
||||
if git_exe_path is not None:
|
||||
git.Git().update_environment(GIT_PYTHON_GIT_EXECUTABLE=git_exe_path)
|
||||
|
||||
|
||||
setup_environment()
|
||||
|
||||
|
||||
try:
|
||||
if sys.argv[1] == "--clone":
|
||||
repo_path = None
|
||||
if len(sys.argv) > 4:
|
||||
repo_path = sys.argv[4]
|
||||
|
||||
gitclone(sys.argv[2], sys.argv[3], repo_path=repo_path)
|
||||
elif sys.argv[1] == "--check":
|
||||
gitcheck(sys.argv[2], False)
|
||||
elif sys.argv[1] == "--fetch":
|
||||
gitcheck(sys.argv[2], True)
|
||||
elif sys.argv[1] == "--pull":
|
||||
gitpull(sys.argv[2])
|
||||
elif sys.argv[1] == "--apply-snapshot":
|
||||
options = set()
|
||||
for x in sys.argv:
|
||||
if x in ['--pip-non-url', '--pip-local-url', '--pip-non-local-url']:
|
||||
options.add(x)
|
||||
|
||||
pips = apply_snapshot(sys.argv[2])
|
||||
|
||||
if pips and len(options) > 0:
|
||||
restore_pip_snapshot(pips, options)
|
||||
sys.exit(0)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
sys.exit(-1)
|
||||
|
||||
|
||||
85
comfyui_manager/glob/git_utils.py
Normal file
85
comfyui_manager/glob/git_utils.py
Normal file
@@ -0,0 +1,85 @@
|
||||
import os
|
||||
import configparser
|
||||
|
||||
|
||||
GITHUB_ENDPOINT = os.getenv('GITHUB_ENDPOINT')
|
||||
|
||||
|
||||
def is_git_repo(path: str) -> bool:
|
||||
""" Check if the path is a git repository. """
|
||||
# NOTE: Checking it through `git.Repo` must be avoided.
|
||||
# It locks the file, causing issues on Windows.
|
||||
return os.path.exists(os.path.join(path, '.git'))
|
||||
|
||||
|
||||
def get_commit_hash(fullpath):
|
||||
git_head = os.path.join(fullpath, '.git', 'HEAD')
|
||||
if os.path.exists(git_head):
|
||||
with open(git_head) as f:
|
||||
line = f.readline()
|
||||
|
||||
if line.startswith("ref: "):
|
||||
ref = os.path.join(fullpath, '.git', line[5:].strip())
|
||||
if os.path.exists(ref):
|
||||
with open(ref) as f2:
|
||||
return f2.readline().strip()
|
||||
else:
|
||||
return "unknown"
|
||||
else:
|
||||
return line
|
||||
|
||||
return "unknown"
|
||||
|
||||
|
||||
def git_url(fullpath):
|
||||
"""
|
||||
resolve version of unclassified custom node based on remote url in .git/config
|
||||
"""
|
||||
git_config_path = os.path.join(fullpath, '.git', 'config')
|
||||
|
||||
if not os.path.exists(git_config_path):
|
||||
return None
|
||||
|
||||
# Set `strict=False` to allow duplicate `vscode-merge-base` sections, addressing <https://github.com/ltdrdata/ComfyUI-Manager/issues/1529>
|
||||
config = configparser.ConfigParser(strict=False)
|
||||
config.read(git_config_path)
|
||||
|
||||
for k, v in config.items():
|
||||
if k.startswith('remote ') and 'url' in v:
|
||||
return v['url']
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def normalize_url(url) -> str:
|
||||
github_id = normalize_to_github_id(url)
|
||||
if github_id is not None:
|
||||
url = f"https://github.com/{github_id}"
|
||||
|
||||
return url
|
||||
|
||||
|
||||
def normalize_to_github_id(url) -> str:
|
||||
if 'github' in url or (GITHUB_ENDPOINT is not None and GITHUB_ENDPOINT in url):
|
||||
author = os.path.basename(os.path.dirname(url))
|
||||
|
||||
if author.startswith('git@github.com:'):
|
||||
author = author.split(':')[1]
|
||||
|
||||
repo_name = os.path.basename(url)
|
||||
if repo_name.endswith('.git'):
|
||||
repo_name = repo_name[:-4]
|
||||
|
||||
return f"{author}/{repo_name}"
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_url_for_clone(url):
|
||||
url = normalize_url(url)
|
||||
|
||||
if GITHUB_ENDPOINT is not None and url.startswith('https://github.com/'):
|
||||
url = GITHUB_ENDPOINT + url[18:] # url[18:] -> remove `https://github.com`
|
||||
|
||||
return url
|
||||
|
||||
3355
comfyui_manager/glob/manager_core.py
Normal file
3355
comfyui_manager/glob/manager_core.py
Normal file
File diff suppressed because it is too large
Load Diff
159
comfyui_manager/glob/manager_downloader.py
Normal file
159
comfyui_manager/glob/manager_downloader.py
Normal file
@@ -0,0 +1,159 @@
|
||||
import os
|
||||
from urllib.parse import urlparse
|
||||
import urllib
|
||||
import sys
|
||||
import logging
|
||||
import requests
|
||||
from huggingface_hub import HfApi
|
||||
from tqdm.auto import tqdm
|
||||
|
||||
|
||||
aria2 = os.getenv('COMFYUI_MANAGER_ARIA2_SERVER')
|
||||
HF_ENDPOINT = os.getenv('HF_ENDPOINT')
|
||||
|
||||
|
||||
if aria2 is not None:
|
||||
secret = os.getenv('COMFYUI_MANAGER_ARIA2_SECRET')
|
||||
url = urlparse(aria2)
|
||||
port = url.port
|
||||
host = url.scheme + '://' + url.hostname
|
||||
import aria2p
|
||||
|
||||
aria2 = aria2p.API(aria2p.Client(host=host, port=port, secret=secret))
|
||||
|
||||
|
||||
def basic_download_url(url, dest_folder: str, filename: str):
|
||||
'''
|
||||
Download file from url to dest_folder with filename
|
||||
using requests library.
|
||||
'''
|
||||
import requests
|
||||
|
||||
# Ensure the destination folder exists
|
||||
if not os.path.exists(dest_folder):
|
||||
os.makedirs(dest_folder)
|
||||
|
||||
# Full path to save the file
|
||||
dest_path = os.path.join(dest_folder, filename)
|
||||
|
||||
# Download the file
|
||||
response = requests.get(url, stream=True)
|
||||
if response.status_code == 200:
|
||||
with open(dest_path, 'wb') as file:
|
||||
for chunk in response.iter_content(chunk_size=1024):
|
||||
if chunk:
|
||||
file.write(chunk)
|
||||
else:
|
||||
raise Exception(f"Failed to download file from {url}")
|
||||
|
||||
|
||||
def download_url(model_url: str, model_dir: str, filename: str):
|
||||
if HF_ENDPOINT:
|
||||
model_url = model_url.replace('https://huggingface.co', HF_ENDPOINT)
|
||||
logging.info(f"model_url replaced by HF_ENDPOINT, new = {model_url}")
|
||||
if aria2:
|
||||
return aria2_download_url(model_url, model_dir, filename)
|
||||
else:
|
||||
from torchvision.datasets.utils import download_url as torchvision_download_url
|
||||
return torchvision_download_url(model_url, model_dir, filename)
|
||||
|
||||
|
||||
def aria2_find_task(dir: str, filename: str):
|
||||
target = os.path.join(dir, filename)
|
||||
|
||||
downloads = aria2.get_downloads()
|
||||
|
||||
for download in downloads:
|
||||
for file in download.files:
|
||||
if file.is_metadata:
|
||||
continue
|
||||
if str(file.path) == target:
|
||||
return download
|
||||
|
||||
|
||||
def aria2_download_url(model_url: str, model_dir: str, filename: str):
|
||||
import manager_core as core
|
||||
import tqdm
|
||||
import time
|
||||
|
||||
if model_dir.startswith(core.comfy_path):
|
||||
model_dir = model_dir[len(core.comfy_path) :]
|
||||
|
||||
download_dir = model_dir if model_dir.startswith('/') else os.path.join('/models', model_dir)
|
||||
|
||||
download = aria2_find_task(download_dir, filename)
|
||||
if download is None:
|
||||
options = {'dir': download_dir, 'out': filename}
|
||||
download = aria2.add(model_url, options)[0]
|
||||
|
||||
if download.is_active:
|
||||
with tqdm.tqdm(
|
||||
total=download.total_length,
|
||||
bar_format='{l_bar}{bar}{r_bar}',
|
||||
desc=filename,
|
||||
unit='B',
|
||||
unit_scale=True,
|
||||
) as progress_bar:
|
||||
while download.is_active:
|
||||
if progress_bar.total == 0 and download.total_length != 0:
|
||||
progress_bar.reset(download.total_length)
|
||||
progress_bar.update(download.completed_length - progress_bar.n)
|
||||
time.sleep(1)
|
||||
download.update()
|
||||
|
||||
|
||||
def download_url_with_agent(url, save_path):
|
||||
try:
|
||||
headers = {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3'}
|
||||
|
||||
req = urllib.request.Request(url, headers=headers)
|
||||
response = urllib.request.urlopen(req)
|
||||
data = response.read()
|
||||
|
||||
if not os.path.exists(os.path.dirname(save_path)):
|
||||
os.makedirs(os.path.dirname(save_path))
|
||||
|
||||
with open(save_path, 'wb') as f:
|
||||
f.write(data)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Download error: {url} / {e}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
print("Installation was successful.")
|
||||
return True
|
||||
|
||||
# NOTE: snapshot_download doesn't provide file size tqdm.
|
||||
def download_repo_in_bytes(repo_id, local_dir):
|
||||
api = HfApi()
|
||||
repo_info = api.repo_info(repo_id=repo_id, files_metadata=True)
|
||||
|
||||
os.makedirs(local_dir, exist_ok=True)
|
||||
|
||||
total_size = 0
|
||||
for file_info in repo_info.siblings:
|
||||
if file_info.size is not None:
|
||||
total_size += file_info.size
|
||||
|
||||
pbar = tqdm(total=total_size, unit="B", unit_scale=True, desc="Downloading")
|
||||
|
||||
for file_info in repo_info.siblings:
|
||||
out_path = os.path.join(local_dir, file_info.rfilename)
|
||||
os.makedirs(os.path.dirname(out_path), exist_ok=True)
|
||||
|
||||
if file_info.size is None:
|
||||
continue
|
||||
|
||||
download_url = f"https://huggingface.co/{repo_id}/resolve/main/{file_info.rfilename}"
|
||||
|
||||
with requests.get(download_url, stream=True) as r, open(out_path, "wb") as f:
|
||||
r.raise_for_status()
|
||||
for chunk in r.iter_content(chunk_size=65536):
|
||||
if chunk:
|
||||
f.write(chunk)
|
||||
pbar.update(len(chunk))
|
||||
|
||||
pbar.close()
|
||||
|
||||
|
||||
1699
comfyui_manager/glob/manager_server.py
Normal file
1699
comfyui_manager/glob/manager_server.py
Normal file
File diff suppressed because it is too large
Load Diff
533
comfyui_manager/glob/manager_util.py
Normal file
533
comfyui_manager/glob/manager_util.py
Normal file
@@ -0,0 +1,533 @@
|
||||
"""
|
||||
description:
|
||||
`manager_util` is the lightest module shared across the prestartup_script, main code, and cm-cli of ComfyUI-Manager.
|
||||
"""
|
||||
import traceback
|
||||
|
||||
import aiohttp
|
||||
import json
|
||||
import threading
|
||||
import os
|
||||
from datetime import datetime
|
||||
import subprocess
|
||||
import sys
|
||||
import re
|
||||
import logging
|
||||
import platform
|
||||
import shlex
|
||||
|
||||
|
||||
cache_lock = threading.Lock()
|
||||
session_lock = threading.Lock()
|
||||
|
||||
comfyui_manager_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
|
||||
cache_dir = os.path.join(comfyui_manager_path, '.cache') # This path is also updated together in **manager_core.update_user_directory**.
|
||||
|
||||
use_uv = False
|
||||
|
||||
|
||||
def add_python_path_to_env():
|
||||
if platform.system() != "Windows":
|
||||
sep = ':'
|
||||
else:
|
||||
sep = ';'
|
||||
|
||||
os.environ['PATH'] = os.path.dirname(sys.executable)+sep+os.environ['PATH']
|
||||
|
||||
|
||||
def make_pip_cmd(cmd):
|
||||
if 'python_embeded' in sys.executable:
|
||||
if use_uv:
|
||||
return [sys.executable, '-s', '-m', 'uv', 'pip'] + cmd
|
||||
else:
|
||||
return [sys.executable, '-s', '-m', 'pip'] + cmd
|
||||
else:
|
||||
# FIXED: https://github.com/ltdrdata/ComfyUI-Manager/issues/1667
|
||||
if use_uv:
|
||||
return [sys.executable, '-m', 'uv', 'pip'] + cmd
|
||||
else:
|
||||
return [sys.executable, '-m', 'pip'] + cmd
|
||||
|
||||
# DON'T USE StrictVersion - cannot handle pre_release version
|
||||
# try:
|
||||
# from distutils.version import StrictVersion
|
||||
# except:
|
||||
# print(f"[ComfyUI-Manager] 'distutils' package not found. Activating fallback mode for compatibility.")
|
||||
class StrictVersion:
|
||||
def __init__(self, version_string):
|
||||
self.version_string = version_string
|
||||
self.major = 0
|
||||
self.minor = 0
|
||||
self.patch = 0
|
||||
self.pre_release = None
|
||||
self.parse_version_string()
|
||||
|
||||
def parse_version_string(self):
|
||||
parts = self.version_string.split('.')
|
||||
if not parts:
|
||||
raise ValueError("Version string must not be empty")
|
||||
|
||||
self.major = int(parts[0])
|
||||
self.minor = int(parts[1]) if len(parts) > 1 else 0
|
||||
self.patch = int(parts[2]) if len(parts) > 2 else 0
|
||||
|
||||
# Handling pre-release versions if present
|
||||
if len(parts) > 3:
|
||||
self.pre_release = parts[3]
|
||||
|
||||
def __str__(self):
|
||||
version = f"{self.major}.{self.minor}.{self.patch}"
|
||||
if self.pre_release:
|
||||
version += f"-{self.pre_release}"
|
||||
return version
|
||||
|
||||
def __eq__(self, other):
|
||||
return (self.major, self.minor, self.patch, self.pre_release) == \
|
||||
(other.major, other.minor, other.patch, other.pre_release)
|
||||
|
||||
def __lt__(self, other):
|
||||
if (self.major, self.minor, self.patch) == (other.major, other.minor, other.patch):
|
||||
return self.pre_release_compare(self.pre_release, other.pre_release) < 0
|
||||
return (self.major, self.minor, self.patch) < (other.major, other.minor, other.patch)
|
||||
|
||||
@staticmethod
|
||||
def pre_release_compare(pre1, pre2):
|
||||
if pre1 == pre2:
|
||||
return 0
|
||||
if pre1 is None:
|
||||
return 1
|
||||
if pre2 is None:
|
||||
return -1
|
||||
return -1 if pre1 < pre2 else 1
|
||||
|
||||
def __le__(self, other):
|
||||
return self == other or self < other
|
||||
|
||||
def __gt__(self, other):
|
||||
return not self <= other
|
||||
|
||||
def __ge__(self, other):
|
||||
return not self < other
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
def simple_hash(input_string):
|
||||
hash_value = 0
|
||||
for char in input_string:
|
||||
hash_value = (hash_value * 31 + ord(char)) % (2**32)
|
||||
|
||||
return hash_value
|
||||
|
||||
|
||||
def is_file_created_within_one_day(file_path):
|
||||
if not os.path.exists(file_path):
|
||||
return False
|
||||
|
||||
file_creation_time = os.path.getctime(file_path)
|
||||
current_time = datetime.now().timestamp()
|
||||
time_difference = current_time - file_creation_time
|
||||
|
||||
return time_difference <= 86400
|
||||
|
||||
|
||||
async def get_data(uri, silent=False):
|
||||
if not silent:
|
||||
print(f"FETCH DATA from: {uri}", end="")
|
||||
|
||||
if uri.startswith("http"):
|
||||
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
|
||||
headers = {
|
||||
'Cache-Control': 'no-cache',
|
||||
'Pragma': 'no-cache',
|
||||
'Expires': '0'
|
||||
}
|
||||
async with session.get(uri, headers=headers) as resp:
|
||||
json_text = await resp.text()
|
||||
else:
|
||||
with cache_lock:
|
||||
with open(uri, "r", encoding="utf-8") as f:
|
||||
json_text = f.read()
|
||||
|
||||
try:
|
||||
json_obj = json.loads(json_text)
|
||||
except Exception as e:
|
||||
logging.error(f"[ComfyUI-Manager] An error occurred while fetching '{uri}': {e}")
|
||||
|
||||
return {}
|
||||
|
||||
if not silent:
|
||||
print(" [DONE]")
|
||||
|
||||
return json_obj
|
||||
|
||||
|
||||
def get_cache_path(uri):
|
||||
cache_uri = str(simple_hash(uri)) + '_' + os.path.basename(uri).replace('&', "_").replace('?', "_").replace('=', "_")
|
||||
return os.path.join(cache_dir, cache_uri+'.json')
|
||||
|
||||
|
||||
def get_cache_state(uri):
|
||||
cache_uri = get_cache_path(uri)
|
||||
|
||||
if not os.path.exists(cache_uri):
|
||||
return "not-cached"
|
||||
elif is_file_created_within_one_day(cache_uri):
|
||||
return "cached"
|
||||
|
||||
return "expired"
|
||||
|
||||
|
||||
def save_to_cache(uri, json_obj, silent=False):
|
||||
cache_uri = get_cache_path(uri)
|
||||
|
||||
with cache_lock:
|
||||
with open(cache_uri, "w", encoding='utf-8') as file:
|
||||
json.dump(json_obj, file, indent=4, sort_keys=True)
|
||||
if not silent:
|
||||
logging.info(f"[ComfyUI-Manager] default cache updated: {uri}")
|
||||
|
||||
|
||||
async def get_data_with_cache(uri, silent=False, cache_mode=True, dont_wait=False, dont_cache=False):
|
||||
cache_uri = get_cache_path(uri)
|
||||
|
||||
if cache_mode and dont_wait:
|
||||
# NOTE: return the cache if possible, even if it is expired, so do not cache
|
||||
if not os.path.exists(cache_uri):
|
||||
logging.error(f"[ComfyUI-Manager] The network connection is unstable, so it is operating in fallback mode: {uri}")
|
||||
|
||||
return {}
|
||||
else:
|
||||
if not is_file_created_within_one_day(cache_uri):
|
||||
logging.error(f"[ComfyUI-Manager] The network connection is unstable, so it is operating in outdated cache mode: {uri}")
|
||||
|
||||
return await get_data(cache_uri, silent=silent)
|
||||
|
||||
if cache_mode and is_file_created_within_one_day(cache_uri):
|
||||
json_obj = await get_data(cache_uri, silent=silent)
|
||||
else:
|
||||
json_obj = await get_data(uri, silent=silent)
|
||||
if not dont_cache:
|
||||
with cache_lock:
|
||||
with open(cache_uri, "w", encoding='utf-8') as file:
|
||||
json.dump(json_obj, file, indent=4, sort_keys=True)
|
||||
if not silent:
|
||||
logging.info(f"[ComfyUI-Manager] default cache updated: {uri}")
|
||||
|
||||
return json_obj
|
||||
|
||||
|
||||
def sanitize_tag(x):
|
||||
return x.replace('<', '<').replace('>', '>')
|
||||
|
||||
|
||||
def extract_package_as_zip(file_path, extract_path):
|
||||
import zipfile
|
||||
try:
|
||||
with zipfile.ZipFile(file_path, "r") as zip_ref:
|
||||
zip_ref.extractall(extract_path)
|
||||
extracted_files = zip_ref.namelist()
|
||||
logging.info(f"Extracted zip file to {extract_path}")
|
||||
return extracted_files
|
||||
except zipfile.BadZipFile:
|
||||
logging.error(f"File '{file_path}' is not a zip or is corrupted.")
|
||||
return None
|
||||
|
||||
|
||||
pip_map = None
|
||||
|
||||
|
||||
def get_installed_packages(renew=False):
|
||||
global pip_map
|
||||
|
||||
if renew or pip_map is None:
|
||||
try:
|
||||
result = subprocess.check_output(make_pip_cmd(['list']), universal_newlines=True)
|
||||
|
||||
pip_map = {}
|
||||
for line in result.split('\n'):
|
||||
x = line.strip()
|
||||
if x:
|
||||
y = line.split()
|
||||
if y[0] == 'Package' or y[0].startswith('-'):
|
||||
continue
|
||||
|
||||
normalized_name = y[0].lower().replace('-', '_')
|
||||
pip_map[normalized_name] = y[1]
|
||||
except subprocess.CalledProcessError:
|
||||
logging.error("[ComfyUI-Manager] Failed to retrieve the information of installed pip packages.")
|
||||
return set()
|
||||
|
||||
return pip_map
|
||||
|
||||
|
||||
def clear_pip_cache():
|
||||
global pip_map
|
||||
pip_map = None
|
||||
|
||||
|
||||
def parse_requirement_line(line):
|
||||
tokens = shlex.split(line)
|
||||
if not tokens:
|
||||
return None
|
||||
|
||||
package_spec = tokens[0]
|
||||
|
||||
pattern = re.compile(
|
||||
r'^(?P<package>[A-Za-z0-9_.+-]+)'
|
||||
r'(?P<operator>==|>=|<=|!=|~=|>|<)?'
|
||||
r'(?P<version>[A-Za-z0-9_.+-]*)$'
|
||||
)
|
||||
m = pattern.match(package_spec)
|
||||
if not m:
|
||||
return None
|
||||
|
||||
package = m.group('package')
|
||||
operator = m.group('operator') or None
|
||||
version = m.group('version') or None
|
||||
|
||||
index_url = None
|
||||
if '--index-url' in tokens:
|
||||
idx = tokens.index('--index-url')
|
||||
if idx + 1 < len(tokens):
|
||||
index_url = tokens[idx + 1]
|
||||
|
||||
res = {'package': package}
|
||||
|
||||
if operator is not None:
|
||||
res['operator'] = operator
|
||||
|
||||
if version is not None:
|
||||
res['version'] = StrictVersion(version)
|
||||
|
||||
if index_url is not None:
|
||||
res['index_url'] = index_url
|
||||
|
||||
return res
|
||||
|
||||
|
||||
torch_torchvision_torchaudio_version_map = {
|
||||
'2.6.0': ('0.21.0', '2.6.0'),
|
||||
'2.5.1': ('0.20.0', '2.5.0'),
|
||||
'2.5.0': ('0.20.0', '2.5.0'),
|
||||
'2.4.1': ('0.19.1', '2.4.1'),
|
||||
'2.4.0': ('0.19.0', '2.4.0'),
|
||||
'2.3.1': ('0.18.1', '2.3.1'),
|
||||
'2.3.0': ('0.18.0', '2.3.0'),
|
||||
'2.2.2': ('0.17.2', '2.2.2'),
|
||||
'2.2.1': ('0.17.1', '2.2.1'),
|
||||
'2.2.0': ('0.17.0', '2.2.0'),
|
||||
'2.1.2': ('0.16.2', '2.1.2'),
|
||||
'2.1.1': ('0.16.1', '2.1.1'),
|
||||
'2.1.0': ('0.16.0', '2.1.0'),
|
||||
'2.0.1': ('0.15.2', '2.0.1'),
|
||||
'2.0.0': ('0.15.1', '2.0.0'),
|
||||
}
|
||||
|
||||
|
||||
|
||||
class PIPFixer:
|
||||
def __init__(self, prev_pip_versions, comfyui_path, manager_files_path):
|
||||
self.prev_pip_versions = { **prev_pip_versions }
|
||||
self.comfyui_path = comfyui_path
|
||||
self.manager_files_path = manager_files_path
|
||||
|
||||
def torch_rollback(self):
|
||||
spec = self.prev_pip_versions['torch'].split('+')
|
||||
if len(spec) > 0:
|
||||
platform = spec[1]
|
||||
else:
|
||||
cmd = make_pip_cmd(['install', '--force', 'torch', 'torchvision', 'torchaudio'])
|
||||
subprocess.check_output(cmd, universal_newlines=True)
|
||||
logging.error(cmd)
|
||||
return
|
||||
|
||||
torch_ver = StrictVersion(spec[0])
|
||||
torch_ver = f"{torch_ver.major}.{torch_ver.minor}.{torch_ver.patch}"
|
||||
torch_torchvision_torchaudio_ver = torch_torchvision_torchaudio_version_map.get(torch_ver)
|
||||
|
||||
if torch_torchvision_torchaudio_ver is None:
|
||||
cmd = make_pip_cmd(['install', '--pre', 'torch', 'torchvision', 'torchaudio',
|
||||
'--index-url', f"https://download.pytorch.org/whl/nightly/{platform}"])
|
||||
logging.info("[ComfyUI-Manager] restore PyTorch to nightly version")
|
||||
else:
|
||||
torchvision_ver, torchaudio_ver = torch_torchvision_torchaudio_ver
|
||||
cmd = make_pip_cmd(['install', f'torch=={torch_ver}', f'torchvision=={torchvision_ver}', f"torchaudio=={torchaudio_ver}",
|
||||
'--index-url', f"https://download.pytorch.org/whl/{platform}"])
|
||||
logging.info(f"[ComfyUI-Manager] restore PyTorch to {torch_ver}+{platform}")
|
||||
|
||||
subprocess.check_output(cmd, universal_newlines=True)
|
||||
|
||||
def fix_broken(self):
|
||||
new_pip_versions = get_installed_packages(True)
|
||||
|
||||
# remove `comfy` python package
|
||||
try:
|
||||
if 'comfy' in new_pip_versions:
|
||||
cmd = make_pip_cmd(['uninstall', 'comfy'])
|
||||
subprocess.check_output(cmd, universal_newlines=True)
|
||||
|
||||
logging.warning("[ComfyUI-Manager] 'comfy' python package is uninstalled.\nWARN: The 'comfy' package is completely unrelated to ComfyUI and should never be installed as it causes conflicts with ComfyUI.")
|
||||
except Exception as e:
|
||||
logging.error("[ComfyUI-Manager] Failed to uninstall `comfy` python package")
|
||||
logging.error(e)
|
||||
|
||||
# fix torch - reinstall torch packages if version is changed
|
||||
try:
|
||||
if 'torch' not in self.prev_pip_versions or 'torchvision' not in self.prev_pip_versions or 'torchaudio' not in self.prev_pip_versions:
|
||||
logging.error("[ComfyUI-Manager] PyTorch is not installed")
|
||||
elif self.prev_pip_versions['torch'] != new_pip_versions['torch'] \
|
||||
or self.prev_pip_versions['torchvision'] != new_pip_versions['torchvision'] \
|
||||
or self.prev_pip_versions['torchaudio'] != new_pip_versions['torchaudio']:
|
||||
self.torch_rollback()
|
||||
except Exception as e:
|
||||
logging.error("[ComfyUI-Manager] Failed to restore PyTorch")
|
||||
logging.error(e)
|
||||
|
||||
# fix opencv
|
||||
try:
|
||||
ocp = new_pip_versions.get('opencv-contrib-python')
|
||||
ocph = new_pip_versions.get('opencv-contrib-python-headless')
|
||||
op = new_pip_versions.get('opencv-python')
|
||||
oph = new_pip_versions.get('opencv-python-headless')
|
||||
|
||||
versions = [ocp, ocph, op, oph]
|
||||
versions = [StrictVersion(x) for x in versions if x is not None]
|
||||
versions.sort(reverse=True)
|
||||
|
||||
if len(versions) > 0:
|
||||
# upgrade to maximum version
|
||||
targets = []
|
||||
cur = versions[0]
|
||||
if ocp is not None and StrictVersion(ocp) != cur:
|
||||
targets.append('opencv-contrib-python')
|
||||
if ocph is not None and StrictVersion(ocph) != cur:
|
||||
targets.append('opencv-contrib-python-headless')
|
||||
if op is not None and StrictVersion(op) != cur:
|
||||
targets.append('opencv-python')
|
||||
if oph is not None and StrictVersion(oph) != cur:
|
||||
targets.append('opencv-python-headless')
|
||||
|
||||
if len(targets) > 0:
|
||||
for x in targets:
|
||||
cmd = make_pip_cmd(['install', f"{x}=={versions[0].version_string}", "numpy<2"])
|
||||
subprocess.check_output(cmd, universal_newlines=True)
|
||||
|
||||
logging.info(f"[ComfyUI-Manager] 'opencv' dependencies were fixed: {targets}")
|
||||
except Exception as e:
|
||||
logging.error("[ComfyUI-Manager] Failed to restore opencv")
|
||||
logging.error(e)
|
||||
|
||||
# fix numpy
|
||||
try:
|
||||
np = new_pip_versions.get('numpy')
|
||||
if np is not None:
|
||||
if StrictVersion(np) >= StrictVersion('2'):
|
||||
cmd = make_pip_cmd(['install', "numpy<2"])
|
||||
subprocess.check_output(cmd , universal_newlines=True)
|
||||
|
||||
logging.info("[ComfyUI-Manager] 'numpy' dependency were fixed")
|
||||
except Exception as e:
|
||||
logging.error("[ComfyUI-Manager] Failed to restore numpy")
|
||||
logging.error(e)
|
||||
|
||||
# fix missing frontend
|
||||
try:
|
||||
# NOTE: package name in requirements is 'comfyui-frontend-package'
|
||||
# but, package name from `pip freeze` is 'comfyui_frontend_package'
|
||||
# but, package name from `uv pip freeze` is 'comfyui-frontend-package'
|
||||
#
|
||||
# get_installed_packages returns normalized name (i.e. comfyui_frontend_package)
|
||||
if 'comfyui_frontend_package' not in new_pip_versions:
|
||||
requirements_path = os.path.join(self.comfyui_path, 'requirements.txt')
|
||||
|
||||
with open(requirements_path, 'r') as file:
|
||||
lines = file.readlines()
|
||||
|
||||
front_line = next((line.strip() for line in lines if line.startswith('comfyui-frontend-package')), None)
|
||||
if front_line is None:
|
||||
logging.info("[ComfyUI-Manager] Skipped fixing the 'comfyui-frontend-package' dependency because the ComfyUI is outdated.")
|
||||
else:
|
||||
cmd = make_pip_cmd(['install', front_line])
|
||||
subprocess.check_output(cmd , universal_newlines=True)
|
||||
logging.info("[ComfyUI-Manager] 'comfyui-frontend-package' dependency were fixed")
|
||||
except Exception as e:
|
||||
logging.error("[ComfyUI-Manager] Failed to restore comfyui-frontend-package")
|
||||
logging.error(e)
|
||||
|
||||
# restore based on custom list
|
||||
pip_auto_fix_path = os.path.join(self.manager_files_path, "pip_auto_fix.list")
|
||||
if os.path.exists(pip_auto_fix_path):
|
||||
with open(pip_auto_fix_path, 'r', encoding="UTF-8", errors="ignore") as f:
|
||||
fixed_list = []
|
||||
|
||||
for x in f.readlines():
|
||||
try:
|
||||
parsed = parse_requirement_line(x)
|
||||
need_to_reinstall = True
|
||||
|
||||
normalized_name = parsed['package'].lower().replace('-', '_')
|
||||
if normalized_name in new_pip_versions:
|
||||
if 'version' in parsed and 'operator' in parsed:
|
||||
cur = StrictVersion(new_pip_versions[parsed['package']])
|
||||
dest = parsed['version']
|
||||
op = parsed['operator']
|
||||
if cur == dest:
|
||||
if op in ['==', '>=', '<=']:
|
||||
need_to_reinstall = False
|
||||
elif cur < dest:
|
||||
if op in ['<=', '<', '~=', '!=']:
|
||||
need_to_reinstall = False
|
||||
elif cur > dest:
|
||||
if op in ['>=', '>', '~=', '!=']:
|
||||
need_to_reinstall = False
|
||||
|
||||
if need_to_reinstall:
|
||||
cmd_args = ['install']
|
||||
if 'version' in parsed and 'operator' in parsed:
|
||||
cmd_args.append(parsed['package']+parsed['operator']+parsed['version'].version_string)
|
||||
|
||||
if 'index_url' in parsed:
|
||||
cmd_args.append('--index-url')
|
||||
cmd_args.append(parsed['index_url'])
|
||||
|
||||
cmd = make_pip_cmd(cmd_args)
|
||||
subprocess.check_output(cmd, universal_newlines=True)
|
||||
|
||||
fixed_list.append(parsed['package'])
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
logging.error(f"[ComfyUI-Manager] Failed to restore '{x}'")
|
||||
logging.error(e)
|
||||
|
||||
if len(fixed_list) > 0:
|
||||
logging.info(f"[ComfyUI-Manager] dependencies in pip_auto_fix.json were fixed: {fixed_list}")
|
||||
|
||||
def sanitize(data):
|
||||
return data.replace("<", "<").replace(">", ">")
|
||||
|
||||
|
||||
def sanitize_filename(input_string):
|
||||
result_string = re.sub(r'[^a-zA-Z0-9_]', '_', input_string)
|
||||
return result_string
|
||||
|
||||
|
||||
def robust_readlines(fullpath):
|
||||
import chardet
|
||||
try:
|
||||
with open(fullpath, "r") as f:
|
||||
return f.readlines()
|
||||
except:
|
||||
encoding = None
|
||||
with open(fullpath, "rb") as f:
|
||||
raw_data = f.read()
|
||||
result = chardet.detect(raw_data)
|
||||
encoding = result['encoding']
|
||||
|
||||
if encoding is not None:
|
||||
with open(fullpath, "r", encoding=encoding) as f:
|
||||
return f.readlines()
|
||||
|
||||
print(f"[ComfyUI-Manager] Failed to recognize encoding for: {fullpath}")
|
||||
return []
|
||||
72
comfyui_manager/glob/node_package.py
Normal file
72
comfyui_manager/glob/node_package.py
Normal file
@@ -0,0 +1,72 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import os
|
||||
|
||||
from .git_utils import get_commit_hash
|
||||
|
||||
|
||||
@dataclass
|
||||
class InstalledNodePackage:
|
||||
"""Information about an installed node package."""
|
||||
|
||||
id: str
|
||||
fullpath: str
|
||||
disabled: bool
|
||||
version: str
|
||||
|
||||
@property
|
||||
def is_unknown(self) -> bool:
|
||||
return self.version == "unknown"
|
||||
|
||||
@property
|
||||
def is_nightly(self) -> bool:
|
||||
return self.version == "nightly"
|
||||
|
||||
@property
|
||||
def is_from_cnr(self) -> bool:
|
||||
return not self.is_unknown and not self.is_nightly
|
||||
|
||||
@property
|
||||
def is_enabled(self) -> bool:
|
||||
return not self.disabled
|
||||
|
||||
@property
|
||||
def is_disabled(self) -> bool:
|
||||
return self.disabled
|
||||
|
||||
def get_commit_hash(self) -> str:
|
||||
return get_commit_hash(self.fullpath)
|
||||
|
||||
def isValid(self) -> bool:
|
||||
if self.is_from_cnr:
|
||||
return os.path.exists(os.path.join(self.fullpath, '.tracking'))
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def from_fullpath(fullpath: str, resolve_from_path) -> InstalledNodePackage:
|
||||
parent_folder_name = os.path.basename(os.path.dirname(fullpath))
|
||||
module_name = os.path.basename(fullpath)
|
||||
|
||||
if module_name.endswith(".disabled"):
|
||||
node_id = module_name[:-9]
|
||||
disabled = True
|
||||
elif parent_folder_name == ".disabled":
|
||||
# Nodes under custom_nodes/.disabled/* are disabled
|
||||
node_id = module_name
|
||||
disabled = True
|
||||
else:
|
||||
node_id = module_name
|
||||
disabled = False
|
||||
|
||||
info = resolve_from_path(fullpath)
|
||||
if info is None:
|
||||
version = 'unknown'
|
||||
else:
|
||||
node_id = info['id'] # robust module guessing
|
||||
version = info['ver']
|
||||
|
||||
return InstalledNodePackage(
|
||||
id=node_id, fullpath=fullpath, disabled=disabled, version=version
|
||||
)
|
||||
117
comfyui_manager/glob/security_check.py
Normal file
117
comfyui_manager/glob/security_check.py
Normal file
@@ -0,0 +1,117 @@
|
||||
import sys
|
||||
import subprocess
|
||||
import os
|
||||
|
||||
|
||||
def security_check():
|
||||
print("[START] Security scan")
|
||||
|
||||
custom_nodes_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
|
||||
comfyui_path = os.path.abspath(os.path.join(custom_nodes_path, '..'))
|
||||
|
||||
guide = {
|
||||
"ComfyUI_LLMVISION": """
|
||||
0.Remove ComfyUI\\custom_nodes\\ComfyUI_LLMVISION.
|
||||
1.Remove pip packages: openai-1.16.3.dist-info, anthropic-0.21.4.dist-info, openai-1.30.2.dist-info, anthropic-0.21.5.dist-info, anthropic-0.26.1.dist-info, %LocalAppData%\\rundll64.exe
|
||||
(For portable versions, it is recommended to reinstall. If you are using a venv, it is advised to recreate the venv.)
|
||||
2.Remove these files in your system: lib/browser/admin.py, Cadmino.py, Fadmino.py, VISION-D.exe, BeamNG.UI.exe
|
||||
3.Check your Windows registry for the key listed above and remove it.
|
||||
(HKEY_CURRENT_USER\\Software\\OpenAICLI)
|
||||
4.Run a malware scanner.
|
||||
5.Change all of your passwords, everywhere.
|
||||
|
||||
(Reinstall OS is recommended.)
|
||||
\n
|
||||
Detailed information: https://old.reddit.com/r/comfyui/comments/1dbls5n/psa_if_youve_used_the_comfyui_llmvision_node_from/
|
||||
""",
|
||||
"lolMiner": """
|
||||
1. Remove pip packages: lolMiner*
|
||||
2. Remove files: lolMiner*, 4G_Ethash_Linux_Readme.txt, mine* in ComfyUI dir.
|
||||
|
||||
(Reinstall ComfyUI is recommended.)
|
||||
""",
|
||||
"ultralytics==8.3.41": f"""
|
||||
Execute following commands:
|
||||
{sys.executable} -m pip uninstall ultralytics
|
||||
{sys.executable} -m pip install ultralytics==8.3.40
|
||||
|
||||
And kill and remove /tmp/ultralytics_runner
|
||||
|
||||
|
||||
The version 8.3.41 to 8.3.42 of the Ultralytics package you installed is compromised. Please uninstall that version and reinstall the latest version.
|
||||
https://blog.comfy.org/comfyui-statement-on-the-ultralytics-crypto-miner-situation/
|
||||
""",
|
||||
"ultralytics==8.3.42": f"""
|
||||
Execute following commands:
|
||||
{sys.executable} -m pip uninstall ultralytics
|
||||
{sys.executable} -m pip install ultralytics==8.3.40
|
||||
|
||||
And kill and remove /tmp/ultralytics_runner
|
||||
|
||||
|
||||
The version 8.3.41 to 8.3.42 of the Ultralytics package you installed is compromised. Please uninstall that version and reinstall the latest version.
|
||||
https://blog.comfy.org/comfyui-statement-on-the-ultralytics-crypto-miner-situation/
|
||||
"""
|
||||
}
|
||||
|
||||
node_blacklist = {"ComfyUI_LLMVISION": "ComfyUI_LLMVISION"}
|
||||
|
||||
pip_blacklist = {
|
||||
"AppleBotzz": "ComfyUI_LLMVISION",
|
||||
"ultralytics==8.3.41": "ultralytics==8.3.41"
|
||||
}
|
||||
|
||||
file_blacklist = {
|
||||
"ComfyUI_LLMVISION": ["%LocalAppData%\\rundll64.exe"],
|
||||
"lolMiner": [os.path.join(comfyui_path, 'lolMiner')]
|
||||
}
|
||||
|
||||
installed_pips = subprocess.check_output([sys.executable, '-m', "pip", "freeze"], text=True)
|
||||
|
||||
detected = set()
|
||||
try:
|
||||
anthropic_info = subprocess.check_output([sys.executable, '-m', "pip", "show", "anthropic"], text=True, stderr=subprocess.DEVNULL)
|
||||
anthropic_reqs = [x for x in anthropic_info.split('\n') if x.startswith("Requires")][0].split(': ')[1]
|
||||
if "pycrypto" in anthropic_reqs:
|
||||
location = [x for x in anthropic_info.split('\n') if x.startswith("Location")][0].split(': ')[1]
|
||||
for fi in os.listdir(location):
|
||||
if fi.startswith("anthropic"):
|
||||
guide["ComfyUI_LLMVISION"] = f"\n0.Remove {os.path.join(location, fi)}" + guide["ComfyUI_LLMVISION"]
|
||||
detected.add("ComfyUI_LLMVISION")
|
||||
except subprocess.CalledProcessError:
|
||||
pass
|
||||
|
||||
for k, v in node_blacklist.items():
|
||||
if os.path.exists(os.path.join(custom_nodes_path, k)):
|
||||
print(f"[SECURITY ALERT] custom node '{k}' is dangerous.")
|
||||
detected.add(v)
|
||||
|
||||
for k, v in pip_blacklist.items():
|
||||
if k in installed_pips:
|
||||
detected.add(v)
|
||||
break
|
||||
|
||||
for k, v in file_blacklist.items():
|
||||
for x in v:
|
||||
if os.path.exists(os.path.expandvars(x)):
|
||||
detected.add(k)
|
||||
break
|
||||
|
||||
if len(detected) > 0:
|
||||
for line in installed_pips.split('\n'):
|
||||
for k, v in pip_blacklist.items():
|
||||
if k in line:
|
||||
print(f"[SECURITY ALERT] '{line}' is dangerous.")
|
||||
|
||||
print("\n########################################################################")
|
||||
print(" Malware has been detected, forcibly terminating ComfyUI execution.")
|
||||
print("########################################################################\n")
|
||||
|
||||
for x in detected:
|
||||
print(f"\n======== TARGET: {x} =========")
|
||||
print("\nTODO:")
|
||||
print(guide.get(x))
|
||||
|
||||
exit(-1)
|
||||
|
||||
print("[DONE] Security scan")
|
||||
385
comfyui_manager/glob/share_3rdparty.py
Normal file
385
comfyui_manager/glob/share_3rdparty.py
Normal file
@@ -0,0 +1,385 @@
|
||||
import mimetypes
|
||||
from . import manager_core as core
|
||||
|
||||
import os
|
||||
from aiohttp import web
|
||||
import aiohttp
|
||||
import json
|
||||
import hashlib
|
||||
|
||||
import folder_paths
|
||||
from server import PromptServer
|
||||
|
||||
|
||||
def extract_model_file_names(json_data):
|
||||
"""Extract unique file names from the input JSON data."""
|
||||
file_names = set()
|
||||
model_filename_extensions = {'.safetensors', '.ckpt', '.pt', '.pth', '.bin'}
|
||||
|
||||
# Recursively search for file names in the JSON data
|
||||
def recursive_search(data):
|
||||
if isinstance(data, dict):
|
||||
for value in data.values():
|
||||
recursive_search(value)
|
||||
elif isinstance(data, list):
|
||||
for item in data:
|
||||
recursive_search(item)
|
||||
elif isinstance(data, str) and '.' in data:
|
||||
file_names.add(os.path.basename(data)) # file_names.add(data)
|
||||
|
||||
recursive_search(json_data)
|
||||
return [f for f in list(file_names) if os.path.splitext(f)[1] in model_filename_extensions]
|
||||
|
||||
|
||||
def find_file_paths(base_dir, file_names):
|
||||
"""Find the paths of the files in the base directory."""
|
||||
file_paths = {}
|
||||
|
||||
for root, dirs, files in os.walk(base_dir):
|
||||
# Exclude certain directories
|
||||
dirs[:] = [d for d in dirs if d not in ['.git']]
|
||||
|
||||
for file in files:
|
||||
if file in file_names:
|
||||
file_paths[file] = os.path.join(root, file)
|
||||
return file_paths
|
||||
|
||||
|
||||
def compute_sha256_checksum(filepath):
|
||||
"""Compute the SHA256 checksum of a file, in chunks"""
|
||||
sha256 = hashlib.sha256()
|
||||
with open(filepath, 'rb') as f:
|
||||
for chunk in iter(lambda: f.read(4096), b''):
|
||||
sha256.update(chunk)
|
||||
return sha256.hexdigest()
|
||||
|
||||
|
||||
@PromptServer.instance.routes.get("/v2/manager/share_option")
|
||||
async def share_option(request):
|
||||
if "value" in request.rel_url.query:
|
||||
core.get_config()['share_option'] = request.rel_url.query['value']
|
||||
core.write_config()
|
||||
else:
|
||||
return web.Response(text=core.get_config()['share_option'], status=200)
|
||||
|
||||
return web.Response(status=200)
|
||||
|
||||
|
||||
def get_openart_auth():
|
||||
if not os.path.exists(os.path.join(core.manager_files_path, ".openart_key")):
|
||||
return None
|
||||
try:
|
||||
with open(os.path.join(core.manager_files_path, ".openart_key"), "r") as f:
|
||||
openart_key = f.read().strip()
|
||||
return openart_key if openart_key else None
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
def get_matrix_auth():
|
||||
if not os.path.exists(os.path.join(core.manager_files_path, "matrix_auth")):
|
||||
return None
|
||||
try:
|
||||
with open(os.path.join(core.manager_files_path, "matrix_auth"), "r") as f:
|
||||
matrix_auth = f.read()
|
||||
homeserver, username, password = matrix_auth.strip().split("\n")
|
||||
if not homeserver or not username or not password:
|
||||
return None
|
||||
return {
|
||||
"homeserver": homeserver,
|
||||
"username": username,
|
||||
"password": password,
|
||||
}
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
def get_comfyworkflows_auth():
|
||||
if not os.path.exists(os.path.join(core.manager_files_path, "comfyworkflows_sharekey")):
|
||||
return None
|
||||
try:
|
||||
with open(os.path.join(core.manager_files_path, "comfyworkflows_sharekey"), "r") as f:
|
||||
share_key = f.read()
|
||||
if not share_key.strip():
|
||||
return None
|
||||
return share_key
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
def get_youml_settings():
|
||||
if not os.path.exists(os.path.join(core.manager_files_path, ".youml")):
|
||||
return None
|
||||
try:
|
||||
with open(os.path.join(core.manager_files_path, ".youml"), "r") as f:
|
||||
youml_settings = f.read().strip()
|
||||
return youml_settings if youml_settings else None
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
def set_youml_settings(settings):
|
||||
with open(os.path.join(core.manager_files_path, ".youml"), "w") as f:
|
||||
f.write(settings)
|
||||
|
||||
|
||||
@PromptServer.instance.routes.get("/v2/manager/get_openart_auth")
|
||||
async def api_get_openart_auth(request):
|
||||
# print("Getting stored Matrix credentials...")
|
||||
openart_key = get_openart_auth()
|
||||
if not openart_key:
|
||||
return web.Response(status=404)
|
||||
return web.json_response({"openart_key": openart_key})
|
||||
|
||||
|
||||
@PromptServer.instance.routes.post("/v2/manager/set_openart_auth")
|
||||
async def api_set_openart_auth(request):
|
||||
json_data = await request.json()
|
||||
openart_key = json_data['openart_key']
|
||||
with open(os.path.join(core.manager_files_path, ".openart_key"), "w") as f:
|
||||
f.write(openart_key)
|
||||
return web.Response(status=200)
|
||||
|
||||
|
||||
@PromptServer.instance.routes.get("/v2/manager/get_matrix_auth")
|
||||
async def api_get_matrix_auth(request):
|
||||
# print("Getting stored Matrix credentials...")
|
||||
matrix_auth = get_matrix_auth()
|
||||
if not matrix_auth:
|
||||
return web.Response(status=404)
|
||||
return web.json_response(matrix_auth)
|
||||
|
||||
|
||||
@PromptServer.instance.routes.get("/v2/manager/youml/settings")
|
||||
async def api_get_youml_settings(request):
|
||||
youml_settings = get_youml_settings()
|
||||
if not youml_settings:
|
||||
return web.Response(status=404)
|
||||
return web.json_response(json.loads(youml_settings))
|
||||
|
||||
|
||||
@PromptServer.instance.routes.post("/v2/manager/youml/settings")
|
||||
async def api_set_youml_settings(request):
|
||||
json_data = await request.json()
|
||||
set_youml_settings(json.dumps(json_data))
|
||||
return web.Response(status=200)
|
||||
|
||||
|
||||
@PromptServer.instance.routes.get("/v2/manager/get_comfyworkflows_auth")
|
||||
async def api_get_comfyworkflows_auth(request):
|
||||
# Check if the user has provided Matrix credentials in a file called 'matrix_accesstoken'
|
||||
# in the same directory as the ComfyUI base folder
|
||||
# print("Getting stored Comfyworkflows.com auth...")
|
||||
comfyworkflows_auth = get_comfyworkflows_auth()
|
||||
if not comfyworkflows_auth:
|
||||
return web.Response(status=404)
|
||||
return web.json_response({"comfyworkflows_sharekey": comfyworkflows_auth})
|
||||
|
||||
|
||||
@PromptServer.instance.routes.post("/v2/manager/set_esheep_workflow_and_images")
|
||||
async def set_esheep_workflow_and_images(request):
|
||||
json_data = await request.json()
|
||||
with open(os.path.join(core.manager_files_path, "esheep_share_message.json"), "w", encoding='utf-8') as file:
|
||||
json.dump(json_data, file, indent=4)
|
||||
return web.Response(status=200)
|
||||
|
||||
|
||||
@PromptServer.instance.routes.get("/v2/manager/get_esheep_workflow_and_images")
|
||||
async def get_esheep_workflow_and_images(request):
|
||||
with open(os.path.join(core.manager_files_path, "esheep_share_message.json"), 'r', encoding='utf-8') as file:
|
||||
data = json.load(file)
|
||||
return web.Response(status=200, text=json.dumps(data))
|
||||
|
||||
|
||||
def set_matrix_auth(json_data):
|
||||
homeserver = json_data['homeserver']
|
||||
username = json_data['username']
|
||||
password = json_data['password']
|
||||
with open(os.path.join(core.manager_files_path, "matrix_auth"), "w") as f:
|
||||
f.write("\n".join([homeserver, username, password]))
|
||||
|
||||
|
||||
def set_comfyworkflows_auth(comfyworkflows_sharekey):
|
||||
with open(os.path.join(core.manager_files_path, "comfyworkflows_sharekey"), "w") as f:
|
||||
f.write(comfyworkflows_sharekey)
|
||||
|
||||
|
||||
def has_provided_matrix_auth(matrix_auth):
|
||||
return matrix_auth['homeserver'].strip() and matrix_auth['username'].strip() and matrix_auth['password'].strip()
|
||||
|
||||
|
||||
def has_provided_comfyworkflows_auth(comfyworkflows_sharekey):
|
||||
return comfyworkflows_sharekey.strip()
|
||||
|
||||
|
||||
@PromptServer.instance.routes.post("/v2/manager/share")
|
||||
async def share_art(request):
|
||||
# get json data
|
||||
json_data = await request.json()
|
||||
|
||||
matrix_auth = json_data['matrix_auth']
|
||||
comfyworkflows_sharekey = json_data['cw_auth']['cw_sharekey']
|
||||
|
||||
set_matrix_auth(matrix_auth)
|
||||
set_comfyworkflows_auth(comfyworkflows_sharekey)
|
||||
|
||||
share_destinations = json_data['share_destinations']
|
||||
credits = json_data['credits']
|
||||
title = json_data['title']
|
||||
description = json_data['description']
|
||||
is_nsfw = json_data['is_nsfw']
|
||||
prompt = json_data['prompt']
|
||||
potential_outputs = json_data['potential_outputs']
|
||||
selected_output_index = json_data['selected_output_index']
|
||||
|
||||
try:
|
||||
output_to_share = potential_outputs[int(selected_output_index)]
|
||||
except:
|
||||
# for now, pick the first output
|
||||
output_to_share = potential_outputs[0]
|
||||
|
||||
assert output_to_share['type'] in ('image', 'output')
|
||||
output_dir = folder_paths.get_output_directory()
|
||||
|
||||
if output_to_share['type'] == 'image':
|
||||
asset_filename = output_to_share['image']['filename']
|
||||
asset_subfolder = output_to_share['image']['subfolder']
|
||||
|
||||
if output_to_share['image']['type'] == 'temp':
|
||||
output_dir = folder_paths.get_temp_directory()
|
||||
else:
|
||||
asset_filename = output_to_share['output']['filename']
|
||||
asset_subfolder = output_to_share['output']['subfolder']
|
||||
|
||||
if asset_subfolder:
|
||||
asset_filepath = os.path.join(output_dir, asset_subfolder, asset_filename)
|
||||
else:
|
||||
asset_filepath = os.path.join(output_dir, asset_filename)
|
||||
|
||||
# get the mime type of the asset
|
||||
assetFileType = mimetypes.guess_type(asset_filepath)[0]
|
||||
|
||||
share_website_host = "UNKNOWN"
|
||||
if "comfyworkflows" in share_destinations:
|
||||
share_website_host = "https://comfyworkflows.com"
|
||||
share_endpoint = f"{share_website_host}/api"
|
||||
|
||||
# get presigned urls
|
||||
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
|
||||
async with session.post(
|
||||
f"{share_endpoint}/get_presigned_urls",
|
||||
json={
|
||||
"assetFileName": asset_filename,
|
||||
"assetFileType": assetFileType,
|
||||
"workflowJsonFileName": 'workflow.json',
|
||||
"workflowJsonFileType": 'application/json',
|
||||
},
|
||||
) as resp:
|
||||
assert resp.status == 200
|
||||
presigned_urls_json = await resp.json()
|
||||
assetFilePresignedUrl = presigned_urls_json["assetFilePresignedUrl"]
|
||||
assetFileKey = presigned_urls_json["assetFileKey"]
|
||||
workflowJsonFilePresignedUrl = presigned_urls_json["workflowJsonFilePresignedUrl"]
|
||||
workflowJsonFileKey = presigned_urls_json["workflowJsonFileKey"]
|
||||
|
||||
# upload asset
|
||||
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
|
||||
async with session.put(assetFilePresignedUrl, data=open(asset_filepath, "rb")) as resp:
|
||||
assert resp.status == 200
|
||||
|
||||
# upload workflow json
|
||||
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
|
||||
async with session.put(workflowJsonFilePresignedUrl, data=json.dumps(prompt['workflow']).encode('utf-8')) as resp:
|
||||
assert resp.status == 200
|
||||
|
||||
model_filenames = extract_model_file_names(prompt['workflow'])
|
||||
model_file_paths = find_file_paths(folder_paths.base_path, model_filenames)
|
||||
|
||||
models_info = {}
|
||||
for filename, filepath in model_file_paths.items():
|
||||
models_info[filename] = {
|
||||
"filename": filename,
|
||||
"sha256_checksum": compute_sha256_checksum(filepath),
|
||||
"relative_path": os.path.relpath(filepath, folder_paths.base_path),
|
||||
}
|
||||
|
||||
# make a POST request to /api/upload_workflow with form data key values
|
||||
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
|
||||
form = aiohttp.FormData()
|
||||
if comfyworkflows_sharekey:
|
||||
form.add_field("shareKey", comfyworkflows_sharekey)
|
||||
form.add_field("source", "comfyui_manager")
|
||||
form.add_field("assetFileKey", assetFileKey)
|
||||
form.add_field("assetFileType", assetFileType)
|
||||
form.add_field("workflowJsonFileKey", workflowJsonFileKey)
|
||||
form.add_field("sharedWorkflowWorkflowJsonString", json.dumps(prompt['workflow']))
|
||||
form.add_field("sharedWorkflowPromptJsonString", json.dumps(prompt['output']))
|
||||
form.add_field("shareWorkflowCredits", credits)
|
||||
form.add_field("shareWorkflowTitle", title)
|
||||
form.add_field("shareWorkflowDescription", description)
|
||||
form.add_field("shareWorkflowIsNSFW", str(is_nsfw).lower())
|
||||
form.add_field("currentSnapshot", json.dumps(await core.get_current_snapshot()))
|
||||
form.add_field("modelsInfo", json.dumps(models_info))
|
||||
|
||||
async with session.post(
|
||||
f"{share_endpoint}/upload_workflow",
|
||||
data=form,
|
||||
) as resp:
|
||||
assert resp.status == 200
|
||||
upload_workflow_json = await resp.json()
|
||||
workflowId = upload_workflow_json["workflowId"]
|
||||
|
||||
# check if the user has provided Matrix credentials
|
||||
if "matrix" in share_destinations:
|
||||
comfyui_share_room_id = '!LGYSoacpJPhIfBqVfb:matrix.org'
|
||||
filename = os.path.basename(asset_filepath)
|
||||
content_type = assetFileType
|
||||
|
||||
try:
|
||||
from matrix_client.api import MatrixHttpApi
|
||||
from matrix_client.client import MatrixClient
|
||||
|
||||
homeserver = 'matrix.org'
|
||||
if matrix_auth:
|
||||
homeserver = matrix_auth.get('homeserver', 'matrix.org')
|
||||
homeserver = homeserver.replace("http://", "https://")
|
||||
if not homeserver.startswith("https://"):
|
||||
homeserver = "https://" + homeserver
|
||||
|
||||
client = MatrixClient(homeserver)
|
||||
try:
|
||||
token = client.login(username=matrix_auth['username'], password=matrix_auth['password'])
|
||||
if not token:
|
||||
return web.json_response({"error": "Invalid Matrix credentials."}, content_type='application/json', status=400)
|
||||
except:
|
||||
return web.json_response({"error": "Invalid Matrix credentials."}, content_type='application/json', status=400)
|
||||
|
||||
matrix = MatrixHttpApi(homeserver, token=token)
|
||||
with open(asset_filepath, 'rb') as f:
|
||||
mxc_url = matrix.media_upload(f.read(), content_type, filename=filename)['content_uri']
|
||||
|
||||
workflow_json_mxc_url = matrix.media_upload(prompt['workflow'], 'application/json', filename='workflow.json')['content_uri']
|
||||
|
||||
text_content = ""
|
||||
if title:
|
||||
text_content += f"{title}\n"
|
||||
if description:
|
||||
text_content += f"{description}\n"
|
||||
if credits:
|
||||
text_content += f"\ncredits: {credits}\n"
|
||||
matrix.send_message(comfyui_share_room_id, text_content)
|
||||
matrix.send_content(comfyui_share_room_id, mxc_url, filename, 'm.image')
|
||||
matrix.send_content(comfyui_share_room_id, workflow_json_mxc_url, 'workflow.json', 'm.file')
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return web.json_response({"error": "An error occurred when sharing your art to Matrix."}, content_type='application/json', status=500)
|
||||
|
||||
return web.json_response({
|
||||
"comfyworkflows": {
|
||||
"url": None if "comfyworkflows" not in share_destinations else f"{share_website_host}/workflows/{workflowId}",
|
||||
},
|
||||
"matrix": {
|
||||
"success": None if "matrix" not in share_destinations else True
|
||||
}
|
||||
}, content_type='application/json', status=200)
|
||||
Reference in New Issue
Block a user