Merge branch 'feat/cacheless-v2' into draft-v4

This commit is contained in:
Dr.Lt.Data
2025-04-12 20:11:33 +09:00
11 changed files with 162 additions and 58 deletions

View File

@@ -0,0 +1,17 @@
import enum
class NetworkMode(enum.Enum):
PUBLIC = "public"
PRIVATE = "private"
OFFLINE = "offline"
class SecurityLevel(enum.Enum):
STRONG = "strong"
NORMAL = "normal"
NORMAL_MINUS = "normal-minus"
WEAK = "weak"
class DBMode(enum.Enum):
LOCAL = "local"
CACHE = "cache"
REMOTE = "remote"

View File

@@ -38,7 +38,7 @@ from . import manager_util
from . import git_utils
from . import manager_downloader
from .node_package import InstalledNodePackage
from .enums import NetworkMode, SecurityLevel, DBMode
version_code = [4, 0]
version_str = f"V{version_code[0]}.{version_code[1]}" + (f'.{version_code[2]}' if len(version_code) > 2 else '')
@@ -237,7 +237,7 @@ def update_user_directory(user_dir):
if not os.path.exists(manager_util.cache_dir):
os.makedirs(manager_util.cache_dir)
if not os.path.exists(manager_batch_history_path):
os.makedirs(manager_batch_history_path)
@@ -558,7 +558,7 @@ class UnifiedManager:
ver = str(manager_util.StrictVersion(info['version']))
return {'id': cnr['id'], 'cnr': cnr, 'ver': ver}
else:
return None
return {'id': info['id'], 'ver': info['version']}
else:
return None
@@ -734,7 +734,7 @@ class UnifiedManager:
return latest
async def reload(self, cache_mode, dont_wait=True):
async def reload(self, cache_mode, dont_wait=True, update_cnr_map=True):
self.custom_node_map_cache = {}
self.cnr_inactive_nodes = {} # node_id -> node_version -> fullpath
self.nightly_inactive_nodes = {} # node_id -> fullpath
@@ -742,17 +742,18 @@ class UnifiedManager:
self.unknown_active_nodes = {} # node_id -> repo url * fullpath
self.active_nodes = {} # node_id -> node_version * fullpath
if get_config()['network_mode'] != 'public':
if get_config()['network_mode'] != 'public' or manager_util.is_manager_pip_package():
dont_wait = True
# reload 'cnr_map' and 'repo_cnr_map'
cnrs = await cnr_utils.get_cnr_data(cache_mode=cache_mode=='cache', dont_wait=dont_wait)
if update_cnr_map:
# reload 'cnr_map' and 'repo_cnr_map'
cnrs = await cnr_utils.get_cnr_data(cache_mode=cache_mode=='cache', dont_wait=dont_wait)
for x in cnrs:
self.cnr_map[x['id']] = x
if 'repository' in x:
normalized_url = git_utils.normalize_url(x['repository'])
self.repo_cnr_map[normalized_url] = x
for x in cnrs:
self.cnr_map[x['id']] = x
if 'repository' in x:
normalized_url = git_utils.normalize_url(x['repository'])
self.repo_cnr_map[normalized_url] = x
# reload node status info from custom_nodes/*
for custom_nodes_path in folder_paths.get_folder_paths('custom_nodes'):
@@ -1677,9 +1678,9 @@ def read_config():
'model_download_by_agent': get_bool('model_download_by_agent', False),
'downgrade_blacklist': default_conf.get('downgrade_blacklist', '').lower(),
'always_lazy_install': get_bool('always_lazy_install', False),
'network_mode': default_conf.get('network_mode', 'public').lower(),
'security_level': default_conf.get('security_level', 'normal').lower(),
'db_mode': default_conf.get('db_mode', 'cache').lower(),
'network_mode': default_conf.get('network_mode', NetworkMode.PUBLIC.value).lower(),
'security_level': default_conf.get('security_level', SecurityLevel.NORMAL.value).lower(),
'db_mode': default_conf.get('db_mode', DBMode.CACHE.value).lower(),
}
except Exception:
@@ -1700,9 +1701,9 @@ def read_config():
'model_download_by_agent': False,
'downgrade_blacklist': '',
'always_lazy_install': False,
'network_mode': 'public', # public | private | offline
'security_level': 'normal', # strong | normal | normal- | weak
'db_mode': 'cache', # local | cache | remote
'network_mode': NetworkMode.OFFLINE.value,
'security_level': SecurityLevel.NORMAL.value,
'db_mode': DBMode.CACHE.value,
}
@@ -2199,7 +2200,7 @@ async def get_data_by_mode(mode, filename, channel_url=None):
cache_uri = str(manager_util.simple_hash(uri))+'_'+filename
cache_uri = os.path.join(manager_util.cache_dir, cache_uri)
if get_config()['network_mode'] == 'offline':
if get_config()['network_mode'] == 'offline' or manager_util.is_manager_pip_package():
# offline network mode
if os.path.exists(cache_uri):
json_obj = await manager_util.get_data(cache_uri)

View File

@@ -25,7 +25,12 @@ from . import manager_downloader
logging.info(f"### Loading: ComfyUI-Manager ({core.version_str})")
logging.info("[ComfyUI-Manager] network_mode: " + core.get_config()['network_mode'])
if not manager_util.is_manager_pip_package():
network_mode_description = "offline"
else:
network_mode_description = core.get_config()['network_mode']
logging.info("[ComfyUI-Manager] network_mode: " + network_mode_description)
comfy_ui_hash = "-"
comfyui_tag = None
@@ -411,7 +416,7 @@ class TaskBatch:
item = self.tasks[self.current_index]
self.current_index += 1
return item
def done_count(self):
return len(self.nodepack_result) + len(self.model_result)
@@ -487,7 +492,7 @@ async def task_worker():
ui_id, cnr_id = item
core.unified_manager.unified_enable(cnr_id)
return 'success'
async def do_update(item):
ui_id, node_name, node_ver = item
@@ -664,9 +669,9 @@ async def task_worker():
logging.info(f"\n[ComfyUI-Manager] A tasks batch(batch_id={cur_batch.batch_id}) is completed.\nstat={cur_batch.stats}")
res = {'status': 'batch-done',
'nodepack_result': cur_batch.nodepack_result,
'nodepack_result': cur_batch.nodepack_result,
'model_result': cur_batch.model_result,
'total_count': cur_batch.total_count(),
'total_count': cur_batch.total_count(),
'done_count': cur_batch.done_count(),
'batch_id': cur_batch.batch_id,
'remaining_batch_count': len(task_batch_queue) }
@@ -773,10 +778,10 @@ async def queue_batch(request):
res = await _update_custom_node(x)
if res.status != 200:
failed.add(x[0])
elif k == 'update_comfyui':
await update_comfyui(None)
elif k == 'disable':
for x in v:
await _disable_node(x)
@@ -988,7 +993,7 @@ def populate_markdown(x):
# freeze imported version
startup_time_installed_node_packs = core.get_installed_node_packs()
@routes.get("/customnode/installed")
@routes.get("/v2/customnode/installed")
async def installed_list(request):
mode = request.query.get('mode', 'default')
@@ -1294,7 +1299,7 @@ async def abort_queue(request):
if len(task_batch_queue) > 0:
task_batch_queue[0].abort()
task_batch_queue.popleft()
return web.Response(status=200)
@@ -1396,10 +1401,10 @@ async def queue_start(request):
with task_worker_lock:
finalize_temp_queue_batch()
return _queue_start()
def _queue_start():
global task_worker_thread
if task_worker_thread is not None and task_worker_thread.is_alive():
return web.Response(status=201) # already in-progress
@@ -1588,7 +1593,7 @@ async def check_whitelist_for_model(item):
async def install_model(request):
json_data = await request.json()
return await _install_model(json_data)
async def _install_model(json_data):
if not is_allowed_security_level('middle'):
@@ -1895,7 +1900,7 @@ async def default_cache_update():
logging.error(f"[ComfyUI-Manager] Failed to perform initial fetching '{filename}': {e}")
traceback.print_exc()
if core.get_config()['network_mode'] != 'offline':
if core.get_config()['network_mode'] != 'offline' and not manager_util.is_manager_pip_package():
a = get_cache("custom-node-list.json")
b = get_cache("extension-node-map.json")
c = get_cache("model-list.json")
@@ -1910,6 +1915,8 @@ async def default_cache_update():
# load at least once
await core.unified_manager.reload('remote', dont_wait=False)
await core.unified_manager.get_custom_nodes(channel_url, 'remote')
else:
await core.unified_manager.reload('remote', dont_wait=False, update_cnr_map=False)
logging.info("[ComfyUI-Manager] All startup tasks have been completed.")

View File

@@ -25,6 +25,8 @@ cache_dir = os.path.join(comfyui_manager_path, '.cache') # This path is also up
use_uv = False
def is_manager_pip_package():
return not os.path.exists(os.path.join(comfyui_manager_path, '..', 'custom_nodes'))
def add_python_path_to_env():
if platform.system() != "Windows":