restructuring
the existing cache-based implementation will be retained as a fallback under legacy/..., while glob/... will be updated to a cacheless implementation.
This commit is contained in:
@@ -19,9 +19,10 @@ import asyncio
|
||||
from collections import deque
|
||||
|
||||
from . import manager_core as core
|
||||
from . import manager_util
|
||||
from . import cm_global
|
||||
from . import manager_downloader
|
||||
from ..common import manager_util
|
||||
from ..common import cm_global
|
||||
from ..common import manager_downloader
|
||||
from ..common import context
|
||||
|
||||
|
||||
logging.info(f"### Loading: ComfyUI-Manager ({core.version_str})")
|
||||
@@ -160,10 +161,10 @@ class ManagerFuncsInComfyUI(core.ManagerFuncs):
|
||||
|
||||
core.manager_funcs = ManagerFuncsInComfyUI()
|
||||
|
||||
from .manager_downloader import download_url, download_url_with_agent
|
||||
from comfyui_manager.common.manager_downloader import download_url, download_url_with_agent
|
||||
|
||||
core.comfy_path = os.path.dirname(folder_paths.__file__)
|
||||
core.js_path = os.path.join(core.comfy_path, "web", "extensions")
|
||||
context.comfy_path = os.path.dirname(folder_paths.__file__)
|
||||
core.js_path = os.path.join(context.comfy_path, "web", "extensions")
|
||||
|
||||
local_db_model = os.path.join(manager_util.comfyui_manager_path, "model-list.json")
|
||||
local_db_alter = os.path.join(manager_util.comfyui_manager_path, "alter-list.json")
|
||||
@@ -214,7 +215,7 @@ def print_comfyui_version():
|
||||
is_detached = repo.head.is_detached
|
||||
current_branch = repo.active_branch.name
|
||||
|
||||
comfyui_tag = core.get_comfyui_tag()
|
||||
comfyui_tag = context.get_comfyui_tag()
|
||||
|
||||
try:
|
||||
if not os.environ.get('__COMFYUI_DESKTOP_VERSION__') and core.comfy_ui_commit_datetime.date() < core.comfy_ui_required_commit_datetime.date():
|
||||
@@ -428,7 +429,7 @@ class TaskBatch:
|
||||
|
||||
def finalize(self):
|
||||
if self.batch_id is not None:
|
||||
batch_path = os.path.join(core.manager_batch_history_path, self.batch_id+".json")
|
||||
batch_path = os.path.join(context.manager_batch_history_path, self.batch_id+".json")
|
||||
json_obj = {
|
||||
"batch": self.batch_json,
|
||||
"nodepack_result": self.nodepack_result,
|
||||
@@ -807,7 +808,7 @@ async def queue_batch(request):
|
||||
|
||||
@routes.get("/v2/manager/queue/history_list")
|
||||
async def get_history_list(request):
|
||||
history_path = core.manager_batch_history_path
|
||||
history_path = context.manager_batch_history_path
|
||||
|
||||
try:
|
||||
files = [os.path.join(history_path, f) for f in os.listdir(history_path) if os.path.isfile(os.path.join(history_path, f))]
|
||||
@@ -824,7 +825,7 @@ async def get_history_list(request):
|
||||
async def get_history(request):
|
||||
try:
|
||||
json_name = request.rel_url.query["id"]+'.json'
|
||||
batch_path = os.path.join(core.manager_batch_history_path, json_name)
|
||||
batch_path = os.path.join(context.manager_batch_history_path, json_name)
|
||||
|
||||
with open(batch_path, 'r', encoding='utf-8') as file:
|
||||
json_str = file.read()
|
||||
@@ -1133,7 +1134,7 @@ async def fetch_externalmodel_list(request):
|
||||
|
||||
@PromptServer.instance.routes.get("/v2/snapshot/getlist")
|
||||
async def get_snapshot_list(request):
|
||||
items = [f[:-5] for f in os.listdir(core.manager_snapshot_path) if f.endswith('.json')]
|
||||
items = [f[:-5] for f in os.listdir(context.manager_snapshot_path) if f.endswith('.json')]
|
||||
items.sort(reverse=True)
|
||||
return web.json_response({'items': items}, content_type='application/json')
|
||||
|
||||
@@ -1147,7 +1148,7 @@ async def remove_snapshot(request):
|
||||
try:
|
||||
target = request.rel_url.query["target"]
|
||||
|
||||
path = os.path.join(core.manager_snapshot_path, f"{target}.json")
|
||||
path = os.path.join(context.manager_snapshot_path, f"{target}.json")
|
||||
if os.path.exists(path):
|
||||
os.remove(path)
|
||||
|
||||
@@ -1165,12 +1166,12 @@ async def restore_snapshot(request):
|
||||
try:
|
||||
target = request.rel_url.query["target"]
|
||||
|
||||
path = os.path.join(core.manager_snapshot_path, f"{target}.json")
|
||||
path = os.path.join(context.manager_snapshot_path, f"{target}.json")
|
||||
if os.path.exists(path):
|
||||
if not os.path.exists(core.manager_startup_script_path):
|
||||
os.makedirs(core.manager_startup_script_path)
|
||||
if not os.path.exists(context.manager_startup_script_path):
|
||||
os.makedirs(context.manager_startup_script_path)
|
||||
|
||||
target_path = os.path.join(core.manager_startup_script_path, "restore-snapshot.json")
|
||||
target_path = os.path.join(context.manager_startup_script_path, "restore-snapshot.json")
|
||||
shutil.copy(path, target_path)
|
||||
|
||||
logging.info(f"Snapshot restore scheduled: `{target}`")
|
||||
@@ -1726,7 +1727,7 @@ async def get_notice(request):
|
||||
if version_tag is not None:
|
||||
markdown_content += f"<HR>ComfyUI: {version_tag} [Desktop]"
|
||||
else:
|
||||
version_tag = core.get_comfyui_tag()
|
||||
version_tag = context.get_comfyui_tag()
|
||||
if version_tag is None:
|
||||
markdown_content += f"<HR>ComfyUI: {core.comfy_ui_revision}[{comfy_ui_hash[:6]}]({core.comfy_ui_commit_datetime.date()})"
|
||||
else:
|
||||
@@ -1803,15 +1804,15 @@ async def save_component(request):
|
||||
name = data['name']
|
||||
workflow = data['workflow']
|
||||
|
||||
if not os.path.exists(core.manager_components_path):
|
||||
os.mkdir(core.manager_components_path)
|
||||
if not os.path.exists(context.manager_components_path):
|
||||
os.mkdir(context.manager_components_path)
|
||||
|
||||
if 'packname' in workflow and workflow['packname'] != '':
|
||||
sanitized_name = manager_util.sanitize_filename(workflow['packname']) + '.pack'
|
||||
else:
|
||||
sanitized_name = manager_util.sanitize_filename(name) + '.json'
|
||||
|
||||
filepath = os.path.join(core.manager_components_path, sanitized_name)
|
||||
filepath = os.path.join(context.manager_components_path, sanitized_name)
|
||||
components = {}
|
||||
if os.path.exists(filepath):
|
||||
with open(filepath) as f:
|
||||
@@ -1828,14 +1829,14 @@ async def save_component(request):
|
||||
|
||||
@routes.post("/v2/manager/component/loads")
|
||||
async def load_components(request):
|
||||
if os.path.exists(core.manager_components_path):
|
||||
if os.path.exists(context.manager_components_path):
|
||||
try:
|
||||
json_files = [f for f in os.listdir(core.manager_components_path) if f.endswith('.json')]
|
||||
pack_files = [f for f in os.listdir(core.manager_components_path) if f.endswith('.pack')]
|
||||
json_files = [f for f in os.listdir(context.manager_components_path) if f.endswith('.json')]
|
||||
pack_files = [f for f in os.listdir(context.manager_components_path) if f.endswith('.pack')]
|
||||
|
||||
components = {}
|
||||
for json_file in json_files + pack_files:
|
||||
file_path = os.path.join(core.manager_components_path, json_file)
|
||||
file_path = os.path.join(context.manager_components_path, json_file)
|
||||
with open(file_path, 'r') as file:
|
||||
try:
|
||||
# When there is a conflict between the .pack and the .json, the pack takes precedence and overrides.
|
||||
@@ -1923,7 +1924,7 @@ async def default_cache_update():
|
||||
|
||||
threading.Thread(target=lambda: asyncio.run(default_cache_update())).start()
|
||||
|
||||
if not os.path.exists(core.manager_config_path):
|
||||
if not os.path.exists(context.manager_config_path):
|
||||
core.get_config()
|
||||
core.write_config()
|
||||
|
||||
|
||||
Reference in New Issue
Block a user