Compare commits
276 Commits
feat/add-t
...
draft-v4-s
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
24ca0ab538 | ||
|
|
62da330182 | ||
|
|
5316ec1b4d | ||
|
|
e730dca1ad | ||
|
|
8da30640bb | ||
|
|
6f4eb88e07 | ||
|
|
d9592b9dab | ||
|
|
b87ada72aa | ||
|
|
83363ba1f0 | ||
|
|
a2a7349ce4 | ||
|
|
23ebe7f718 | ||
|
|
e04264cfa3 | ||
|
|
8d29e5037f | ||
|
|
6926ed45b0 | ||
|
|
736b85b8bb | ||
|
|
9e3361bc31 | ||
|
|
6e10381020 | ||
|
|
a1d37d379c | ||
|
|
07d87db7a2 | ||
|
|
4e556673d2 | ||
|
|
f421304fc1 | ||
|
|
6867616973 | ||
|
|
c9271b1686 | ||
|
|
12eb6863da | ||
|
|
4834874091 | ||
|
|
8759ebf200 | ||
|
|
d4715aebef | ||
|
|
0fe2ade7bb | ||
|
|
0c71565535 | ||
|
|
cf8029ecd4 | ||
|
|
6a637091a2 | ||
|
|
31eba60012 | ||
|
|
51e58e9078 | ||
|
|
4a1e76730a | ||
|
|
5599bb028b | ||
|
|
552c6da0cc | ||
|
|
cc6817a891 | ||
|
|
fb48d1b485 | ||
|
|
1c336dad6b | ||
|
|
a4940d46cd | ||
|
|
499b2f44c1 | ||
|
|
2b200c9281 | ||
|
|
36a900c98f | ||
|
|
5236b03f66 | ||
|
|
8be35e3621 | ||
|
|
509f00fe89 | ||
|
|
a98b87f148 | ||
|
|
ae9b2b3b72 | ||
|
|
02e1ec0ae3 | ||
|
|
daefb0f120 | ||
|
|
ff0604e3b6 | ||
|
|
20e41e22fa | ||
|
|
59264c1fd9 | ||
|
|
a0e3bdd594 | ||
|
|
6580aaf3ad | ||
|
|
0b46701b60 | ||
|
|
0bb4effede | ||
|
|
b07082a52d | ||
|
|
04f267f5a7 | ||
|
|
03ccce2804 | ||
|
|
e894bd9f24 | ||
|
|
10e6988273 | ||
|
|
905b61e5d8 | ||
|
|
ee69d393ae | ||
|
|
cab39973ae | ||
|
|
d93f5d07bb | ||
|
|
ba00ffe1ae | ||
|
|
6afaf5eaf5 | ||
|
|
d30459cc34 | ||
|
|
e92fbb7b1b | ||
|
|
42d464b532 | ||
|
|
c2e9e5c63a | ||
|
|
bc36726925 | ||
|
|
22725b0188 | ||
|
|
7abbff8c31 | ||
|
|
6236f4bcf4 | ||
|
|
3c3e80f77f | ||
|
|
4aae2fb289 | ||
|
|
66ff07752f | ||
|
|
5cf92f2742 | ||
|
|
6d3fddc474 | ||
|
|
66d4ad6174 | ||
|
|
2a366a1607 | ||
|
|
d87a0995b4 | ||
|
|
9a73a41e04 | ||
|
|
ba041b36bc | ||
|
|
f5f9de69b4 | ||
|
|
71e56c62e8 | ||
|
|
a0b0c2b963 | ||
|
|
0f496619fd | ||
|
|
5fdd6a441a | ||
|
|
00f287bb63 | ||
|
|
785268efa6 | ||
|
|
2c976d9394 | ||
|
|
1e32582642 | ||
|
|
6f8f6d07f5 | ||
|
|
3958111e76 | ||
|
|
86fcc4af74 | ||
|
|
2fd26756df | ||
|
|
478f4b74d8 | ||
|
|
73d0d2a1bb | ||
|
|
546db08ec4 | ||
|
|
0dd41a8670 | ||
|
|
82c0c89f46 | ||
|
|
f4ce0fd5f1 | ||
|
|
c3798bf4c2 | ||
|
|
ff80b6ccb0 | ||
|
|
e729217116 | ||
|
|
94c695daca | ||
|
|
9f189f0420 | ||
|
|
ad09e53f60 | ||
|
|
092a7a5f3f | ||
|
|
f45649bd25 | ||
|
|
2595cc5ed7 | ||
|
|
2f62190c6f | ||
|
|
577314984c | ||
|
|
f0346b955b | ||
|
|
70139ded4a | ||
|
|
bf379900e1 | ||
|
|
9bafc90f5e | ||
|
|
fce0d9e88e | ||
|
|
2b3b154989 | ||
|
|
948d2440a1 | ||
|
|
5adbe1ce7a | ||
|
|
8157d34ffa | ||
|
|
3ec8cb2204 | ||
|
|
0daa826543 | ||
|
|
a66028da58 | ||
|
|
807c9e6872 | ||
|
|
e71f3774ba | ||
|
|
dd7314bf10 | ||
|
|
f33bc127dc | ||
|
|
db92b87782 | ||
|
|
eba41c8693 | ||
|
|
c855308162 | ||
|
|
73d971bed8 | ||
|
|
bcfe0c2874 | ||
|
|
931ff666ae | ||
|
|
18b6d86cc4 | ||
|
|
086040f858 | ||
|
|
adbeb527d6 | ||
|
|
043176168d | ||
|
|
3c5efa0662 | ||
|
|
9b739bcbbf | ||
|
|
db89076e48 | ||
|
|
19b341ef18 | ||
|
|
be3713b1a3 | ||
|
|
99c4415cfb | ||
|
|
7b311f2ccf | ||
|
|
4aeabfe0a7 | ||
|
|
431ed02194 | ||
|
|
07f587ed83 | ||
|
|
0408341d82 | ||
|
|
5b3c9432f3 | ||
|
|
4a197e63f9 | ||
|
|
ad79a2ef45 | ||
|
|
0876a12fe9 | ||
|
|
c43c7ecc03 | ||
|
|
4a6dee3044 | ||
|
|
019acdd840 | ||
|
|
1c98512720 | ||
|
|
43041cebed | ||
|
|
23a09ad546 | ||
|
|
0836e8fe7c | ||
|
|
90196af8f8 | ||
|
|
002e549a86 | ||
|
|
1de6f859bf | ||
|
|
566fe05772 | ||
|
|
18772c6292 | ||
|
|
6278bddc9b | ||
|
|
f74bf71735 | ||
|
|
efe9ed68b2 | ||
|
|
7c1e75865d | ||
|
|
89530fc4e7 | ||
|
|
a0aee41f1a | ||
|
|
2049dd75f4 | ||
|
|
0864c35ba9 | ||
|
|
92c9f66671 | ||
|
|
223d6dad51 | ||
|
|
815784e809 | ||
|
|
2795d00d1e | ||
|
|
86dd0b4963 | ||
|
|
77a4f4819f | ||
|
|
b63d603482 | ||
|
|
e569b4e613 | ||
|
|
8a70997546 | ||
|
|
80d0a0f882 | ||
|
|
70b3997874 | ||
|
|
e8e4311068 | ||
|
|
cb0fa5829d | ||
|
|
a66f86d4af | ||
|
|
35d98dcea8 | ||
|
|
38fefde06d | ||
|
|
75ecb31f8c | ||
|
|
77133375ad | ||
|
|
c58b93ff51 | ||
|
|
7d8ebfe91b | ||
|
|
810381eab2 | ||
|
|
61dc6cf2de | ||
|
|
0205ebad2a | ||
|
|
09a94133ac | ||
|
|
1eb3c3b219 | ||
|
|
457845bb51 | ||
|
|
0c11b46585 | ||
|
|
c35100d9e9 | ||
|
|
847031cb04 | ||
|
|
d1ca6288a3 | ||
|
|
624ad4cfe6 | ||
|
|
f8d87bb452 | ||
|
|
f60b3505e0 | ||
|
|
addefbc511 | ||
|
|
c4314b25a3 | ||
|
|
921bb86127 | ||
|
|
d912fb0f8b | ||
|
|
e8fc053a32 | ||
|
|
ce3b2bab39 | ||
|
|
15e3699535 | ||
|
|
a4bf6bddbf | ||
|
|
f1b3c6b735 | ||
|
|
e923434d08 | ||
|
|
ddc9cd0fd5 | ||
|
|
d081db0c30 | ||
|
|
14298b0859 | ||
|
|
03ecda3cfe | ||
|
|
350cb767c3 | ||
|
|
f450dcbb57 | ||
|
|
32e003965a | ||
|
|
65f0764338 | ||
|
|
1bdb026079 | ||
|
|
b3a7fb9c3e | ||
|
|
c143c81a7e | ||
|
|
dd389ba0f8 | ||
|
|
46b1649ab8 | ||
|
|
89710412e4 | ||
|
|
931973b632 | ||
|
|
60aaa838e3 | ||
|
|
7e51286313 | ||
|
|
1246538bbb | ||
|
|
80518abf9d | ||
|
|
fc1ae2a18e | ||
|
|
3fd8d2049c | ||
|
|
35a6bcf20c | ||
|
|
0d75fc331e | ||
|
|
0a23e793e3 | ||
|
|
2c1c03e063 | ||
|
|
64059d2949 | ||
|
|
648aa7c4d3 | ||
|
|
274bb81a08 | ||
|
|
e2c90b4681 | ||
|
|
fa0a98ac6e | ||
|
|
e6e7b42415 | ||
|
|
0b7ef2e1d4 | ||
|
|
2fac67a9f9 | ||
|
|
8b9892de2e | ||
|
|
b3290dc909 | ||
|
|
3e3176eddb | ||
|
|
b1ef84894a | ||
|
|
c6cffc92c4 | ||
|
|
efb9fd2712 | ||
|
|
94b294ff93 | ||
|
|
99a9e33648 | ||
|
|
055d94a919 | ||
|
|
0978005240 | ||
|
|
1f796581ec | ||
|
|
f3a1716dad | ||
|
|
a1c3a0db1f | ||
|
|
9f80cc8a6b | ||
|
|
133786846e | ||
|
|
bdf297a5c6 | ||
|
|
6767254eb0 | ||
|
|
691cebd479 | ||
|
|
f3932cbf29 | ||
|
|
3f73a97037 | ||
|
|
226f1f5be4 | ||
|
|
7e45c07660 | ||
|
|
0c815036b9 |
49
README.md
49
README.md
@@ -215,13 +215,14 @@ The following settings are applied based on the section marked as `is_default`.
|
|||||||
downgrade_blacklist = <Set a list of packages to prevent downgrades. List them separated by commas.>
|
downgrade_blacklist = <Set a list of packages to prevent downgrades. List them separated by commas.>
|
||||||
security_level = <Set the security level => strong|normal|normal-|weak>
|
security_level = <Set the security level => strong|normal|normal-|weak>
|
||||||
always_lazy_install = <Whether to perform dependency installation on restart even in environments other than Windows.>
|
always_lazy_install = <Whether to perform dependency installation on restart even in environments other than Windows.>
|
||||||
network_mode = <Set the network mode => public|private|offline>
|
network_mode = <Set the network mode => public|private|offline|personal_cloud>
|
||||||
```
|
```
|
||||||
|
|
||||||
* network_mode:
|
* network_mode:
|
||||||
- public: An environment that uses a typical public network.
|
- public: An environment that uses a typical public network.
|
||||||
- private: An environment that uses a closed network, where a private node DB is configured via `channel_url`. (Uses cache if available)
|
- private: An environment that uses a closed network, where a private node DB is configured via `channel_url`. (Uses cache if available)
|
||||||
- offline: An environment that does not use any external connections when using an offline network. (Uses cache if available)
|
- offline: An environment that does not use any external connections when using an offline network. (Uses cache if available)
|
||||||
|
- personal_cloud: Applies relaxed security features in cloud environments such as Google Colab or Runpod, where strong security is not required.
|
||||||
|
|
||||||
|
|
||||||
## Additional Feature
|
## Additional Feature
|
||||||
@@ -312,31 +313,33 @@ When you run the `scan.sh` script:
|
|||||||
|
|
||||||
|
|
||||||
## Security policy
|
## Security policy
|
||||||
* Edit `config.ini` file: add `security_level = <LEVEL>`
|
|
||||||
* `strong`
|
|
||||||
* doesn't allow `high` and `middle` level risky feature
|
|
||||||
* `normal`
|
|
||||||
* doesn't allow `high` level risky feature
|
|
||||||
* `middle` level risky feature is available
|
|
||||||
* `normal-`
|
|
||||||
* doesn't allow `high` level risky feature if `--listen` is specified and not starts with `127.`
|
|
||||||
* `middle` level risky feature is available
|
|
||||||
* `weak`
|
|
||||||
* all feature is available
|
|
||||||
|
|
||||||
* `high` level risky features
|
The security settings are applied based on whether the ComfyUI server's listener is non-local and whether the network mode is set to `personal_cloud`.
|
||||||
* `Install via git url`, `pip install`
|
|
||||||
* Installation of custom nodes registered not in the `default channel`.
|
|
||||||
* Fix custom nodes
|
|
||||||
|
|
||||||
* `middle` level risky features
|
* **non-local**: When the server is launched with `--listen` and is bound to a network range other than the local `127.` range, allowing remote IP access.
|
||||||
* Uninstall/Update
|
* **personal\_cloud**: When the `network_mode` is set to `personal_cloud`.
|
||||||
* Installation of custom nodes registered in the `default channel`.
|
|
||||||
* Restore/Remove Snapshot
|
|
||||||
* Restart
|
### Risky Level Table
|
||||||
|
|
||||||
|
| Risky Level | features |
|
||||||
|
|-------------|---------------------------------------------------------------------------------------------------------------------------------------|
|
||||||
|
| high+ | * `Install via git url`, `pip install`<BR>* Installation of nodepack registered not in the `default channel`. |
|
||||||
|
| high | * Fix nodepack |
|
||||||
|
| middle+ | * Uninstall/Update<BR>* Installation of nodepack registered in the `default channel`.<BR>* Restore/Remove Snapshot<BR>* Install model |
|
||||||
|
| middle | * Restart |
|
||||||
|
| low | * Update ComfyUI |
|
||||||
|
|
||||||
|
|
||||||
|
### Security Level Table
|
||||||
|
|
||||||
|
| Security Level | local | non-local (personal_cloud) | non-local (not personal_cloud) |
|
||||||
|
|----------------|--------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------|--------------------------------|
|
||||||
|
| strong | * Only `weak` level risky features are allowed | * Only `weak` level risky features are allowed | * Only `weak` level risky features are allowed |
|
||||||
|
| normal | * `high+` and `high` level risky features are not allowed<BR>* `middle+` and `middle` level risky features are available | * `high+` and `high` level risky features are not allowed<BR>* `middle+` and `middle` level risky features are available | * `high+`, `high` and `middle+` level risky features are not allowed<BR>* `middle` level risky features are available
|
||||||
|
| normal- | * All features are available | * `high+` and `high` level risky features are not allowed<BR>* `middle+` and `middle` level risky features are available | * `high+`, `high` and `middle+` level risky features are not allowed<BR>* `middle` level risky features are available
|
||||||
|
| weak | * All features are available | * All features are available | * `high+` and `middle+` level risky features are not allowed<BR>* `high`, `middle` and `low` level risky features are available
|
||||||
|
|
||||||
* `low` level risky features
|
|
||||||
* Update ComfyUI
|
|
||||||
|
|
||||||
|
|
||||||
# Disclaimer
|
# Disclaimer
|
||||||
|
|||||||
@@ -1,5 +1,10 @@
|
|||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
|
from aiohttp import web
|
||||||
|
from .common.manager_security import HANDLER_POLICY
|
||||||
|
from .common import manager_security
|
||||||
|
from comfy.cli_args import args
|
||||||
|
|
||||||
|
|
||||||
def prestartup():
|
def prestartup():
|
||||||
from . import prestartup_script # noqa: F401
|
from . import prestartup_script # noqa: F401
|
||||||
@@ -7,8 +12,6 @@ def prestartup():
|
|||||||
|
|
||||||
|
|
||||||
def start():
|
def start():
|
||||||
from comfy.cli_args import args
|
|
||||||
|
|
||||||
logging.info('[START] ComfyUI-Manager')
|
logging.info('[START] ComfyUI-Manager')
|
||||||
from .common import cm_global # noqa: F401
|
from .common import cm_global # noqa: F401
|
||||||
|
|
||||||
@@ -17,15 +20,21 @@ def start():
|
|||||||
try:
|
try:
|
||||||
from .legacy import manager_server # noqa: F401
|
from .legacy import manager_server # noqa: F401
|
||||||
from .legacy import share_3rdparty # noqa: F401
|
from .legacy import share_3rdparty # noqa: F401
|
||||||
|
from .legacy import manager_core as core
|
||||||
import nodes
|
import nodes
|
||||||
|
|
||||||
logging.info("[ComfyUI-Manager] Legacy UI is enabled.")
|
logging.info("[ComfyUI-Manager] Legacy UI is enabled.")
|
||||||
nodes.EXTENSION_WEB_DIRS['comfyui-manager-legacy'] = os.path.join(os.path.dirname(__file__), 'js')
|
nodes.EXTENSION_WEB_DIRS['comfyui-manager-legacy'] = os.path.join(os.path.dirname(__file__), 'js')
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("Error enabling legacy ComfyUI Manager frontend:", e)
|
print("Error enabling legacy ComfyUI Manager frontend:", e)
|
||||||
|
core = None
|
||||||
else:
|
else:
|
||||||
from .glob import manager_server # noqa: F401
|
from .glob import manager_server # noqa: F401
|
||||||
from .glob import share_3rdparty # noqa: F401
|
from .glob import share_3rdparty # noqa: F401
|
||||||
|
from .glob import manager_core as core
|
||||||
|
|
||||||
|
if core is not None:
|
||||||
|
manager_security.is_personal_cloud_mode = core.get_config()['network_mode'].lower() == 'personal_cloud'
|
||||||
|
|
||||||
|
|
||||||
def should_be_disabled(fullpath:str) -> bool:
|
def should_be_disabled(fullpath:str) -> bool:
|
||||||
@@ -33,8 +42,6 @@ def should_be_disabled(fullpath:str) -> bool:
|
|||||||
1. Disables the legacy ComfyUI-Manager.
|
1. Disables the legacy ComfyUI-Manager.
|
||||||
2. The blocklist can be expanded later based on policies.
|
2. The blocklist can be expanded later based on policies.
|
||||||
"""
|
"""
|
||||||
from comfy.cli_args import args
|
|
||||||
|
|
||||||
if not args.disable_manager:
|
if not args.disable_manager:
|
||||||
# In cases where installation is done via a zip archive, the directory name may not be comfyui-manager, and it may not contain a git repository.
|
# In cases where installation is done via a zip archive, the directory name may not be comfyui-manager, and it may not contain a git repository.
|
||||||
# It is assumed that any installed legacy ComfyUI-Manager will have at least 'comfyui-manager' in its directory name.
|
# It is assumed that any installed legacy ComfyUI-Manager will have at least 'comfyui-manager' in its directory name.
|
||||||
@@ -43,3 +50,55 @@ def should_be_disabled(fullpath:str) -> bool:
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def get_client_ip(request):
|
||||||
|
peername = request.transport.get_extra_info("peername")
|
||||||
|
if peername is not None:
|
||||||
|
host, port = peername
|
||||||
|
return host
|
||||||
|
|
||||||
|
return "unknown"
|
||||||
|
|
||||||
|
|
||||||
|
def create_middleware():
|
||||||
|
connected_clients = set()
|
||||||
|
is_local_mode = manager_security.is_loopback(args.listen)
|
||||||
|
|
||||||
|
@web.middleware
|
||||||
|
async def manager_middleware(request: web.Request, handler):
|
||||||
|
nonlocal connected_clients
|
||||||
|
|
||||||
|
# security policy for remote environments
|
||||||
|
prev_client_count = len(connected_clients)
|
||||||
|
client_ip = get_client_ip(request)
|
||||||
|
connected_clients.add(client_ip)
|
||||||
|
next_client_count = len(connected_clients)
|
||||||
|
|
||||||
|
if prev_client_count == 1 and next_client_count > 1:
|
||||||
|
manager_security.multiple_remote_alert()
|
||||||
|
|
||||||
|
policy = manager_security.get_handler_policy(handler)
|
||||||
|
is_banned = False
|
||||||
|
|
||||||
|
# policy check
|
||||||
|
if len(connected_clients) > 1:
|
||||||
|
if is_local_mode:
|
||||||
|
if HANDLER_POLICY.MULTIPLE_REMOTE_BAN_NON_LOCAL in policy:
|
||||||
|
is_banned = True
|
||||||
|
if HANDLER_POLICY.MULTIPLE_REMOTE_BAN_NOT_PERSONAL_CLOUD in policy:
|
||||||
|
is_banned = not manager_security.is_personal_cloud_mode
|
||||||
|
|
||||||
|
if HANDLER_POLICY.BANNED in policy:
|
||||||
|
is_banned = True
|
||||||
|
|
||||||
|
if is_banned:
|
||||||
|
logging.warning(f"[Manager] Banning request from {client_ip}: {request.path}")
|
||||||
|
response = web.Response(text="[Manager] This request is banned.", status=403)
|
||||||
|
else:
|
||||||
|
response: web.Response = await handler(request)
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
return manager_middleware
|
||||||
|
|
||||||
@@ -46,9 +46,6 @@ comfyui_manager_path = os.path.abspath(os.path.dirname(__file__))
|
|||||||
cm_global.pip_blacklist = {'torch', 'torchaudio', 'torchsde', 'torchvision'}
|
cm_global.pip_blacklist = {'torch', 'torchaudio', 'torchsde', 'torchvision'}
|
||||||
cm_global.pip_downgrade_blacklist = ['torch', 'torchaudio', 'torchsde', 'torchvision', 'transformers', 'safetensors', 'kornia']
|
cm_global.pip_downgrade_blacklist = ['torch', 'torchaudio', 'torchsde', 'torchvision', 'transformers', 'safetensors', 'kornia']
|
||||||
|
|
||||||
if sys.version_info < (3, 13):
|
|
||||||
cm_global.pip_overrides = {'numpy': 'numpy<2'}
|
|
||||||
else:
|
|
||||||
cm_global.pip_overrides = {}
|
cm_global.pip_overrides = {}
|
||||||
|
|
||||||
if os.path.exists(os.path.join(manager_util.comfyui_manager_path, "pip_overrides.json")):
|
if os.path.exists(os.path.join(manager_util.comfyui_manager_path, "pip_overrides.json")):
|
||||||
@@ -152,9 +149,6 @@ class Ctx:
|
|||||||
with open(context.manager_pip_overrides_path, 'r', encoding="UTF-8", errors="ignore") as json_file:
|
with open(context.manager_pip_overrides_path, 'r', encoding="UTF-8", errors="ignore") as json_file:
|
||||||
cm_global.pip_overrides = json.load(json_file)
|
cm_global.pip_overrides = json.load(json_file)
|
||||||
|
|
||||||
if sys.version_info < (3, 13):
|
|
||||||
cm_global.pip_overrides = {'numpy': 'numpy<2'}
|
|
||||||
|
|
||||||
if os.path.exists(context.manager_pip_blacklist_path):
|
if os.path.exists(context.manager_pip_blacklist_path):
|
||||||
with open(context.manager_pip_blacklist_path, 'r', encoding="UTF-8", errors="ignore") as f:
|
with open(context.manager_pip_blacklist_path, 'r', encoding="UTF-8", errors="ignore") as f:
|
||||||
for x in f.readlines():
|
for x in f.readlines():
|
||||||
|
|||||||
@@ -180,7 +180,7 @@ def install_node(node_id, version=None):
|
|||||||
else:
|
else:
|
||||||
url = f"{base_url}/nodes/{node_id}/install?version={version}"
|
url = f"{base_url}/nodes/{node_id}/install?version={version}"
|
||||||
|
|
||||||
response = requests.get(url)
|
response = requests.get(url, verify=not manager_util.bypass_ssl)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
# Convert the API response to a NodeVersion object
|
# Convert the API response to a NodeVersion object
|
||||||
return map_node_version(response.json())
|
return map_node_version(response.json())
|
||||||
@@ -191,7 +191,7 @@ def install_node(node_id, version=None):
|
|||||||
def all_versions_of_node(node_id):
|
def all_versions_of_node(node_id):
|
||||||
url = f"{base_url}/nodes/{node_id}/versions?statuses=NodeVersionStatusActive&statuses=NodeVersionStatusPending"
|
url = f"{base_url}/nodes/{node_id}/versions?statuses=NodeVersionStatusActive&statuses=NodeVersionStatusPending"
|
||||||
|
|
||||||
response = requests.get(url)
|
response = requests.get(url, verify=not manager_util.bypass_ssl)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
return response.json()
|
return response.json()
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -106,4 +106,3 @@ def get_comfyui_tag():
|
|||||||
except Exception:
|
except Exception:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ class NetworkMode(enum.Enum):
|
|||||||
PUBLIC = "public"
|
PUBLIC = "public"
|
||||||
PRIVATE = "private"
|
PRIVATE = "private"
|
||||||
OFFLINE = "offline"
|
OFFLINE = "offline"
|
||||||
|
PERSONAL_CLOUD = "personal_cloud"
|
||||||
|
|
||||||
class SecurityLevel(enum.Enum):
|
class SecurityLevel(enum.Enum):
|
||||||
STRONG = "strong"
|
STRONG = "strong"
|
||||||
|
|||||||
@@ -55,7 +55,11 @@ def download_url(model_url: str, model_dir: str, filename: str):
|
|||||||
return aria2_download_url(model_url, model_dir, filename)
|
return aria2_download_url(model_url, model_dir, filename)
|
||||||
else:
|
else:
|
||||||
from torchvision.datasets.utils import download_url as torchvision_download_url
|
from torchvision.datasets.utils import download_url as torchvision_download_url
|
||||||
|
try:
|
||||||
return torchvision_download_url(model_url, model_dir, filename)
|
return torchvision_download_url(model_url, model_dir, filename)
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"[ComfyUI-Manager] Failed to download: {model_url} / {repr(e)}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
def aria2_find_task(dir: str, filename: str):
|
def aria2_find_task(dir: str, filename: str):
|
||||||
|
|||||||
36
comfyui_manager/common/manager_security.py
Normal file
36
comfyui_manager/common/manager_security.py
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
is_personal_cloud_mode = False
|
||||||
|
handler_policy = {}
|
||||||
|
|
||||||
|
class HANDLER_POLICY(Enum):
|
||||||
|
MULTIPLE_REMOTE_BAN_NON_LOCAL = 1
|
||||||
|
MULTIPLE_REMOTE_BAN_NOT_PERSONAL_CLOUD = 2
|
||||||
|
BANNED = 3
|
||||||
|
|
||||||
|
|
||||||
|
def is_loopback(address):
|
||||||
|
import ipaddress
|
||||||
|
try:
|
||||||
|
return ipaddress.ip_address(address).is_loopback
|
||||||
|
except ValueError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def do_nothing():
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def get_handler_policy(x):
|
||||||
|
return handler_policy.get(x) or set()
|
||||||
|
|
||||||
|
def add_handler_policy(x, policy):
|
||||||
|
s = handler_policy.get(x)
|
||||||
|
if s is None:
|
||||||
|
s = set()
|
||||||
|
handler_policy[x] = s
|
||||||
|
|
||||||
|
s.add(policy)
|
||||||
|
|
||||||
|
|
||||||
|
multiple_remote_alert = do_nothing
|
||||||
@@ -15,8 +15,7 @@ import re
|
|||||||
import logging
|
import logging
|
||||||
import platform
|
import platform
|
||||||
import shlex
|
import shlex
|
||||||
from . import cm_global
|
from packaging import version
|
||||||
|
|
||||||
|
|
||||||
cache_lock = threading.Lock()
|
cache_lock = threading.Lock()
|
||||||
session_lock = threading.Lock()
|
session_lock = threading.Lock()
|
||||||
@@ -25,6 +24,7 @@ comfyui_manager_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '
|
|||||||
cache_dir = os.path.join(comfyui_manager_path, '.cache') # This path is also updated together in **manager_core.update_user_directory**.
|
cache_dir = os.path.join(comfyui_manager_path, '.cache') # This path is also updated together in **manager_core.update_user_directory**.
|
||||||
|
|
||||||
use_uv = False
|
use_uv = False
|
||||||
|
bypass_ssl = False
|
||||||
|
|
||||||
def is_manager_pip_package():
|
def is_manager_pip_package():
|
||||||
return not os.path.exists(os.path.join(comfyui_manager_path, '..', 'custom_nodes'))
|
return not os.path.exists(os.path.join(comfyui_manager_path, '..', 'custom_nodes'))
|
||||||
@@ -58,62 +58,32 @@ def make_pip_cmd(cmd):
|
|||||||
# print(f"[ComfyUI-Manager] 'distutils' package not found. Activating fallback mode for compatibility.")
|
# print(f"[ComfyUI-Manager] 'distutils' package not found. Activating fallback mode for compatibility.")
|
||||||
class StrictVersion:
|
class StrictVersion:
|
||||||
def __init__(self, version_string):
|
def __init__(self, version_string):
|
||||||
|
self.obj = version.parse(version_string)
|
||||||
self.version_string = version_string
|
self.version_string = version_string
|
||||||
self.major = 0
|
self.major = self.obj.major
|
||||||
self.minor = 0
|
self.minor = self.obj.minor
|
||||||
self.patch = 0
|
self.patch = self.obj.micro
|
||||||
self.pre_release = None
|
|
||||||
self.parse_version_string()
|
|
||||||
|
|
||||||
def parse_version_string(self):
|
|
||||||
parts = self.version_string.split('.')
|
|
||||||
if not parts:
|
|
||||||
raise ValueError("Version string must not be empty")
|
|
||||||
|
|
||||||
self.major = int(parts[0])
|
|
||||||
self.minor = int(parts[1]) if len(parts) > 1 else 0
|
|
||||||
self.patch = int(parts[2]) if len(parts) > 2 else 0
|
|
||||||
|
|
||||||
# Handling pre-release versions if present
|
|
||||||
if len(parts) > 3:
|
|
||||||
self.pre_release = parts[3]
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
version = f"{self.major}.{self.minor}.{self.patch}"
|
return self.version_string
|
||||||
if self.pre_release:
|
|
||||||
version += f"-{self.pre_release}"
|
|
||||||
return version
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
return (self.major, self.minor, self.patch, self.pre_release) == \
|
return self.obj == other.obj
|
||||||
(other.major, other.minor, other.patch, other.pre_release)
|
|
||||||
|
|
||||||
def __lt__(self, other):
|
def __lt__(self, other):
|
||||||
if (self.major, self.minor, self.patch) == (other.major, other.minor, other.patch):
|
return self.obj < other.obj
|
||||||
return self.pre_release_compare(self.pre_release, other.pre_release) < 0
|
|
||||||
return (self.major, self.minor, self.patch) < (other.major, other.minor, other.patch)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def pre_release_compare(pre1, pre2):
|
|
||||||
if pre1 == pre2:
|
|
||||||
return 0
|
|
||||||
if pre1 is None:
|
|
||||||
return 1
|
|
||||||
if pre2 is None:
|
|
||||||
return -1
|
|
||||||
return -1 if pre1 < pre2 else 1
|
|
||||||
|
|
||||||
def __le__(self, other):
|
def __le__(self, other):
|
||||||
return self == other or self < other
|
return self.obj == other.obj or self.obj < other.obj
|
||||||
|
|
||||||
def __gt__(self, other):
|
def __gt__(self, other):
|
||||||
return not self <= other
|
return not self.obj <= other.obj
|
||||||
|
|
||||||
def __ge__(self, other):
|
def __ge__(self, other):
|
||||||
return not self < other
|
return not self.obj < other.obj
|
||||||
|
|
||||||
def __ne__(self, other):
|
def __ne__(self, other):
|
||||||
return not self == other
|
return not self.obj == other.obj
|
||||||
|
|
||||||
|
|
||||||
def simple_hash(input_string):
|
def simple_hash(input_string):
|
||||||
@@ -140,7 +110,7 @@ async def get_data(uri, silent=False):
|
|||||||
print(f"FETCH DATA from: {uri}", end="")
|
print(f"FETCH DATA from: {uri}", end="")
|
||||||
|
|
||||||
if uri.startswith("http"):
|
if uri.startswith("http"):
|
||||||
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
|
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=not bypass_ssl)) as session:
|
||||||
headers = {
|
headers = {
|
||||||
'Cache-Control': 'no-cache',
|
'Cache-Control': 'no-cache',
|
||||||
'Pragma': 'no-cache',
|
'Pragma': 'no-cache',
|
||||||
@@ -330,16 +300,9 @@ torch_torchvision_torchaudio_version_map = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def torch_rollback(prev):
|
||||||
class PIPFixer:
|
spec = prev.split('+')
|
||||||
def __init__(self, prev_pip_versions, comfyui_path, manager_files_path):
|
if len(spec) > 1:
|
||||||
self.prev_pip_versions = { **prev_pip_versions }
|
|
||||||
self.comfyui_path = comfyui_path
|
|
||||||
self.manager_files_path = manager_files_path
|
|
||||||
|
|
||||||
def torch_rollback(self):
|
|
||||||
spec = self.prev_pip_versions['torch'].split('+')
|
|
||||||
if len(spec) > 0:
|
|
||||||
platform = spec[1]
|
platform = spec[1]
|
||||||
else:
|
else:
|
||||||
cmd = make_pip_cmd(['install', '--force', 'torch', 'torchvision', 'torchaudio'])
|
cmd = make_pip_cmd(['install', '--force', 'torch', 'torchvision', 'torchaudio'])
|
||||||
@@ -363,6 +326,13 @@ class PIPFixer:
|
|||||||
|
|
||||||
subprocess.check_output(cmd, universal_newlines=True)
|
subprocess.check_output(cmd, universal_newlines=True)
|
||||||
|
|
||||||
|
|
||||||
|
class PIPFixer:
|
||||||
|
def __init__(self, prev_pip_versions, comfyui_path, manager_files_path):
|
||||||
|
self.prev_pip_versions = { **prev_pip_versions }
|
||||||
|
self.comfyui_path = comfyui_path
|
||||||
|
self.manager_files_path = manager_files_path
|
||||||
|
|
||||||
def fix_broken(self):
|
def fix_broken(self):
|
||||||
new_pip_versions = get_installed_packages(True)
|
new_pip_versions = get_installed_packages(True)
|
||||||
|
|
||||||
@@ -384,7 +354,7 @@ class PIPFixer:
|
|||||||
elif self.prev_pip_versions['torch'] != new_pip_versions['torch'] \
|
elif self.prev_pip_versions['torch'] != new_pip_versions['torch'] \
|
||||||
or self.prev_pip_versions['torchvision'] != new_pip_versions['torchvision'] \
|
or self.prev_pip_versions['torchvision'] != new_pip_versions['torchvision'] \
|
||||||
or self.prev_pip_versions['torchaudio'] != new_pip_versions['torchaudio']:
|
or self.prev_pip_versions['torchaudio'] != new_pip_versions['torchaudio']:
|
||||||
self.torch_rollback()
|
torch_rollback(self.prev_pip_versions['torch'])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error("[ComfyUI-Manager] Failed to restore PyTorch")
|
logging.error("[ComfyUI-Manager] Failed to restore PyTorch")
|
||||||
logging.error(e)
|
logging.error(e)
|
||||||
@@ -415,8 +385,7 @@ class PIPFixer:
|
|||||||
|
|
||||||
if len(targets) > 0:
|
if len(targets) > 0:
|
||||||
for x in targets:
|
for x in targets:
|
||||||
if sys.version_info < (3, 13):
|
cmd = make_pip_cmd(['install', f"{x}=={versions[0].version_string}"])
|
||||||
cmd = make_pip_cmd(['install', f"{x}=={versions[0].version_string}", "numpy<2"])
|
|
||||||
subprocess.check_output(cmd, universal_newlines=True)
|
subprocess.check_output(cmd, universal_newlines=True)
|
||||||
|
|
||||||
logging.info(f"[ComfyUI-Manager] 'opencv' dependencies were fixed: {targets}")
|
logging.info(f"[ComfyUI-Manager] 'opencv' dependencies were fixed: {targets}")
|
||||||
@@ -424,23 +393,6 @@ class PIPFixer:
|
|||||||
logging.error("[ComfyUI-Manager] Failed to restore opencv")
|
logging.error("[ComfyUI-Manager] Failed to restore opencv")
|
||||||
logging.error(e)
|
logging.error(e)
|
||||||
|
|
||||||
# fix numpy
|
|
||||||
if sys.version_info >= (3, 13):
|
|
||||||
logging.info("[ComfyUI-Manager] In Python 3.13 and above, PIP Fixer does not downgrade `numpy` below version 2.0. If you need to force a downgrade of `numpy`, please use `pip_auto_fix.list`.")
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
np = new_pip_versions.get('numpy')
|
|
||||||
if cm_global.pip_overrides.get('numpy') == 'numpy<2':
|
|
||||||
if np is not None:
|
|
||||||
if StrictVersion(np) >= StrictVersion('2'):
|
|
||||||
cmd = make_pip_cmd(['install', "numpy<2"])
|
|
||||||
subprocess.check_output(cmd , universal_newlines=True)
|
|
||||||
|
|
||||||
logging.info("[ComfyUI-Manager] 'numpy' dependency were fixed")
|
|
||||||
except Exception as e:
|
|
||||||
logging.error("[ComfyUI-Manager] Failed to restore numpy")
|
|
||||||
logging.error(e)
|
|
||||||
|
|
||||||
# fix missing frontend
|
# fix missing frontend
|
||||||
try:
|
try:
|
||||||
# NOTE: package name in requirements is 'comfyui-frontend-package'
|
# NOTE: package name in requirements is 'comfyui-frontend-package'
|
||||||
@@ -540,3 +492,69 @@ def robust_readlines(fullpath):
|
|||||||
|
|
||||||
print(f"[ComfyUI-Manager] Failed to recognize encoding for: {fullpath}")
|
print(f"[ComfyUI-Manager] Failed to recognize encoding for: {fullpath}")
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def restore_pip_snapshot(pips, options):
|
||||||
|
non_url = []
|
||||||
|
local_url = []
|
||||||
|
non_local_url = []
|
||||||
|
|
||||||
|
for k, v in pips.items():
|
||||||
|
# NOTE: skip torch related packages
|
||||||
|
if k.startswith("torch==") or k.startswith("torchvision==") or k.startswith("torchaudio==") or k.startswith("nvidia-"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if v == "":
|
||||||
|
non_url.append(k)
|
||||||
|
else:
|
||||||
|
if v.startswith('file:'):
|
||||||
|
local_url.append(v)
|
||||||
|
else:
|
||||||
|
non_local_url.append(v)
|
||||||
|
|
||||||
|
|
||||||
|
# restore other pips
|
||||||
|
failed = []
|
||||||
|
if '--pip-non-url' in options:
|
||||||
|
# try all at once
|
||||||
|
res = 1
|
||||||
|
try:
|
||||||
|
res = subprocess.check_output(make_pip_cmd(['install'] + non_url))
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# fallback
|
||||||
|
if res != 0:
|
||||||
|
for x in non_url:
|
||||||
|
res = 1
|
||||||
|
try:
|
||||||
|
res = subprocess.check_output(make_pip_cmd(['install', '--no-deps', x]))
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if res != 0:
|
||||||
|
failed.append(x)
|
||||||
|
|
||||||
|
if '--pip-non-local-url' in options:
|
||||||
|
for x in non_local_url:
|
||||||
|
res = 1
|
||||||
|
try:
|
||||||
|
res = subprocess.check_output(make_pip_cmd(['install', '--no-deps', x]))
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if res != 0:
|
||||||
|
failed.append(x)
|
||||||
|
|
||||||
|
if '--pip-local-url' in options:
|
||||||
|
for x in local_url:
|
||||||
|
res = 1
|
||||||
|
try:
|
||||||
|
res = subprocess.check_output(make_pip_cmd(['install', '--no-deps', x]))
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if res != 0:
|
||||||
|
failed.append(x)
|
||||||
|
|
||||||
|
print(f"Installation failed for pip packages: {failed}")
|
||||||
@@ -2,6 +2,8 @@ import sys
|
|||||||
import subprocess
|
import subprocess
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
from . import manager_util
|
||||||
|
|
||||||
|
|
||||||
def security_check():
|
def security_check():
|
||||||
print("[START] Security scan")
|
print("[START] Security scan")
|
||||||
@@ -66,18 +68,23 @@ https://blog.comfy.org/comfyui-statement-on-the-ultralytics-crypto-miner-situati
|
|||||||
"lolMiner": [os.path.join(comfyui_path, 'lolMiner')]
|
"lolMiner": [os.path.join(comfyui_path, 'lolMiner')]
|
||||||
}
|
}
|
||||||
|
|
||||||
installed_pips = subprocess.check_output([sys.executable, '-m', "pip", "freeze"], text=True)
|
installed_pips = subprocess.check_output(manager_util.make_pip_cmd(["freeze"]), text=True)
|
||||||
|
|
||||||
detected = set()
|
detected = set()
|
||||||
try:
|
try:
|
||||||
anthropic_info = subprocess.check_output([sys.executable, '-m', "pip", "show", "anthropic"], text=True, stderr=subprocess.DEVNULL)
|
anthropic_info = subprocess.check_output(manager_util.make_pip_cmd(["show", "anthropic"]), text=True, stderr=subprocess.DEVNULL)
|
||||||
anthropic_reqs = [x for x in anthropic_info.split('\n') if x.startswith("Requires")][0].split(': ')[1]
|
requires_lines = [x for x in anthropic_info.split('\n') if x.startswith("Requires")]
|
||||||
|
if requires_lines:
|
||||||
|
anthropic_reqs = requires_lines[0].split(": ", 1)[1]
|
||||||
if "pycrypto" in anthropic_reqs:
|
if "pycrypto" in anthropic_reqs:
|
||||||
location = [x for x in anthropic_info.split('\n') if x.startswith("Location")][0].split(': ')[1]
|
location_lines = [x for x in anthropic_info.split('\n') if x.startswith("Location")]
|
||||||
|
if location_lines:
|
||||||
|
location = location_lines[0].split(": ", 1)[1]
|
||||||
for fi in os.listdir(location):
|
for fi in os.listdir(location):
|
||||||
if fi.startswith("anthropic"):
|
if fi.startswith("anthropic"):
|
||||||
guide["ComfyUI_LLMVISION"] = f"\n0.Remove {os.path.join(location, fi)}" + guide["ComfyUI_LLMVISION"]
|
guide["ComfyUI_LLMVISION"] = (f"\n0.Remove {os.path.join(location, fi)}" + guide["ComfyUI_LLMVISION"])
|
||||||
detected.add("ComfyUI_LLMVISION")
|
detected.add("ComfyUI_LLMVISION")
|
||||||
|
|
||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
136
comfyui_manager/common/snapshot_util.py
Normal file
136
comfyui_manager/common/snapshot_util.py
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
from . import manager_util
|
||||||
|
from . import git_utils
|
||||||
|
import json
|
||||||
|
import yaml
|
||||||
|
import logging
|
||||||
|
|
||||||
|
def read_snapshot(snapshot_path):
|
||||||
|
try:
|
||||||
|
|
||||||
|
with open(snapshot_path, 'r', encoding="UTF-8") as snapshot_file:
|
||||||
|
if snapshot_path.endswith('.json'):
|
||||||
|
info = json.load(snapshot_file)
|
||||||
|
elif snapshot_path.endswith('.yaml'):
|
||||||
|
info = yaml.load(snapshot_file, Loader=yaml.SafeLoader)
|
||||||
|
info = info['custom_nodes']
|
||||||
|
|
||||||
|
return info
|
||||||
|
except Exception as e:
|
||||||
|
logging.warning(f"Failed to read snapshot file: {snapshot_path}\nError: {e}")
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def diff_snapshot(a, b):
|
||||||
|
if not a or not b:
|
||||||
|
return None
|
||||||
|
|
||||||
|
nodepack_diff = {
|
||||||
|
'added': {},
|
||||||
|
'removed': [],
|
||||||
|
'upgraded': {},
|
||||||
|
'downgraded': {},
|
||||||
|
'changed': []
|
||||||
|
}
|
||||||
|
|
||||||
|
pip_diff = {
|
||||||
|
'added': {},
|
||||||
|
'upgraded': {},
|
||||||
|
'downgraded': {}
|
||||||
|
}
|
||||||
|
|
||||||
|
# check: comfyui
|
||||||
|
if a.get('comfyui') != b.get('comfyui'):
|
||||||
|
nodepack_diff['changed'].append('comfyui')
|
||||||
|
|
||||||
|
# check: cnr nodes
|
||||||
|
a_cnrs = a.get('cnr_custom_nodes', {})
|
||||||
|
b_cnrs = b.get('cnr_custom_nodes', {})
|
||||||
|
|
||||||
|
if 'comfyui-manager' in a_cnrs:
|
||||||
|
del a_cnrs['comfyui-manager']
|
||||||
|
if 'comfyui-manager' in b_cnrs:
|
||||||
|
del b_cnrs['comfyui-manager']
|
||||||
|
|
||||||
|
for k, v in a_cnrs.items():
|
||||||
|
if k not in b_cnrs.keys():
|
||||||
|
nodepack_diff['removed'].append(k)
|
||||||
|
elif a_cnrs[k] != b_cnrs[k]:
|
||||||
|
a_ver = manager_util.StrictVersion(a_cnrs[k])
|
||||||
|
b_ver = manager_util.StrictVersion(b_cnrs[k])
|
||||||
|
if a_ver < b_ver:
|
||||||
|
nodepack_diff['upgraded'][k] = {'from': a_cnrs[k], 'to': b_cnrs[k]}
|
||||||
|
elif a_ver > b_ver:
|
||||||
|
nodepack_diff['downgraded'][k] = {'from': a_cnrs[k], 'to': b_cnrs[k]}
|
||||||
|
|
||||||
|
added_cnrs = set(b_cnrs.keys()) - set(a_cnrs.keys())
|
||||||
|
for k in added_cnrs:
|
||||||
|
nodepack_diff['added'][k] = b_cnrs[k]
|
||||||
|
|
||||||
|
# check: git custom nodes
|
||||||
|
a_gits = a.get('git_custom_nodes', {})
|
||||||
|
b_gits = b.get('git_custom_nodes', {})
|
||||||
|
|
||||||
|
a_gits = {git_utils.normalize_url(k): v for k, v in a_gits.items() if k.lower() != 'comfyui-manager'}
|
||||||
|
b_gits = {git_utils.normalize_url(k): v for k, v in b_gits.items() if k.lower() != 'comfyui-manager'}
|
||||||
|
|
||||||
|
for k, v in a_gits.items():
|
||||||
|
if k not in b_gits.keys():
|
||||||
|
nodepack_diff['removed'].append(k)
|
||||||
|
elif not v['disabled'] and b_gits[k]['disabled']:
|
||||||
|
nodepack_diff['removed'].append(k)
|
||||||
|
elif v['disabled'] and not b_gits[k]['disabled']:
|
||||||
|
nodepack_diff['added'].append(k)
|
||||||
|
elif v['hash'] != b_gits[k]['hash']:
|
||||||
|
a_date = v.get('commit_timestamp')
|
||||||
|
b_date = b_gits[k].get('commit_timestamp')
|
||||||
|
if a_date is not None and b_date is not None:
|
||||||
|
if a_date < b_date:
|
||||||
|
nodepack_diff['upgraded'].append(k)
|
||||||
|
elif a_date > b_date:
|
||||||
|
nodepack_diff['downgraded'].append(k)
|
||||||
|
else:
|
||||||
|
nodepack_diff['changed'].append(k)
|
||||||
|
|
||||||
|
# check: pip packages
|
||||||
|
a_pip = a.get('pips', {})
|
||||||
|
b_pip = b.get('pips', {})
|
||||||
|
for k, v in a_pip.items():
|
||||||
|
if '==' in k:
|
||||||
|
package_name, version = k.split('==', 1)
|
||||||
|
else:
|
||||||
|
package_name, version = k, None
|
||||||
|
|
||||||
|
for k2, v2 in b_pip.items():
|
||||||
|
if '==' in k2:
|
||||||
|
package_name2, version2 = k2.split('==', 1)
|
||||||
|
else:
|
||||||
|
package_name2, version2 = k2, None
|
||||||
|
|
||||||
|
if package_name.lower() == package_name2.lower():
|
||||||
|
if version != version2:
|
||||||
|
a_ver = manager_util.StrictVersion(version) if version else None
|
||||||
|
b_ver = manager_util.StrictVersion(version2) if version2 else None
|
||||||
|
if a_ver and b_ver:
|
||||||
|
if a_ver < b_ver:
|
||||||
|
pip_diff['upgraded'][package_name] = {'from': version, 'to': version2}
|
||||||
|
elif a_ver > b_ver:
|
||||||
|
pip_diff['downgraded'][package_name] = {'from': version, 'to': version2}
|
||||||
|
elif not a_ver and b_ver:
|
||||||
|
pip_diff['added'][package_name] = version2
|
||||||
|
|
||||||
|
a_pip_names = {k.split('==', 1)[0].lower() for k in a_pip.keys()}
|
||||||
|
|
||||||
|
for k in b_pip.keys():
|
||||||
|
if '==' in k:
|
||||||
|
package_name = k.split('==', 1)[0]
|
||||||
|
package_version = k.split('==', 1)[1]
|
||||||
|
else:
|
||||||
|
package_name = k
|
||||||
|
package_version = None
|
||||||
|
|
||||||
|
if package_name.lower() not in a_pip_names:
|
||||||
|
if package_version:
|
||||||
|
pip_diff['added'][package_name] = package_version
|
||||||
|
|
||||||
|
return {'nodepack_diff': nodepack_diff, 'pip_diff': pip_diff}
|
||||||
@@ -29,6 +29,7 @@ datamodel-codegen \
|
|||||||
--use-subclass-enum \
|
--use-subclass-enum \
|
||||||
--field-constraints \
|
--field-constraints \
|
||||||
--strict-types bytes \
|
--strict-types bytes \
|
||||||
|
--use-double-quotes \
|
||||||
--input openapi.yaml \
|
--input openapi.yaml \
|
||||||
--output comfyui_manager/data_models/generated_models.py \
|
--output comfyui_manager/data_models/generated_models.py \
|
||||||
--output-model-type pydantic_v2.BaseModel
|
--output-model-type pydantic_v2.BaseModel
|
||||||
|
|||||||
@@ -31,8 +31,8 @@ from .generated_models import (
|
|||||||
ComfyUIVersionInfo,
|
ComfyUIVersionInfo,
|
||||||
|
|
||||||
# Other models
|
# Other models
|
||||||
Kind,
|
OperationType,
|
||||||
StatusStr,
|
OperationResult,
|
||||||
ManagerPackInfo,
|
ManagerPackInfo,
|
||||||
ManagerPackInstalled,
|
ManagerPackInstalled,
|
||||||
SelectedVersion,
|
SelectedVersion,
|
||||||
@@ -49,6 +49,9 @@ from .generated_models import (
|
|||||||
UninstallPackParams,
|
UninstallPackParams,
|
||||||
DisablePackParams,
|
DisablePackParams,
|
||||||
EnablePackParams,
|
EnablePackParams,
|
||||||
|
UpdateAllQueryParams,
|
||||||
|
UpdateComfyUIQueryParams,
|
||||||
|
ComfyUISwitchVersionQueryParams,
|
||||||
QueueStatus,
|
QueueStatus,
|
||||||
ManagerMappings,
|
ManagerMappings,
|
||||||
ModelMetadata,
|
ModelMetadata,
|
||||||
@@ -59,8 +62,8 @@ from .generated_models import (
|
|||||||
HistoryResponse,
|
HistoryResponse,
|
||||||
HistoryListResponse,
|
HistoryListResponse,
|
||||||
InstallType,
|
InstallType,
|
||||||
OperationType,
|
SecurityLevel,
|
||||||
Result,
|
RiskLevel,
|
||||||
)
|
)
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
@@ -86,8 +89,8 @@ __all__ = [
|
|||||||
"ComfyUIVersionInfo",
|
"ComfyUIVersionInfo",
|
||||||
|
|
||||||
# Other models
|
# Other models
|
||||||
"Kind",
|
"OperationType",
|
||||||
"StatusStr",
|
"OperationResult",
|
||||||
"ManagerPackInfo",
|
"ManagerPackInfo",
|
||||||
"ManagerPackInstalled",
|
"ManagerPackInstalled",
|
||||||
"SelectedVersion",
|
"SelectedVersion",
|
||||||
@@ -104,6 +107,9 @@ __all__ = [
|
|||||||
"UninstallPackParams",
|
"UninstallPackParams",
|
||||||
"DisablePackParams",
|
"DisablePackParams",
|
||||||
"EnablePackParams",
|
"EnablePackParams",
|
||||||
|
"UpdateAllQueryParams",
|
||||||
|
"UpdateComfyUIQueryParams",
|
||||||
|
"ComfyUISwitchVersionQueryParams",
|
||||||
"QueueStatus",
|
"QueueStatus",
|
||||||
"ManagerMappings",
|
"ManagerMappings",
|
||||||
"ModelMetadata",
|
"ModelMetadata",
|
||||||
@@ -114,6 +120,6 @@ __all__ = [
|
|||||||
"HistoryResponse",
|
"HistoryResponse",
|
||||||
"HistoryListResponse",
|
"HistoryListResponse",
|
||||||
"InstallType",
|
"InstallType",
|
||||||
"OperationType",
|
"SecurityLevel",
|
||||||
"Result",
|
"RiskLevel",
|
||||||
]
|
]
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
# generated by datamodel-codegen:
|
# generated by datamodel-codegen:
|
||||||
# filename: openapi.yaml
|
# filename: openapi.yaml
|
||||||
# timestamp: 2025-06-14T01:44:21+00:00
|
# timestamp: 2025-06-27T04:01:45+00:00
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
@@ -11,252 +11,298 @@ from typing import Any, Dict, List, Optional, Union
|
|||||||
from pydantic import BaseModel, Field, RootModel
|
from pydantic import BaseModel, Field, RootModel
|
||||||
|
|
||||||
|
|
||||||
class Kind(str, Enum):
|
class OperationType(str, Enum):
|
||||||
install = 'install'
|
install = "install"
|
||||||
uninstall = 'uninstall'
|
uninstall = "uninstall"
|
||||||
update = 'update'
|
update = "update"
|
||||||
update_all = 'update-all'
|
update_comfyui = "update-comfyui"
|
||||||
update_comfyui = 'update-comfyui'
|
fix = "fix"
|
||||||
fix = 'fix'
|
disable = "disable"
|
||||||
disable = 'disable'
|
enable = "enable"
|
||||||
enable = 'enable'
|
install_model = "install-model"
|
||||||
install_model = 'install-model'
|
|
||||||
|
|
||||||
|
|
||||||
class StatusStr(str, Enum):
|
class OperationResult(str, Enum):
|
||||||
success = 'success'
|
success = "success"
|
||||||
error = 'error'
|
failed = "failed"
|
||||||
skip = 'skip'
|
skipped = "skipped"
|
||||||
|
error = "error"
|
||||||
|
skip = "skip"
|
||||||
|
|
||||||
|
|
||||||
class TaskExecutionStatus(BaseModel):
|
class TaskExecutionStatus(BaseModel):
|
||||||
status_str: StatusStr = Field(..., description='Overall task execution status')
|
status_str: OperationResult
|
||||||
completed: bool = Field(..., description='Whether the task completed')
|
completed: bool = Field(..., description="Whether the task completed")
|
||||||
messages: List[str] = Field(..., description='Additional status messages')
|
messages: List[str] = Field(..., description="Additional status messages")
|
||||||
|
|
||||||
|
|
||||||
class ManagerMessageName(str, Enum):
|
class ManagerMessageName(str, Enum):
|
||||||
cm_task_completed = 'cm-task-completed'
|
cm_task_completed = "cm-task-completed"
|
||||||
cm_task_started = 'cm-task-started'
|
cm_task_started = "cm-task-started"
|
||||||
cm_queue_status = 'cm-queue-status'
|
cm_queue_status = "cm-queue-status"
|
||||||
|
|
||||||
|
|
||||||
class ManagerPackInfo(BaseModel):
|
class ManagerPackInfo(BaseModel):
|
||||||
id: str = Field(
|
id: str = Field(
|
||||||
...,
|
...,
|
||||||
description='Either github-author/github-repo or name of pack from the registry',
|
description="Either github-author/github-repo or name of pack from the registry",
|
||||||
)
|
)
|
||||||
version: str = Field(..., description='Semantic version or Git commit hash')
|
version: str = Field(..., description="Semantic version or Git commit hash")
|
||||||
ui_id: Optional[str] = Field(None, description='Task ID - generated internally')
|
ui_id: Optional[str] = Field(None, description="Task ID - generated internally")
|
||||||
|
|
||||||
|
|
||||||
class ManagerPackInstalled(BaseModel):
|
class ManagerPackInstalled(BaseModel):
|
||||||
ver: str = Field(
|
ver: str = Field(
|
||||||
...,
|
...,
|
||||||
description='The version of the pack that is installed (Git commit hash or semantic version)',
|
description="The version of the pack that is installed (Git commit hash or semantic version)",
|
||||||
)
|
)
|
||||||
cnr_id: Optional[str] = Field(
|
cnr_id: Optional[str] = Field(
|
||||||
None, description='The name of the pack if installed from the registry'
|
None, description="The name of the pack if installed from the registry"
|
||||||
)
|
)
|
||||||
aux_id: Optional[str] = Field(
|
aux_id: Optional[str] = Field(
|
||||||
None,
|
None,
|
||||||
description='The name of the pack if installed from github (author/repo-name format)',
|
description="The name of the pack if installed from github (author/repo-name format)",
|
||||||
)
|
)
|
||||||
enabled: bool = Field(..., description='Whether the pack is enabled')
|
enabled: bool = Field(..., description="Whether the pack is enabled")
|
||||||
|
|
||||||
|
|
||||||
class SelectedVersion(str, Enum):
|
class SelectedVersion(str, Enum):
|
||||||
latest = 'latest'
|
latest = "latest"
|
||||||
nightly = 'nightly'
|
nightly = "nightly"
|
||||||
|
|
||||||
|
|
||||||
class ManagerChannel(str, Enum):
|
class ManagerChannel(str, Enum):
|
||||||
default = 'default'
|
default = "default"
|
||||||
recent = 'recent'
|
recent = "recent"
|
||||||
legacy = 'legacy'
|
legacy = "legacy"
|
||||||
forked = 'forked'
|
forked = "forked"
|
||||||
dev = 'dev'
|
dev = "dev"
|
||||||
tutorial = 'tutorial'
|
tutorial = "tutorial"
|
||||||
|
|
||||||
|
|
||||||
class ManagerDatabaseSource(str, Enum):
|
class ManagerDatabaseSource(str, Enum):
|
||||||
remote = 'remote'
|
remote = "remote"
|
||||||
local = 'local'
|
local = "local"
|
||||||
cache = 'cache'
|
cache = "cache"
|
||||||
|
|
||||||
|
|
||||||
class ManagerPackState(str, Enum):
|
class ManagerPackState(str, Enum):
|
||||||
installed = 'installed'
|
installed = "installed"
|
||||||
disabled = 'disabled'
|
disabled = "disabled"
|
||||||
not_installed = 'not_installed'
|
not_installed = "not_installed"
|
||||||
import_failed = 'import_failed'
|
import_failed = "import_failed"
|
||||||
needs_update = 'needs_update'
|
needs_update = "needs_update"
|
||||||
|
|
||||||
|
|
||||||
class ManagerPackInstallType(str, Enum):
|
class ManagerPackInstallType(str, Enum):
|
||||||
git_clone = 'git-clone'
|
git_clone = "git-clone"
|
||||||
copy = 'copy'
|
copy = "copy"
|
||||||
cnr = 'cnr'
|
cnr = "cnr"
|
||||||
|
|
||||||
|
|
||||||
|
class SecurityLevel(str, Enum):
|
||||||
|
strong = "strong"
|
||||||
|
normal = "normal"
|
||||||
|
normal_ = "normal-"
|
||||||
|
weak = "weak"
|
||||||
|
|
||||||
|
|
||||||
|
class RiskLevel(str, Enum):
|
||||||
|
block = "block"
|
||||||
|
high_ = "high+"
|
||||||
|
high = "high"
|
||||||
|
middle_ = "middle+"
|
||||||
|
middle = "middle"
|
||||||
|
|
||||||
|
|
||||||
class UpdateState(Enum):
|
class UpdateState(Enum):
|
||||||
false = 'false'
|
false = "false"
|
||||||
true = 'true'
|
true = "true"
|
||||||
|
|
||||||
|
|
||||||
class ManagerPack(ManagerPackInfo):
|
class ManagerPack(ManagerPackInfo):
|
||||||
author: Optional[str] = Field(
|
author: Optional[str] = Field(
|
||||||
None, description="Pack author name or 'Unclaimed' if added via GitHub crawl"
|
None, description="Pack author name or 'Unclaimed' if added via GitHub crawl"
|
||||||
)
|
)
|
||||||
files: Optional[List[str]] = Field(None, description='Files included in the pack')
|
files: Optional[List[str]] = Field(
|
||||||
reference: Optional[str] = Field(
|
None,
|
||||||
None, description='The type of installation reference'
|
description="Repository URLs for installation (typically contains one GitHub URL)",
|
||||||
)
|
)
|
||||||
title: Optional[str] = Field(None, description='The display name of the pack')
|
reference: Optional[str] = Field(
|
||||||
|
None, description="The type of installation reference"
|
||||||
|
)
|
||||||
|
title: Optional[str] = Field(None, description="The display name of the pack")
|
||||||
cnr_latest: Optional[SelectedVersion] = None
|
cnr_latest: Optional[SelectedVersion] = None
|
||||||
repository: Optional[str] = Field(None, description='GitHub repository URL')
|
repository: Optional[str] = Field(None, description="GitHub repository URL")
|
||||||
state: Optional[ManagerPackState] = None
|
state: Optional[ManagerPackState] = None
|
||||||
update_state: Optional[UpdateState] = Field(
|
update_state: Optional[UpdateState] = Field(
|
||||||
None, alias='update-state', description='Update availability status'
|
None, alias="update-state", description="Update availability status"
|
||||||
)
|
)
|
||||||
stars: Optional[int] = Field(None, description='GitHub stars count')
|
stars: Optional[int] = Field(None, description="GitHub stars count")
|
||||||
last_update: Optional[datetime] = Field(None, description='Last update timestamp')
|
last_update: Optional[datetime] = Field(None, description="Last update timestamp")
|
||||||
health: Optional[str] = Field(None, description='Health status of the pack')
|
health: Optional[str] = Field(None, description="Health status of the pack")
|
||||||
description: Optional[str] = Field(None, description='Pack description')
|
description: Optional[str] = Field(None, description="Pack description")
|
||||||
trust: Optional[bool] = Field(None, description='Whether the pack is trusted')
|
trust: Optional[bool] = Field(None, description="Whether the pack is trusted")
|
||||||
install_type: Optional[ManagerPackInstallType] = None
|
install_type: Optional[ManagerPackInstallType] = None
|
||||||
|
|
||||||
|
|
||||||
class InstallPackParams(ManagerPackInfo):
|
class InstallPackParams(ManagerPackInfo):
|
||||||
selected_version: Union[str, SelectedVersion] = Field(
|
selected_version: Union[str, SelectedVersion] = Field(
|
||||||
..., description='Semantic version, Git commit hash, latest, or nightly'
|
..., description="Semantic version, Git commit hash, latest, or nightly"
|
||||||
)
|
)
|
||||||
repository: Optional[str] = Field(
|
repository: Optional[str] = Field(
|
||||||
None,
|
None,
|
||||||
description='GitHub repository URL (required if selected_version is nightly)',
|
description="GitHub repository URL (required if selected_version is nightly)",
|
||||||
)
|
)
|
||||||
pip: Optional[List[str]] = Field(None, description='PyPi dependency names')
|
pip: Optional[List[str]] = Field(None, description="PyPi dependency names")
|
||||||
mode: ManagerDatabaseSource
|
mode: ManagerDatabaseSource
|
||||||
channel: ManagerChannel
|
channel: ManagerChannel
|
||||||
skip_post_install: Optional[bool] = Field(
|
skip_post_install: Optional[bool] = Field(
|
||||||
None, description='Whether to skip post-installation steps'
|
None, description="Whether to skip post-installation steps"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class UpdateAllPacksParams(BaseModel):
|
class UpdateAllPacksParams(BaseModel):
|
||||||
mode: Optional[ManagerDatabaseSource] = None
|
mode: Optional[ManagerDatabaseSource] = None
|
||||||
ui_id: Optional[str] = Field(None, description='Task ID - generated internally')
|
ui_id: Optional[str] = Field(None, description="Task ID - generated internally")
|
||||||
|
|
||||||
|
|
||||||
class UpdatePackParams(BaseModel):
|
class UpdatePackParams(BaseModel):
|
||||||
node_name: str = Field(..., description='Name of the node package to update')
|
node_name: str = Field(..., description="Name of the node package to update")
|
||||||
node_ver: Optional[str] = Field(
|
node_ver: Optional[str] = Field(
|
||||||
None, description='Current version of the node package'
|
None, description="Current version of the node package"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class UpdateComfyUIParams(BaseModel):
|
class UpdateComfyUIParams(BaseModel):
|
||||||
is_stable: Optional[bool] = Field(
|
is_stable: Optional[bool] = Field(
|
||||||
True,
|
True,
|
||||||
description='Whether to update to stable version (true) or nightly (false)',
|
description="Whether to update to stable version (true) or nightly (false)",
|
||||||
)
|
)
|
||||||
target_version: Optional[str] = Field(
|
target_version: Optional[str] = Field(
|
||||||
None,
|
None,
|
||||||
description='Specific version to switch to (for version switching operations)',
|
description="Specific version to switch to (for version switching operations)",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class FixPackParams(BaseModel):
|
class FixPackParams(BaseModel):
|
||||||
node_name: str = Field(..., description='Name of the node package to fix')
|
node_name: str = Field(..., description="Name of the node package to fix")
|
||||||
node_ver: str = Field(..., description='Version of the node package')
|
node_ver: str = Field(..., description="Version of the node package")
|
||||||
|
|
||||||
|
|
||||||
class UninstallPackParams(BaseModel):
|
class UninstallPackParams(BaseModel):
|
||||||
node_name: str = Field(..., description='Name of the node package to uninstall')
|
node_name: str = Field(..., description="Name of the node package to uninstall")
|
||||||
is_unknown: Optional[bool] = Field(
|
is_unknown: Optional[bool] = Field(
|
||||||
False, description='Whether this is an unknown/unregistered package'
|
False, description="Whether this is an unknown/unregistered package"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class DisablePackParams(BaseModel):
|
class DisablePackParams(BaseModel):
|
||||||
node_name: str = Field(..., description='Name of the node package to disable')
|
node_name: str = Field(..., description="Name of the node package to disable")
|
||||||
is_unknown: Optional[bool] = Field(
|
is_unknown: Optional[bool] = Field(
|
||||||
False, description='Whether this is an unknown/unregistered package'
|
False, description="Whether this is an unknown/unregistered package"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class EnablePackParams(BaseModel):
|
class EnablePackParams(BaseModel):
|
||||||
cnr_id: str = Field(
|
cnr_id: str = Field(
|
||||||
..., description='ComfyUI Node Registry ID of the package to enable'
|
..., description="ComfyUI Node Registry ID of the package to enable"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class UpdateAllQueryParams(BaseModel):
|
||||||
|
client_id: str = Field(
|
||||||
|
..., description="Client identifier that initiated the request"
|
||||||
|
)
|
||||||
|
ui_id: str = Field(..., description="Base UI identifier for task tracking")
|
||||||
|
mode: Optional[ManagerDatabaseSource] = None
|
||||||
|
|
||||||
|
|
||||||
|
class UpdateComfyUIQueryParams(BaseModel):
|
||||||
|
client_id: str = Field(
|
||||||
|
..., description="Client identifier that initiated the request"
|
||||||
|
)
|
||||||
|
ui_id: str = Field(..., description="UI identifier for task tracking")
|
||||||
|
stable: Optional[bool] = Field(
|
||||||
|
True,
|
||||||
|
description="Whether to update to stable version (true) or nightly (false)",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ComfyUISwitchVersionQueryParams(BaseModel):
|
||||||
|
ver: str = Field(..., description="Version to switch to")
|
||||||
|
client_id: str = Field(
|
||||||
|
..., description="Client identifier that initiated the request"
|
||||||
|
)
|
||||||
|
ui_id: str = Field(..., description="UI identifier for task tracking")
|
||||||
|
|
||||||
|
|
||||||
class QueueStatus(BaseModel):
|
class QueueStatus(BaseModel):
|
||||||
total_count: int = Field(
|
total_count: int = Field(
|
||||||
..., description='Total number of tasks (pending + running)'
|
..., description="Total number of tasks (pending + running)"
|
||||||
)
|
)
|
||||||
done_count: int = Field(..., description='Number of completed tasks')
|
done_count: int = Field(..., description="Number of completed tasks")
|
||||||
in_progress_count: int = Field(..., description='Number of tasks currently running')
|
in_progress_count: int = Field(..., description="Number of tasks currently running")
|
||||||
pending_count: Optional[int] = Field(
|
pending_count: Optional[int] = Field(
|
||||||
None, description='Number of tasks waiting to be executed'
|
None, description="Number of tasks waiting to be executed"
|
||||||
)
|
)
|
||||||
is_processing: bool = Field(..., description='Whether the task worker is active')
|
is_processing: bool = Field(..., description="Whether the task worker is active")
|
||||||
client_id: Optional[str] = Field(
|
client_id: Optional[str] = Field(
|
||||||
None, description='Client ID (when filtered by client)'
|
None, description="Client ID (when filtered by client)"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class ManagerMappings1(BaseModel):
|
class ManagerMappings1(BaseModel):
|
||||||
title_aux: Optional[str] = Field(None, description='The display name of the pack')
|
title_aux: Optional[str] = Field(None, description="The display name of the pack")
|
||||||
|
|
||||||
|
|
||||||
class ManagerMappings(
|
class ManagerMappings(
|
||||||
RootModel[Optional[Dict[str, List[Union[List[str], ManagerMappings1]]]]]
|
RootModel[Optional[Dict[str, List[Union[List[str], ManagerMappings1]]]]]
|
||||||
):
|
):
|
||||||
root: Optional[Dict[str, List[Union[List[str], ManagerMappings1]]]] = Field(
|
root: Optional[Dict[str, List[Union[List[str], ManagerMappings1]]]] = Field(
|
||||||
None, description='Tuple of [node_names, metadata]'
|
None, description="Tuple of [node_names, metadata]"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class ModelMetadata(BaseModel):
|
class ModelMetadata(BaseModel):
|
||||||
name: str = Field(..., description='Name of the model')
|
name: str = Field(..., description="Name of the model")
|
||||||
type: str = Field(..., description='Type of model')
|
type: str = Field(..., description="Type of model")
|
||||||
base: Optional[str] = Field(None, description='Base model type')
|
base: Optional[str] = Field(None, description="Base model type")
|
||||||
save_path: Optional[str] = Field(None, description='Path for saving the model')
|
save_path: Optional[str] = Field(None, description="Path for saving the model")
|
||||||
url: str = Field(..., description='Download URL')
|
url: str = Field(..., description="Download URL")
|
||||||
filename: str = Field(..., description='Target filename')
|
filename: str = Field(..., description="Target filename")
|
||||||
ui_id: Optional[str] = Field(None, description='ID for UI reference')
|
ui_id: Optional[str] = Field(None, description="ID for UI reference")
|
||||||
|
|
||||||
|
|
||||||
class InstallType(str, Enum):
|
class InstallType(str, Enum):
|
||||||
git = 'git'
|
git = "git"
|
||||||
copy = 'copy'
|
copy = "copy"
|
||||||
pip = 'pip'
|
pip = "pip"
|
||||||
|
|
||||||
|
|
||||||
class NodePackageMetadata(BaseModel):
|
class NodePackageMetadata(BaseModel):
|
||||||
title: Optional[str] = Field(None, description='Display name of the node package')
|
title: Optional[str] = Field(None, description="Display name of the node package")
|
||||||
name: Optional[str] = Field(None, description='Repository/package name')
|
name: Optional[str] = Field(None, description="Repository/package name")
|
||||||
files: Optional[List[str]] = Field(None, description='Source URLs for the package')
|
files: Optional[List[str]] = Field(None, description="Source URLs for the package")
|
||||||
description: Optional[str] = Field(
|
description: Optional[str] = Field(
|
||||||
None, description='Description of the node package functionality'
|
None, description="Description of the node package functionality"
|
||||||
)
|
)
|
||||||
install_type: Optional[InstallType] = Field(None, description='Installation method')
|
install_type: Optional[InstallType] = Field(None, description="Installation method")
|
||||||
version: Optional[str] = Field(None, description='Version identifier')
|
version: Optional[str] = Field(None, description="Version identifier")
|
||||||
id: Optional[str] = Field(
|
id: Optional[str] = Field(
|
||||||
None, description='Unique identifier for the node package'
|
None, description="Unique identifier for the node package"
|
||||||
)
|
)
|
||||||
ui_id: Optional[str] = Field(None, description='ID for UI reference')
|
ui_id: Optional[str] = Field(None, description="ID for UI reference")
|
||||||
channel: Optional[str] = Field(None, description='Source channel')
|
channel: Optional[str] = Field(None, description="Source channel")
|
||||||
mode: Optional[str] = Field(None, description='Source mode')
|
mode: Optional[str] = Field(None, description="Source mode")
|
||||||
|
|
||||||
|
|
||||||
class SnapshotItem(RootModel[str]):
|
class SnapshotItem(RootModel[str]):
|
||||||
root: str = Field(..., description='Name of the snapshot')
|
root: str = Field(..., description="Name of the snapshot")
|
||||||
|
|
||||||
|
|
||||||
class Error(BaseModel):
|
class Error(BaseModel):
|
||||||
error: str = Field(..., description='Error message')
|
error: str = Field(..., description="Error message")
|
||||||
|
|
||||||
|
|
||||||
class InstalledPacksResponse(RootModel[Optional[Dict[str, ManagerPackInstalled]]]):
|
class InstalledPacksResponse(RootModel[Optional[Dict[str, ManagerPackInstalled]]]):
|
||||||
@@ -265,142 +311,153 @@ class InstalledPacksResponse(RootModel[Optional[Dict[str, ManagerPackInstalled]]
|
|||||||
|
|
||||||
class HistoryListResponse(BaseModel):
|
class HistoryListResponse(BaseModel):
|
||||||
ids: Optional[List[str]] = Field(
|
ids: Optional[List[str]] = Field(
|
||||||
None, description='List of available batch history IDs'
|
None, description="List of available batch history IDs"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class InstalledNodeInfo(BaseModel):
|
class InstalledNodeInfo(BaseModel):
|
||||||
name: str = Field(..., description='Node package name')
|
name: str = Field(..., description="Node package name")
|
||||||
version: str = Field(..., description='Installed version')
|
version: str = Field(..., description="Installed version")
|
||||||
repository_url: Optional[str] = Field(None, description='Git repository URL')
|
repository_url: Optional[str] = Field(None, description="Git repository URL")
|
||||||
install_method: str = Field(
|
install_method: str = Field(
|
||||||
..., description='Installation method (cnr, git, pip, etc.)'
|
..., description="Installation method (cnr, git, pip, etc.)"
|
||||||
)
|
)
|
||||||
enabled: Optional[bool] = Field(
|
enabled: Optional[bool] = Field(
|
||||||
True, description='Whether the node is currently enabled'
|
True, description="Whether the node is currently enabled"
|
||||||
)
|
)
|
||||||
install_date: Optional[datetime] = Field(
|
install_date: Optional[datetime] = Field(
|
||||||
None, description='ISO timestamp of installation'
|
None, description="ISO timestamp of installation"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class InstalledModelInfo(BaseModel):
|
class InstalledModelInfo(BaseModel):
|
||||||
name: str = Field(..., description='Model filename')
|
name: str = Field(..., description="Model filename")
|
||||||
path: str = Field(..., description='Full path to model file')
|
path: str = Field(..., description="Full path to model file")
|
||||||
type: str = Field(..., description='Model type (checkpoint, lora, vae, etc.)')
|
type: str = Field(..., description="Model type (checkpoint, lora, vae, etc.)")
|
||||||
size_bytes: Optional[int] = Field(None, description='File size in bytes', ge=0)
|
size_bytes: Optional[int] = Field(None, description="File size in bytes", ge=0)
|
||||||
hash: Optional[str] = Field(None, description='Model file hash for verification')
|
hash: Optional[str] = Field(None, description="Model file hash for verification")
|
||||||
install_date: Optional[datetime] = Field(
|
install_date: Optional[datetime] = Field(
|
||||||
None, description='ISO timestamp when added'
|
None, description="ISO timestamp when added"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class ComfyUIVersionInfo(BaseModel):
|
class ComfyUIVersionInfo(BaseModel):
|
||||||
version: str = Field(..., description='ComfyUI version string')
|
version: str = Field(..., description="ComfyUI version string")
|
||||||
commit_hash: Optional[str] = Field(None, description='Git commit hash')
|
commit_hash: Optional[str] = Field(None, description="Git commit hash")
|
||||||
branch: Optional[str] = Field(None, description='Git branch name')
|
branch: Optional[str] = Field(None, description="Git branch name")
|
||||||
is_stable: Optional[bool] = Field(
|
is_stable: Optional[bool] = Field(
|
||||||
False, description='Whether this is a stable release'
|
False, description="Whether this is a stable release"
|
||||||
)
|
)
|
||||||
last_updated: Optional[datetime] = Field(
|
last_updated: Optional[datetime] = Field(
|
||||||
None, description='ISO timestamp of last update'
|
None, description="ISO timestamp of last update"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class OperationType(str, Enum):
|
|
||||||
install = 'install'
|
|
||||||
update = 'update'
|
|
||||||
uninstall = 'uninstall'
|
|
||||||
fix = 'fix'
|
|
||||||
disable = 'disable'
|
|
||||||
enable = 'enable'
|
|
||||||
install_model = 'install-model'
|
|
||||||
|
|
||||||
|
|
||||||
class Result(str, Enum):
|
|
||||||
success = 'success'
|
|
||||||
failed = 'failed'
|
|
||||||
skipped = 'skipped'
|
|
||||||
|
|
||||||
|
|
||||||
class BatchOperation(BaseModel):
|
class BatchOperation(BaseModel):
|
||||||
operation_id: str = Field(..., description='Unique operation identifier')
|
operation_id: str = Field(..., description="Unique operation identifier")
|
||||||
operation_type: OperationType = Field(..., description='Type of operation')
|
operation_type: OperationType
|
||||||
target: str = Field(
|
target: str = Field(
|
||||||
..., description='Target of the operation (node name, model name, etc.)'
|
..., description="Target of the operation (node name, model name, etc.)"
|
||||||
)
|
)
|
||||||
target_version: Optional[str] = Field(
|
target_version: Optional[str] = Field(
|
||||||
None, description='Target version for the operation'
|
None, description="Target version for the operation"
|
||||||
)
|
)
|
||||||
result: Result = Field(..., description='Operation result')
|
result: OperationResult
|
||||||
error_message: Optional[str] = Field(
|
error_message: Optional[str] = Field(
|
||||||
None, description='Error message if operation failed'
|
None, description="Error message if operation failed"
|
||||||
)
|
)
|
||||||
start_time: datetime = Field(
|
start_time: datetime = Field(
|
||||||
..., description='ISO timestamp when operation started'
|
..., description="ISO timestamp when operation started"
|
||||||
)
|
)
|
||||||
end_time: Optional[datetime] = Field(
|
end_time: Optional[datetime] = Field(
|
||||||
None, description='ISO timestamp when operation completed'
|
None, description="ISO timestamp when operation completed"
|
||||||
)
|
)
|
||||||
client_id: Optional[str] = Field(
|
client_id: Optional[str] = Field(
|
||||||
None, description='Client that initiated the operation'
|
None, description="Client that initiated the operation"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class ComfyUISystemState(BaseModel):
|
class ComfyUISystemState(BaseModel):
|
||||||
snapshot_time: datetime = Field(
|
snapshot_time: datetime = Field(
|
||||||
..., description='ISO timestamp when snapshot was taken'
|
..., description="ISO timestamp when snapshot was taken"
|
||||||
)
|
)
|
||||||
comfyui_version: ComfyUIVersionInfo
|
comfyui_version: ComfyUIVersionInfo
|
||||||
frontend_version: Optional[str] = Field(
|
frontend_version: Optional[str] = Field(
|
||||||
None, description='ComfyUI frontend version if available'
|
None, description="ComfyUI frontend version if available"
|
||||||
)
|
)
|
||||||
python_version: str = Field(..., description='Python interpreter version')
|
python_version: str = Field(..., description="Python interpreter version")
|
||||||
platform_info: str = Field(
|
platform_info: str = Field(
|
||||||
..., description='Operating system and platform information'
|
..., description="Operating system and platform information"
|
||||||
)
|
)
|
||||||
installed_nodes: Optional[Dict[str, InstalledNodeInfo]] = Field(
|
installed_nodes: Optional[Dict[str, InstalledNodeInfo]] = Field(
|
||||||
None, description='Map of installed node packages by name'
|
None, description="Map of installed node packages by name"
|
||||||
)
|
)
|
||||||
installed_models: Optional[Dict[str, InstalledModelInfo]] = Field(
|
installed_models: Optional[Dict[str, InstalledModelInfo]] = Field(
|
||||||
None, description='Map of installed models by name'
|
None, description="Map of installed models by name"
|
||||||
)
|
)
|
||||||
manager_config: Optional[Dict[str, Any]] = Field(
|
manager_config: Optional[Dict[str, Any]] = Field(
|
||||||
None, description='ComfyUI Manager configuration settings'
|
None, description="ComfyUI Manager configuration settings"
|
||||||
|
)
|
||||||
|
comfyui_root_path: Optional[str] = Field(
|
||||||
|
None, description="ComfyUI root installation directory"
|
||||||
|
)
|
||||||
|
model_paths: Optional[Dict[str, List[str]]] = Field(
|
||||||
|
None, description="Map of model types to their configured paths"
|
||||||
|
)
|
||||||
|
manager_version: Optional[str] = Field(None, description="ComfyUI Manager version")
|
||||||
|
security_level: Optional[SecurityLevel] = None
|
||||||
|
network_mode: Optional[str] = Field(
|
||||||
|
None, description="Network mode (online, offline, private)"
|
||||||
|
)
|
||||||
|
cli_args: Optional[Dict[str, Any]] = Field(
|
||||||
|
None, description="Selected ComfyUI CLI arguments"
|
||||||
|
)
|
||||||
|
custom_nodes_count: Optional[int] = Field(
|
||||||
|
None, description="Total number of custom node packages", ge=0
|
||||||
|
)
|
||||||
|
failed_imports: Optional[List[str]] = Field(
|
||||||
|
None, description="List of custom nodes that failed to import"
|
||||||
|
)
|
||||||
|
pip_packages: Optional[Dict[str, str]] = Field(
|
||||||
|
None, description="Map of installed pip packages to their versions"
|
||||||
|
)
|
||||||
|
embedded_python: Optional[bool] = Field(
|
||||||
|
None,
|
||||||
|
description="Whether ComfyUI is running from an embedded Python distribution",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class BatchExecutionRecord(BaseModel):
|
class BatchExecutionRecord(BaseModel):
|
||||||
batch_id: str = Field(..., description='Unique batch identifier')
|
batch_id: str = Field(..., description="Unique batch identifier")
|
||||||
start_time: datetime = Field(..., description='ISO timestamp when batch started')
|
start_time: datetime = Field(..., description="ISO timestamp when batch started")
|
||||||
end_time: Optional[datetime] = Field(
|
end_time: Optional[datetime] = Field(
|
||||||
None, description='ISO timestamp when batch completed'
|
None, description="ISO timestamp when batch completed"
|
||||||
)
|
)
|
||||||
state_before: ComfyUISystemState
|
state_before: ComfyUISystemState
|
||||||
state_after: Optional[ComfyUISystemState] = Field(
|
state_after: Optional[ComfyUISystemState] = Field(
|
||||||
None, description='System state after batch execution'
|
None, description="System state after batch execution"
|
||||||
)
|
)
|
||||||
operations: Optional[List[BatchOperation]] = Field(
|
operations: Optional[List[BatchOperation]] = Field(
|
||||||
None, description='List of operations performed in this batch'
|
None, description="List of operations performed in this batch"
|
||||||
)
|
)
|
||||||
total_operations: Optional[int] = Field(
|
total_operations: Optional[int] = Field(
|
||||||
0, description='Total number of operations in batch', ge=0
|
0, description="Total number of operations in batch", ge=0
|
||||||
)
|
)
|
||||||
successful_operations: Optional[int] = Field(
|
successful_operations: Optional[int] = Field(
|
||||||
0, description='Number of successful operations', ge=0
|
0, description="Number of successful operations", ge=0
|
||||||
)
|
)
|
||||||
failed_operations: Optional[int] = Field(
|
failed_operations: Optional[int] = Field(
|
||||||
0, description='Number of failed operations', ge=0
|
0, description="Number of failed operations", ge=0
|
||||||
)
|
)
|
||||||
skipped_operations: Optional[int] = Field(
|
skipped_operations: Optional[int] = Field(
|
||||||
0, description='Number of skipped operations', ge=0
|
0, description="Number of skipped operations", ge=0
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class QueueTaskItem(BaseModel):
|
class QueueTaskItem(BaseModel):
|
||||||
ui_id: str = Field(..., description='Unique identifier for the task')
|
ui_id: str = Field(..., description="Unique identifier for the task")
|
||||||
client_id: str = Field(..., description='Client identifier that initiated the task')
|
client_id: str = Field(..., description="Client identifier that initiated the task")
|
||||||
kind: Kind = Field(..., description='Type of task being performed')
|
kind: OperationType
|
||||||
params: Union[
|
params: Union[
|
||||||
InstallPackParams,
|
InstallPackParams,
|
||||||
UpdatePackParams,
|
UpdatePackParams,
|
||||||
@@ -415,50 +472,56 @@ class QueueTaskItem(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class TaskHistoryItem(BaseModel):
|
class TaskHistoryItem(BaseModel):
|
||||||
ui_id: str = Field(..., description='Unique identifier for the task')
|
ui_id: str = Field(..., description="Unique identifier for the task")
|
||||||
client_id: str = Field(..., description='Client identifier that initiated the task')
|
client_id: str = Field(..., description="Client identifier that initiated the task")
|
||||||
kind: str = Field(..., description='Type of task that was performed')
|
kind: str = Field(..., description="Type of task that was performed")
|
||||||
timestamp: datetime = Field(..., description='ISO timestamp when task completed')
|
timestamp: datetime = Field(..., description="ISO timestamp when task completed")
|
||||||
result: str = Field(..., description='Task result message or details')
|
result: str = Field(..., description="Task result message or details")
|
||||||
status: Optional[TaskExecutionStatus] = None
|
status: Optional[TaskExecutionStatus] = None
|
||||||
|
batch_id: Optional[str] = Field(
|
||||||
|
None, description="ID of the batch this task belongs to"
|
||||||
|
)
|
||||||
|
end_time: Optional[datetime] = Field(
|
||||||
|
None, description="ISO timestamp when task execution ended"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class TaskStateMessage(BaseModel):
|
class TaskStateMessage(BaseModel):
|
||||||
history: Dict[str, TaskHistoryItem] = Field(
|
history: Dict[str, TaskHistoryItem] = Field(
|
||||||
..., description='Map of task IDs to their history items'
|
..., description="Map of task IDs to their history items"
|
||||||
)
|
)
|
||||||
running_queue: List[QueueTaskItem] = Field(
|
running_queue: List[QueueTaskItem] = Field(
|
||||||
..., description='Currently executing tasks'
|
..., description="Currently executing tasks"
|
||||||
)
|
)
|
||||||
pending_queue: List[QueueTaskItem] = Field(
|
pending_queue: List[QueueTaskItem] = Field(
|
||||||
..., description='Tasks waiting to be executed'
|
..., description="Tasks waiting to be executed"
|
||||||
)
|
)
|
||||||
installed_packs: Dict[str, ManagerPackInstalled] = Field(
|
installed_packs: Dict[str, ManagerPackInstalled] = Field(
|
||||||
..., description='Map of currently installed node packages by name'
|
..., description="Map of currently installed node packages by name"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class MessageTaskDone(BaseModel):
|
class MessageTaskDone(BaseModel):
|
||||||
ui_id: str = Field(..., description='Task identifier')
|
ui_id: str = Field(..., description="Task identifier")
|
||||||
result: str = Field(..., description='Task result message')
|
result: str = Field(..., description="Task result message")
|
||||||
kind: str = Field(..., description='Type of task')
|
kind: str = Field(..., description="Type of task")
|
||||||
status: Optional[TaskExecutionStatus] = None
|
status: Optional[TaskExecutionStatus] = None
|
||||||
timestamp: datetime = Field(..., description='ISO timestamp when task completed')
|
timestamp: datetime = Field(..., description="ISO timestamp when task completed")
|
||||||
state: TaskStateMessage
|
state: TaskStateMessage
|
||||||
|
|
||||||
|
|
||||||
class MessageTaskStarted(BaseModel):
|
class MessageTaskStarted(BaseModel):
|
||||||
ui_id: str = Field(..., description='Task identifier')
|
ui_id: str = Field(..., description="Task identifier")
|
||||||
kind: str = Field(..., description='Type of task')
|
kind: str = Field(..., description="Type of task")
|
||||||
timestamp: datetime = Field(..., description='ISO timestamp when task started')
|
timestamp: datetime = Field(..., description="ISO timestamp when task started")
|
||||||
state: TaskStateMessage
|
state: TaskStateMessage
|
||||||
|
|
||||||
|
|
||||||
class MessageTaskFailed(BaseModel):
|
class MessageTaskFailed(BaseModel):
|
||||||
ui_id: str = Field(..., description='Task identifier')
|
ui_id: str = Field(..., description="Task identifier")
|
||||||
error: str = Field(..., description='Error message')
|
error: str = Field(..., description="Error message")
|
||||||
kind: str = Field(..., description='Type of task')
|
kind: str = Field(..., description="Type of task")
|
||||||
timestamp: datetime = Field(..., description='ISO timestamp when task failed')
|
timestamp: datetime = Field(..., description="ISO timestamp when task failed")
|
||||||
state: TaskStateMessage
|
state: TaskStateMessage
|
||||||
|
|
||||||
|
|
||||||
@@ -466,11 +529,11 @@ class MessageUpdate(
|
|||||||
RootModel[Union[MessageTaskDone, MessageTaskStarted, MessageTaskFailed]]
|
RootModel[Union[MessageTaskDone, MessageTaskStarted, MessageTaskFailed]]
|
||||||
):
|
):
|
||||||
root: Union[MessageTaskDone, MessageTaskStarted, MessageTaskFailed] = Field(
|
root: Union[MessageTaskDone, MessageTaskStarted, MessageTaskFailed] = Field(
|
||||||
..., description='Union type for all possible WebSocket message updates'
|
..., description="Union type for all possible WebSocket message updates"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class HistoryResponse(BaseModel):
|
class HistoryResponse(BaseModel):
|
||||||
history: Optional[Dict[str, TaskHistoryItem]] = Field(
|
history: Optional[Dict[str, TaskHistoryItem]] = Field(
|
||||||
None, description='Map of task IDs to their history items'
|
None, description="Map of task IDs to their history items"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -8,3 +8,4 @@
|
|||||||
7. Adjust the `__init__.py` files in the `data_models` directory to match/export the new data model
|
7. Adjust the `__init__.py` files in the `data_models` directory to match/export the new data model
|
||||||
8. Only then, make the changes to the rest of the codebase
|
8. Only then, make the changes to the rest of the codebase
|
||||||
9. Run the CI tests to verify that the changes are working
|
9. Run the CI tests to verify that the changes are working
|
||||||
|
- The comfyui_manager is a python package that is used to manage the comfyui server. There are two sub-packages `glob` and `legacy`. These represent the current version (`glob`) and the previous version (`legacy`), not including common utilities and data models. When developing, we work in the `glob` package. You can ignore the `legacy` package entirely, unless you have a very good reason to research how things were done in the legacy or prior major versions of the package. But in those cases, you should just look for the sake of knowledge or reflection, not for changing code (unless explicitly asked to do so).
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
from comfy.cli_args import args
|
|
||||||
|
|
||||||
SECURITY_MESSAGE_MIDDLE_OR_BELOW = "ERROR: To use this action, a security_level of `middle or below` is required. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
|
SECURITY_MESSAGE_MIDDLE = "ERROR: To use this action, a security_level of `normal or below` is required. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
|
||||||
|
SECURITY_MESSAGE_MIDDLE_P = "ERROR: To use this action, security_level must be `normal or below`, and network_mode must be set to `personal_cloud`. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
|
||||||
SECURITY_MESSAGE_NORMAL_MINUS = "ERROR: To use this feature, you must either set '--listen' to a local IP and set the security level to 'normal-' or lower, or set the security level to 'middle' or 'weak'. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
|
SECURITY_MESSAGE_NORMAL_MINUS = "ERROR: To use this feature, you must either set '--listen' to a local IP and set the security level to 'normal-' or lower, or set the security level to 'middle' or 'weak'. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
|
||||||
SECURITY_MESSAGE_GENERAL = "ERROR: This installation is not allowed in this security_level. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
|
SECURITY_MESSAGE_GENERAL = "ERROR: This installation is not allowed in this security_level. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
|
||||||
SECURITY_MESSAGE_NORMAL_MINUS_MODEL = "ERROR: Downloading models that are not in '.safetensors' format is only allowed for models registered in the 'default' channel at this security level. If you want to download this model, set the security level to 'normal-' or lower."
|
SECURITY_MESSAGE_NORMAL_MINUS_MODEL = "ERROR: Downloading models that are not in '.safetensors' format is only allowed for models registered in the 'default' channel at this security level. If you want to download this model, set the security level to 'normal-' or lower."
|
||||||
@@ -15,9 +15,6 @@ def is_loopback(address):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
is_local_mode = is_loopback(args.listen)
|
|
||||||
|
|
||||||
|
|
||||||
model_dir_name_map = {
|
model_dir_name_map = {
|
||||||
"checkpoints": "checkpoints",
|
"checkpoints": "checkpoints",
|
||||||
"checkpoint": "checkpoints",
|
"checkpoint": "checkpoints",
|
||||||
@@ -37,3 +34,22 @@ model_dir_name_map = {
|
|||||||
"unet": "diffusion_models",
|
"unet": "diffusion_models",
|
||||||
"diffusion_model": "diffusion_models",
|
"diffusion_model": "diffusion_models",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# List of all model directory names used for checking installed models
|
||||||
|
MODEL_DIR_NAMES = [
|
||||||
|
"checkpoints",
|
||||||
|
"loras",
|
||||||
|
"vae",
|
||||||
|
"text_encoders",
|
||||||
|
"diffusion_models",
|
||||||
|
"clip_vision",
|
||||||
|
"embeddings",
|
||||||
|
"diffusers",
|
||||||
|
"vae_approx",
|
||||||
|
"controlnet",
|
||||||
|
"gligen",
|
||||||
|
"upscale_models",
|
||||||
|
"hypernetworks",
|
||||||
|
"photomaker",
|
||||||
|
"classifiers",
|
||||||
|
]
|
||||||
|
|||||||
@@ -304,16 +304,84 @@ class ManagedResult:
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
class NormalizedKeyDict:
|
||||||
|
def __init__(self):
|
||||||
|
self._store = {}
|
||||||
|
self._key_map = {}
|
||||||
|
|
||||||
|
def _normalize_key(self, key):
|
||||||
|
if isinstance(key, str):
|
||||||
|
return key.strip().lower()
|
||||||
|
return key
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
norm_key = self._normalize_key(key)
|
||||||
|
self._key_map[norm_key] = key
|
||||||
|
self._store[key] = value
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
norm_key = self._normalize_key(key)
|
||||||
|
original_key = self._key_map[norm_key]
|
||||||
|
return self._store[original_key]
|
||||||
|
|
||||||
|
def __delitem__(self, key):
|
||||||
|
norm_key = self._normalize_key(key)
|
||||||
|
original_key = self._key_map.pop(norm_key)
|
||||||
|
del self._store[original_key]
|
||||||
|
|
||||||
|
def __contains__(self, key):
|
||||||
|
return self._normalize_key(key) in self._key_map
|
||||||
|
|
||||||
|
def get(self, key, default=None):
|
||||||
|
return self[key] if key in self else default
|
||||||
|
|
||||||
|
def setdefault(self, key, default=None):
|
||||||
|
if key in self:
|
||||||
|
return self[key]
|
||||||
|
self[key] = default
|
||||||
|
return default
|
||||||
|
|
||||||
|
def pop(self, key, default=None):
|
||||||
|
if key in self:
|
||||||
|
val = self[key]
|
||||||
|
del self[key]
|
||||||
|
return val
|
||||||
|
if default is not None:
|
||||||
|
return default
|
||||||
|
raise KeyError(key)
|
||||||
|
|
||||||
|
def keys(self):
|
||||||
|
return self._store.keys()
|
||||||
|
|
||||||
|
def values(self):
|
||||||
|
return self._store.values()
|
||||||
|
|
||||||
|
def items(self):
|
||||||
|
return self._store.items()
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(self._store)
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self._store)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return repr(self._store)
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
return dict(self._store)
|
||||||
|
|
||||||
|
|
||||||
class UnifiedManager:
|
class UnifiedManager:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.installed_node_packages: dict[str, InstalledNodePackage] = {}
|
self.installed_node_packages: dict[str, InstalledNodePackage] = {}
|
||||||
|
|
||||||
self.cnr_inactive_nodes = {} # node_id -> node_version -> fullpath
|
self.cnr_inactive_nodes = NormalizedKeyDict() # node_id -> node_version -> fullpath
|
||||||
self.nightly_inactive_nodes = {} # node_id -> fullpath
|
self.nightly_inactive_nodes = NormalizedKeyDict() # node_id -> fullpath
|
||||||
self.unknown_inactive_nodes = {} # node_id -> repo url * fullpath
|
self.unknown_inactive_nodes = {} # node_id -> repo url * fullpath
|
||||||
self.active_nodes = {} # node_id -> node_version * fullpath
|
self.active_nodes = NormalizedKeyDict() # node_id -> node_version * fullpath
|
||||||
self.unknown_active_nodes = {} # node_id -> repo url * fullpath
|
self.unknown_active_nodes = {} # node_id -> repo url * fullpath
|
||||||
self.cnr_map = {} # node_id -> cnr info
|
self.cnr_map = NormalizedKeyDict() # node_id -> cnr info
|
||||||
self.repo_cnr_map = {} # repo_url -> cnr info
|
self.repo_cnr_map = {} # repo_url -> cnr info
|
||||||
self.custom_node_map_cache = {} # (channel, mode) -> augmented custom node list json
|
self.custom_node_map_cache = {} # (channel, mode) -> augmented custom node list json
|
||||||
self.processed_install = set()
|
self.processed_install = set()
|
||||||
@@ -721,7 +789,7 @@ class UnifiedManager:
|
|||||||
channel = normalize_channel(channel)
|
channel = normalize_channel(channel)
|
||||||
nodes = await self.load_nightly(channel, mode)
|
nodes = await self.load_nightly(channel, mode)
|
||||||
|
|
||||||
res = {}
|
res = NormalizedKeyDict()
|
||||||
added_cnr = set()
|
added_cnr = set()
|
||||||
for v in nodes.values():
|
for v in nodes.values():
|
||||||
v = v[0]
|
v = v[0]
|
||||||
@@ -1558,16 +1626,18 @@ def read_config():
|
|||||||
config = configparser.ConfigParser(strict=False)
|
config = configparser.ConfigParser(strict=False)
|
||||||
config.read(context.manager_config_path)
|
config.read(context.manager_config_path)
|
||||||
default_conf = config['default']
|
default_conf = config['default']
|
||||||
manager_util.use_uv = default_conf['use_uv'].lower() == 'true' if 'use_uv' in default_conf else False
|
|
||||||
|
|
||||||
def get_bool(key, default_value):
|
def get_bool(key, default_value):
|
||||||
return default_conf[key].lower() == 'true' if key in default_conf else False
|
return default_conf[key].lower() == 'true' if key in default_conf else False
|
||||||
|
|
||||||
|
manager_util.use_uv = default_conf['use_uv'].lower() == 'true' if 'use_uv' in default_conf else False
|
||||||
|
manager_util.bypass_ssl = get_bool('bypass_ssl', False)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'http_channel_enabled': get_bool('http_channel_enabled', False),
|
'http_channel_enabled': get_bool('http_channel_enabled', False),
|
||||||
'preview_method': default_conf.get('preview_method', manager_funcs.get_current_preview_method()).lower(),
|
'preview_method': default_conf.get('preview_method', manager_funcs.get_current_preview_method()).lower(),
|
||||||
'git_exe': default_conf.get('git_exe', ''),
|
'git_exe': default_conf.get('git_exe', ''),
|
||||||
'use_uv': get_bool('use_uv', False),
|
'use_uv': get_bool('use_uv', True),
|
||||||
'channel_url': default_conf.get('channel_url', DEFAULT_CHANNEL),
|
'channel_url': default_conf.get('channel_url', DEFAULT_CHANNEL),
|
||||||
'default_cache_as_channel_url': get_bool('default_cache_as_channel_url', False),
|
'default_cache_as_channel_url': get_bool('default_cache_as_channel_url', False),
|
||||||
'share_option': default_conf.get('share_option', 'all').lower(),
|
'share_option': default_conf.get('share_option', 'all').lower(),
|
||||||
@@ -1585,16 +1655,20 @@ def read_config():
|
|||||||
}
|
}
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
manager_util.use_uv = False
|
import importlib.util
|
||||||
|
# temporary disable `uv` on Windows by default (https://github.com/Comfy-Org/ComfyUI-Manager/issues/1969)
|
||||||
|
manager_util.use_uv = importlib.util.find_spec("uv") is not None and platform.system() != "Windows"
|
||||||
|
manager_util.bypass_ssl = False
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'http_channel_enabled': False,
|
'http_channel_enabled': False,
|
||||||
'preview_method': manager_funcs.get_current_preview_method(),
|
'preview_method': manager_funcs.get_current_preview_method(),
|
||||||
'git_exe': '',
|
'git_exe': '',
|
||||||
'use_uv': False,
|
'use_uv': manager_util.use_uv,
|
||||||
'channel_url': DEFAULT_CHANNEL,
|
'channel_url': DEFAULT_CHANNEL,
|
||||||
'default_cache_as_channel_url': False,
|
'default_cache_as_channel_url': False,
|
||||||
'share_option': 'all',
|
'share_option': 'all',
|
||||||
'bypass_ssl': False,
|
'bypass_ssl': manager_util.bypass_ssl,
|
||||||
'file_logging': True,
|
'file_logging': True,
|
||||||
'component_policy': 'workflow',
|
'component_policy': 'workflow',
|
||||||
'update_policy': 'stable-comfyui',
|
'update_policy': 'stable-comfyui',
|
||||||
@@ -2572,8 +2646,8 @@ async def get_current_snapshot(custom_nodes_only = False):
|
|||||||
commit_hash = git_utils.get_commit_hash(fullpath)
|
commit_hash = git_utils.get_commit_hash(fullpath)
|
||||||
url = git_utils.git_url(fullpath)
|
url = git_utils.git_url(fullpath)
|
||||||
git_custom_nodes[url] = dict(hash=commit_hash, disabled=is_disabled)
|
git_custom_nodes[url] = dict(hash=commit_hash, disabled=is_disabled)
|
||||||
except Exception:
|
except Exception as e:
|
||||||
print(f"Failed to extract snapshots for the custom node '{path}'.")
|
print(f"Failed to extract snapshots for the custom node '{path}'. / {e}")
|
||||||
|
|
||||||
elif path.endswith('.py'):
|
elif path.endswith('.py'):
|
||||||
is_disabled = path.endswith(".py.disabled")
|
is_disabled = path.endswith(".py.disabled")
|
||||||
@@ -2791,7 +2865,7 @@ async def get_unified_total_nodes(channel, mode, regsitry_cache_mode='cache'):
|
|||||||
|
|
||||||
if cnr_id is not None:
|
if cnr_id is not None:
|
||||||
# cnr or nightly version
|
# cnr or nightly version
|
||||||
cnr_ids.remove(cnr_id)
|
cnr_ids.discard(cnr_id)
|
||||||
updatable = False
|
updatable = False
|
||||||
cnr = unified_manager.cnr_map[cnr_id]
|
cnr = unified_manager.cnr_map[cnr_id]
|
||||||
|
|
||||||
@@ -2955,6 +3029,11 @@ async def restore_snapshot(snapshot_path, git_helper_extras=None):
|
|||||||
info = yaml.load(snapshot_file, Loader=yaml.SafeLoader)
|
info = yaml.load(snapshot_file, Loader=yaml.SafeLoader)
|
||||||
info = info['custom_nodes']
|
info = info['custom_nodes']
|
||||||
|
|
||||||
|
if 'pips' in info and info['pips']:
|
||||||
|
pips = info['pips']
|
||||||
|
else:
|
||||||
|
pips = {}
|
||||||
|
|
||||||
# for cnr restore
|
# for cnr restore
|
||||||
cnr_info = info.get('cnr_custom_nodes')
|
cnr_info = info.get('cnr_custom_nodes')
|
||||||
if cnr_info is not None:
|
if cnr_info is not None:
|
||||||
@@ -3161,6 +3240,8 @@ async def restore_snapshot(snapshot_path, git_helper_extras=None):
|
|||||||
unified_manager.repo_install(repo_url, to_path, instant_execution=True, no_deps=False, return_postinstall=False)
|
unified_manager.repo_install(repo_url, to_path, instant_execution=True, no_deps=False, return_postinstall=False)
|
||||||
cloned_repos.append(repo_name)
|
cloned_repos.append(repo_name)
|
||||||
|
|
||||||
|
manager_util.restore_pip_snapshot(pips, git_helper_extras)
|
||||||
|
|
||||||
# print summary
|
# print summary
|
||||||
for x in cloned_repos:
|
for x in cloned_repos:
|
||||||
print(f"[ INSTALLED ] {x}")
|
print(f"[ INSTALLED ] {x}")
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,9 +1,10 @@
|
|||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
|
import concurrent.futures
|
||||||
import folder_paths
|
import folder_paths
|
||||||
|
|
||||||
from comfyui_manager.glob import manager_core as core
|
from comfyui_manager.glob import manager_core as core
|
||||||
from comfyui_manager.glob.constants import model_dir_name_map
|
from comfyui_manager.glob.constants import model_dir_name_map, MODEL_DIR_NAMES
|
||||||
|
|
||||||
|
|
||||||
def get_model_dir(data, show_log=False):
|
def get_model_dir(data, show_log=False):
|
||||||
@@ -72,3 +73,89 @@ def get_model_path(data, show_log=False):
|
|||||||
return os.path.join(base_model, os.path.basename(data["url"]))
|
return os.path.join(base_model, os.path.basename(data["url"]))
|
||||||
else:
|
else:
|
||||||
return os.path.join(base_model, data["filename"])
|
return os.path.join(base_model, data["filename"])
|
||||||
|
|
||||||
|
|
||||||
|
def check_model_installed(json_obj):
|
||||||
|
def is_exists(model_dir_name, filename, url):
|
||||||
|
if filename == "<huggingface>":
|
||||||
|
filename = os.path.basename(url)
|
||||||
|
|
||||||
|
dirs = folder_paths.get_folder_paths(model_dir_name)
|
||||||
|
|
||||||
|
for x in dirs:
|
||||||
|
if os.path.exists(os.path.join(x, filename)):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
total_models_files = set()
|
||||||
|
for x in MODEL_DIR_NAMES:
|
||||||
|
for y in folder_paths.get_filename_list(x):
|
||||||
|
total_models_files.add(y)
|
||||||
|
|
||||||
|
def process_model_phase(item):
|
||||||
|
if (
|
||||||
|
"diffusion" not in item["filename"]
|
||||||
|
and "pytorch" not in item["filename"]
|
||||||
|
and "model" not in item["filename"]
|
||||||
|
):
|
||||||
|
# non-general name case
|
||||||
|
if item["filename"] in total_models_files:
|
||||||
|
item["installed"] = "True"
|
||||||
|
return
|
||||||
|
|
||||||
|
if item["save_path"] == "default":
|
||||||
|
model_dir_name = model_dir_name_map.get(item["type"].lower())
|
||||||
|
if model_dir_name is not None:
|
||||||
|
item["installed"] = str(
|
||||||
|
is_exists(model_dir_name, item["filename"], item["url"])
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
item["installed"] = "False"
|
||||||
|
else:
|
||||||
|
model_dir_name = item["save_path"].split("/")[0]
|
||||||
|
if model_dir_name in folder_paths.folder_names_and_paths:
|
||||||
|
if is_exists(model_dir_name, item["filename"], item["url"]):
|
||||||
|
item["installed"] = "True"
|
||||||
|
|
||||||
|
if "installed" not in item:
|
||||||
|
if item["filename"] == "<huggingface>":
|
||||||
|
filename = os.path.basename(item["url"])
|
||||||
|
else:
|
||||||
|
filename = item["filename"]
|
||||||
|
|
||||||
|
fullpath = os.path.join(
|
||||||
|
folder_paths.models_dir, item["save_path"], filename
|
||||||
|
)
|
||||||
|
|
||||||
|
item["installed"] = "True" if os.path.exists(fullpath) else "False"
|
||||||
|
|
||||||
|
with concurrent.futures.ThreadPoolExecutor(8) as executor:
|
||||||
|
for item in json_obj["models"]:
|
||||||
|
executor.submit(process_model_phase, item)
|
||||||
|
|
||||||
|
|
||||||
|
async def check_whitelist_for_model(item):
|
||||||
|
from comfyui_manager.data_models import ManagerDatabaseSource
|
||||||
|
|
||||||
|
json_obj = await core.get_data_by_mode(ManagerDatabaseSource.cache.value, "model-list.json")
|
||||||
|
|
||||||
|
for x in json_obj.get("models", []):
|
||||||
|
if (
|
||||||
|
x["save_path"] == item["save_path"]
|
||||||
|
and x["base"] == item["base"]
|
||||||
|
and x["filename"] == item["filename"]
|
||||||
|
):
|
||||||
|
return True
|
||||||
|
|
||||||
|
json_obj = await core.get_data_by_mode(ManagerDatabaseSource.local.value, "model-list.json")
|
||||||
|
|
||||||
|
for x in json_obj.get("models", []):
|
||||||
|
if (
|
||||||
|
x["save_path"] == item["save_path"]
|
||||||
|
and x["base"] == item["base"]
|
||||||
|
and x["filename"] == item["filename"]
|
||||||
|
):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
from comfyui_manager.glob import manager_core as core
|
from comfyui_manager.glob import manager_core as core
|
||||||
from comfy.cli_args import args
|
from comfy.cli_args import args
|
||||||
|
from comfyui_manager.data_models import SecurityLevel, RiskLevel, ManagerDatabaseSource
|
||||||
|
|
||||||
|
|
||||||
def is_loopback(address):
|
def is_loopback(address):
|
||||||
@@ -12,24 +13,37 @@ def is_loopback(address):
|
|||||||
|
|
||||||
def is_allowed_security_level(level):
|
def is_allowed_security_level(level):
|
||||||
is_local_mode = is_loopback(args.listen)
|
is_local_mode = is_loopback(args.listen)
|
||||||
|
is_personal_cloud = core.get_config()['network_mode'].lower() == 'personal_cloud'
|
||||||
|
|
||||||
if level == "block":
|
if level == RiskLevel.block.value:
|
||||||
return False
|
return False
|
||||||
elif level == "high":
|
elif level == RiskLevel.high_.value:
|
||||||
if is_local_mode:
|
if is_local_mode:
|
||||||
return core.get_config()["security_level"] in ["weak", "normal-"]
|
return core.get_config()['security_level'] in [SecurityLevel.weak.value, SecurityLevel.normal_.value]
|
||||||
|
elif is_personal_cloud:
|
||||||
|
return core.get_config()['security_level'] == SecurityLevel.weak.value
|
||||||
else:
|
else:
|
||||||
return core.get_config()["security_level"] == "weak"
|
return False
|
||||||
elif level == "middle":
|
elif level == RiskLevel.high.value:
|
||||||
return core.get_config()["security_level"] in ["weak", "normal", "normal-"]
|
if is_local_mode:
|
||||||
|
return core.get_config()['security_level'] in [SecurityLevel.weak.value, SecurityLevel.normal_.value]
|
||||||
|
else:
|
||||||
|
return core.get_config()['security_level'] == SecurityLevel.weak.value
|
||||||
|
elif level == RiskLevel.middle_.value:
|
||||||
|
if is_local_mode or is_personal_cloud:
|
||||||
|
return core.get_config()['security_level'] in [SecurityLevel.weak.value, SecurityLevel.normal.value, SecurityLevel.normal_.value]
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
elif level == RiskLevel.middle.value:
|
||||||
|
return core.get_config()['security_level'] in [SecurityLevel.weak.value, SecurityLevel.normal.value, SecurityLevel.normal_.value]
|
||||||
else:
|
else:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
async def get_risky_level(files, pip_packages):
|
async def get_risky_level(files, pip_packages):
|
||||||
json_data1 = await core.get_data_by_mode("local", "custom-node-list.json")
|
json_data1 = await core.get_data_by_mode(ManagerDatabaseSource.local.value, "custom-node-list.json")
|
||||||
json_data2 = await core.get_data_by_mode(
|
json_data2 = await core.get_data_by_mode(
|
||||||
"cache",
|
ManagerDatabaseSource.cache.value,
|
||||||
"custom-node-list.json",
|
"custom-node-list.json",
|
||||||
channel_url="https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main",
|
channel_url="https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main",
|
||||||
)
|
)
|
||||||
@@ -40,7 +54,7 @@ async def get_risky_level(files, pip_packages):
|
|||||||
|
|
||||||
for x in files:
|
for x in files:
|
||||||
if x not in all_urls:
|
if x not in all_urls:
|
||||||
return "high"
|
return RiskLevel.high_.value
|
||||||
|
|
||||||
all_pip_packages = set()
|
all_pip_packages = set()
|
||||||
for x in json_data1["custom_nodes"] + json_data2["custom_nodes"]:
|
for x in json_data1["custom_nodes"] + json_data2["custom_nodes"]:
|
||||||
@@ -48,6 +62,6 @@ async def get_risky_level(files, pip_packages):
|
|||||||
|
|
||||||
for p in pip_packages:
|
for p in pip_packages:
|
||||||
if p not in all_pip_packages:
|
if p not in all_pip_packages:
|
||||||
return "block"
|
return RiskLevel.block.value
|
||||||
|
|
||||||
return "middle"
|
return RiskLevel.middle_.value
|
||||||
|
|||||||
@@ -222,9 +222,6 @@ function isBeforeFrontendVersion(compareVersion) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const is_legacy_front = () => isBeforeFrontendVersion('1.2.49');
|
|
||||||
const isNotNewManagerUI = () => isBeforeFrontendVersion('1.16.4');
|
|
||||||
|
|
||||||
document.head.appendChild(docStyle);
|
document.head.appendChild(docStyle);
|
||||||
|
|
||||||
var update_comfyui_button = null;
|
var update_comfyui_button = null;
|
||||||
@@ -1518,11 +1515,8 @@ app.registerExtension({
|
|||||||
}).element
|
}).element
|
||||||
);
|
);
|
||||||
|
|
||||||
const shouldShowLegacyMenuItems = isNotNewManagerUI();
|
|
||||||
if (shouldShowLegacyMenuItems) {
|
|
||||||
app.menu?.settingsGroup.element.before(cmGroup.element);
|
app.menu?.settingsGroup.element.before(cmGroup.element);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
catch(exception) {
|
catch(exception) {
|
||||||
console.log('ComfyUI is outdated. New style menu based features are disabled.');
|
console.log('ComfyUI is outdated. New style menu based features are disabled.');
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -342,15 +342,11 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
["0/70"]
|
["0/70"]
|
||||||
);
|
);
|
||||||
// Additional Inputs Section
|
// Additional Inputs Section
|
||||||
const additionalInputsSection = $el(
|
const additionalInputsSection = $el("div", { style: { ...sectionStyle } }, [
|
||||||
"div",
|
|
||||||
{ style: { ...sectionStyle, } },
|
|
||||||
[
|
|
||||||
$el("label", { style: labelStyle }, ["3️⃣ Title "]),
|
$el("label", { style: labelStyle }, ["3️⃣ Title "]),
|
||||||
this.TitleInput,
|
this.TitleInput,
|
||||||
titleNumDom,
|
titleNumDom,
|
||||||
]
|
]);
|
||||||
);
|
|
||||||
const SubtitleSection = $el("div", { style: sectionStyle }, [
|
const SubtitleSection = $el("div", { style: sectionStyle }, [
|
||||||
$el("label", { style: labelStyle }, ["4️⃣ Subtitle "]),
|
$el("label", { style: labelStyle }, ["4️⃣ Subtitle "]),
|
||||||
this.SubTitleInput,
|
this.SubTitleInput,
|
||||||
@@ -392,11 +388,31 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
},
|
},
|
||||||
[
|
[
|
||||||
this.radioButtonsCheck_lock,
|
this.radioButtonsCheck_lock,
|
||||||
$el("div", { style: { marginLeft: "5px" ,display:'flex',alignItems:'center'} }, [
|
$el(
|
||||||
|
"div",
|
||||||
|
{
|
||||||
|
style: {
|
||||||
|
marginLeft: "5px",
|
||||||
|
display: "flex",
|
||||||
|
alignItems: "center",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[
|
||||||
$el("span", { style: { marginLeft: "5px" } }, ["ON"]),
|
$el("span", { style: { marginLeft: "5px" } }, ["ON"]),
|
||||||
$el("span", { style: { marginLeft: "20px",marginRight:'10px' ,color:'#fff'} }, ["Price US$"]),
|
$el(
|
||||||
this.LockInput
|
"span",
|
||||||
]),
|
{
|
||||||
|
style: {
|
||||||
|
marginLeft: "20px",
|
||||||
|
marginRight: "10px",
|
||||||
|
color: "#fff",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
["Price US$"]
|
||||||
|
),
|
||||||
|
this.LockInput,
|
||||||
|
]
|
||||||
|
),
|
||||||
]
|
]
|
||||||
),
|
),
|
||||||
$el(
|
$el(
|
||||||
@@ -404,14 +420,28 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
{ style: { display: "flex", alignItems: "center", cursor: "pointer" } },
|
{ style: { display: "flex", alignItems: "center", cursor: "pointer" } },
|
||||||
[
|
[
|
||||||
this.radioButtonsCheckOff_lock,
|
this.radioButtonsCheckOff_lock,
|
||||||
|
$el(
|
||||||
|
"div",
|
||||||
|
{
|
||||||
|
style: {
|
||||||
|
marginLeft: "5px",
|
||||||
|
display: "flex",
|
||||||
|
alignItems: "center",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[
|
||||||
$el("span", { style: { marginLeft: "5px" } }, ["OFF"]),
|
$el("span", { style: { marginLeft: "5px" } }, ["OFF"]),
|
||||||
]
|
]
|
||||||
),
|
),
|
||||||
|
]
|
||||||
|
),
|
||||||
|
|
||||||
$el(
|
$el(
|
||||||
"p",
|
"p",
|
||||||
{ style: { fontSize: "16px", color: "#fff", margin: "10px 0 0 0" } },
|
{ style: { fontSize: "16px", color: "#fff", margin: "10px 0 0 0" } },
|
||||||
["Get paid from your workflow. You can change the price and withdraw your earnings on Copus."]
|
[
|
||||||
|
"Get paid from your workflow. You can change the price and withdraw your earnings on Copus.",
|
||||||
|
]
|
||||||
),
|
),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
@@ -432,7 +462,7 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const blockChainSection = $el("div", { style: sectionStyle }, [
|
const blockChainSection = $el("div", { style: sectionStyle }, [
|
||||||
$el("label", { style: labelStyle }, ["7️⃣ Store on blockchain "]),
|
$el("label", { style: labelStyle }, ["8️⃣ Store on blockchain "]),
|
||||||
$el(
|
$el(
|
||||||
"label",
|
"label",
|
||||||
{
|
{
|
||||||
@@ -463,6 +493,139 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
),
|
),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
this.ratingRadioButtonsCheck0 = $el("input", {
|
||||||
|
type: "radio",
|
||||||
|
name: "content_rating",
|
||||||
|
value: "0",
|
||||||
|
id: "content_rating0",
|
||||||
|
});
|
||||||
|
this.ratingRadioButtonsCheck1 = $el("input", {
|
||||||
|
type: "radio",
|
||||||
|
name: "content_rating",
|
||||||
|
value: "1",
|
||||||
|
id: "content_rating1",
|
||||||
|
});
|
||||||
|
this.ratingRadioButtonsCheck2 = $el("input", {
|
||||||
|
type: "radio",
|
||||||
|
name: "content_rating",
|
||||||
|
value: "2",
|
||||||
|
id: "content_rating2",
|
||||||
|
});
|
||||||
|
this.ratingRadioButtonsCheck_1 = $el("input", {
|
||||||
|
type: "radio",
|
||||||
|
name: "content_rating",
|
||||||
|
value: "-1",
|
||||||
|
id: "content_rating_1",
|
||||||
|
checked: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
// content rating
|
||||||
|
const contentRatingSection = $el("div", { style: sectionStyle }, [
|
||||||
|
$el("label", { style: labelStyle }, ["7️⃣ Content rating "]),
|
||||||
|
$el(
|
||||||
|
"label",
|
||||||
|
{
|
||||||
|
style: {
|
||||||
|
marginTop: "10px",
|
||||||
|
display: "flex",
|
||||||
|
alignItems: "center",
|
||||||
|
cursor: "pointer",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[
|
||||||
|
this.ratingRadioButtonsCheck0,
|
||||||
|
$el("img", {
|
||||||
|
style: {
|
||||||
|
width: "12px",
|
||||||
|
height: "12px",
|
||||||
|
marginLeft: "5px",
|
||||||
|
},
|
||||||
|
src: "https://static.copus.io/images/client/202507/test/b9f17da83b054d53cd0cb4508c2c30dc.png",
|
||||||
|
}),
|
||||||
|
$el("span", { style: { marginLeft: "5px", color: "#fff" } }, [
|
||||||
|
"All ages",
|
||||||
|
]),
|
||||||
|
]
|
||||||
|
),
|
||||||
|
$el(
|
||||||
|
"p",
|
||||||
|
{ style: { fontSize: "10px", color: "#fff", marginLeft: "20px" } },
|
||||||
|
["Safe for all viewers; no profanity, violence, or mature themes."]
|
||||||
|
),
|
||||||
|
$el(
|
||||||
|
"label",
|
||||||
|
{ style: { display: "flex", alignItems: "center", cursor: "pointer" } },
|
||||||
|
[
|
||||||
|
this.ratingRadioButtonsCheck1,
|
||||||
|
$el("img", {
|
||||||
|
style: {
|
||||||
|
width: "12px",
|
||||||
|
height: "12px",
|
||||||
|
marginLeft: "5px",
|
||||||
|
},
|
||||||
|
src: "https://static.copus.io/images/client/202507/test/7848bc0d3690671df21c7cf00c4cfc81.png",
|
||||||
|
}),
|
||||||
|
$el("span", { style: { marginLeft: "5px", color: "#fff" } }, [
|
||||||
|
"13+ (Teen)",
|
||||||
|
]),
|
||||||
|
]
|
||||||
|
),
|
||||||
|
$el(
|
||||||
|
"p",
|
||||||
|
{ style: { fontSize: "10px", color: "#fff", marginLeft: "20px" } },
|
||||||
|
[
|
||||||
|
"Mild language, light themes, or cartoon violence; no explicit content. ",
|
||||||
|
]
|
||||||
|
),
|
||||||
|
$el(
|
||||||
|
"label",
|
||||||
|
{ style: { display: "flex", alignItems: "center", cursor: "pointer" } },
|
||||||
|
[
|
||||||
|
this.ratingRadioButtonsCheck2,
|
||||||
|
$el("img", {
|
||||||
|
style: {
|
||||||
|
width: "12px",
|
||||||
|
height: "12px",
|
||||||
|
marginLeft: "5px",
|
||||||
|
},
|
||||||
|
src: "https://static.copus.io/images/client/202507/test/bc51839c208d68d91173e43c23bff039.png",
|
||||||
|
}),
|
||||||
|
$el("span", { style: { marginLeft: "5px", color: "#fff" } }, [
|
||||||
|
"18+ (Explicit)",
|
||||||
|
]),
|
||||||
|
]
|
||||||
|
),
|
||||||
|
$el(
|
||||||
|
"p",
|
||||||
|
{ style: { fontSize: "10px", color: "#fff", marginLeft: "20px" } },
|
||||||
|
[
|
||||||
|
"Explicit content, including sexual content, strong violence, or intense themes. ",
|
||||||
|
]
|
||||||
|
),
|
||||||
|
$el(
|
||||||
|
"label",
|
||||||
|
{ style: { display: "flex", alignItems: "center", cursor: "pointer" } },
|
||||||
|
[
|
||||||
|
this.ratingRadioButtonsCheck_1,
|
||||||
|
$el("img", {
|
||||||
|
style: {
|
||||||
|
width: "12px",
|
||||||
|
height: "12px",
|
||||||
|
marginLeft: "5px",
|
||||||
|
},
|
||||||
|
src: "https://static.copus.io/images/client/202507/test/5c802fdcaaea4e7bbed37393eec0d5ba.png",
|
||||||
|
}),
|
||||||
|
$el("span", { style: { marginLeft: "5px", color: "#fff" } }, [
|
||||||
|
"Not Rated",
|
||||||
|
]),
|
||||||
|
]
|
||||||
|
),
|
||||||
|
$el(
|
||||||
|
"p",
|
||||||
|
{ style: { fontSize: "10px", color: "#fff", marginLeft: "20px" } },
|
||||||
|
["No age rating provided."]
|
||||||
|
),
|
||||||
|
]);
|
||||||
|
|
||||||
// Message Section
|
// Message Section
|
||||||
this.message = $el(
|
this.message = $el(
|
||||||
@@ -526,6 +689,7 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
DescriptionSection,
|
DescriptionSection,
|
||||||
// contestSection,
|
// contestSection,
|
||||||
blockChainSection_lock,
|
blockChainSection_lock,
|
||||||
|
contentRatingSection,
|
||||||
blockChainSection,
|
blockChainSection,
|
||||||
this.message,
|
this.message,
|
||||||
buttonsSection,
|
buttonsSection,
|
||||||
@@ -587,7 +751,9 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
url: data,
|
url: data,
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
throw new Error("make sure your API key is correct and try again later");
|
throw new Error(
|
||||||
|
"make sure your API key is correct and try again later"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
if (e?.response?.status === 413) {
|
if (e?.response?.status === 413) {
|
||||||
@@ -630,6 +796,13 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
storeOnChain: this.radioButtonsCheck.checked ? true : false,
|
storeOnChain: this.radioButtonsCheck.checked ? true : false,
|
||||||
lockState: this.radioButtonsCheck_lock.checked ? 2 : 0,
|
lockState: this.radioButtonsCheck_lock.checked ? 2 : 0,
|
||||||
unlockPrice: this.LockInput.value,
|
unlockPrice: this.LockInput.value,
|
||||||
|
rating: this.ratingRadioButtonsCheck0.checked
|
||||||
|
? 0
|
||||||
|
: this.ratingRadioButtonsCheck1.checked
|
||||||
|
? 1
|
||||||
|
: this.ratingRadioButtonsCheck2.checked
|
||||||
|
? 2
|
||||||
|
: -1,
|
||||||
};
|
};
|
||||||
|
|
||||||
if (!this.keyInput.value) {
|
if (!this.keyInput.value) {
|
||||||
|
|||||||
@@ -714,6 +714,7 @@ export class CustomNodesManager {
|
|||||||
link.href = rowItem.reference;
|
link.href = rowItem.reference;
|
||||||
link.target = '_blank';
|
link.target = '_blank';
|
||||||
link.innerHTML = `<b>${title}</b>`;
|
link.innerHTML = `<b>${title}</b>`;
|
||||||
|
link.title = rowItem.originalData.id;
|
||||||
container.appendChild(link);
|
container.appendChild(link);
|
||||||
|
|
||||||
return container;
|
return container;
|
||||||
|
|||||||
@@ -304,16 +304,84 @@ class ManagedResult:
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
class NormalizedKeyDict:
|
||||||
|
def __init__(self):
|
||||||
|
self._store = {}
|
||||||
|
self._key_map = {}
|
||||||
|
|
||||||
|
def _normalize_key(self, key):
|
||||||
|
if isinstance(key, str):
|
||||||
|
return key.strip().lower()
|
||||||
|
return key
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
norm_key = self._normalize_key(key)
|
||||||
|
self._key_map[norm_key] = key
|
||||||
|
self._store[key] = value
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
norm_key = self._normalize_key(key)
|
||||||
|
original_key = self._key_map[norm_key]
|
||||||
|
return self._store[original_key]
|
||||||
|
|
||||||
|
def __delitem__(self, key):
|
||||||
|
norm_key = self._normalize_key(key)
|
||||||
|
original_key = self._key_map.pop(norm_key)
|
||||||
|
del self._store[original_key]
|
||||||
|
|
||||||
|
def __contains__(self, key):
|
||||||
|
return self._normalize_key(key) in self._key_map
|
||||||
|
|
||||||
|
def get(self, key, default=None):
|
||||||
|
return self[key] if key in self else default
|
||||||
|
|
||||||
|
def setdefault(self, key, default=None):
|
||||||
|
if key in self:
|
||||||
|
return self[key]
|
||||||
|
self[key] = default
|
||||||
|
return default
|
||||||
|
|
||||||
|
def pop(self, key, default=None):
|
||||||
|
if key in self:
|
||||||
|
val = self[key]
|
||||||
|
del self[key]
|
||||||
|
return val
|
||||||
|
if default is not None:
|
||||||
|
return default
|
||||||
|
raise KeyError(key)
|
||||||
|
|
||||||
|
def keys(self):
|
||||||
|
return self._store.keys()
|
||||||
|
|
||||||
|
def values(self):
|
||||||
|
return self._store.values()
|
||||||
|
|
||||||
|
def items(self):
|
||||||
|
return self._store.items()
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(self._store)
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self._store)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return repr(self._store)
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
return dict(self._store)
|
||||||
|
|
||||||
|
|
||||||
class UnifiedManager:
|
class UnifiedManager:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.installed_node_packages: dict[str, InstalledNodePackage] = {}
|
self.installed_node_packages: dict[str, InstalledNodePackage] = {}
|
||||||
|
|
||||||
self.cnr_inactive_nodes = {} # node_id -> node_version -> fullpath
|
self.cnr_inactive_nodes = NormalizedKeyDict() # node_id -> node_version -> fullpath
|
||||||
self.nightly_inactive_nodes = {} # node_id -> fullpath
|
self.nightly_inactive_nodes = NormalizedKeyDict() # node_id -> fullpath
|
||||||
self.unknown_inactive_nodes = {} # node_id -> repo url * fullpath
|
self.unknown_inactive_nodes = {} # node_id -> repo url * fullpath
|
||||||
self.active_nodes = {} # node_id -> node_version * fullpath
|
self.active_nodes = NormalizedKeyDict() # node_id -> node_version * fullpath
|
||||||
self.unknown_active_nodes = {} # node_id -> repo url * fullpath
|
self.unknown_active_nodes = {} # node_id -> repo url * fullpath
|
||||||
self.cnr_map = {} # node_id -> cnr info
|
self.cnr_map = NormalizedKeyDict() # node_id -> cnr info
|
||||||
self.repo_cnr_map = {} # repo_url -> cnr info
|
self.repo_cnr_map = {} # repo_url -> cnr info
|
||||||
self.custom_node_map_cache = {} # (channel, mode) -> augmented custom node list json
|
self.custom_node_map_cache = {} # (channel, mode) -> augmented custom node list json
|
||||||
self.processed_install = set()
|
self.processed_install = set()
|
||||||
@@ -721,7 +789,7 @@ class UnifiedManager:
|
|||||||
channel = normalize_channel(channel)
|
channel = normalize_channel(channel)
|
||||||
nodes = await self.load_nightly(channel, mode)
|
nodes = await self.load_nightly(channel, mode)
|
||||||
|
|
||||||
res = {}
|
res = NormalizedKeyDict()
|
||||||
added_cnr = set()
|
added_cnr = set()
|
||||||
for v in nodes.values():
|
for v in nodes.values():
|
||||||
v = v[0]
|
v = v[0]
|
||||||
@@ -1557,16 +1625,18 @@ def read_config():
|
|||||||
config = configparser.ConfigParser(strict=False)
|
config = configparser.ConfigParser(strict=False)
|
||||||
config.read(context.manager_config_path)
|
config.read(context.manager_config_path)
|
||||||
default_conf = config['default']
|
default_conf = config['default']
|
||||||
manager_util.use_uv = default_conf['use_uv'].lower() == 'true' if 'use_uv' in default_conf else False
|
|
||||||
|
|
||||||
def get_bool(key, default_value):
|
def get_bool(key, default_value):
|
||||||
return default_conf[key].lower() == 'true' if key in default_conf else False
|
return default_conf[key].lower() == 'true' if key in default_conf else False
|
||||||
|
|
||||||
|
manager_util.use_uv = default_conf['use_uv'].lower() == 'true' if 'use_uv' in default_conf else False
|
||||||
|
manager_util.bypass_ssl = get_bool('bypass_ssl', False)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'http_channel_enabled': get_bool('http_channel_enabled', False),
|
'http_channel_enabled': get_bool('http_channel_enabled', False),
|
||||||
'preview_method': default_conf.get('preview_method', manager_funcs.get_current_preview_method()).lower(),
|
'preview_method': default_conf.get('preview_method', manager_funcs.get_current_preview_method()).lower(),
|
||||||
'git_exe': default_conf.get('git_exe', ''),
|
'git_exe': default_conf.get('git_exe', ''),
|
||||||
'use_uv': get_bool('use_uv', False),
|
'use_uv': get_bool('use_uv', True),
|
||||||
'channel_url': default_conf.get('channel_url', DEFAULT_CHANNEL),
|
'channel_url': default_conf.get('channel_url', DEFAULT_CHANNEL),
|
||||||
'default_cache_as_channel_url': get_bool('default_cache_as_channel_url', False),
|
'default_cache_as_channel_url': get_bool('default_cache_as_channel_url', False),
|
||||||
'share_option': default_conf.get('share_option', 'all').lower(),
|
'share_option': default_conf.get('share_option', 'all').lower(),
|
||||||
@@ -1585,15 +1655,17 @@ def read_config():
|
|||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
manager_util.use_uv = False
|
manager_util.use_uv = False
|
||||||
|
manager_util.bypass_ssl = False
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'http_channel_enabled': False,
|
'http_channel_enabled': False,
|
||||||
'preview_method': manager_funcs.get_current_preview_method(),
|
'preview_method': manager_funcs.get_current_preview_method(),
|
||||||
'git_exe': '',
|
'git_exe': '',
|
||||||
'use_uv': False,
|
'use_uv': True,
|
||||||
'channel_url': DEFAULT_CHANNEL,
|
'channel_url': DEFAULT_CHANNEL,
|
||||||
'default_cache_as_channel_url': False,
|
'default_cache_as_channel_url': False,
|
||||||
'share_option': 'all',
|
'share_option': 'all',
|
||||||
'bypass_ssl': False,
|
'bypass_ssl': manager_util.bypass_ssl,
|
||||||
'file_logging': True,
|
'file_logging': True,
|
||||||
'component_policy': 'workflow',
|
'component_policy': 'workflow',
|
||||||
'update_policy': 'stable-comfyui',
|
'update_policy': 'stable-comfyui',
|
||||||
@@ -2776,7 +2848,7 @@ async def get_unified_total_nodes(channel, mode, regsitry_cache_mode='cache'):
|
|||||||
|
|
||||||
if cnr_id is not None:
|
if cnr_id is not None:
|
||||||
# cnr or nightly version
|
# cnr or nightly version
|
||||||
cnr_ids.remove(cnr_id)
|
cnr_ids.discard(cnr_id)
|
||||||
updatable = False
|
updatable = False
|
||||||
cnr = unified_manager.cnr_map[cnr_id]
|
cnr = unified_manager.cnr_map[cnr_id]
|
||||||
|
|
||||||
@@ -2940,6 +3012,11 @@ async def restore_snapshot(snapshot_path, git_helper_extras=None):
|
|||||||
info = yaml.load(snapshot_file, Loader=yaml.SafeLoader)
|
info = yaml.load(snapshot_file, Loader=yaml.SafeLoader)
|
||||||
info = info['custom_nodes']
|
info = info['custom_nodes']
|
||||||
|
|
||||||
|
if 'pips' in info and info['pips']:
|
||||||
|
pips = info['pips']
|
||||||
|
else:
|
||||||
|
pips = {}
|
||||||
|
|
||||||
# for cnr restore
|
# for cnr restore
|
||||||
cnr_info = info.get('cnr_custom_nodes')
|
cnr_info = info.get('cnr_custom_nodes')
|
||||||
if cnr_info is not None:
|
if cnr_info is not None:
|
||||||
@@ -3146,6 +3223,8 @@ async def restore_snapshot(snapshot_path, git_helper_extras=None):
|
|||||||
unified_manager.repo_install(repo_url, to_path, instant_execution=True, no_deps=False, return_postinstall=False)
|
unified_manager.repo_install(repo_url, to_path, instant_execution=True, no_deps=False, return_postinstall=False)
|
||||||
cloned_repos.append(repo_name)
|
cloned_repos.append(repo_name)
|
||||||
|
|
||||||
|
manager_util.restore_pip_snapshot(pips, git_helper_extras)
|
||||||
|
|
||||||
# print summary
|
# print summary
|
||||||
for x in cloned_repos:
|
for x in cloned_repos:
|
||||||
print(f"[ INSTALLED ] {x}")
|
print(f"[ INSTALLED ] {x}")
|
||||||
|
|||||||
@@ -23,6 +23,8 @@ from ..common import manager_util
|
|||||||
from ..common import cm_global
|
from ..common import cm_global
|
||||||
from ..common import manager_downloader
|
from ..common import manager_downloader
|
||||||
from ..common import context
|
from ..common import context
|
||||||
|
from ..common import manager_security
|
||||||
|
from ..common import snapshot_util
|
||||||
|
|
||||||
|
|
||||||
logging.info(f"### Loading: ComfyUI-Manager ({core.version_str})")
|
logging.info(f"### Loading: ComfyUI-Manager ({core.version_str})")
|
||||||
@@ -36,7 +38,8 @@ logging.info("[ComfyUI-Manager] network_mode: " + network_mode_description)
|
|||||||
comfy_ui_hash = "-"
|
comfy_ui_hash = "-"
|
||||||
comfyui_tag = None
|
comfyui_tag = None
|
||||||
|
|
||||||
SECURITY_MESSAGE_MIDDLE_OR_BELOW = "ERROR: To use this action, a security_level of `middle or below` is required. Please contact the administrator.\nReference: https://github.com/Comfy-Org/ComfyUI-Manager#security-policy"
|
SECURITY_MESSAGE_MIDDLE = "ERROR: To use this action, a security_level of `normal or below` is required. Please contact the administrator.\nReference: https://github.com/Comfy-Org/ComfyUI-Manager#security-policy"
|
||||||
|
SECURITY_MESSAGE_MIDDLE_P = "ERROR: To use this action, security_level must be `normal or below`, and network_mode must be set to `personal_cloud`. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
|
||||||
SECURITY_MESSAGE_NORMAL_MINUS = "ERROR: To use this feature, you must either set '--listen' to a local IP and set the security level to 'normal-' or lower, or set the security level to 'middle' or 'weak'. Please contact the administrator.\nReference: https://github.com/Comfy-Org/ComfyUI-Manager#security-policy"
|
SECURITY_MESSAGE_NORMAL_MINUS = "ERROR: To use this feature, you must either set '--listen' to a local IP and set the security level to 'normal-' or lower, or set the security level to 'middle' or 'weak'. Please contact the administrator.\nReference: https://github.com/Comfy-Org/ComfyUI-Manager#security-policy"
|
||||||
SECURITY_MESSAGE_GENERAL = "ERROR: This installation is not allowed in this security_level. Please contact the administrator.\nReference: https://github.com/Comfy-Org/ComfyUI-Manager#security-policy"
|
SECURITY_MESSAGE_GENERAL = "ERROR: This installation is not allowed in this security_level. Please contact the administrator.\nReference: https://github.com/Comfy-Org/ComfyUI-Manager#security-policy"
|
||||||
SECURITY_MESSAGE_NORMAL_MINUS_MODEL = "ERROR: Downloading models that are not in '.safetensors' format is only allowed for models registered in the 'default' channel at this security level. If you want to download this model, set the security level to 'normal-' or lower."
|
SECURITY_MESSAGE_NORMAL_MINUS_MODEL = "ERROR: Downloading models that are not in '.safetensors' format is only allowed for models registered in the 'default' channel at this security level. If you want to download this model, set the security level to 'normal-' or lower."
|
||||||
@@ -93,13 +96,27 @@ model_dir_name_map = {
|
|||||||
|
|
||||||
|
|
||||||
def is_allowed_security_level(level):
|
def is_allowed_security_level(level):
|
||||||
|
is_personal_cloud = core.get_config()['network_mode'].lower() == 'personal_cloud'
|
||||||
|
|
||||||
if level == 'block':
|
if level == 'block':
|
||||||
return False
|
return False
|
||||||
|
elif level == 'high+':
|
||||||
|
if is_local_mode:
|
||||||
|
return core.get_config()['security_level'] in ['weak', 'normal-']
|
||||||
|
elif is_personal_cloud:
|
||||||
|
return core.get_config()['security_level'] == 'weak'
|
||||||
|
else:
|
||||||
|
return False
|
||||||
elif level == 'high':
|
elif level == 'high':
|
||||||
if is_local_mode:
|
if is_local_mode:
|
||||||
return core.get_config()['security_level'] in ['weak', 'normal-']
|
return core.get_config()['security_level'] in ['weak', 'normal-']
|
||||||
else:
|
else:
|
||||||
return core.get_config()['security_level'] == 'weak'
|
return core.get_config()['security_level'] == 'weak'
|
||||||
|
elif level == 'middle+':
|
||||||
|
if is_local_mode or is_personal_cloud:
|
||||||
|
return core.get_config()['security_level'] in ['weak', 'normal', 'normal-']
|
||||||
|
else:
|
||||||
|
return False
|
||||||
elif level == 'middle':
|
elif level == 'middle':
|
||||||
return core.get_config()['security_level'] in ['weak', 'normal', 'normal-']
|
return core.get_config()['security_level'] in ['weak', 'normal', 'normal-']
|
||||||
else:
|
else:
|
||||||
@@ -116,7 +133,7 @@ async def get_risky_level(files, pip_packages):
|
|||||||
|
|
||||||
for x in files:
|
for x in files:
|
||||||
if x not in all_urls:
|
if x not in all_urls:
|
||||||
return "high"
|
return "high+"
|
||||||
|
|
||||||
all_pip_packages = set()
|
all_pip_packages = set()
|
||||||
for x in json_data1['custom_nodes'] + json_data2['custom_nodes']:
|
for x in json_data1['custom_nodes'] + json_data2['custom_nodes']:
|
||||||
@@ -126,7 +143,7 @@ async def get_risky_level(files, pip_packages):
|
|||||||
if p not in all_pip_packages:
|
if p not in all_pip_packages:
|
||||||
return "block"
|
return "block"
|
||||||
|
|
||||||
return "middle"
|
return "middle+"
|
||||||
|
|
||||||
|
|
||||||
class ManagerFuncsInComfyUI(core.ManagerFuncs):
|
class ManagerFuncsInComfyUI(core.ManagerFuncs):
|
||||||
@@ -650,7 +667,7 @@ async def task_worker():
|
|||||||
return 'success'
|
return 'success'
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"[ComfyUI-Manager] ERROR: {e}", file=sys.stderr)
|
logging.error(f"[ComfyUI-Manager] ERROR: {e}")
|
||||||
|
|
||||||
return f"Model installation error: {model_url}"
|
return f"Model installation error: {model_url}"
|
||||||
|
|
||||||
@@ -758,29 +775,29 @@ async def queue_batch(request):
|
|||||||
for x in v:
|
for x in v:
|
||||||
res = await _uninstall_custom_node(x)
|
res = await _uninstall_custom_node(x)
|
||||||
if res.status != 200:
|
if res.status != 200:
|
||||||
failed.add(x[0])
|
failed.add(x['id'])
|
||||||
else:
|
else:
|
||||||
res = await _install_custom_node(x)
|
res = await _install_custom_node(x)
|
||||||
if res.status != 200:
|
if res.status != 200:
|
||||||
failed.add(x[0])
|
failed.add(x['id'])
|
||||||
|
|
||||||
elif k == 'install':
|
elif k == 'install':
|
||||||
for x in v:
|
for x in v:
|
||||||
res = await _install_custom_node(x)
|
res = await _install_custom_node(x)
|
||||||
if res.status != 200:
|
if res.status != 200:
|
||||||
failed.add(x[0])
|
failed.add(x['id'])
|
||||||
|
|
||||||
elif k == 'uninstall':
|
elif k == 'uninstall':
|
||||||
for x in v:
|
for x in v:
|
||||||
res = await _uninstall_custom_node(x)
|
res = await _uninstall_custom_node(x)
|
||||||
if res.status != 200:
|
if res.status != 200:
|
||||||
failed.add(x[0])
|
failed.add(x['id'])
|
||||||
|
|
||||||
elif k == 'update':
|
elif k == 'update':
|
||||||
for x in v:
|
for x in v:
|
||||||
res = await _update_custom_node(x)
|
res = await _update_custom_node(x)
|
||||||
if res.status != 200:
|
if res.status != 200:
|
||||||
failed.add(x[0])
|
failed.add(x['id'])
|
||||||
|
|
||||||
elif k == 'update_comfyui':
|
elif k == 'update_comfyui':
|
||||||
await update_comfyui(None)
|
await update_comfyui(None)
|
||||||
@@ -793,13 +810,13 @@ async def queue_batch(request):
|
|||||||
for x in v:
|
for x in v:
|
||||||
res = await _install_model(x)
|
res = await _install_model(x)
|
||||||
if res.status != 200:
|
if res.status != 200:
|
||||||
failed.add(x[0])
|
failed.add(x['id'])
|
||||||
|
|
||||||
elif k == 'fix':
|
elif k == 'fix':
|
||||||
for x in v:
|
for x in v:
|
||||||
res = await _fix_custom_node(x)
|
res = await _fix_custom_node(x)
|
||||||
if res.status != 200:
|
if res.status != 200:
|
||||||
failed.add(x[0])
|
failed.add(x['id'])
|
||||||
|
|
||||||
with task_worker_lock:
|
with task_worker_lock:
|
||||||
finalize_temp_queue_batch(json_data, failed)
|
finalize_temp_queue_batch(json_data, failed)
|
||||||
@@ -910,8 +927,8 @@ async def update_all(request):
|
|||||||
|
|
||||||
|
|
||||||
async def _update_all(json_data):
|
async def _update_all(json_data):
|
||||||
if not is_allowed_security_level('middle'):
|
if not is_allowed_security_level('middle+'):
|
||||||
logging.error(SECURITY_MESSAGE_MIDDLE_OR_BELOW)
|
logging.error(SECURITY_MESSAGE_MIDDLE_P)
|
||||||
return web.Response(status=403)
|
return web.Response(status=403)
|
||||||
|
|
||||||
with task_worker_lock:
|
with task_worker_lock:
|
||||||
@@ -1063,7 +1080,7 @@ async def fetch_customnode_list(request):
|
|||||||
|
|
||||||
channel = found
|
channel = found
|
||||||
|
|
||||||
result = dict(channel=channel, node_packs=node_packs)
|
result = dict(channel=channel, node_packs=node_packs.to_dict())
|
||||||
|
|
||||||
return web.json_response(result, content_type='application/json')
|
return web.json_response(result, content_type='application/json')
|
||||||
|
|
||||||
@@ -1152,7 +1169,7 @@ async def fetch_externalmodel_list(request):
|
|||||||
return web.json_response(json_obj, content_type='application/json')
|
return web.json_response(json_obj, content_type='application/json')
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/snapshot/getlist")
|
@routes.get("/v2/snapshot/getlist")
|
||||||
async def get_snapshot_list(request):
|
async def get_snapshot_list(request):
|
||||||
items = [f[:-5] for f in os.listdir(context.manager_snapshot_path) if f.endswith('.json')]
|
items = [f[:-5] for f in os.listdir(context.manager_snapshot_path) if f.endswith('.json')]
|
||||||
items.sort(reverse=True)
|
items.sort(reverse=True)
|
||||||
@@ -1162,7 +1179,7 @@ async def get_snapshot_list(request):
|
|||||||
@routes.get("/v2/snapshot/remove")
|
@routes.get("/v2/snapshot/remove")
|
||||||
async def remove_snapshot(request):
|
async def remove_snapshot(request):
|
||||||
if not is_allowed_security_level('middle'):
|
if not is_allowed_security_level('middle'):
|
||||||
logging.error(SECURITY_MESSAGE_MIDDLE_OR_BELOW)
|
logging.error(SECURITY_MESSAGE_MIDDLE)
|
||||||
return web.Response(status=403)
|
return web.Response(status=403)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -1179,8 +1196,8 @@ async def remove_snapshot(request):
|
|||||||
|
|
||||||
@routes.get("/v2/snapshot/restore")
|
@routes.get("/v2/snapshot/restore")
|
||||||
async def restore_snapshot(request):
|
async def restore_snapshot(request):
|
||||||
if not is_allowed_security_level('middle'):
|
if not is_allowed_security_level('middle+'):
|
||||||
logging.error(SECURITY_MESSAGE_MIDDLE_OR_BELOW)
|
logging.error(SECURITY_MESSAGE_MIDDLE_P)
|
||||||
return web.Response(status=403)
|
return web.Response(status=403)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -1220,6 +1237,46 @@ async def save_snapshot(request):
|
|||||||
return web.Response(status=400)
|
return web.Response(status=400)
|
||||||
|
|
||||||
|
|
||||||
|
@routes.get("/v2/snapshot/diff")
|
||||||
|
async def get_snapshot_diff(request):
|
||||||
|
try:
|
||||||
|
from_id = request.rel_url.query.get("from")
|
||||||
|
to_id = request.rel_url.query.get("to")
|
||||||
|
|
||||||
|
if (from_id is not None and '..' in from_id) or (to_id is not None and '..' in to_id):
|
||||||
|
logging.error("/v2/snapshot/diff: invalid 'from' or 'to' parameter.")
|
||||||
|
return web.Response(status=400)
|
||||||
|
|
||||||
|
if from_id is None:
|
||||||
|
from_json = await core.get_current_snapshot()
|
||||||
|
else:
|
||||||
|
from_path = os.path.join(context.manager_snapshot_path, f"{from_id}.json")
|
||||||
|
if not os.path.exists(from_path):
|
||||||
|
logging.error(f"/v2/snapshot/diff: 'from' parameter file not found: {from_path}")
|
||||||
|
return web.Response(status=400)
|
||||||
|
|
||||||
|
from_json = snapshot_util.read_snapshot(from_path)
|
||||||
|
|
||||||
|
if to_id is None:
|
||||||
|
logging.error("/v2/snapshot/diff: 'to' parameter is required.")
|
||||||
|
return web.Response(status=401)
|
||||||
|
else:
|
||||||
|
to_path = os.path.join(context.manager_snapshot_path, f"{to_id}.json")
|
||||||
|
if not os.path.exists(to_path):
|
||||||
|
logging.error(f"/v2/snapshot/diff: 'to' parameter file not found: {to_path}")
|
||||||
|
return web.Response(status=400)
|
||||||
|
|
||||||
|
to_json = snapshot_util.read_snapshot(to_path)
|
||||||
|
|
||||||
|
return web.json_response(snapshot_util.diff_snapshot(from_json, to_json), content_type='application/json')
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"[ComfyUI-Manager] Error in /v2/snapshot/diff: {e}")
|
||||||
|
traceback.print_exc()
|
||||||
|
# Return a generic error response
|
||||||
|
return web.Response(status=400)
|
||||||
|
|
||||||
|
|
||||||
def unzip_install(files):
|
def unzip_install(files):
|
||||||
temp_filename = 'manager-temp.zip'
|
temp_filename = 'manager-temp.zip'
|
||||||
for url in files:
|
for url in files:
|
||||||
@@ -1356,8 +1413,8 @@ async def install_custom_node(request):
|
|||||||
|
|
||||||
|
|
||||||
async def _install_custom_node(json_data):
|
async def _install_custom_node(json_data):
|
||||||
if not is_allowed_security_level('middle'):
|
if not is_allowed_security_level('middle+'):
|
||||||
logging.error(SECURITY_MESSAGE_MIDDLE_OR_BELOW)
|
logging.error(SECURITY_MESSAGE_MIDDLE_P)
|
||||||
return web.Response(status=403, text="A security error has occurred. Please check the terminal logs")
|
return web.Response(status=403, text="A security error has occurred. Please check the terminal logs")
|
||||||
|
|
||||||
# non-nightly cnr is safe
|
# non-nightly cnr is safe
|
||||||
@@ -1462,7 +1519,7 @@ async def _fix_custom_node(json_data):
|
|||||||
|
|
||||||
@routes.post("/v2/customnode/install/git_url")
|
@routes.post("/v2/customnode/install/git_url")
|
||||||
async def install_custom_node_git_url(request):
|
async def install_custom_node_git_url(request):
|
||||||
if not is_allowed_security_level('high'):
|
if not is_allowed_security_level('high+'):
|
||||||
logging.error(SECURITY_MESSAGE_NORMAL_MINUS)
|
logging.error(SECURITY_MESSAGE_NORMAL_MINUS)
|
||||||
return web.Response(status=403)
|
return web.Response(status=403)
|
||||||
|
|
||||||
@@ -1482,7 +1539,7 @@ async def install_custom_node_git_url(request):
|
|||||||
|
|
||||||
@routes.post("/v2/customnode/install/pip")
|
@routes.post("/v2/customnode/install/pip")
|
||||||
async def install_custom_node_pip(request):
|
async def install_custom_node_pip(request):
|
||||||
if not is_allowed_security_level('high'):
|
if not is_allowed_security_level('high+'):
|
||||||
logging.error(SECURITY_MESSAGE_NORMAL_MINUS)
|
logging.error(SECURITY_MESSAGE_NORMAL_MINUS)
|
||||||
return web.Response(status=403)
|
return web.Response(status=403)
|
||||||
|
|
||||||
@@ -1500,7 +1557,7 @@ async def uninstall_custom_node(request):
|
|||||||
|
|
||||||
async def _uninstall_custom_node(json_data):
|
async def _uninstall_custom_node(json_data):
|
||||||
if not is_allowed_security_level('middle'):
|
if not is_allowed_security_level('middle'):
|
||||||
logging.error(SECURITY_MESSAGE_MIDDLE_OR_BELOW)
|
logging.error(SECURITY_MESSAGE_MIDDLE)
|
||||||
return web.Response(status=403, text="A security error has occurred. Please check the terminal logs")
|
return web.Response(status=403, text="A security error has occurred. Please check the terminal logs")
|
||||||
|
|
||||||
node_id = json_data.get('id')
|
node_id = json_data.get('id')
|
||||||
@@ -1526,7 +1583,7 @@ async def update_custom_node(request):
|
|||||||
|
|
||||||
async def _update_custom_node(json_data):
|
async def _update_custom_node(json_data):
|
||||||
if not is_allowed_security_level('middle'):
|
if not is_allowed_security_level('middle'):
|
||||||
logging.error(SECURITY_MESSAGE_MIDDLE_OR_BELOW)
|
logging.error(SECURITY_MESSAGE_MIDDLE)
|
||||||
return web.Response(status=403, text="A security error has occurred. Please check the terminal logs")
|
return web.Response(status=403, text="A security error has occurred. Please check the terminal logs")
|
||||||
|
|
||||||
node_id = json_data.get('id')
|
node_id = json_data.get('id')
|
||||||
@@ -1617,8 +1674,8 @@ async def install_model(request):
|
|||||||
|
|
||||||
|
|
||||||
async def _install_model(json_data):
|
async def _install_model(json_data):
|
||||||
if not is_allowed_security_level('middle'):
|
if not is_allowed_security_level('middle+'):
|
||||||
logging.error(SECURITY_MESSAGE_MIDDLE_OR_BELOW)
|
logging.error(SECURITY_MESSAGE_MIDDLE_P)
|
||||||
return web.Response(status=403, text="A security error has occurred. Please check the terminal logs")
|
return web.Response(status=403, text="A security error has occurred. Please check the terminal logs")
|
||||||
|
|
||||||
# validate request
|
# validate request
|
||||||
@@ -1626,7 +1683,7 @@ async def _install_model(json_data):
|
|||||||
logging.error(f"[ComfyUI-Manager] Invalid model install request is detected: {json_data}")
|
logging.error(f"[ComfyUI-Manager] Invalid model install request is detected: {json_data}")
|
||||||
return web.Response(status=400, text="Invalid model install request is detected")
|
return web.Response(status=400, text="Invalid model install request is detected")
|
||||||
|
|
||||||
if not json_data['filename'].endswith('.safetensors') and not is_allowed_security_level('high'):
|
if not json_data['filename'].endswith('.safetensors') and not is_allowed_security_level('high+'):
|
||||||
models_json = await core.get_data_by_mode('cache', 'model-list.json', 'default')
|
models_json = await core.get_data_by_mode('cache', 'model-list.json', 'default')
|
||||||
|
|
||||||
is_belongs_to_whitelist = False
|
is_belongs_to_whitelist = False
|
||||||
@@ -1783,7 +1840,7 @@ async def get_notice_legacy(request):
|
|||||||
@routes.get("/v2/manager/reboot")
|
@routes.get("/v2/manager/reboot")
|
||||||
def restart(self):
|
def restart(self):
|
||||||
if not is_allowed_security_level('middle'):
|
if not is_allowed_security_level('middle'):
|
||||||
logging.error(SECURITY_MESSAGE_MIDDLE_OR_BELOW)
|
logging.error(SECURITY_MESSAGE_MIDDLE)
|
||||||
return web.Response(status=403)
|
return web.Response(status=403)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -1949,9 +2006,10 @@ if not os.path.exists(context.manager_config_path):
|
|||||||
core.write_config()
|
core.write_config()
|
||||||
|
|
||||||
|
|
||||||
cm_global.register_extension('ComfyUI-Manager',
|
# policy setup
|
||||||
{'version': core.version,
|
manager_security.add_handler_policy(reinstall_custom_node, manager_security.HANDLER_POLICY.MULTIPLE_REMOTE_BAN_NOT_PERSONAL_CLOUD)
|
||||||
'name': 'ComfyUI Manager',
|
manager_security.add_handler_policy(install_custom_node, manager_security.HANDLER_POLICY.MULTIPLE_REMOTE_BAN_NOT_PERSONAL_CLOUD)
|
||||||
'nodes': {},
|
manager_security.add_handler_policy(fix_custom_node, manager_security.HANDLER_POLICY.MULTIPLE_REMOTE_BAN_NOT_PERSONAL_CLOUD)
|
||||||
'description': 'This extension provides the ability to manage custom nodes in ComfyUI.', })
|
manager_security.add_handler_policy(install_custom_node_git_url, manager_security.HANDLER_POLICY.MULTIPLE_REMOTE_BAN_NOT_PERSONAL_CLOUD)
|
||||||
|
manager_security.add_handler_policy(install_custom_node_pip, manager_security.HANDLER_POLICY.MULTIPLE_REMOTE_BAN_NOT_PERSONAL_CLOUD)
|
||||||
|
manager_security.add_handler_policy(install_model, manager_security.HANDLER_POLICY.MULTIPLE_REMOTE_BAN_NOT_PERSONAL_CLOUD)
|
||||||
|
|||||||
@@ -337,8 +337,7 @@ async def share_art(request):
|
|||||||
content_type = assetFileType
|
content_type = assetFileType
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from matrix_client.api import MatrixHttpApi
|
from nio import AsyncClient, LoginResponse, UploadResponse
|
||||||
from matrix_client.client import MatrixClient
|
|
||||||
|
|
||||||
homeserver = 'matrix.org'
|
homeserver = 'matrix.org'
|
||||||
if matrix_auth:
|
if matrix_auth:
|
||||||
@@ -347,20 +346,35 @@ async def share_art(request):
|
|||||||
if not homeserver.startswith("https://"):
|
if not homeserver.startswith("https://"):
|
||||||
homeserver = "https://" + homeserver
|
homeserver = "https://" + homeserver
|
||||||
|
|
||||||
client = MatrixClient(homeserver)
|
client = AsyncClient(homeserver, matrix_auth['username'])
|
||||||
try:
|
|
||||||
token = client.login(username=matrix_auth['username'], password=matrix_auth['password'])
|
# Login
|
||||||
if not token:
|
login_resp = await client.login(matrix_auth['password'])
|
||||||
return web.json_response({"error": "Invalid Matrix credentials."}, content_type='application/json', status=400)
|
if not isinstance(login_resp, LoginResponse) or not login_resp.access_token:
|
||||||
except Exception:
|
await client.close()
|
||||||
return web.json_response({"error": "Invalid Matrix credentials."}, content_type='application/json', status=400)
|
return web.json_response({"error": "Invalid Matrix credentials."}, content_type='application/json', status=400)
|
||||||
|
|
||||||
matrix = MatrixHttpApi(homeserver, token=token)
|
# Upload asset
|
||||||
with open(asset_filepath, 'rb') as f:
|
with open(asset_filepath, 'rb') as f:
|
||||||
mxc_url = matrix.media_upload(f.read(), content_type, filename=filename)['content_uri']
|
upload_resp, _maybe_keys = await client.upload(f, content_type=content_type, filename=filename)
|
||||||
|
asset_data = f.seek(0) or f.read() # get size for info below
|
||||||
|
if not isinstance(upload_resp, UploadResponse) or not upload_resp.content_uri:
|
||||||
|
await client.close()
|
||||||
|
return web.json_response({"error": "Failed to upload asset to Matrix."}, content_type='application/json', status=500)
|
||||||
|
mxc_url = upload_resp.content_uri
|
||||||
|
|
||||||
workflow_json_mxc_url = matrix.media_upload(prompt['workflow'], 'application/json', filename='workflow.json')['content_uri']
|
# Upload workflow JSON
|
||||||
|
import io
|
||||||
|
workflow_json_bytes = json.dumps(prompt['workflow']).encode('utf-8')
|
||||||
|
workflow_io = io.BytesIO(workflow_json_bytes)
|
||||||
|
upload_workflow_resp, _maybe_keys = await client.upload(workflow_io, content_type='application/json', filename='workflow.json')
|
||||||
|
workflow_io.seek(0)
|
||||||
|
if not isinstance(upload_workflow_resp, UploadResponse) or not upload_workflow_resp.content_uri:
|
||||||
|
await client.close()
|
||||||
|
return web.json_response({"error": "Failed to upload workflow to Matrix."}, content_type='application/json', status=500)
|
||||||
|
workflow_json_mxc_url = upload_workflow_resp.content_uri
|
||||||
|
|
||||||
|
# Send text message
|
||||||
text_content = ""
|
text_content = ""
|
||||||
if title:
|
if title:
|
||||||
text_content += f"{title}\n"
|
text_content += f"{title}\n"
|
||||||
@@ -368,10 +382,45 @@ async def share_art(request):
|
|||||||
text_content += f"{description}\n"
|
text_content += f"{description}\n"
|
||||||
if credits:
|
if credits:
|
||||||
text_content += f"\ncredits: {credits}\n"
|
text_content += f"\ncredits: {credits}\n"
|
||||||
matrix.send_message(comfyui_share_room_id, text_content)
|
await client.room_send(
|
||||||
matrix.send_content(comfyui_share_room_id, mxc_url, filename, 'm.image')
|
room_id=comfyui_share_room_id,
|
||||||
matrix.send_content(comfyui_share_room_id, workflow_json_mxc_url, 'workflow.json', 'm.file')
|
message_type="m.room.message",
|
||||||
except Exception:
|
content={"msgtype": "m.text", "body": text_content}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Send image
|
||||||
|
await client.room_send(
|
||||||
|
room_id=comfyui_share_room_id,
|
||||||
|
message_type="m.room.message",
|
||||||
|
content={
|
||||||
|
"msgtype": "m.image",
|
||||||
|
"body": filename,
|
||||||
|
"url": mxc_url,
|
||||||
|
"info": {
|
||||||
|
"mimetype": content_type,
|
||||||
|
"size": len(asset_data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Send workflow JSON file
|
||||||
|
await client.room_send(
|
||||||
|
room_id=comfyui_share_room_id,
|
||||||
|
message_type="m.room.message",
|
||||||
|
content={
|
||||||
|
"msgtype": "m.file",
|
||||||
|
"body": "workflow.json",
|
||||||
|
"url": workflow_json_mxc_url,
|
||||||
|
"info": {
|
||||||
|
"mimetype": "application/json",
|
||||||
|
"size": len(workflow_json_bytes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
await client.close()
|
||||||
|
|
||||||
|
except:
|
||||||
import traceback
|
import traceback
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
return web.json_response({"error": "An error occurred when sharing your art to Matrix."}, content_type='application/json', status=500)
|
return web.json_response({"error": "An error occurred when sharing your art to Matrix."}, content_type='application/json', status=500)
|
||||||
|
|||||||
@@ -35,7 +35,6 @@ else:
|
|||||||
def current_timestamp():
|
def current_timestamp():
|
||||||
return str(time.time()).split('.')[0]
|
return str(time.time()).split('.')[0]
|
||||||
|
|
||||||
security_check.security_check()
|
|
||||||
|
|
||||||
cm_global.pip_blacklist = {'torch', 'torchaudio', 'torchsde', 'torchvision'}
|
cm_global.pip_blacklist = {'torch', 'torchaudio', 'torchsde', 'torchvision'}
|
||||||
cm_global.pip_downgrade_blacklist = ['torch', 'torchaudio', 'torchsde', 'torchvision', 'transformers', 'safetensors', 'kornia']
|
cm_global.pip_downgrade_blacklist = ['torch', 'torchaudio', 'torchsde', 'torchvision', 'transformers', 'safetensors', 'kornia']
|
||||||
@@ -111,20 +110,15 @@ def check_file_logging():
|
|||||||
|
|
||||||
read_config()
|
read_config()
|
||||||
read_uv_mode()
|
read_uv_mode()
|
||||||
|
security_check.security_check()
|
||||||
check_file_logging()
|
check_file_logging()
|
||||||
|
|
||||||
if sys.version_info < (3, 13):
|
|
||||||
cm_global.pip_overrides = {'numpy': 'numpy<2'}
|
|
||||||
else:
|
|
||||||
cm_global.pip_overrides = {}
|
cm_global.pip_overrides = {}
|
||||||
|
|
||||||
if os.path.exists(manager_pip_overrides_path):
|
if os.path.exists(manager_pip_overrides_path):
|
||||||
with open(manager_pip_overrides_path, 'r', encoding="UTF-8", errors="ignore") as json_file:
|
with open(manager_pip_overrides_path, 'r', encoding="UTF-8", errors="ignore") as json_file:
|
||||||
cm_global.pip_overrides = json.load(json_file)
|
cm_global.pip_overrides = json.load(json_file)
|
||||||
|
|
||||||
if sys.version_info < (3, 13):
|
|
||||||
cm_global.pip_overrides['numpy'] = 'numpy<2'
|
|
||||||
|
|
||||||
|
|
||||||
if os.path.exists(manager_pip_blacklist_path):
|
if os.path.exists(manager_pip_blacklist_path):
|
||||||
with open(manager_pip_blacklist_path, 'r', encoding="UTF-8", errors="ignore") as f:
|
with open(manager_pip_blacklist_path, 'r', encoding="UTF-8", errors="ignore") as f:
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
10166
github-stats.json
10166
github-stats.json
File diff suppressed because it is too large
Load Diff
204
model-list.json
204
model-list.json
@@ -1973,6 +1973,97 @@
|
|||||||
"url": "https://dl.fbaipublicfiles.com/segment_anything/sam_vit_b_01ec64.pth",
|
"url": "https://dl.fbaipublicfiles.com/segment_anything/sam_vit_b_01ec64.pth",
|
||||||
"size": "375.0MB"
|
"size": "375.0MB"
|
||||||
},
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "sam2.1_hiera_tiny.pt",
|
||||||
|
"type": "sam2.1",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2.1 hiera model (tiny)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2.1_hiera_tiny.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_tiny.pt",
|
||||||
|
"size": "149.0MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sam2.1_hiera_small.pt",
|
||||||
|
"type": "sam2.1",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2.1 hiera model (small)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2.1_hiera_small.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_small.pt",
|
||||||
|
"size": "176.0MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sam2.1_hiera_base_plus.pt",
|
||||||
|
"type": "sam2.1",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2.1 hiera model (base+)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2.1_hiera_base_plus.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_base_plus.pt",
|
||||||
|
"size": "309.0MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sam2.1_hiera_large.pt",
|
||||||
|
"type": "sam2.1",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2.1 hiera model (large)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2.1_hiera_large.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_large.pt",
|
||||||
|
"size": "857.0MB"
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "sam2_hiera_tiny.pt",
|
||||||
|
"type": "sam2",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2 hiera model (tiny)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2_hiera_tiny.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/072824/sam2_hiera_tiny.pt",
|
||||||
|
"size": "149.0MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sam2_hiera_small.pt",
|
||||||
|
"type": "sam2",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2 hiera model (small)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2_hiera_small.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/072824/sam2_hiera_small.pt",
|
||||||
|
"size": "176.0MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sam2_hiera_base_plus.pt",
|
||||||
|
"type": "sam2",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2 hiera model (base+)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2_hiera_base_plus.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/072824/sam2_hiera_base_plus.pt",
|
||||||
|
"size": "309.0MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sam2_hiera_large.pt",
|
||||||
|
"type": "sam2",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2 hiera model (large)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2_hiera_large.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/072824/sam2_hiera_large.pt",
|
||||||
|
"size": "857.0MB"
|
||||||
|
},
|
||||||
|
|
||||||
{
|
{
|
||||||
"name": "seecoder v1.0",
|
"name": "seecoder v1.0",
|
||||||
"type": "seecoder",
|
"type": "seecoder",
|
||||||
@@ -4006,6 +4097,29 @@
|
|||||||
"size": "649MB"
|
"size": "649MB"
|
||||||
},
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/omnigen2_fp16.safetensors",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "OmniGen2",
|
||||||
|
"save_path": "default",
|
||||||
|
"description": "OmniGen2 diffusion model. This is required for using OmniGen2.",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged",
|
||||||
|
"filename": "omnigen2_fp16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged/resolve/main/split_files/diffusion_models/omnigen2_fp16.safetensors",
|
||||||
|
"size": "7.93GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/qwen_2.5_vl_fp16.safetensors",
|
||||||
|
"type": "clip",
|
||||||
|
"base": "qwen-2.5",
|
||||||
|
"save_path": "default",
|
||||||
|
"description": "text encoder for OmniGen2",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged",
|
||||||
|
"filename": "qwen_2.5_vl_fp16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged/resolve/main/split_files/text_encoders/qwen_2.5_vl_fp16.safetensors",
|
||||||
|
"size": "7.51GB"
|
||||||
|
},
|
||||||
|
|
||||||
{
|
{
|
||||||
"name": "FLUX.1 [Schnell] Diffusion model",
|
"name": "FLUX.1 [Schnell] Diffusion model",
|
||||||
"type": "diffusion_model",
|
"type": "diffusion_model",
|
||||||
@@ -4023,7 +4137,7 @@
|
|||||||
"type": "VAE",
|
"type": "VAE",
|
||||||
"base": "FLUX.1",
|
"base": "FLUX.1",
|
||||||
"save_path": "vae/FLUX1",
|
"save_path": "vae/FLUX1",
|
||||||
"description": "FLUX.1 VAE model",
|
"description": "FLUX.1 VAE model\nNOTE: This VAE model can also be used for image generation with OmniGen2.",
|
||||||
"reference": "https://huggingface.co/black-forest-labs/FLUX.1-schnell",
|
"reference": "https://huggingface.co/black-forest-labs/FLUX.1-schnell",
|
||||||
"filename": "ae.safetensors",
|
"filename": "ae.safetensors",
|
||||||
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors",
|
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors",
|
||||||
@@ -5033,6 +5147,50 @@
|
|||||||
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltxv-13b-0.9.7-distilled-fp8.safetensors",
|
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltxv-13b-0.9.7-distilled-fp8.safetensors",
|
||||||
"size": "15.7GB"
|
"size": "15.7GB"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "LTX-Video 2B Distilled v0.9.8",
|
||||||
|
"type": "checkpoint",
|
||||||
|
"base": "LTX-Video",
|
||||||
|
"save_path": "checkpoints/LTXV",
|
||||||
|
"description": "LTX-Video 2B distilled model v0.9.8 with improved prompt understanding and detail generation.",
|
||||||
|
"reference": "https://huggingface.co/Lightricks/LTX-Video",
|
||||||
|
"filename": "ltxv-2b-0.9.8-distilled.safetensors",
|
||||||
|
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltxv-2b-0.9.8-distilled.safetensors",
|
||||||
|
"size": "6.34GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "LTX-Video 2B Distilled FP8 v0.9.8",
|
||||||
|
"type": "checkpoint",
|
||||||
|
"base": "LTX-Video",
|
||||||
|
"save_path": "checkpoints/LTXV",
|
||||||
|
"description": "Quantized LTX-Video 2B distilled model v0.9.8 with improved prompt understanding and detail generation, optimized for lower VRAM usage.",
|
||||||
|
"reference": "https://huggingface.co/Lightricks/LTX-Video",
|
||||||
|
"filename": "ltxv-2b-0.9.8-distilled-fp8.safetensors",
|
||||||
|
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltxv-2b-0.9.8-distilled-fp8.safetensors",
|
||||||
|
"size": "4.46GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "LTX-Video 13B Distilled v0.9.8",
|
||||||
|
"type": "checkpoint",
|
||||||
|
"base": "LTX-Video",
|
||||||
|
"save_path": "checkpoints/LTXV",
|
||||||
|
"description": "LTX-Video 13B distilled model v0.9.8 with improved prompt understanding and detail generation.",
|
||||||
|
"reference": "https://huggingface.co/Lightricks/LTX-Video",
|
||||||
|
"filename": "ltxv-13b-0.9.8-distilled.safetensors",
|
||||||
|
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltxv-13b-0.9.8-distilled.safetensors",
|
||||||
|
"size": "28.6GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "LTX-Video 13B Distilled FP8 v0.9.8",
|
||||||
|
"type": "checkpoint",
|
||||||
|
"base": "LTX-Video",
|
||||||
|
"save_path": "checkpoints/LTXV",
|
||||||
|
"description": "Quantized LTX-Video 13B distilled model v0.9.8 with improved prompt understanding and detail generation, optimized for lower VRAM usage.",
|
||||||
|
"reference": "https://huggingface.co/Lightricks/LTX-Video",
|
||||||
|
"filename": "ltxv-13b-0.9.8-distilled-fp8.safetensors",
|
||||||
|
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltxv-13b-0.9.8-distilled-fp8.safetensors",
|
||||||
|
"size": "15.7GB"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "LTX-Video 13B Distilled LoRA v0.9.7",
|
"name": "LTX-Video 13B Distilled LoRA v0.9.7",
|
||||||
"type": "lora",
|
"type": "lora",
|
||||||
@@ -5044,6 +5202,50 @@
|
|||||||
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltxv-13b-0.9.7-distilled-lora128.safetensors",
|
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltxv-13b-0.9.7-distilled-lora128.safetensors",
|
||||||
"size": "1.33GB"
|
"size": "1.33GB"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "LTX-Video ICLoRA Depth 13B v0.9.7",
|
||||||
|
"type": "lora",
|
||||||
|
"base": "LTX-Video",
|
||||||
|
"save_path": "loras",
|
||||||
|
"description": "In-Context LoRA (IC LoRA) for depth-controlled video-to-video generation with precise depth conditioning.",
|
||||||
|
"reference": "https://huggingface.co/Lightricks/LTX-Video-ICLoRA-depth-13b-0.9.7",
|
||||||
|
"filename": "ltxv-097-ic-lora-depth-control-comfyui.safetensors",
|
||||||
|
"url": "https://huggingface.co/Lightricks/LTX-Video-ICLoRA-depth-13b-0.9.7/resolve/main/ltxv-097-ic-lora-depth-control-comfyui.safetensors",
|
||||||
|
"size": "81.9MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "LTX-Video ICLoRA Pose 13B v0.9.7",
|
||||||
|
"type": "lora",
|
||||||
|
"base": "LTX-Video",
|
||||||
|
"save_path": "loras",
|
||||||
|
"description": "In-Context LoRA (IC LoRA) for pose-controlled video-to-video generation with precise pose conditioning.",
|
||||||
|
"reference": "https://huggingface.co/Lightricks/LTX-Video-ICLoRA-pose-13b-0.9.7",
|
||||||
|
"filename": "ltxv-097-ic-lora-pose-control-comfyui.safetensors",
|
||||||
|
"url": "https://huggingface.co/Lightricks/LTX-Video-ICLoRA-pose-13b-0.9.7/resolve/main/ltxv-097-ic-lora-pose-control-comfyui.safetensors",
|
||||||
|
"size": "151MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "LTX-Video ICLoRA Canny 13B v0.9.7",
|
||||||
|
"type": "lora",
|
||||||
|
"base": "LTX-Video",
|
||||||
|
"save_path": "loras",
|
||||||
|
"description": "In-Context LoRA (IC LoRA) for canny edge-controlled video-to-video generation with precise edge conditioning.",
|
||||||
|
"reference": "https://huggingface.co/Lightricks/LTX-Video-ICLoRA-canny-13b-0.9.7",
|
||||||
|
"filename": "ltxv-097-ic-lora-canny-control-comfyui.safetensors",
|
||||||
|
"url": "https://huggingface.co/Lightricks/LTX-Video-ICLoRA-canny-13b-0.9.7/resolve/main/ltxv-097-ic-lora-canny-control-comfyui.safetensors",
|
||||||
|
"size": "81.9MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "LTX-Video ICLoRA Detailer 13B v0.9.8",
|
||||||
|
"type": "lora",
|
||||||
|
"base": "LTX-Video",
|
||||||
|
"save_path": "loras",
|
||||||
|
"description": "A video detailer model on top of LTXV_13B_098_DEV trained on custom data using In-Context LoRA (IC LoRA) method.",
|
||||||
|
"reference": "https://huggingface.co/Lightricks/LTX-Video-ICLoRA-detailer-13b-0.9.8",
|
||||||
|
"filename": "ltxv-098-ic-lora-detailer-comfyui.safetensors",
|
||||||
|
"url": "https://huggingface.co/Lightricks/LTX-Video-ICLoRA-detailer-13b-0.9.8/resolve/main/ltxv-098-ic-lora-detailer-comfyui.safetensors",
|
||||||
|
"size": "1.31GB"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "Latent Bridge Matching for Image Relighting",
|
"name": "Latent Bridge Matching for Image Relighting",
|
||||||
"type": "diffusion_model",
|
"type": "diffusion_model",
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,3 +1,3 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
rm ~/.tmp/dev/*.py > /dev/null 2>&1
|
rm ~/.tmp/dev/*.py > /dev/null 2>&1
|
||||||
python ../../scanner.py ~/.tmp/dev
|
python ../../scanner.py ~/.tmp/dev $*
|
||||||
@@ -1,5 +1,15 @@
|
|||||||
{
|
{
|
||||||
"custom_nodes": [
|
"custom_nodes": [
|
||||||
|
{
|
||||||
|
"author": "joaomede",
|
||||||
|
"title": "ComfyUI-Unload-Model-Fork",
|
||||||
|
"reference": "https://github.com/joaomede/ComfyUI-Unload-Model-Fork",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/joaomede/ComfyUI-Unload-Model-Fork"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "For unloading a model or all models, using the memory management that is already present in ComfyUI. Copied from [a/https://github.com/willblaschko/ComfyUI-Unload-Models](https://github.com/willblaschko/ComfyUI-Unload-Models) but without the unnecessary extra stuff."
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"author": "SanDiegoDude",
|
"author": "SanDiegoDude",
|
||||||
"title": "ComfyUI-HiDream-Sampler [WIP]",
|
"title": "ComfyUI-HiDream-Sampler [WIP]",
|
||||||
|
|||||||
@@ -1,5 +1,394 @@
|
|||||||
{
|
{
|
||||||
"custom_nodes": [
|
"custom_nodes": [
|
||||||
|
{
|
||||||
|
"author": "skayka",
|
||||||
|
"title": "ComfyUI-DreamFit []REMOVED]",
|
||||||
|
"reference": "https://github.com/skayka/ComfyUI-DreamFit",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/skayka/ComfyUI-DreamFit"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Garment-centric human generation nodes for ComfyUI using DreamFit with Flux.\nDreamFit is a powerful adapter system that enhances Flux models with garment-aware generation capabilities, enabling high-quality fashion and clothing generation."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "domenecmiralles",
|
||||||
|
"title": "obobo_nodes [REMOVED]",
|
||||||
|
"reference": "https://github.com/domenecmiralles/obobo_nodes",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/domenecmiralles/obobo_nodes"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A collection of custom nodes for ComfyUI that provide various input and output capabilities."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "NicholasKao1029",
|
||||||
|
"title": "comfyui-pixxio [REMOVED]",
|
||||||
|
"reference": "https://github.com/NicholasKao1029/comfyui-pixxio",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/NicholasKao1029/comfyui-pixxio"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "NODES: Auto-Upload Image to Pixxio Collection, Load Image from Pixx.io"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "ComfyUI-Workflow",
|
||||||
|
"title": "ComfyUI OpenAI Nodes [REMOVED]",
|
||||||
|
"reference": "https://github.com/ComfyUI-Workflow/ComfyUI-OpenAI",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/ComfyUI-Workflow/ComfyUI-OpenAI"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "By utilizing OpenAI's powerful vision models, this node enables you to incorporate state-of-the-art image understanding into your ComfyUI projects with minimal setup."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "dionren",
|
||||||
|
"title": "Export Workflow With Cyuai Api Available Nodes [REMOVED]",
|
||||||
|
"id": "comfyUI-Pro-Export-Tool",
|
||||||
|
"reference": "https://github.com/dionren/ComfyUI-Pro-Export-Tool",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/dionren/ComfyUI-Pro-Export-Tool"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "This is a node to convert workflows to cyuai api available nodes."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "1H-hobit",
|
||||||
|
"title": "ComfyUI_InternVL3 [REMOVED]",
|
||||||
|
"reference": "https://github.com/1H-hobit/ComfyUI_InternVL3",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/1H-hobit/ComfyUI_InternVL3"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "ComfyUI for [a/InternVL](https://github.com/OpenGVLab/InternVL)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "spacepxl",
|
||||||
|
"title": "ComfyUI-Florence-2 [DEPRECATED]",
|
||||||
|
"id": "florence2-spacepxl",
|
||||||
|
"reference": "https://github.com/spacepxl/ComfyUI-Florence-2",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/spacepxl/ComfyUI-Florence-2"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "[a/https://huggingface.co/microsoft/Florence-2-large-ft](https://huggingface.co/microsoft/Florence-2-large-ft)\nLarge or base model, support for captioning and bbox task modes, more coming soon."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "xxxxxxxxxxxc",
|
||||||
|
"title": "flux-kontext-diff-merge [REMOVED]",
|
||||||
|
"reference": "https://github.com/xxxxxxxxxxxc/flux-kontext-diff-merge",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/xxxxxxxxxxxc/flux-kontext-diff-merge"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Preserve image quality with flux-kontext-diff-merge. This ComfyUI node merges only changed areas from AI edits, ensuring clarity and detail."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "TechnoByteJS",
|
||||||
|
"title": "TechNodes [REMOVED]",
|
||||||
|
"id": "technodes",
|
||||||
|
"reference": "https://github.com/TechnoByteJS/ComfyUI-TechNodes",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/TechnoByteJS/ComfyUI-TechNodes"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "ComfyUI nodes for merging, testing and more.\nNOTE: SDNext Merge, VAE Merge, MBW Layers, Repeat VAE, Quantization."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "DDDDEEP",
|
||||||
|
"title": "ComfyUI-DDDDEEP [REMOVED]",
|
||||||
|
"reference": "https://github.com/DDDDEEP/ComfyUI-DDDDEEP",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/DDDDEEP/ComfyUI-DDDDEEP"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "NODES: AutoWidthHeight, ReturnIntSeed, OppositeBool, PromptItemCollection"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "manifestations",
|
||||||
|
"title": "ComfyUI Ethnic Outfits Custom Nodes [REMOVED]",
|
||||||
|
"reference": "https://github.com/manifestations/comfyui-outfits",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/manifestations/comfyui-outfits"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Custom ComfyUI nodes for generating outfit prompts representing diverse ethnicities, cultures, and regions. Uses extensible JSON data for clothing, accessories, and poses, with “random/disabled” dropdowns for flexibility. Advanced prompt engineering is supported via Ollama LLM integration. Easily add new regions, ethnicities, or cultures by updating data files and creating lightweight node wrappers. Designed for artists, researchers, and developers seeking culturally rich, customizable prompt generation in ComfyUI workflows."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "MitoshiroPJ",
|
||||||
|
"title": "ComfyUI Slothful Attention [REMOVED]",
|
||||||
|
"reference": "https://github.com/MitoshiroPJ/comfyui_slothful_attention",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/MitoshiroPJ/comfyui_slothful_attention"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "This custom node allow controlling output without training. The reducing method is similar to [a/Spatial-Reduction Attention](https://paperswithcode.com/method/spatial-reduction-attention)."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "MitoshiroPJ",
|
||||||
|
"title": "comfyui_focal_sampler [REMOVED]",
|
||||||
|
"reference": "https://github.com/MitoshiroPJ/comfyui_focal_sampler",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/MitoshiroPJ/comfyui_focal_sampler"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Apply additional sampling to specific area"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "manifestations",
|
||||||
|
"title": "ComfyUI Ethnic Outfit & Prompt Enhancer Nodes [REMOVED]",
|
||||||
|
"reference": "https://github.com/manifestations/comfyui-indian-outfit",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/manifestations/comfyui-indian-outfit"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Features:\n* Extensive options for Indian, Indonesian, and international clothing, jewelry, accessories, and styles\n* Multiple jewelry and accessory fields (with material support: gold, diamond, silver, leather, beads, etc.)\n* Support for tattoos, henna, hair styles, poses, shot types, lighting, and photography genres\n* Seamless prompt expansion using your own Ollama LLM instance\n* Modular, extensible JSON data files for easy customization"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "coVISIONSld",
|
||||||
|
"title": "ComfyUI-OmniGen2 [REMOVED]",
|
||||||
|
"reference": "https://github.com/coVISIONSld/ComfyUI-OmniGen2",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/coVISIONSld/ComfyUI-OmniGen2"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "ComfyUI-OmniGen2 is a custom node package for the OmniGen2 model, enabling advanced text-to-image generation and visual understanding."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "S4MUEL-404",
|
||||||
|
"title": "ComfyUI-S4Tool-Image-Overlay [REMOVED]",
|
||||||
|
"reference": "https://github.com/S4MUEL-404/ComfyUI-S4Tool-Image-Overlay",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/S4MUEL-404/ComfyUI-S4Tool-Image-Overlay"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Quickly set up image overlay effects"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "akspa0",
|
||||||
|
"title": "ComfyUI-FapMixPlus [REMOVED]",
|
||||||
|
"reference": "https://github.com/akspa0/ComfyUI-FapMixPlus",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/akspa0/ComfyUI-FapMixPlus"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "This is an audio processing script that applies soft limiting, optional loudness normalization, and optional slicing for transcription. It can also produce stereo-mixed outputs with optional audio appended to the end. The script organizes processed files into structured folders with sanitized filenames and retains original timestamps for continuity."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "RedmondAI",
|
||||||
|
"title": "comfyui-tools [UNSAFE]",
|
||||||
|
"reference": "https://github.com/RedmondAI/comfyui-tools",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/RedmondAI/comfyui-tools"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Custom extensions for ComfyUI used by the Redmond3D VFX team.[w/This node pack has a vulnerability that allows it to create files at arbitrary paths.]"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "S4MUEL-404",
|
||||||
|
"title": "Image Position Blend [REMOVED]",
|
||||||
|
"id": "ComfyUI-Image-Position-Blend",
|
||||||
|
"version": "1.1",
|
||||||
|
"reference": "https://github.com/S4MUEL-404/ComfyUI-Image-Position-Blend",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/S4MUEL-404/ComfyUI-Image-Position-Blend"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A custom node for conveniently adjusting the overlay position of two images."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "S4MUEL-404",
|
||||||
|
"title": "ComfyUI-Text-On-Image [REMOVED]",
|
||||||
|
"id": "ComfyUI-Text-On-Image",
|
||||||
|
"reference": "https://github.com/S4MUEL-404/ComfyUI-Text-On-Image",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/S4MUEL-404/ComfyUI-Text-On-Image"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A custom node for ComfyUI that allows users to add text overlays to images with customizable size, font, position, and shadow."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "S4MUEL-404",
|
||||||
|
"title": "ComfyUI-Prompts-Selector [REMOVED]",
|
||||||
|
"reference": "https://github.com/S4MUEL-404/ComfyUI-Prompts-Selector",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/S4MUEL-404/ComfyUI-Prompts-Selector"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Quickly select preset prompts and merge them"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "juntaosun",
|
||||||
|
"title": "ComfyUI_open_nodes [REMOVED]",
|
||||||
|
"reference": "https://github.com/juntaosun/ComfyUI_open_nodes",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/juntaosun/ComfyUI_open_nodes"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "ComfyUI open nodes by juntaosun."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "perilli",
|
||||||
|
"title": "apw_nodes [DEPRECATED]",
|
||||||
|
"reference": "https://github.com/alessandroperilli/apw_nodes",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/alessandroperilli/apw_nodes"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A custom node suite to augment the capabilities of the [a/AP Workflows for ComfyUI](https://perilli.com/ai/comfyui/)[w/'APW_Nodes' has been newly added in place of 'apw_nodes'.]"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "markuryy",
|
||||||
|
"title": "ComfyUI Spiritparticle Nodes [REMOVED]",
|
||||||
|
"reference": "https://github.com/markuryy/comfyui-spiritparticle",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/markuryy/comfyui-spiritparticle"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A node pack by spiritparticle."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "SpaceKendo",
|
||||||
|
"title": "Text to video for Stable Video Diffusion in ComfyUI [REMOVED]",
|
||||||
|
"id": "svd-txt2vid",
|
||||||
|
"reference": "https://github.com/SpaceKendo/ComfyUI-svd_txt2vid",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/SpaceKendo/ComfyUI-svd_txt2vid"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "This is node replaces the init_image conditioning for the [a/Stable Video Diffusion](https://github.com/Stability-AI/generative-models) image to video model with text embeds, together with a conditioning frame. The conditioning frame is a set of latents."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "vovler",
|
||||||
|
"title": "ComfyUI Civitai Helper Extension [REMOVED]",
|
||||||
|
"reference": "https://github.com/vovler/comfyui-civitaihelper",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/vovler/comfyui-civitaihelper"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "ComfyUI extension for parsing Civitai PNG workflows and automatically downloading missing models"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "DriftJohnson",
|
||||||
|
"title": "DJZ-Nodes [REMOVED]",
|
||||||
|
"id": "DJZ-Nodes",
|
||||||
|
"reference": "https://github.com/MushroomFleet/DJZ-Nodes",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/MushroomFleet/DJZ-Nodes"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "AspectSize and other nodes"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "DriftJohnson",
|
||||||
|
"title": "KokoroTTS Node [REMOVED]",
|
||||||
|
"reference": "https://github.com/MushroomFleet/DJZ-KokoroTTS",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/MushroomFleet/DJZ-KokoroTTS"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "This node provides advanced text-to-speech functionality powered by KokoroTTS. Follow the instructions below to install, configure, and use the node within your portable ComfyUI installation."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "MushroomFleet",
|
||||||
|
"title": "DJZ-Pedalboard [REMOVED]",
|
||||||
|
"reference": "https://github.com/MushroomFleet/DJZ-Pedalboard",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/MushroomFleet/DJZ-Pedalboard"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "This project provides a collection of custom nodes designed for enhanced audio effects in ComfyUI. With an intuitive pedalboard interface, users can easily integrate and manipulate various audio effects within their workflows."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "MushroomFleet",
|
||||||
|
"title": "SVG Suite for ComfyUI [REMOVED]",
|
||||||
|
"reference": "https://github.com/MushroomFleet/svg-suite",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/MushroomFleet/svg-suite"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "SVG Suite is an advanced set of nodes for converting images to SVG in ComfyUI, expanding upon the functionality of ComfyUI-ToSVG."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "joeriben",
|
||||||
|
"title": "AI4ArtsEd Ollama Prompt Node [DEPRECATED]",
|
||||||
|
"reference": "https://github.com/joeriben/ai4artsed_comfyui",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/joeriben/ai4artsed_comfyui"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Experimental nodes for ComfyUI. For more, see [a/https://kubi-meta.de/ai4artsed](https://kubi-meta.de/ai4artsed) A custom ComfyUI node for stylistic and cultural transformation of input text using local LLMs served via Ollama. This node allows you to combine a free-form prompt (e.g. translation, poetic recoding, genre shift) with externally supplied text in the ComfyUI graph. The result is processed via an Ollama-hosted model and returned as plain text."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "bento234",
|
||||||
|
"title": "ComfyUI-bento-toolbox [REMOVED]",
|
||||||
|
"reference": "https://github.com/bento234/ComfyUI-bento-toolbox",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/bento234/ComfyUI-bento-toolbox"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "NODES: Tile Prompt Distributor"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "yichengup",
|
||||||
|
"title": "ComfyUI-VideoBlender [REMOVED]",
|
||||||
|
"reference": "https://github.com/yichengup/ComfyUI-VideoBlender",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/yichengup/ComfyUI-VideoBlender"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Video clip mixing"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "xl0",
|
||||||
|
"title": "latent-tools [REMOVED]",
|
||||||
|
"reference": "https://github.com/xl0/latent-tools",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/xl0/latent-tools"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Visualize and manipulate the latent space in ComfyUI"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "Conor-Collins",
|
||||||
|
"title": "ComfyUI-CoCoTools [REMOVED]",
|
||||||
|
"reference": "https://github.com/Conor-Collins/coco_tools",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/Conor-Collins/coco_tools"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A set of custom nodes for ComfyUI providing advanced image processing, file handling, and utility functions."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "theUpsider",
|
||||||
|
"title": "ComfyUI-Logic [DEPRECATED]",
|
||||||
|
"id": "comfy-logic",
|
||||||
|
"reference": "https://github.com/theUpsider/ComfyUI-Logic",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/theUpsider/ComfyUI-Logic"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "An extension to ComfyUI that introduces logic nodes and conditional rendering capabilities."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "Malloc-pix",
|
||||||
|
"title": "comfyui_qwen2.4_vl_node [REMOVED]",
|
||||||
|
"reference": "https://github.com/Malloc-pix/comfyui_qwen2.4_vl_node",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/Malloc-pix/comfyui_qwen2.4_vl_node"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "NODES: CogVLM2 Captioner, CLIP Dynamic Text Encode(cy)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "inyourdreams-studio",
|
||||||
|
"title": "ComfyUI-RBLM [REMOVED]",
|
||||||
|
"reference": "https://github.com/inyourdreams-studio/comfyui-rblm",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/inyourdreams-studio/comfyui-rblm"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A custom node pack for ComfyUI that provides text manipulation nodes."
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"author": "dream-computing",
|
"author": "dream-computing",
|
||||||
"title": "SyntaxNodes - Image Processing Effects for ComfyUI [REMOVED]",
|
"title": "SyntaxNodes - Image Processing Effects for ComfyUI [REMOVED]",
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,118 @@
|
|||||||
{
|
{
|
||||||
"models": [
|
"models": [
|
||||||
|
{
|
||||||
|
"name": "sam2.1_hiera_tiny.pt",
|
||||||
|
"type": "sam2.1",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2.1 hiera model (tiny)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2.1_hiera_tiny.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_tiny.pt",
|
||||||
|
"size": "149.0MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sam2.1_hiera_small.pt",
|
||||||
|
"type": "sam2.1",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2.1 hiera model (small)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2.1_hiera_small.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_small.pt",
|
||||||
|
"size": "176.0MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sam2.1_hiera_base_plus.pt",
|
||||||
|
"type": "sam2.1",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2.1 hiera model (base+)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2.1_hiera_base_plus.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_base_plus.pt",
|
||||||
|
"size": "309.0MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sam2.1_hiera_large.pt",
|
||||||
|
"type": "sam2.1",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2.1 hiera model (large)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2.1_hiera_large.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_large.pt",
|
||||||
|
"size": "857.0MB"
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "sam2_hiera_tiny.pt",
|
||||||
|
"type": "sam2",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2 hiera model (tiny)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2_hiera_tiny.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/072824/sam2_hiera_tiny.pt",
|
||||||
|
"size": "149.0MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sam2_hiera_small.pt",
|
||||||
|
"type": "sam2",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2 hiera model (small)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2_hiera_small.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/072824/sam2_hiera_small.pt",
|
||||||
|
"size": "176.0MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sam2_hiera_base_plus.pt",
|
||||||
|
"type": "sam2",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2 hiera model (base+)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2_hiera_base_plus.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/072824/sam2_hiera_base_plus.pt",
|
||||||
|
"size": "309.0MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sam2_hiera_large.pt",
|
||||||
|
"type": "sam2",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2 hiera model (large)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2_hiera_large.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/072824/sam2_hiera_large.pt",
|
||||||
|
"size": "857.0MB"
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/omnigen2_fp16.safetensors",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "OmniGen2",
|
||||||
|
"save_path": "default",
|
||||||
|
"description": "OmniGen2 diffusion model. This is required for using OmniGen2.",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged",
|
||||||
|
"filename": "omnigen2_fp16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged/resolve/main/split_files/diffusion_models/omnigen2_fp16.safetensors",
|
||||||
|
"size": "7.93GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/qwen_2.5_vl_fp16.safetensors",
|
||||||
|
"type": "clip",
|
||||||
|
"base": "qwen-2.5",
|
||||||
|
"save_path": "default",
|
||||||
|
"description": "text encoder for OmniGen2",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged",
|
||||||
|
"filename": "qwen_2.5_vl_fp16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged/resolve/main/split_files/text_encoders/qwen_2.5_vl_fp16.safetensors",
|
||||||
|
"size": "7.51GB"
|
||||||
|
},
|
||||||
|
|
||||||
{
|
{
|
||||||
"name": "Latent Bridge Matching for Image Relighting",
|
"name": "Latent Bridge Matching for Image Relighting",
|
||||||
"type": "diffusion_model",
|
"type": "diffusion_model",
|
||||||
@@ -576,121 +689,6 @@
|
|||||||
"filename": "flux-hed-controlnet-v3.safetensors",
|
"filename": "flux-hed-controlnet-v3.safetensors",
|
||||||
"url": "https://huggingface.co/XLabs-AI/flux-controlnet-collections/resolve/main/flux-hed-controlnet-v3.safetensors",
|
"url": "https://huggingface.co/XLabs-AI/flux-controlnet-collections/resolve/main/flux-hed-controlnet-v3.safetensors",
|
||||||
"size": "1.49GB"
|
"size": "1.49GB"
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"name": "XLabs-AI/realism_lora.safetensors",
|
|
||||||
"type": "lora",
|
|
||||||
"base": "FLUX.1",
|
|
||||||
"save_path": "xlabs/loras",
|
|
||||||
"description": "A checkpoint with trained LoRAs for FLUX.1-dev model by Black Forest Labs",
|
|
||||||
"reference": "https://huggingface.co/XLabs-AI/flux-lora-collection",
|
|
||||||
"filename": "realism_lora.safetensors",
|
|
||||||
"url": "https://huggingface.co/XLabs-AI/flux-lora-collection/resolve/main/realism_lora.safetensors",
|
|
||||||
"size": "44.8MB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "XLabs-AI/art_lora.safetensors",
|
|
||||||
"type": "lora",
|
|
||||||
"base": "FLUX.1",
|
|
||||||
"save_path": "xlabs/loras",
|
|
||||||
"description": "A checkpoint with trained LoRAs for FLUX.1-dev model by Black Forest Labs",
|
|
||||||
"reference": "https://huggingface.co/XLabs-AI/flux-lora-collection",
|
|
||||||
"filename": "art_lora.safetensors",
|
|
||||||
"url": "https://huggingface.co/XLabs-AI/flux-lora-collection/resolve/main/scenery_lora.safetensors",
|
|
||||||
"size": "44.8MB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "XLabs-AI/mjv6_lora.safetensors",
|
|
||||||
"type": "lora",
|
|
||||||
"base": "FLUX.1",
|
|
||||||
"save_path": "xlabs/loras",
|
|
||||||
"description": "A checkpoint with trained LoRAs for FLUX.1-dev model by Black Forest Labs",
|
|
||||||
"reference": "https://huggingface.co/XLabs-AI/flux-lora-collection",
|
|
||||||
"filename": "mjv6_lora.safetensors",
|
|
||||||
"url": "https://huggingface.co/XLabs-AI/flux-lora-collection/resolve/main/mjv6_lora.safetensors",
|
|
||||||
"size": "44.8MB"
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"name": "XLabs-AI/flux-ip-adapter",
|
|
||||||
"type": "lora",
|
|
||||||
"base": "FLUX.1",
|
|
||||||
"save_path": "xlabs/ipadapters",
|
|
||||||
"description": "A checkpoint with trained LoRAs for FLUX.1-dev model by Black Forest Labs",
|
|
||||||
"reference": "https://huggingface.co/XLabs-AI/flux-ip-adapter",
|
|
||||||
"filename": "ip_adapter.safetensors",
|
|
||||||
"url": "https://huggingface.co/XLabs-AI/flux-ip-adapter/resolve/main/ip_adapter.safetensors",
|
|
||||||
"size": "982MB"
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"name": "stabilityai/SD3.5-Large-Controlnet-Blur",
|
|
||||||
"type": "controlnet",
|
|
||||||
"base": "SD3.5",
|
|
||||||
"save_path": "controlnet/SD3.5",
|
|
||||||
"description": "Blur Controlnet model for SD3.5 Large",
|
|
||||||
"reference": "https://huggingface.co/stabilityai/stable-diffusion-3.5-controlnets",
|
|
||||||
"filename": "sd3.5_large_controlnet_blur.safetensors",
|
|
||||||
"url": "https://huggingface.co/stabilityai/stable-diffusion-3.5-controlnets/resolve/main/sd3.5_large_controlnet_blur.safetensors",
|
|
||||||
"size": "8.65GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "stabilityai/SD3.5-Large-Controlnet-Canny",
|
|
||||||
"type": "controlnet",
|
|
||||||
"base": "SD3.5",
|
|
||||||
"save_path": "controlnet/SD3.5",
|
|
||||||
"description": "Canny Controlnet model for SD3.5 Large",
|
|
||||||
"reference": "https://huggingface.co/stabilityai/stable-diffusion-3.5-controlnets",
|
|
||||||
"filename": "sd3.5_large_controlnet_canny.safetensors",
|
|
||||||
"url": "https://huggingface.co/stabilityai/stable-diffusion-3.5-controlnets/resolve/main/sd3.5_large_controlnet_canny.safetensors",
|
|
||||||
"size": "8.65GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "stabilityai/SD3.5-Large-Controlnet-Depth",
|
|
||||||
"type": "controlnet",
|
|
||||||
"base": "SD3.5",
|
|
||||||
"save_path": "controlnet/SD3.5",
|
|
||||||
"description": "Depth Controlnet model for SD3.5 Large",
|
|
||||||
"reference": "https://huggingface.co/stabilityai/stable-diffusion-3.5-controlnets",
|
|
||||||
"filename": "sd3.5_large_controlnet_depth.safetensors",
|
|
||||||
"url": "https://huggingface.co/stabilityai/stable-diffusion-3.5-controlnets/resolve/main/sd3.5_large_controlnet_depth.safetensors",
|
|
||||||
"size": "8.65GB"
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"name": "LTX-Video 2B v0.9 Checkpoint",
|
|
||||||
"type": "checkpoint",
|
|
||||||
"base": "LTX-Video",
|
|
||||||
"save_path": "checkpoints/LTXV",
|
|
||||||
"description": "LTX-Video is the first DiT-based video generation model capable of generating high-quality videos in real-time. It produces 24 FPS videos at a 768x512 resolution faster than they can be watched. Trained on a large-scale dataset of diverse videos, the model generates high-resolution videos with realistic and varied content.",
|
|
||||||
"reference": "https://huggingface.co/Lightricks/LTX-Video",
|
|
||||||
"filename": "ltx-video-2b-v0.9.safetensors",
|
|
||||||
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltx-video-2b-v0.9.safetensors",
|
|
||||||
"size": "9.37GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "InstantX/FLUX.1-dev-IP-Adapter",
|
|
||||||
"type": "IP-Adapter",
|
|
||||||
"base": "FLUX.1",
|
|
||||||
"save_path": "ipadapter-flux",
|
|
||||||
"description": "FLUX.1-dev-IP-Adapter",
|
|
||||||
"reference": "https://huggingface.co/InstantX/FLUX.1-dev-IP-Adapter",
|
|
||||||
"filename": "ip-adapter.bin",
|
|
||||||
"url": "https://huggingface.co/InstantX/FLUX.1-dev-IP-Adapter/resolve/main/ip-adapter.bin",
|
|
||||||
"size": "5.29GB"
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"name": "Comfy-Org/sigclip_vision_384 (patch14_384)",
|
|
||||||
"type": "clip_vision",
|
|
||||||
"base": "sigclip",
|
|
||||||
"save_path": "clip_vision",
|
|
||||||
"description": "This clip vision model is required for FLUX.1 Redux.",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/sigclip_vision_384/tree/main",
|
|
||||||
"filename": "sigclip_vision_patch14_384.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/sigclip_vision_384/resolve/main/sigclip_vision_patch14_384.safetensors",
|
|
||||||
"size": "857MB"
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -331,6 +331,16 @@
|
|||||||
],
|
],
|
||||||
"description": "Dynamic Node examples for ComfyUI",
|
"description": "Dynamic Node examples for ComfyUI",
|
||||||
"install_type": "git-clone"
|
"install_type": "git-clone"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "Jonathon-Doran",
|
||||||
|
"title": "remote-combo-demo",
|
||||||
|
"reference": "https://github.com/Jonathon-Doran/remote-combo-demo",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/Jonathon-Doran/remote-combo-demo"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A minimal test suite demonstrating how remote COMBO inputs behave in ComfyUI, with and without force_input"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
203
openapi.yaml
203
openapi.yaml
@@ -18,6 +18,14 @@ security: []
|
|||||||
# Common API components
|
# Common API components
|
||||||
components:
|
components:
|
||||||
schemas:
|
schemas:
|
||||||
|
OperationType:
|
||||||
|
type: string
|
||||||
|
enum: [install, uninstall, update, update-comfyui, fix, disable, enable, install-model]
|
||||||
|
description: Type of operation or task being performed
|
||||||
|
OperationResult:
|
||||||
|
type: string
|
||||||
|
enum: [success, failed, skipped, error, skip]
|
||||||
|
description: Result status of an operation (failed/error and skipped/skip are aliases)
|
||||||
# Core Task Queue Models
|
# Core Task Queue Models
|
||||||
QueueTaskItem:
|
QueueTaskItem:
|
||||||
type: object
|
type: object
|
||||||
@@ -29,9 +37,7 @@ components:
|
|||||||
type: string
|
type: string
|
||||||
description: Client identifier that initiated the task
|
description: Client identifier that initiated the task
|
||||||
kind:
|
kind:
|
||||||
type: string
|
$ref: '#/components/schemas/OperationType'
|
||||||
description: Type of task being performed
|
|
||||||
enum: [install, uninstall, update, update-all, update-comfyui, fix, disable, enable, install-model]
|
|
||||||
params:
|
params:
|
||||||
oneOf:
|
oneOf:
|
||||||
- $ref: '#/components/schemas/InstallPackParams'
|
- $ref: '#/components/schemas/InstallPackParams'
|
||||||
@@ -65,14 +71,19 @@ components:
|
|||||||
description: Task result message or details
|
description: Task result message or details
|
||||||
status:
|
status:
|
||||||
$ref: '#/components/schemas/TaskExecutionStatus'
|
$ref: '#/components/schemas/TaskExecutionStatus'
|
||||||
|
batch_id:
|
||||||
|
type: [string, 'null']
|
||||||
|
description: ID of the batch this task belongs to
|
||||||
|
end_time:
|
||||||
|
type: [string, 'null']
|
||||||
|
format: date-time
|
||||||
|
description: ISO timestamp when task execution ended
|
||||||
required: [ui_id, client_id, kind, timestamp, result]
|
required: [ui_id, client_id, kind, timestamp, result]
|
||||||
TaskExecutionStatus:
|
TaskExecutionStatus:
|
||||||
type: object
|
type: object
|
||||||
properties:
|
properties:
|
||||||
status_str:
|
status_str:
|
||||||
type: string
|
$ref: '#/components/schemas/OperationResult'
|
||||||
enum: [success, error, skip]
|
|
||||||
description: Overall task execution status
|
|
||||||
completed:
|
completed:
|
||||||
type: boolean
|
type: boolean
|
||||||
description: Whether the task completed
|
description: Whether the task completed
|
||||||
@@ -223,6 +234,14 @@ components:
|
|||||||
type: string
|
type: string
|
||||||
enum: [git-clone, copy, cnr]
|
enum: [git-clone, copy, cnr]
|
||||||
description: Type of installation used for the pack
|
description: Type of installation used for the pack
|
||||||
|
SecurityLevel:
|
||||||
|
type: string
|
||||||
|
enum: [strong, normal, normal-, weak]
|
||||||
|
description: Security level configuration (from most to least restrictive)
|
||||||
|
RiskLevel:
|
||||||
|
type: string
|
||||||
|
enum: [block, high+, high, middle+, middle]
|
||||||
|
description: Risk classification for operations
|
||||||
ManagerPack:
|
ManagerPack:
|
||||||
allOf:
|
allOf:
|
||||||
- $ref: '#/components/schemas/ManagerPackInfo'
|
- $ref: '#/components/schemas/ManagerPackInfo'
|
||||||
@@ -235,7 +254,7 @@ components:
|
|||||||
type: array
|
type: array
|
||||||
items:
|
items:
|
||||||
type: string
|
type: string
|
||||||
description: Files included in the pack
|
description: Repository URLs for installation (typically contains one GitHub URL)
|
||||||
reference:
|
reference:
|
||||||
type: string
|
type: string
|
||||||
description: The type of installation reference
|
description: The type of installation reference
|
||||||
@@ -366,6 +385,46 @@ components:
|
|||||||
type: string
|
type: string
|
||||||
description: ComfyUI Node Registry ID of the package to enable
|
description: ComfyUI Node Registry ID of the package to enable
|
||||||
required: [cnr_id]
|
required: [cnr_id]
|
||||||
|
# Query Parameter Models
|
||||||
|
UpdateAllQueryParams:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
client_id:
|
||||||
|
type: string
|
||||||
|
description: Client identifier that initiated the request
|
||||||
|
ui_id:
|
||||||
|
type: string
|
||||||
|
description: Base UI identifier for task tracking
|
||||||
|
mode:
|
||||||
|
$ref: '#/components/schemas/ManagerDatabaseSource'
|
||||||
|
required: [client_id, ui_id]
|
||||||
|
UpdateComfyUIQueryParams:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
client_id:
|
||||||
|
type: string
|
||||||
|
description: Client identifier that initiated the request
|
||||||
|
ui_id:
|
||||||
|
type: string
|
||||||
|
description: UI identifier for task tracking
|
||||||
|
stable:
|
||||||
|
type: boolean
|
||||||
|
default: true
|
||||||
|
description: Whether to update to stable version (true) or nightly (false)
|
||||||
|
required: [client_id, ui_id]
|
||||||
|
ComfyUISwitchVersionQueryParams:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
ver:
|
||||||
|
type: string
|
||||||
|
description: Version to switch to
|
||||||
|
client_id:
|
||||||
|
type: string
|
||||||
|
description: Client identifier that initiated the request
|
||||||
|
ui_id:
|
||||||
|
type: string
|
||||||
|
description: UI identifier for task tracking
|
||||||
|
required: [ver, client_id, ui_id]
|
||||||
# Queue Status Models
|
# Queue Status Models
|
||||||
QueueStatus:
|
QueueStatus:
|
||||||
type: object
|
type: object
|
||||||
@@ -580,9 +639,7 @@ components:
|
|||||||
type: string
|
type: string
|
||||||
description: Unique operation identifier
|
description: Unique operation identifier
|
||||||
operation_type:
|
operation_type:
|
||||||
type: string
|
$ref: '#/components/schemas/OperationType'
|
||||||
description: Type of operation
|
|
||||||
enum: [install, update, uninstall, fix, disable, enable, install-model]
|
|
||||||
target:
|
target:
|
||||||
type: string
|
type: string
|
||||||
description: Target of the operation (node name, model name, etc.)
|
description: Target of the operation (node name, model name, etc.)
|
||||||
@@ -590,9 +647,7 @@ components:
|
|||||||
type: [string, 'null']
|
type: [string, 'null']
|
||||||
description: Target version for the operation
|
description: Target version for the operation
|
||||||
result:
|
result:
|
||||||
type: string
|
$ref: '#/components/schemas/OperationResult'
|
||||||
description: Operation result
|
|
||||||
enum: [success, failed, skipped]
|
|
||||||
error_message:
|
error_message:
|
||||||
type: [string, 'null']
|
type: [string, 'null']
|
||||||
description: Error message if operation failed
|
description: Error message if operation failed
|
||||||
@@ -640,6 +695,45 @@ components:
|
|||||||
type: object
|
type: object
|
||||||
additionalProperties: true
|
additionalProperties: true
|
||||||
description: ComfyUI Manager configuration settings
|
description: ComfyUI Manager configuration settings
|
||||||
|
comfyui_root_path:
|
||||||
|
type: [string, 'null']
|
||||||
|
description: ComfyUI root installation directory
|
||||||
|
model_paths:
|
||||||
|
type: object
|
||||||
|
additionalProperties:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
description: Map of model types to their configured paths
|
||||||
|
manager_version:
|
||||||
|
type: [string, 'null']
|
||||||
|
description: ComfyUI Manager version
|
||||||
|
security_level:
|
||||||
|
$ref: '#/components/schemas/SecurityLevel'
|
||||||
|
network_mode:
|
||||||
|
type: [string, 'null']
|
||||||
|
description: Network mode (online, offline, private)
|
||||||
|
cli_args:
|
||||||
|
type: object
|
||||||
|
additionalProperties: true
|
||||||
|
description: Selected ComfyUI CLI arguments
|
||||||
|
custom_nodes_count:
|
||||||
|
type: [integer, 'null']
|
||||||
|
description: Total number of custom node packages
|
||||||
|
minimum: 0
|
||||||
|
failed_imports:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
description: List of custom nodes that failed to import
|
||||||
|
pip_packages:
|
||||||
|
type: object
|
||||||
|
additionalProperties:
|
||||||
|
type: string
|
||||||
|
description: Map of installed pip packages to their versions
|
||||||
|
embedded_python:
|
||||||
|
type: [boolean, 'null']
|
||||||
|
description: Whether ComfyUI is running from an embedded Python distribution
|
||||||
required: [snapshot_time, comfyui_version, python_version, platform_info]
|
required: [snapshot_time, comfyui_version, python_version, platform_info]
|
||||||
BatchExecutionRecord:
|
BatchExecutionRecord:
|
||||||
type: object
|
type: object
|
||||||
@@ -1115,6 +1209,89 @@ paths:
|
|||||||
description: Snapshot saved successfully
|
description: Snapshot saved successfully
|
||||||
'400':
|
'400':
|
||||||
description: Error saving snapshot
|
description: Error saving snapshot
|
||||||
|
/v2/snapshot/diff:
|
||||||
|
get:
|
||||||
|
summary: Get snapshot diff
|
||||||
|
description: Returns the changes that would occur when restoring from the 'from' snapshot to the 'to' snapshot.
|
||||||
|
parameters:
|
||||||
|
- name: from
|
||||||
|
in: query
|
||||||
|
required: false
|
||||||
|
description: This parameter refers to the existing snapshot; if omitted, it defaults to the current snapshot.
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
- name: to
|
||||||
|
in: query
|
||||||
|
required: true
|
||||||
|
description: This parameter is the snapshot to compare against the existing snapshot.
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: Successful operation
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
nodepack_diff:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
added:
|
||||||
|
type: object
|
||||||
|
additionalProperties:
|
||||||
|
type: string
|
||||||
|
removed:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
upgraded:
|
||||||
|
type: object
|
||||||
|
additionalProperties:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
from:
|
||||||
|
type: string
|
||||||
|
to:
|
||||||
|
type: string
|
||||||
|
downgraded:
|
||||||
|
type: object
|
||||||
|
additionalProperties:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
from:
|
||||||
|
type: string
|
||||||
|
to:
|
||||||
|
type: string
|
||||||
|
changed:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
pip_diff:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
added:
|
||||||
|
type: object
|
||||||
|
additionalProperties:
|
||||||
|
type: string
|
||||||
|
upgraded:
|
||||||
|
type: object
|
||||||
|
additionalProperties:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
from:
|
||||||
|
type: string
|
||||||
|
to:
|
||||||
|
type: string
|
||||||
|
downgraded:
|
||||||
|
type: object
|
||||||
|
additionalProperties:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
from:
|
||||||
|
type: string
|
||||||
|
to:
|
||||||
|
type: string
|
||||||
# ComfyUI Management Endpoints (v2)
|
# ComfyUI Management Endpoints (v2)
|
||||||
/v2/comfyui_manager/comfyui_versions:
|
/v2/comfyui_manager/comfyui_versions:
|
||||||
get:
|
get:
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ build-backend = "setuptools.build_meta"
|
|||||||
[project]
|
[project]
|
||||||
name = "comfyui-manager"
|
name = "comfyui-manager"
|
||||||
license = { text = "GPL-3.0-only" }
|
license = { text = "GPL-3.0-only" }
|
||||||
version = "4.0.0-beta.4"
|
version = "4.0.0-beta.10"
|
||||||
requires-python = ">= 3.9"
|
requires-python = ">= 3.9"
|
||||||
description = "ComfyUI-Manager provides features to install and manage custom nodes for ComfyUI, as well as various functionalities to assist with ComfyUI."
|
description = "ComfyUI-Manager provides features to install and manage custom nodes for ComfyUI, as well as various functionalities to assist with ComfyUI."
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
|
|||||||
13
pytest.ini
13
pytest.ini
@@ -1,13 +0,0 @@
|
|||||||
[tool:pytest]
|
|
||||||
testpaths = tests
|
|
||||||
python_files = test_*.py
|
|
||||||
python_classes = Test*
|
|
||||||
python_functions = test_*
|
|
||||||
addopts =
|
|
||||||
-v
|
|
||||||
--tb=short
|
|
||||||
--strict-markers
|
|
||||||
--disable-warnings
|
|
||||||
markers =
|
|
||||||
slow: marks tests as slow (deselect with '-m "not slow"')
|
|
||||||
integration: marks tests as integration tests
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
GitPython
|
GitPython
|
||||||
PyGithub
|
PyGithub
|
||||||
matrix-client==0.4.0
|
matrix-nio
|
||||||
transformers
|
transformers
|
||||||
huggingface-hub>0.20
|
huggingface-hub>0.20
|
||||||
typer
|
typer
|
||||||
|
|||||||
42
run_tests.py
42
run_tests.py
@@ -1,42 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
Simple test runner for ComfyUI-Manager tests.
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
python run_tests.py # Run all tests
|
|
||||||
python run_tests.py -k test_task_queue # Run specific tests
|
|
||||||
python run_tests.py --cov # Run with coverage
|
|
||||||
"""
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import subprocess
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
def main():
|
|
||||||
"""Run pytest with appropriate arguments"""
|
|
||||||
# Ensure we're in the project directory
|
|
||||||
project_root = Path(__file__).parent
|
|
||||||
|
|
||||||
# Base pytest command
|
|
||||||
cmd = [sys.executable, "-m", "pytest"]
|
|
||||||
|
|
||||||
# Add any command line arguments passed to this script
|
|
||||||
cmd.extend(sys.argv[1:])
|
|
||||||
|
|
||||||
# Add default arguments if none provided
|
|
||||||
if len(sys.argv) == 1:
|
|
||||||
cmd.extend([
|
|
||||||
"tests/",
|
|
||||||
"-v",
|
|
||||||
"--tb=short"
|
|
||||||
])
|
|
||||||
|
|
||||||
print(f"Running: {' '.join(cmd)}")
|
|
||||||
print(f"Working directory: {project_root}")
|
|
||||||
|
|
||||||
# Run pytest
|
|
||||||
result = subprocess.run(cmd, cwd=project_root)
|
|
||||||
sys.exit(result.returncode)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
11
scanner.py
11
scanner.py
@@ -255,13 +255,13 @@ def clone_or_pull_git_repository(git_url):
|
|||||||
repo.git.submodule('update', '--init', '--recursive')
|
repo.git.submodule('update', '--init', '--recursive')
|
||||||
print(f"Pulling {repo_name}...")
|
print(f"Pulling {repo_name}...")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Pulling {repo_name} failed: {e}")
|
print(f"Failed to pull '{repo_name}': {e}")
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
Repo.clone_from(git_url, repo_dir, recursive=True)
|
Repo.clone_from(git_url, repo_dir, recursive=True)
|
||||||
print(f"Cloning {repo_name}...")
|
print(f"Cloning {repo_name}...")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Cloning {repo_name} failed: {e}")
|
print(f"Failed to clone '{repo_name}': {e}")
|
||||||
|
|
||||||
|
|
||||||
def update_custom_nodes():
|
def update_custom_nodes():
|
||||||
@@ -496,8 +496,15 @@ def gen_json(node_info):
|
|||||||
nodes_in_url, metadata_in_url = data[git_url]
|
nodes_in_url, metadata_in_url = data[git_url]
|
||||||
nodes = set(nodes_in_url)
|
nodes = set(nodes_in_url)
|
||||||
|
|
||||||
|
try:
|
||||||
for x, desc in node_list_json.items():
|
for x, desc in node_list_json.items():
|
||||||
nodes.add(x.strip())
|
nodes.add(x.strip())
|
||||||
|
except Exception as e:
|
||||||
|
print(f"\nERROR: Invalid json format '{node_list_json_path}'")
|
||||||
|
print("------------------------------------------------------")
|
||||||
|
print(e)
|
||||||
|
print("------------------------------------------------------")
|
||||||
|
node_list_json = {}
|
||||||
|
|
||||||
metadata_in_url['title_aux'] = title
|
metadata_in_url['title_aux'] = title
|
||||||
|
|
||||||
|
|||||||
@@ -1,89 +0,0 @@
|
|||||||
# ComfyUI-Manager Tests
|
|
||||||
|
|
||||||
This directory contains unit tests for ComfyUI-Manager components.
|
|
||||||
|
|
||||||
## Running Tests
|
|
||||||
|
|
||||||
### Using the Virtual Environment
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# From the project root
|
|
||||||
/path/to/comfyui/.venv/bin/python -m pytest tests/ -v
|
|
||||||
```
|
|
||||||
|
|
||||||
### Using the Test Runner
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Run all tests
|
|
||||||
python run_tests.py
|
|
||||||
|
|
||||||
# Run specific tests
|
|
||||||
python run_tests.py -k test_task_queue
|
|
||||||
|
|
||||||
# Run with coverage
|
|
||||||
python run_tests.py --cov
|
|
||||||
```
|
|
||||||
|
|
||||||
## Test Structure
|
|
||||||
|
|
||||||
### test_task_queue.py
|
|
||||||
|
|
||||||
Comprehensive tests for the TaskQueue functionality including:
|
|
||||||
|
|
||||||
- **Basic Operations**: Initialization, adding/removing tasks, state management
|
|
||||||
- **Batch Tracking**: Automatic batch creation, history saving, finalization
|
|
||||||
- **Thread Safety**: Concurrent access, worker lifecycle management
|
|
||||||
- **Integration Testing**: Full task processing workflow
|
|
||||||
- **Edge Cases**: Empty queues, invalid data, exception handling
|
|
||||||
|
|
||||||
**Key Features Tested:**
|
|
||||||
- ✅ Task queueing with Pydantic model validation
|
|
||||||
- ✅ Batch history tracking and persistence
|
|
||||||
- ✅ Thread-safe concurrent operations
|
|
||||||
- ✅ Worker thread lifecycle management
|
|
||||||
- ✅ WebSocket message tracking
|
|
||||||
- ✅ State snapshots and transitions
|
|
||||||
|
|
||||||
### MockTaskQueue
|
|
||||||
|
|
||||||
The tests use a `MockTaskQueue` class that:
|
|
||||||
- Isolates testing from global state and external dependencies
|
|
||||||
- Provides dependency injection for mocking external services
|
|
||||||
- Maintains the same API as the real TaskQueue
|
|
||||||
- Supports both synchronous and asynchronous testing patterns
|
|
||||||
|
|
||||||
## Test Categories
|
|
||||||
|
|
||||||
- **Unit Tests**: Individual method testing with mocked dependencies
|
|
||||||
- **Integration Tests**: Full workflow testing with real threading
|
|
||||||
- **Concurrency Tests**: Multi-threaded access verification
|
|
||||||
- **Edge Case Tests**: Error conditions and boundary cases
|
|
||||||
|
|
||||||
## Dependencies
|
|
||||||
|
|
||||||
Tests require:
|
|
||||||
- `pytest` - Test framework
|
|
||||||
- `pytest-asyncio` - Async test support
|
|
||||||
- `pydantic` - Data model validation
|
|
||||||
|
|
||||||
Install with: `pip install -e ".[dev]"`
|
|
||||||
|
|
||||||
## Design Notes
|
|
||||||
|
|
||||||
### Handling Singleton Pattern
|
|
||||||
|
|
||||||
The real TaskQueue uses a singleton pattern which makes testing challenging. The MockTaskQueue avoids this by:
|
|
||||||
- Not setting global instance variables
|
|
||||||
- Creating fresh instances per test
|
|
||||||
- Providing controlled dependency injection
|
|
||||||
|
|
||||||
### Thread Management
|
|
||||||
|
|
||||||
Tests handle threading complexities by:
|
|
||||||
- Using controlled mock workers for predictable behavior
|
|
||||||
- Providing synchronization primitives for timing-sensitive tests
|
|
||||||
- Testing both successful workflows and exception scenarios
|
|
||||||
|
|
||||||
### Heapq Compatibility
|
|
||||||
|
|
||||||
The original TaskQueue uses `heapq` with Pydantic models, which don't support comparison by default. Tests solve this by wrapping items in comparable tuples with priority values, maintaining FIFO order while enabling heap operations.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
"""Test suite for ComfyUI-Manager"""
|
|
||||||
@@ -1,510 +0,0 @@
|
|||||||
"""
|
|
||||||
Tests for TaskQueue functionality.
|
|
||||||
|
|
||||||
This module tests the core TaskQueue operations including:
|
|
||||||
- Task queueing and processing
|
|
||||||
- Batch tracking
|
|
||||||
- Thread lifecycle management
|
|
||||||
- State management
|
|
||||||
- WebSocket message delivery
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import threading
|
|
||||||
import time
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
from pathlib import Path
|
|
||||||
from unittest.mock import AsyncMock, MagicMock, Mock, patch
|
|
||||||
from typing import Any, Dict, Optional
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from comfyui_manager.data_models import (
|
|
||||||
QueueTaskItem,
|
|
||||||
TaskExecutionStatus,
|
|
||||||
TaskStateMessage,
|
|
||||||
InstallPackParams,
|
|
||||||
ManagerDatabaseSource,
|
|
||||||
ManagerChannel,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class MockTaskQueue:
|
|
||||||
"""
|
|
||||||
A testable version of TaskQueue that allows for dependency injection
|
|
||||||
and isolated testing without global state.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, history_dir: Optional[Path] = None):
|
|
||||||
# Don't set the global instance for testing
|
|
||||||
self.mutex = threading.RLock()
|
|
||||||
self.not_empty = threading.Condition(self.mutex)
|
|
||||||
self.current_index = 0
|
|
||||||
self.pending_tasks = []
|
|
||||||
self.running_tasks = {}
|
|
||||||
self.history_tasks = {}
|
|
||||||
self.task_counter = 0
|
|
||||||
self.batch_id = None
|
|
||||||
self.batch_start_time = None
|
|
||||||
self.batch_state_before = None
|
|
||||||
self._worker_task = None
|
|
||||||
self._history_dir = history_dir
|
|
||||||
|
|
||||||
# Mock external dependencies
|
|
||||||
self.mock_core = MagicMock()
|
|
||||||
self.mock_prompt_server = MagicMock()
|
|
||||||
|
|
||||||
def is_processing(self) -> bool:
|
|
||||||
"""Check if the queue is currently processing tasks"""
|
|
||||||
return (
|
|
||||||
self._worker_task is not None
|
|
||||||
and self._worker_task.is_alive()
|
|
||||||
)
|
|
||||||
|
|
||||||
def start_worker(self, mock_task_worker=None) -> bool:
|
|
||||||
"""Start the task worker. Can inject a mock worker for testing."""
|
|
||||||
if self._worker_task is not None and self._worker_task.is_alive():
|
|
||||||
return False # Already running
|
|
||||||
|
|
||||||
if mock_task_worker:
|
|
||||||
self._worker_task = threading.Thread(target=mock_task_worker)
|
|
||||||
else:
|
|
||||||
# Use a simple test worker that processes one task then stops
|
|
||||||
self._worker_task = threading.Thread(target=self._test_worker)
|
|
||||||
self._worker_task.start()
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _test_worker(self):
|
|
||||||
"""Simple test worker that processes tasks without external dependencies"""
|
|
||||||
while True:
|
|
||||||
task = self.get(timeout=1.0) # Short timeout for tests
|
|
||||||
if task is None:
|
|
||||||
if self.total_count() == 0:
|
|
||||||
break
|
|
||||||
continue
|
|
||||||
|
|
||||||
item, task_index = task
|
|
||||||
|
|
||||||
# Simulate task processing
|
|
||||||
self.running_tasks[task_index] = item
|
|
||||||
|
|
||||||
# Simulate work
|
|
||||||
time.sleep(0.1)
|
|
||||||
|
|
||||||
# Mark as completed
|
|
||||||
status = TaskExecutionStatus(
|
|
||||||
status_str="success",
|
|
||||||
completed=True,
|
|
||||||
messages=["Test task completed"]
|
|
||||||
)
|
|
||||||
|
|
||||||
self.mark_done(task_index, item, status, "Test result")
|
|
||||||
|
|
||||||
# Clean up
|
|
||||||
if task_index in self.running_tasks:
|
|
||||||
del self.running_tasks[task_index]
|
|
||||||
|
|
||||||
def get_current_state(self) -> TaskStateMessage:
|
|
||||||
"""Get current queue state with mocked dependencies"""
|
|
||||||
return TaskStateMessage(
|
|
||||||
history=self.get_history(),
|
|
||||||
running_queue=self.get_current_queue()[0],
|
|
||||||
pending_queue=self.get_current_queue()[1],
|
|
||||||
installed_packs={} # Mocked empty
|
|
||||||
)
|
|
||||||
|
|
||||||
def send_queue_state_update(self, msg: str, update, client_id: Optional[str] = None):
|
|
||||||
"""Mock implementation that tracks calls instead of sending WebSocket messages"""
|
|
||||||
if not hasattr(self, '_sent_updates'):
|
|
||||||
self._sent_updates = []
|
|
||||||
self._sent_updates.append({
|
|
||||||
'msg': msg,
|
|
||||||
'update': update,
|
|
||||||
'client_id': client_id
|
|
||||||
})
|
|
||||||
|
|
||||||
# Copy the essential methods from the real TaskQueue
|
|
||||||
def put(self, item) -> None:
|
|
||||||
"""Add a task to the queue. Item can be a dict or QueueTaskItem model."""
|
|
||||||
with self.mutex:
|
|
||||||
# Start a new batch if this is the first task after queue was empty
|
|
||||||
if (
|
|
||||||
self.batch_id is None
|
|
||||||
and len(self.pending_tasks) == 0
|
|
||||||
and len(self.running_tasks) == 0
|
|
||||||
):
|
|
||||||
self._start_new_batch()
|
|
||||||
|
|
||||||
# Convert to Pydantic model if it's a dict
|
|
||||||
if isinstance(item, dict):
|
|
||||||
item = QueueTaskItem(**item)
|
|
||||||
|
|
||||||
import heapq
|
|
||||||
# Wrap in tuple with priority to make it comparable
|
|
||||||
# Use task_counter as priority to maintain FIFO order
|
|
||||||
priority_item = (self.task_counter, item)
|
|
||||||
heapq.heappush(self.pending_tasks, priority_item)
|
|
||||||
self.task_counter += 1
|
|
||||||
self.not_empty.notify()
|
|
||||||
|
|
||||||
def _start_new_batch(self) -> None:
|
|
||||||
"""Start a new batch session for tracking operations."""
|
|
||||||
self.batch_id = (
|
|
||||||
f"test_batch_{datetime.now().strftime('%Y%m%d_%H%M%S')}_{uuid.uuid4().hex[:8]}"
|
|
||||||
)
|
|
||||||
self.batch_start_time = datetime.now().isoformat()
|
|
||||||
self.batch_state_before = {"test": "state"} # Simplified for testing
|
|
||||||
|
|
||||||
def get(self, timeout: Optional[float] = None):
|
|
||||||
"""Get next task from queue"""
|
|
||||||
with self.not_empty:
|
|
||||||
while len(self.pending_tasks) == 0:
|
|
||||||
self.not_empty.wait(timeout=timeout)
|
|
||||||
if timeout is not None and len(self.pending_tasks) == 0:
|
|
||||||
return None
|
|
||||||
import heapq
|
|
||||||
priority_item = heapq.heappop(self.pending_tasks)
|
|
||||||
task_index, item = priority_item # Unwrap the tuple
|
|
||||||
return item, task_index
|
|
||||||
|
|
||||||
def total_count(self) -> int:
|
|
||||||
"""Get total number of tasks (pending + running)"""
|
|
||||||
return len(self.pending_tasks) + len(self.running_tasks)
|
|
||||||
|
|
||||||
def done_count(self) -> int:
|
|
||||||
"""Get number of completed tasks"""
|
|
||||||
return len(self.history_tasks)
|
|
||||||
|
|
||||||
def get_current_queue(self):
|
|
||||||
"""Get current running and pending queues"""
|
|
||||||
running = list(self.running_tasks.values())
|
|
||||||
# Extract items from the priority tuples
|
|
||||||
pending = [item for priority, item in self.pending_tasks]
|
|
||||||
return running, pending
|
|
||||||
|
|
||||||
def get_history(self):
|
|
||||||
"""Get task history"""
|
|
||||||
return self.history_tasks
|
|
||||||
|
|
||||||
def mark_done(self, task_index: int, item: QueueTaskItem, status: TaskExecutionStatus, result: str):
|
|
||||||
"""Mark a task as completed"""
|
|
||||||
from comfyui_manager.data_models import TaskHistoryItem
|
|
||||||
|
|
||||||
history_item = TaskHistoryItem(
|
|
||||||
ui_id=item.ui_id,
|
|
||||||
client_id=item.client_id,
|
|
||||||
kind=item.kind.value if hasattr(item.kind, 'value') else str(item.kind),
|
|
||||||
timestamp=datetime.now().isoformat(),
|
|
||||||
result=result,
|
|
||||||
status=status
|
|
||||||
)
|
|
||||||
|
|
||||||
self.history_tasks[item.ui_id] = history_item
|
|
||||||
|
|
||||||
def finalize(self):
|
|
||||||
"""Finalize batch (simplified for testing)"""
|
|
||||||
if self._history_dir and self.batch_id:
|
|
||||||
batch_file = self._history_dir / f"{self.batch_id}.json"
|
|
||||||
batch_record = {
|
|
||||||
"batch_id": self.batch_id,
|
|
||||||
"start_time": self.batch_start_time,
|
|
||||||
"state_before": self.batch_state_before,
|
|
||||||
"operations": [] # Simplified
|
|
||||||
}
|
|
||||||
with open(batch_file, 'w') as f:
|
|
||||||
json.dump(batch_record, f, indent=2)
|
|
||||||
|
|
||||||
|
|
||||||
class TestTaskQueue:
|
|
||||||
"""Test suite for TaskQueue functionality"""
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def task_queue(self, tmp_path):
|
|
||||||
"""Create a clean TaskQueue instance for each test"""
|
|
||||||
return MockTaskQueue(history_dir=tmp_path)
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def sample_task(self):
|
|
||||||
"""Create a sample task for testing"""
|
|
||||||
return QueueTaskItem(
|
|
||||||
ui_id=str(uuid.uuid4()),
|
|
||||||
client_id="test_client",
|
|
||||||
kind="install",
|
|
||||||
params=InstallPackParams(
|
|
||||||
id="test-node",
|
|
||||||
version="1.0.0",
|
|
||||||
selected_version="1.0.0",
|
|
||||||
mode=ManagerDatabaseSource.cache,
|
|
||||||
channel=ManagerChannel.dev
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_task_queue_initialization(self, task_queue):
|
|
||||||
"""Test TaskQueue initializes with correct default state"""
|
|
||||||
assert task_queue.total_count() == 0
|
|
||||||
assert task_queue.done_count() == 0
|
|
||||||
assert not task_queue.is_processing()
|
|
||||||
assert task_queue.batch_id is None
|
|
||||||
assert len(task_queue.pending_tasks) == 0
|
|
||||||
assert len(task_queue.running_tasks) == 0
|
|
||||||
assert len(task_queue.history_tasks) == 0
|
|
||||||
|
|
||||||
def test_put_task_starts_batch(self, task_queue, sample_task):
|
|
||||||
"""Test that adding first task starts a new batch"""
|
|
||||||
assert task_queue.batch_id is None
|
|
||||||
|
|
||||||
task_queue.put(sample_task)
|
|
||||||
|
|
||||||
assert task_queue.batch_id is not None
|
|
||||||
assert task_queue.batch_id.startswith("test_batch_")
|
|
||||||
assert task_queue.batch_start_time is not None
|
|
||||||
assert task_queue.total_count() == 1
|
|
||||||
|
|
||||||
def test_put_multiple_tasks(self, task_queue, sample_task):
|
|
||||||
"""Test adding multiple tasks to queue"""
|
|
||||||
task_queue.put(sample_task)
|
|
||||||
|
|
||||||
# Create second task
|
|
||||||
task2 = QueueTaskItem(
|
|
||||||
ui_id=str(uuid.uuid4()),
|
|
||||||
client_id="test_client_2",
|
|
||||||
kind="install",
|
|
||||||
params=sample_task.params
|
|
||||||
)
|
|
||||||
task_queue.put(task2)
|
|
||||||
|
|
||||||
assert task_queue.total_count() == 2
|
|
||||||
assert len(task_queue.pending_tasks) == 2
|
|
||||||
|
|
||||||
def test_put_task_with_dict(self, task_queue):
|
|
||||||
"""Test adding task as dictionary gets converted to QueueTaskItem"""
|
|
||||||
task_dict = {
|
|
||||||
"ui_id": str(uuid.uuid4()),
|
|
||||||
"client_id": "test_client",
|
|
||||||
"kind": "install",
|
|
||||||
"params": {
|
|
||||||
"id": "test-node",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"selected_version": "1.0.0",
|
|
||||||
"mode": "cache",
|
|
||||||
"channel": "dev"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
task_queue.put(task_dict)
|
|
||||||
|
|
||||||
assert task_queue.total_count() == 1
|
|
||||||
# Verify it was converted to QueueTaskItem
|
|
||||||
item, _ = task_queue.get(timeout=0.1)
|
|
||||||
assert isinstance(item, QueueTaskItem)
|
|
||||||
assert item.ui_id == task_dict["ui_id"]
|
|
||||||
|
|
||||||
def test_get_task_from_queue(self, task_queue, sample_task):
|
|
||||||
"""Test retrieving task from queue"""
|
|
||||||
task_queue.put(sample_task)
|
|
||||||
|
|
||||||
item, task_index = task_queue.get(timeout=0.1)
|
|
||||||
|
|
||||||
assert item == sample_task
|
|
||||||
assert isinstance(task_index, int)
|
|
||||||
assert task_queue.total_count() == 0 # Should be removed from pending
|
|
||||||
|
|
||||||
def test_get_task_timeout(self, task_queue):
|
|
||||||
"""Test get with timeout on empty queue returns None"""
|
|
||||||
result = task_queue.get(timeout=0.1)
|
|
||||||
assert result is None
|
|
||||||
|
|
||||||
def test_start_stop_worker(self, task_queue):
|
|
||||||
"""Test worker thread lifecycle"""
|
|
||||||
assert not task_queue.is_processing()
|
|
||||||
|
|
||||||
# Mock worker that stops immediately
|
|
||||||
stop_event = threading.Event()
|
|
||||||
def mock_worker():
|
|
||||||
stop_event.wait(0.1) # Brief delay then stop
|
|
||||||
|
|
||||||
started = task_queue.start_worker(mock_worker)
|
|
||||||
assert started is True
|
|
||||||
assert task_queue.is_processing()
|
|
||||||
|
|
||||||
# Try to start again - should return False
|
|
||||||
started_again = task_queue.start_worker(mock_worker)
|
|
||||||
assert started_again is False
|
|
||||||
|
|
||||||
# Wait for worker to finish
|
|
||||||
stop_event.set()
|
|
||||||
task_queue._worker_task.join(timeout=1.0)
|
|
||||||
assert not task_queue.is_processing()
|
|
||||||
|
|
||||||
def test_task_processing_integration(self, task_queue, sample_task):
|
|
||||||
"""Test full task processing workflow"""
|
|
||||||
# Add task to queue
|
|
||||||
task_queue.put(sample_task)
|
|
||||||
assert task_queue.total_count() == 1
|
|
||||||
|
|
||||||
# Start worker
|
|
||||||
started = task_queue.start_worker()
|
|
||||||
assert started is True
|
|
||||||
|
|
||||||
# Wait for processing to complete
|
|
||||||
for _ in range(50): # Max 5 seconds
|
|
||||||
if task_queue.done_count() > 0:
|
|
||||||
break
|
|
||||||
time.sleep(0.1)
|
|
||||||
|
|
||||||
# Verify task was processed
|
|
||||||
assert task_queue.done_count() == 1
|
|
||||||
assert task_queue.total_count() == 0
|
|
||||||
assert sample_task.ui_id in task_queue.history_tasks
|
|
||||||
|
|
||||||
# Stop worker
|
|
||||||
task_queue._worker_task.join(timeout=1.0)
|
|
||||||
|
|
||||||
def test_get_current_state(self, task_queue, sample_task):
|
|
||||||
"""Test getting current queue state"""
|
|
||||||
task_queue.put(sample_task)
|
|
||||||
|
|
||||||
state = task_queue.get_current_state()
|
|
||||||
|
|
||||||
assert isinstance(state, TaskStateMessage)
|
|
||||||
assert len(state.pending_queue) == 1
|
|
||||||
assert len(state.running_queue) == 0
|
|
||||||
assert state.pending_queue[0] == sample_task
|
|
||||||
|
|
||||||
def test_batch_finalization(self, task_queue, tmp_path):
|
|
||||||
"""Test batch history is saved correctly"""
|
|
||||||
task_queue.put(QueueTaskItem(
|
|
||||||
ui_id=str(uuid.uuid4()),
|
|
||||||
client_id="test_client",
|
|
||||||
kind="install",
|
|
||||||
params=InstallPackParams(
|
|
||||||
id="test-node",
|
|
||||||
version="1.0.0",
|
|
||||||
selected_version="1.0.0",
|
|
||||||
mode=ManagerDatabaseSource.cache,
|
|
||||||
channel=ManagerChannel.dev
|
|
||||||
)
|
|
||||||
))
|
|
||||||
|
|
||||||
batch_id = task_queue.batch_id
|
|
||||||
task_queue.finalize()
|
|
||||||
|
|
||||||
# Check batch file was created
|
|
||||||
batch_file = tmp_path / f"{batch_id}.json"
|
|
||||||
assert batch_file.exists()
|
|
||||||
|
|
||||||
# Verify content
|
|
||||||
with open(batch_file) as f:
|
|
||||||
batch_data = json.load(f)
|
|
||||||
|
|
||||||
assert batch_data["batch_id"] == batch_id
|
|
||||||
assert "start_time" in batch_data
|
|
||||||
assert "state_before" in batch_data
|
|
||||||
|
|
||||||
def test_concurrent_access(self, task_queue):
|
|
||||||
"""Test thread-safe concurrent access to queue"""
|
|
||||||
num_tasks = 10
|
|
||||||
added_tasks = []
|
|
||||||
|
|
||||||
def add_tasks():
|
|
||||||
for i in range(num_tasks):
|
|
||||||
task = QueueTaskItem(
|
|
||||||
ui_id=f"task_{i}",
|
|
||||||
client_id=f"client_{i}",
|
|
||||||
kind="install",
|
|
||||||
params=InstallPackParams(
|
|
||||||
id=f"node_{i}",
|
|
||||||
version="1.0.0",
|
|
||||||
selected_version="1.0.0",
|
|
||||||
mode=ManagerDatabaseSource.cache,
|
|
||||||
channel=ManagerChannel.dev
|
|
||||||
)
|
|
||||||
)
|
|
||||||
task_queue.put(task)
|
|
||||||
added_tasks.append(task)
|
|
||||||
|
|
||||||
# Start multiple threads adding tasks
|
|
||||||
threads = []
|
|
||||||
for _ in range(3):
|
|
||||||
thread = threading.Thread(target=add_tasks)
|
|
||||||
threads.append(thread)
|
|
||||||
thread.start()
|
|
||||||
|
|
||||||
# Wait for all threads to complete
|
|
||||||
for thread in threads:
|
|
||||||
thread.join()
|
|
||||||
|
|
||||||
# Verify all tasks were added
|
|
||||||
assert task_queue.total_count() == num_tasks * 3
|
|
||||||
assert len(added_tasks) == num_tasks * 3
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_queue_state_updates_tracking(self, task_queue, sample_task):
|
|
||||||
"""Test that queue state updates are tracked properly"""
|
|
||||||
# Mock the update tracking
|
|
||||||
task_queue.send_queue_state_update("test-message", {"test": "data"}, "client1")
|
|
||||||
|
|
||||||
# Verify update was tracked
|
|
||||||
assert hasattr(task_queue, '_sent_updates')
|
|
||||||
assert len(task_queue._sent_updates) == 1
|
|
||||||
|
|
||||||
update = task_queue._sent_updates[0]
|
|
||||||
assert update['msg'] == "test-message"
|
|
||||||
assert update['update'] == {"test": "data"}
|
|
||||||
assert update['client_id'] == "client1"
|
|
||||||
|
|
||||||
|
|
||||||
class TestTaskQueueEdgeCases:
|
|
||||||
"""Test edge cases and error conditions"""
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def task_queue(self):
|
|
||||||
return MockTaskQueue()
|
|
||||||
|
|
||||||
def test_empty_queue_operations(self, task_queue):
|
|
||||||
"""Test operations on empty queue"""
|
|
||||||
assert task_queue.total_count() == 0
|
|
||||||
assert task_queue.done_count() == 0
|
|
||||||
|
|
||||||
# Getting from empty queue should timeout
|
|
||||||
result = task_queue.get(timeout=0.1)
|
|
||||||
assert result is None
|
|
||||||
|
|
||||||
# State should be empty
|
|
||||||
state = task_queue.get_current_state()
|
|
||||||
assert len(state.pending_queue) == 0
|
|
||||||
assert len(state.running_queue) == 0
|
|
||||||
|
|
||||||
def test_invalid_task_data(self, task_queue):
|
|
||||||
"""Test handling of invalid task data"""
|
|
||||||
# This should raise ValidationError due to missing required fields
|
|
||||||
with pytest.raises(Exception): # ValidationError from Pydantic
|
|
||||||
task_queue.put({
|
|
||||||
"ui_id": "test",
|
|
||||||
# Missing required fields
|
|
||||||
})
|
|
||||||
|
|
||||||
def test_worker_cleanup_on_exception(self, task_queue):
|
|
||||||
"""Test worker cleanup when worker function raises exception"""
|
|
||||||
exception_raised = threading.Event()
|
|
||||||
|
|
||||||
def failing_worker():
|
|
||||||
exception_raised.set()
|
|
||||||
raise RuntimeError("Test exception")
|
|
||||||
|
|
||||||
started = task_queue.start_worker(failing_worker)
|
|
||||||
assert started is True
|
|
||||||
|
|
||||||
# Wait for exception to be raised
|
|
||||||
exception_raised.wait(timeout=1.0)
|
|
||||||
|
|
||||||
# Worker should eventually stop
|
|
||||||
task_queue._worker_task.join(timeout=1.0)
|
|
||||||
assert not task_queue.is_processing()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
# Allow running tests directly
|
|
||||||
pytest.main([__file__])
|
|
||||||
Reference in New Issue
Block a user