Compare commits
686 Commits
feat/add-t
...
3.38
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
aaed1dc3d5 | ||
|
|
c8dce94c03 | ||
|
|
06496d07b3 | ||
|
|
a97f98c9cc | ||
|
|
8d0406f74f | ||
|
|
c64d14701d | ||
|
|
00332ae444 | ||
|
|
e8deb3d8fe | ||
|
|
8b234c99cf | ||
|
|
1f986d9c45 | ||
|
|
bacb8fb3cd | ||
|
|
e4a90089ab | ||
|
|
674b9f3705 | ||
|
|
4941fb8aa0 | ||
|
|
183af0dfa5 | ||
|
|
45ac5429f8 | ||
|
|
c771977a95 | ||
|
|
668d7bbb2c | ||
|
|
926cfabb58 | ||
|
|
a9a8d05115 | ||
|
|
e368f4366a | ||
|
|
dc5bddbc17 | ||
|
|
358a480408 | ||
|
|
c96fdb3c7a | ||
|
|
c090abcc02 | ||
|
|
1ff02be35f | ||
|
|
10fbfb88f7 | ||
|
|
9753df72ed | ||
|
|
095cc3f792 | ||
|
|
656171037b | ||
|
|
7ac10f9442 | ||
|
|
3925ba27b4 | ||
|
|
44ba79aa31 | ||
|
|
14d0e31268 | ||
|
|
033acffad1 | ||
|
|
d29ff808a5 | ||
|
|
dc9b6d655b | ||
|
|
d340c85013 | ||
|
|
e328353664 | ||
|
|
02785af8fd | ||
|
|
736ae5d63e | ||
|
|
e1eeb617d2 | ||
|
|
23b6c7f0de | ||
|
|
997f97e1fc | ||
|
|
ff335ff1a0 | ||
|
|
cb3036ef81 | ||
|
|
f762906188 | ||
|
|
dde7920f8c | ||
|
|
1a0d24110a | ||
|
|
e79f6c4471 | ||
|
|
a8a7024a84 | ||
|
|
d93d002da0 | ||
|
|
baaa0479e8 | ||
|
|
cc3bd7a056 | ||
|
|
4ecefb3b71 | ||
|
|
f24b5aa251 | ||
|
|
de547da4cd | ||
|
|
0f884166a6 | ||
|
|
63379f759d | ||
|
|
8fdff20243 | ||
|
|
5dfa07ca03 | ||
|
|
343645be6a | ||
|
|
91bf21d7a8 | ||
|
|
be6516cfd3 | ||
|
|
61f1e516a3 | ||
|
|
73b2278b45 | ||
|
|
aa625e30b6 | ||
|
|
29a46fe4ce | ||
|
|
5b3ee49530 | ||
|
|
a9158a101f | ||
|
|
feed8abb34 | ||
|
|
70decc740f | ||
|
|
5b5c83f8c5 | ||
|
|
773c06f40d | ||
|
|
737e6ad5ed | ||
|
|
81bca9c94e | ||
|
|
eef0654de2 | ||
|
|
997a00b8a2 | ||
|
|
4d25232c5f | ||
|
|
135befa101 | ||
|
|
44cac3fc43 | ||
|
|
449fa3510e | ||
|
|
d958af8aad | ||
|
|
09f8d5cb2d | ||
|
|
aedc99cefd | ||
|
|
b32cab6e9a | ||
|
|
a95186965e | ||
|
|
7067de1bb2 | ||
|
|
f45d878d21 | ||
|
|
a0532b938d | ||
|
|
6ad12b7652 | ||
|
|
02887c6c9b | ||
|
|
1b645e1cc3 | ||
|
|
0a4b2a0488 | ||
|
|
d4ce6ddc52 | ||
|
|
5a5b989533 | ||
|
|
b57cffb0fa | ||
|
|
72aa95cacf | ||
|
|
14ef448937 | ||
|
|
1c10607c06 | ||
|
|
41e53a1f2a | ||
|
|
cde83e9a38 | ||
|
|
6c206b1c72 | ||
|
|
a474219e7b | ||
|
|
3b8d25eb31 | ||
|
|
0faa0aa668 | ||
|
|
0fcdcc93a2 | ||
|
|
461d5e72fe | ||
|
|
3f030a2121 | ||
|
|
7fb8e8662f | ||
|
|
dd3ab9cff2 | ||
|
|
97b518de12 | ||
|
|
d2a795c866 | ||
|
|
8a3d65be20 | ||
|
|
b2126d8ba5 | ||
|
|
6386411d21 | ||
|
|
4250244136 | ||
|
|
77c4f9993d | ||
|
|
c7c8417577 | ||
|
|
9d0985ded8 | ||
|
|
3663e10e33 | ||
|
|
5f37a82c3c | ||
|
|
026bf1dfd7 | ||
|
|
643a6e5080 | ||
|
|
5267502896 | ||
|
|
c3c152122d | ||
|
|
afeac097e5 | ||
|
|
e5cea64132 | ||
|
|
26da78cf15 | ||
|
|
179a1e1ca0 | ||
|
|
b379d275d1 | ||
|
|
133cdfb203 | ||
|
|
2b79edd9be | ||
|
|
3862a92e04 | ||
|
|
f4e3817fcc | ||
|
|
61f0f5d67c | ||
|
|
87f57551ea | ||
|
|
ee51efed69 | ||
|
|
5dab865681 | ||
|
|
8c0581eebc | ||
|
|
a72f9f422c | ||
|
|
1354a8c970 | ||
|
|
00a5115267 | ||
|
|
00282eab7b | ||
|
|
bec128de58 | ||
|
|
9edfa7b4fa | ||
|
|
a9af70e5f0 | ||
|
|
910caf593f | ||
|
|
02dc072dc7 | ||
|
|
78fb354452 | ||
|
|
66f5eca7fa | ||
|
|
be95396a57 | ||
|
|
59cbed429f | ||
|
|
d49df7aebb | ||
|
|
0aa9faad2e | ||
|
|
1337def888 | ||
|
|
4b100c558b | ||
|
|
1425a71ece | ||
|
|
a8524508fe | ||
|
|
a5ff973d53 | ||
|
|
337c9aa2c7 | ||
|
|
f1448403ac | ||
|
|
d0b5f77ec6 | ||
|
|
9cb22ffb60 | ||
|
|
f556962d82 | ||
|
|
d28448d519 | ||
|
|
c590a88ffd | ||
|
|
a1fc6c817b | ||
|
|
5554e52799 | ||
|
|
ca749eb4d2 | ||
|
|
41ceee3d24 | ||
|
|
5acfd52986 | ||
|
|
ec4c7b2f6a | ||
|
|
22a3d8f95f | ||
|
|
06b89ca277 | ||
|
|
9e5ffbd00a | ||
|
|
39e92ed778 | ||
|
|
68a3ec567a | ||
|
|
28231e81b3 | ||
|
|
b2ee0feeaa | ||
|
|
5541b6b366 | ||
|
|
408a5fe27e | ||
|
|
bffc73f976 | ||
|
|
bd6edfc9dd | ||
|
|
2cb24e8a94 | ||
|
|
a49779c4d2 | ||
|
|
15a5a5f5df | ||
|
|
b5e0558d6e | ||
|
|
4d683b23fc | ||
|
|
c13da606b2 | ||
|
|
c792f9277c | ||
|
|
b430f42622 | ||
|
|
fee822f5ae | ||
|
|
192659ecbd | ||
|
|
810431b9e2 | ||
|
|
02d845adf3 | ||
|
|
89c7b960fb | ||
|
|
ed1e399a56 | ||
|
|
8a3ce1ae57 | ||
|
|
d89ff649f8 | ||
|
|
24a73b5d1c | ||
|
|
4d0c40ff8a | ||
|
|
b5a2bed539 | ||
|
|
0efb79f571 | ||
|
|
df944b9a0f | ||
|
|
2c11846430 | ||
|
|
0035c01186 | ||
|
|
34be3384fe | ||
|
|
ebbc7b3335 | ||
|
|
4ccc8c3086 | ||
|
|
af9ebc9568 | ||
|
|
ca4b61c5f0 | ||
|
|
393839b3ab | ||
|
|
dadfc96e00 | ||
|
|
a0a33aef03 | ||
|
|
99ed81e0f5 | ||
|
|
5b697db219 | ||
|
|
8e5bf46e14 | ||
|
|
9f649b0900 | ||
|
|
abb15e06d3 | ||
|
|
11a317493e | ||
|
|
e8cece0c1b | ||
|
|
1ab882f81d | ||
|
|
b9338186e3 | ||
|
|
7c3cbff425 | ||
|
|
1ff0afc633 | ||
|
|
bfe7ee8fba | ||
|
|
49c73ed10e | ||
|
|
f571baacf9 | ||
|
|
6f02e1114c | ||
|
|
e230f43565 | ||
|
|
0d9593e71b | ||
|
|
20778ecfb0 | ||
|
|
2ea991d960 | ||
|
|
119c107834 | ||
|
|
800a0d0449 | ||
|
|
95c43f0189 | ||
|
|
9c77176c7f | ||
|
|
ddb6a55cd6 | ||
|
|
56a4f6fdd7 | ||
|
|
8a30f788b5 | ||
|
|
380a1c2c8c | ||
|
|
cd8e8335cf | ||
|
|
6e1beb54a4 | ||
|
|
9217c965dd | ||
|
|
a4d71ef487 | ||
|
|
518f332047 | ||
|
|
9257d497b8 | ||
|
|
07cf5de4f7 | ||
|
|
43ad69e48d | ||
|
|
c62e236cc6 | ||
|
|
15a2fbb293 | ||
|
|
16800c3fa0 | ||
|
|
ce09f41aa3 | ||
|
|
47dc2f036a | ||
|
|
f27a154bfd | ||
|
|
79757366e8 | ||
|
|
2cd9a417d6 | ||
|
|
deb05c6cc3 | ||
|
|
b6f171de51 | ||
|
|
a58d5f6999 | ||
|
|
e0b3f3eb45 | ||
|
|
4bbc8594a7 | ||
|
|
3a377300e1 | ||
|
|
33a07e3a86 | ||
|
|
212cafc1d7 | ||
|
|
2643b3cbcc | ||
|
|
d445229b6d | ||
|
|
dab5c451b0 | ||
|
|
7bdf06131a | ||
|
|
854648d5af | ||
|
|
c5f7b97359 | ||
|
|
dd8a727ad6 | ||
|
|
6c627fe422 | ||
|
|
ee980e1caf | ||
|
|
22bfaf6527 | ||
|
|
48ab48cc30 | ||
|
|
a0b14d4127 | ||
|
|
03f9fe1a70 | ||
|
|
8915b8d796 | ||
|
|
c77ffeeec0 | ||
|
|
4acf5660b2 | ||
|
|
2d9f0a668c | ||
|
|
9e6cb246cc | ||
|
|
14544ca63d | ||
|
|
26b347c04c | ||
|
|
36f75d1811 | ||
|
|
27fc787294 | ||
|
|
d23286d390 | ||
|
|
7c3ccc76c3 | ||
|
|
892dc5d4f3 | ||
|
|
e278692749 | ||
|
|
8d77dd2246 | ||
|
|
14ede2a585 | ||
|
|
5b525622f1 | ||
|
|
a24b11905c | ||
|
|
5d70858341 | ||
|
|
3daa006741 | ||
|
|
0bcc0c2101 | ||
|
|
b8850c808c | ||
|
|
f4f2c01ac1 | ||
|
|
7072e82dff | ||
|
|
53dc36c4cf | ||
|
|
5aadc3af00 | ||
|
|
8c28a698ed | ||
|
|
5ed6d8b202 | ||
|
|
b73dc7bf5e | ||
|
|
71d0f4ab63 | ||
|
|
d479dcde81 | ||
|
|
ae536017d5 | ||
|
|
67ddfce279 | ||
|
|
b1f39b34d7 | ||
|
|
6cf958ccce | ||
|
|
eaed3677d3 | ||
|
|
b9c88da54d | ||
|
|
104ae77f7a | ||
|
|
bfcb2ce61b | ||
|
|
63ba5fed09 | ||
|
|
98a8464933 | ||
|
|
7e3e6726e0 | ||
|
|
09567b2bb2 | ||
|
|
f3bd116184 | ||
|
|
7509737563 | ||
|
|
cfb815d879 | ||
|
|
44241fb967 | ||
|
|
c4b45129bd | ||
|
|
70741008ca | ||
|
|
6c2d2cae2a | ||
|
|
28f13d3311 | ||
|
|
4e31aaa8fb | ||
|
|
ba99f0c2cc | ||
|
|
e0a96b4937 | ||
|
|
82c055f527 | ||
|
|
f94008192c | ||
|
|
3895d5279e | ||
|
|
3d85ecc525 | ||
|
|
7da00796e5 | ||
|
|
6086419cb6 | ||
|
|
5bc1f2f2c0 | ||
|
|
32a83b211e | ||
|
|
bead7b3a7f | ||
|
|
815d6d6572 | ||
|
|
95ce812992 | ||
|
|
9a36f4748c | ||
|
|
50b7849a35 | ||
|
|
6f1245b27c | ||
|
|
cc87ed3899 | ||
|
|
1d9037fefe | ||
|
|
03016e2d16 | ||
|
|
3d41617f4e | ||
|
|
35151ffdd1 | ||
|
|
4527d41a7a | ||
|
|
553cba12f3 | ||
|
|
116e068ac3 | ||
|
|
1010dd2d28 | ||
|
|
7354242906 | ||
|
|
e7d0b158e9 | ||
|
|
330c4657b1 | ||
|
|
72a109f109 | ||
|
|
cf45c51dfb | ||
|
|
0b013adb34 | ||
|
|
7457d91f64 | ||
|
|
7fe1159426 | ||
|
|
c2665e3677 | ||
|
|
d63de803a4 | ||
|
|
11aca3513c | ||
|
|
561c9f40e5 | ||
|
|
54ed13aadf | ||
|
|
109cc21337 | ||
|
|
7e46b30fa5 | ||
|
|
0ba112c2c7 | ||
|
|
fc15d94170 | ||
|
|
dcb37d9c55 | ||
|
|
755b9d6342 | ||
|
|
3d6151c94f | ||
|
|
590bd8c4b9 | ||
|
|
e99aafd876 | ||
|
|
1f0adf8bcf | ||
|
|
dbd5d5fb43 | ||
|
|
a8b0e3641b | ||
|
|
9efb350be9 | ||
|
|
8d9820b3fb | ||
|
|
103f89551a | ||
|
|
6030d961ad | ||
|
|
ee08c9e17f | ||
|
|
48dd9a3240 | ||
|
|
e122e206a6 | ||
|
|
398b905758 | ||
|
|
dc2ec08fe3 | ||
|
|
3bf5edf5c9 | ||
|
|
134bca526c | ||
|
|
3393e58b06 | ||
|
|
eab6cdeee4 | ||
|
|
e8ec1ce8e3 | ||
|
|
b3581564ed | ||
|
|
29e1bd95fd | ||
|
|
8bff401c14 | ||
|
|
41798e9255 | ||
|
|
9e4f0228d1 | ||
|
|
76ee93c98c | ||
|
|
fb1a89efb7 | ||
|
|
aface43554 | ||
|
|
a35f0157b2 | ||
|
|
9b32162906 | ||
|
|
21bba62572 | ||
|
|
302327d6b3 | ||
|
|
5667e8bcbb | ||
|
|
ae66bd0e31 | ||
|
|
48dfadc02d | ||
|
|
3df6272bb6 | ||
|
|
e7f9bcda01 | ||
|
|
205044ca66 | ||
|
|
d497eb1f00 | ||
|
|
4e6f970ee9 | ||
|
|
0b6cdda6f5 | ||
|
|
a896ded763 | ||
|
|
fb5dd9ebc2 | ||
|
|
c8b7db6c38 | ||
|
|
44a3191be3 | ||
|
|
b4f7cdc9e7 | ||
|
|
8da07018d5 | ||
|
|
0c19a27065 | ||
|
|
3296b0ecdf | ||
|
|
0a07261124 | ||
|
|
33106d0ecf | ||
|
|
5bb887206a | ||
|
|
b30b0e27cb | ||
|
|
363736489c | ||
|
|
8dbf5e87a0 | ||
|
|
0b30f2cb50 | ||
|
|
ba5265dac4 | ||
|
|
ecb9c65917 | ||
|
|
8a98474600 | ||
|
|
b072216e67 | ||
|
|
cfb3181716 | ||
|
|
ab684cdc99 | ||
|
|
facadc3a44 | ||
|
|
281319d2da | ||
|
|
5cb203685c | ||
|
|
01fa37900b | ||
|
|
edbe744e17 | ||
|
|
2a32a1a4a8 | ||
|
|
404bdb21e6 | ||
|
|
b260c9a512 | ||
|
|
4b941adb6a | ||
|
|
bd752550a8 | ||
|
|
b8b71bb961 | ||
|
|
5aaf7a4092 | ||
|
|
030e02ffb8 | ||
|
|
d962aa03f4 | ||
|
|
9e4a2aae43 | ||
|
|
ee6eb685e7 | ||
|
|
09a38a32ce | ||
|
|
d13b19d43d | ||
|
|
e730dca1ad | ||
|
|
8da30640bb | ||
|
|
6f4eb88e07 | ||
|
|
d9592b9dab | ||
|
|
b87ada72aa | ||
|
|
83363ba1f0 | ||
|
|
23ebe7f718 | ||
|
|
e04264cfa3 | ||
|
|
8d29e5037f | ||
|
|
6926ed45b0 | ||
|
|
736b85b8bb | ||
|
|
9e3361bc31 | ||
|
|
6e10381020 | ||
|
|
a1d37d379c | ||
|
|
07d87db7a2 | ||
|
|
4e556673d2 | ||
|
|
f421304fc1 | ||
|
|
c9271b1686 | ||
|
|
12eb6863da | ||
|
|
4834874091 | ||
|
|
8759ebf200 | ||
|
|
d4715aebef | ||
|
|
0fe2ade7bb | ||
|
|
0c71565535 | ||
|
|
6a637091a2 | ||
|
|
31eba60012 | ||
|
|
51e58e9078 | ||
|
|
4a1e76730a | ||
|
|
5599bb028b | ||
|
|
552c6da0cc | ||
|
|
cc6817a891 | ||
|
|
fb48d1b485 | ||
|
|
1c336dad6b | ||
|
|
a4940d46cd | ||
|
|
499b2f44c1 | ||
|
|
2b200c9281 | ||
|
|
36a900c98f | ||
|
|
5236b03f66 | ||
|
|
8be35e3621 | ||
|
|
509f00fe89 | ||
|
|
a98b87f148 | ||
|
|
ae9b2b3b72 | ||
|
|
02e1ec0ae3 | ||
|
|
daefb0f120 | ||
|
|
ff0604e3b6 | ||
|
|
20e41e22fa | ||
|
|
a0e3bdd594 | ||
|
|
6580aaf3ad | ||
|
|
0b46701b60 | ||
|
|
0bb4effede | ||
|
|
b07082a52d | ||
|
|
04f267f5a7 | ||
|
|
03ccce2804 | ||
|
|
e894bd9f24 | ||
|
|
10e6988273 | ||
|
|
905b61e5d8 | ||
|
|
ee69d393ae | ||
|
|
cab39973ae | ||
|
|
d93f5d07bb | ||
|
|
ba00ffe1ae | ||
|
|
6afaf5eaf5 | ||
|
|
d30459cc34 | ||
|
|
e92fbb7b1b | ||
|
|
42d464b532 | ||
|
|
c2e9e5c63a | ||
|
|
bc36726925 | ||
|
|
7abbff8c31 | ||
|
|
6236f4bcf4 | ||
|
|
3c3e80f77f | ||
|
|
4aae2fb289 | ||
|
|
66ff07752f | ||
|
|
5cf92f2742 | ||
|
|
6d3fddc474 | ||
|
|
66d4ad6174 | ||
|
|
2a366a1607 | ||
|
|
d87a0995b4 | ||
|
|
9a73a41e04 | ||
|
|
ba041b36bc | ||
|
|
f5f9de69b4 | ||
|
|
71e56c62e8 | ||
|
|
0f496619fd | ||
|
|
5fdd6a441a | ||
|
|
00f287bb63 | ||
|
|
785268efa6 | ||
|
|
2c976d9394 | ||
|
|
1e32582642 | ||
|
|
6f8f6d07f5 | ||
|
|
3958111e76 | ||
|
|
86fcc4af74 | ||
|
|
2fd26756df | ||
|
|
478f4b74d8 | ||
|
|
73d0d2a1bb | ||
|
|
546db08ec4 | ||
|
|
0dd41a8670 | ||
|
|
82c0c89f46 | ||
|
|
c3798bf4c2 | ||
|
|
ff80b6ccb0 | ||
|
|
e729217116 | ||
|
|
94c695daca | ||
|
|
9f189f0420 | ||
|
|
ad09e53f60 | ||
|
|
092a7a5f3f | ||
|
|
f45649bd25 | ||
|
|
2595cc5ed7 | ||
|
|
2f62190c6f | ||
|
|
577314984c | ||
|
|
f0346b955b | ||
|
|
70139ded4a | ||
|
|
bf379900e1 | ||
|
|
9bafc90f5e | ||
|
|
fce0d9e88e | ||
|
|
2b3b154989 | ||
|
|
948d2440a1 | ||
|
|
5adbe1ce7a | ||
|
|
8157d34ffa | ||
|
|
3ec8cb2204 | ||
|
|
0daa826543 | ||
|
|
a66028da58 | ||
|
|
807c9e6872 | ||
|
|
e71f3774ba | ||
|
|
dd7314bf10 | ||
|
|
f33bc127dc | ||
|
|
db92b87782 | ||
|
|
eba41c8693 | ||
|
|
c855308162 | ||
|
|
73d971bed8 | ||
|
|
bcfe0c2874 | ||
|
|
931ff666ae | ||
|
|
18b6d86cc4 | ||
|
|
3c5efa0662 | ||
|
|
9b739bcbbf | ||
|
|
db89076e48 | ||
|
|
19b341ef18 | ||
|
|
be3713b1a3 | ||
|
|
99c4415cfb | ||
|
|
7b311f2ccf | ||
|
|
4aeabfe0a7 | ||
|
|
431ed02194 | ||
|
|
07f587ed83 | ||
|
|
0408341d82 | ||
|
|
5b3c9432f3 | ||
|
|
4a197e63f9 | ||
|
|
0876a12fe9 | ||
|
|
c43c7ecc03 | ||
|
|
4a6dee3044 | ||
|
|
019acdd840 | ||
|
|
1c98512720 | ||
|
|
23a09ad546 | ||
|
|
0836e8fe7c | ||
|
|
90196af8f8 | ||
|
|
566fe05772 | ||
|
|
18772c6292 | ||
|
|
6278bddc9b | ||
|
|
f74bf71735 | ||
|
|
efe9ed68b2 | ||
|
|
7c1e75865d | ||
|
|
a0aee41f1a | ||
|
|
2049dd75f4 | ||
|
|
0864c35ba9 | ||
|
|
92c9f66671 | ||
|
|
815784e809 | ||
|
|
2795d00d1e | ||
|
|
86dd0b4963 | ||
|
|
77a4f4819f | ||
|
|
b63d603482 | ||
|
|
e569b4e613 | ||
|
|
8a70997546 | ||
|
|
80d0a0f882 | ||
|
|
70b3997874 | ||
|
|
e8e4311068 | ||
|
|
c58b93ff51 | ||
|
|
7d8ebfe91b | ||
|
|
810381eab2 | ||
|
|
61dc6cf2de | ||
|
|
0205ebad2a | ||
|
|
09a94133ac | ||
|
|
1eb3c3b219 | ||
|
|
457845bb51 | ||
|
|
0c11b46585 | ||
|
|
c35100d9e9 | ||
|
|
847031cb04 | ||
|
|
f8d87bb452 | ||
|
|
f60b3505e0 | ||
|
|
addefbc511 | ||
|
|
c4314b25a3 | ||
|
|
921bb86127 | ||
|
|
b3a7fb9c3e | ||
|
|
c143c81a7e | ||
|
|
dd389ba0f8 | ||
|
|
46b1649ab8 | ||
|
|
89710412e4 | ||
|
|
931973b632 | ||
|
|
60aaa838e3 | ||
|
|
1246538bbb | ||
|
|
80518abf9d | ||
|
|
fc1ae2a18e | ||
|
|
3fd8d2049c | ||
|
|
35a6bcf20c | ||
|
|
0d75fc331e | ||
|
|
0a23e793e3 | ||
|
|
2c1c03e063 | ||
|
|
64059d2949 | ||
|
|
648aa7c4d3 | ||
|
|
274bb81a08 | ||
|
|
e2c90b4681 | ||
|
|
fa0a98ac6e | ||
|
|
e6e7b42415 | ||
|
|
0b7ef2e1d4 | ||
|
|
2fac67a9f9 | ||
|
|
8b9892de2e | ||
|
|
b3290dc909 | ||
|
|
3e3176eddb | ||
|
|
b1ef84894a | ||
|
|
c6cffc92c4 | ||
|
|
efb9fd2712 | ||
|
|
94b294ff93 | ||
|
|
99a9e33648 | ||
|
|
055d94a919 | ||
|
|
0978005240 | ||
|
|
1f796581ec | ||
|
|
f3a1716dad | ||
|
|
a1c3a0db1f | ||
|
|
9f80cc8a6b | ||
|
|
133786846e | ||
|
|
bdf297a5c6 | ||
|
|
6767254eb0 | ||
|
|
691cebd479 | ||
|
|
f3932cbf29 | ||
|
|
3f73a97037 | ||
|
|
226f1f5be4 | ||
|
|
7e45c07660 | ||
|
|
0c815036b9 |
@@ -1 +0,0 @@
|
|||||||
PYPI_TOKEN=your-pypi-token
|
|
||||||
70
.github/workflows/ci.yml
vendored
70
.github/workflows/ci.yml
vendored
@@ -1,70 +0,0 @@
|
|||||||
name: CI
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [ main, feat/*, fix/* ]
|
|
||||||
pull_request:
|
|
||||||
branches: [ main ]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
validate-openapi:
|
|
||||||
name: Validate OpenAPI Specification
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Check if OpenAPI changed
|
|
||||||
id: openapi-changed
|
|
||||||
uses: tj-actions/changed-files@v44
|
|
||||||
with:
|
|
||||||
files: openapi.yaml
|
|
||||||
|
|
||||||
- name: Setup Node.js
|
|
||||||
if: steps.openapi-changed.outputs.any_changed == 'true'
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: '18'
|
|
||||||
|
|
||||||
- name: Install Redoc CLI
|
|
||||||
if: steps.openapi-changed.outputs.any_changed == 'true'
|
|
||||||
run: |
|
|
||||||
npm install -g @redocly/cli
|
|
||||||
|
|
||||||
- name: Validate OpenAPI specification
|
|
||||||
if: steps.openapi-changed.outputs.any_changed == 'true'
|
|
||||||
run: |
|
|
||||||
redocly lint openapi.yaml
|
|
||||||
|
|
||||||
code-quality:
|
|
||||||
name: Code Quality Checks
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0 # Fetch all history for proper diff
|
|
||||||
|
|
||||||
- name: Get changed Python files
|
|
||||||
id: changed-py-files
|
|
||||||
uses: tj-actions/changed-files@v44
|
|
||||||
with:
|
|
||||||
files: |
|
|
||||||
**/*.py
|
|
||||||
files_ignore: |
|
|
||||||
comfyui_manager/legacy/**
|
|
||||||
|
|
||||||
- name: Setup Python
|
|
||||||
if: steps.changed-py-files.outputs.any_changed == 'true'
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: '3.9'
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
if: steps.changed-py-files.outputs.any_changed == 'true'
|
|
||||||
run: |
|
|
||||||
pip install ruff
|
|
||||||
|
|
||||||
- name: Run ruff linting on changed files
|
|
||||||
if: steps.changed-py-files.outputs.any_changed == 'true'
|
|
||||||
run: |
|
|
||||||
echo "Changed files: ${{ steps.changed-py-files.outputs.all_changed_files }}"
|
|
||||||
echo "${{ steps.changed-py-files.outputs.all_changed_files }}" | xargs -r ruff check
|
|
||||||
2
.github/workflows/publish-to-pypi.yml
vendored
2
.github/workflows/publish-to-pypi.yml
vendored
@@ -4,7 +4,7 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- draft-v4
|
||||||
paths:
|
paths:
|
||||||
- "pyproject.toml"
|
- "pyproject.toml"
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/publish.yml
vendored
2
.github/workflows/publish.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
|||||||
publish-node:
|
publish-node:
|
||||||
name: Publish Custom Node to registry
|
name: Publish Custom Node to registry
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: ${{ github.repository_owner == 'ltdrdata' || github.repository_owner == 'Comfy-Org' }}
|
if: ${{ github.repository_owner == 'ltdrdata' }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code
|
- name: Check out code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|||||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -18,7 +18,3 @@ pip_overrides.json
|
|||||||
*.json
|
*.json
|
||||||
check2.sh
|
check2.sh
|
||||||
/venv/
|
/venv/
|
||||||
build
|
|
||||||
dist
|
|
||||||
*.egg-info
|
|
||||||
.env
|
|
||||||
@@ -1,47 +0,0 @@
|
|||||||
## Testing Changes
|
|
||||||
|
|
||||||
1. Activate the ComfyUI environment.
|
|
||||||
|
|
||||||
2. Build package locally after making changes.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# from inside the ComfyUI-Manager directory, with the ComfyUI environment activated
|
|
||||||
python -m build
|
|
||||||
```
|
|
||||||
|
|
||||||
3. Install the package locally in the ComfyUI environment.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Uninstall existing package
|
|
||||||
pip uninstall comfyui-manager
|
|
||||||
|
|
||||||
# Install the locale package
|
|
||||||
pip install dist/comfyui-manager-*.whl
|
|
||||||
```
|
|
||||||
|
|
||||||
4. Start ComfyUI.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# after navigating to the ComfyUI directory
|
|
||||||
python main.py
|
|
||||||
```
|
|
||||||
|
|
||||||
## Manually Publish Test Version to PyPi
|
|
||||||
|
|
||||||
1. Set the `PYPI_TOKEN` environment variable in env file.
|
|
||||||
|
|
||||||
2. If manually publishing, you likely want to use a release candidate version, so set the version in [pyproject.toml](pyproject.toml) to something like `0.0.1rc1`.
|
|
||||||
|
|
||||||
3. Build the package.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
python -m build
|
|
||||||
```
|
|
||||||
|
|
||||||
4. Upload the package to PyPi.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
python -m twine upload dist/* --username __token__ --password $PYPI_TOKEN
|
|
||||||
```
|
|
||||||
|
|
||||||
5. View at https://pypi.org/project/comfyui-manager/
|
|
||||||
14
MANIFEST.in
14
MANIFEST.in
@@ -1,14 +0,0 @@
|
|||||||
include comfyui_manager/js/*
|
|
||||||
include comfyui_manager/*.json
|
|
||||||
include comfyui_manager/glob/*
|
|
||||||
include LICENSE.txt
|
|
||||||
include README.md
|
|
||||||
include requirements.txt
|
|
||||||
include pyproject.toml
|
|
||||||
include custom-node-list.json
|
|
||||||
include extension-node-list.json
|
|
||||||
include extras.json
|
|
||||||
include github-stats.json
|
|
||||||
include model-list.json
|
|
||||||
include alter-list.json
|
|
||||||
include comfyui_manager/channels.list.template
|
|
||||||
123
README.md
123
README.md
@@ -5,7 +5,7 @@
|
|||||||

|

|
||||||
|
|
||||||
## NOTICE
|
## NOTICE
|
||||||
* V4.0: Modify the structure to be installable via pip instead of using git clone.
|
* V3.38: **Security patch** - Manager data migrated to protected path. See [Migration Guide](docs/en/v3.38-userdata-security-migration.md).
|
||||||
* V3.16: Support for `uv` has been added. Set `use_uv` in `config.ini`.
|
* V3.16: Support for `uv` has been added. Set `use_uv` in `config.ini`.
|
||||||
* V3.10: `double-click feature` is removed
|
* V3.10: `double-click feature` is removed
|
||||||
* This feature has been moved to https://github.com/ltdrdata/comfyui-connection-helper
|
* This feature has been moved to https://github.com/ltdrdata/comfyui-connection-helper
|
||||||
@@ -14,26 +14,78 @@
|
|||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
* When installing the latest ComfyUI, it will be automatically installed as a dependency, so manual installation is no longer necessary.
|
### Installation[method1] (General installation method: ComfyUI-Manager only)
|
||||||
|
|
||||||
* Manual installation of the nightly version:
|
To install ComfyUI-Manager in addition to an existing installation of ComfyUI, you can follow the following steps:
|
||||||
* Clone to a temporary directory (**Note:** Do **not** clone into `ComfyUI/custom_nodes`.)
|
|
||||||
```
|
|
||||||
git clone https://github.com/Comfy-Org/ComfyUI-Manager
|
|
||||||
```
|
|
||||||
* Install via pip
|
|
||||||
```
|
|
||||||
cd ComfyUI-Manager
|
|
||||||
pip install .
|
|
||||||
```
|
|
||||||
|
|
||||||
|
1. Go to `ComfyUI/custom_nodes` dir in terminal (cmd)
|
||||||
|
2. `git clone https://github.com/ltdrdata/ComfyUI-Manager comfyui-manager`
|
||||||
|
3. Restart ComfyUI
|
||||||
|
|
||||||
|
|
||||||
|
### Installation[method2] (Installation for portable ComfyUI version: ComfyUI-Manager only)
|
||||||
|
1. install git
|
||||||
|
- https://git-scm.com/download/win
|
||||||
|
- standalone version
|
||||||
|
- select option: use windows default console window
|
||||||
|
2. Download [scripts/install-manager-for-portable-version.bat](https://github.com/ltdrdata/ComfyUI-Manager/raw/main/scripts/install-manager-for-portable-version.bat) into installed `"ComfyUI_windows_portable"` directory
|
||||||
|
- Don't click. Right-click the link and choose 'Save As...'
|
||||||
|
3. Double-click `install-manager-for-portable-version.bat` batch file
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
|
||||||
|
### Installation[method3] (Installation through comfy-cli: install ComfyUI and ComfyUI-Manager at once.)
|
||||||
|
> RECOMMENDED: comfy-cli provides various features to manage ComfyUI from the CLI.
|
||||||
|
|
||||||
|
* **prerequisite: python 3, git**
|
||||||
|
|
||||||
|
Windows:
|
||||||
|
```commandline
|
||||||
|
python -m venv venv
|
||||||
|
venv\Scripts\activate
|
||||||
|
pip install comfy-cli
|
||||||
|
comfy install
|
||||||
|
```
|
||||||
|
|
||||||
|
Linux/macOS:
|
||||||
|
```commandline
|
||||||
|
python -m venv venv
|
||||||
|
. venv/bin/activate
|
||||||
|
pip install comfy-cli
|
||||||
|
comfy install
|
||||||
|
```
|
||||||
* See also: https://github.com/Comfy-Org/comfy-cli
|
* See also: https://github.com/Comfy-Org/comfy-cli
|
||||||
|
|
||||||
|
|
||||||
## Front-end
|
### Installation[method4] (Installation for Linux+venv: ComfyUI + ComfyUI-Manager)
|
||||||
|
|
||||||
* The built-in front-end of ComfyUI-Manager is the legacy front-end. The front-end for ComfyUI-Manager is now provided via [ComfyUI Frontend](https://github.com/Comfy-Org/ComfyUI_frontend).
|
To install ComfyUI with ComfyUI-Manager on Linux using a venv environment, you can follow these steps:
|
||||||
* To enable the legacy front-end, set the environment variable `ENABLE_LEGACY_COMFYUI_MANAGER_FRONT` to `true` before running.
|
* **prerequisite: python-is-python3, python3-venv, git**
|
||||||
|
|
||||||
|
1. Download [scripts/install-comfyui-venv-linux.sh](https://github.com/ltdrdata/ComfyUI-Manager/raw/main/scripts/install-comfyui-venv-linux.sh) into empty install directory
|
||||||
|
- Don't click. Right-click the link and choose 'Save As...'
|
||||||
|
- ComfyUI will be installed in the subdirectory of the specified directory, and the directory will contain the generated executable script.
|
||||||
|
2. `chmod +x install-comfyui-venv-linux.sh`
|
||||||
|
3. `./install-comfyui-venv-linux.sh`
|
||||||
|
|
||||||
|
### Installation Precautions
|
||||||
|
* **DO**: `ComfyUI-Manager` files must be accurately located in the path `ComfyUI/custom_nodes/comfyui-manager`
|
||||||
|
* Installing in a compressed file format is not recommended.
|
||||||
|
* **DON'T**: Decompress directly into the `ComfyUI/custom_nodes` location, resulting in the Manager contents like `__init__.py` being placed directly in that directory.
|
||||||
|
* You have to remove all ComfyUI-Manager files from `ComfyUI/custom_nodes`
|
||||||
|
* **DON'T**: In a form where decompression occurs in a path such as `ComfyUI/custom_nodes/ComfyUI-Manager/ComfyUI-Manager`.
|
||||||
|
* **DON'T**: In a form where decompression occurs in a path such as `ComfyUI/custom_nodes/ComfyUI-Manager-main`.
|
||||||
|
* In such cases, `ComfyUI-Manager` may operate, but it won't be recognized within `ComfyUI-Manager`, and updates cannot be performed. It also poses the risk of duplicate installations. Remove it and install properly via `git clone` method.
|
||||||
|
|
||||||
|
|
||||||
|
You can execute ComfyUI by running either `./run_gpu.sh` or `./run_cpu.sh` depending on your system configuration.
|
||||||
|
|
||||||
|
## Colab Notebook
|
||||||
|
This repository provides Colab notebooks that allow you to install and use ComfyUI, including ComfyUI-Manager. To use ComfyUI, [click on this link](https://colab.research.google.com/github/ltdrdata/ComfyUI-Manager/blob/main/notebooks/comfyui_colab_with_manager.ipynb).
|
||||||
|
* Support for installing ComfyUI
|
||||||
|
* Support for basic installation of ComfyUI-Manager
|
||||||
|
* Support for automatically installing dependencies of custom nodes upon restarting Colab notebooks.
|
||||||
|
|
||||||
|
|
||||||
## How To Use
|
## How To Use
|
||||||
@@ -89,20 +141,27 @@
|
|||||||
|
|
||||||
|
|
||||||
## Paths
|
## Paths
|
||||||
In `ComfyUI-Manager` V3.0 and later, configuration files and dynamically generated files are located under `<USER_DIRECTORY>/default/ComfyUI-Manager/`.
|
Starting from V3.38, Manager uses a protected system path for enhanced security.
|
||||||
|
|
||||||
* <USER_DIRECTORY>
|
* <USER_DIRECTORY>
|
||||||
* If executed without any options, the path defaults to ComfyUI/user.
|
* If executed without any options, the path defaults to ComfyUI/user.
|
||||||
* It can be set using --user-directory <USER_DIRECTORY>.
|
* It can be set using --user-directory <USER_DIRECTORY>.
|
||||||
|
|
||||||
* Basic config files: `<USER_DIRECTORY>/default/ComfyUI-Manager/config.ini`
|
| ComfyUI Version | Manager Path |
|
||||||
* Configurable channel lists: `<USER_DIRECTORY>/default/ComfyUI-Manager/channels.ini`
|
|-----------------|--------------|
|
||||||
* Configurable pip overrides: `<USER_DIRECTORY>/default/ComfyUI-Manager/pip_overrides.json`
|
| v0.3.76+ (with System User API) | `<USER_DIRECTORY>/__manager/` |
|
||||||
* Configurable pip blacklist: `<USER_DIRECTORY>/default/ComfyUI-Manager/pip_blacklist.list`
|
| Older versions | `<USER_DIRECTORY>/default/ComfyUI-Manager/` |
|
||||||
* Configurable pip auto fix: `<USER_DIRECTORY>/default/ComfyUI-Manager/pip_auto_fix.list`
|
|
||||||
* Saved snapshot files: `<USER_DIRECTORY>/default/ComfyUI-Manager/snapshots`
|
* Basic config files: `config.ini`
|
||||||
* Startup script files: `<USER_DIRECTORY>/default/ComfyUI-Manager/startup-scripts`
|
* Configurable channel lists: `channels.list`
|
||||||
* Component files: `<USER_DIRECTORY>/default/ComfyUI-Manager/components`
|
* Configurable pip overrides: `pip_overrides.json`
|
||||||
|
* Configurable pip blacklist: `pip_blacklist.list`
|
||||||
|
* Configurable pip auto fix: `pip_auto_fix.list`
|
||||||
|
* Saved snapshot files: `snapshots/`
|
||||||
|
* Startup script files: `startup-scripts/`
|
||||||
|
* Component files: `components/`
|
||||||
|
|
||||||
|
> **Note**: See [Migration Guide](docs/en/v3.38-userdata-security-migration.md) for upgrade details.
|
||||||
|
|
||||||
|
|
||||||
## `extra_model_paths.yaml` Configuration
|
## `extra_model_paths.yaml` Configuration
|
||||||
@@ -125,7 +184,7 @@ The following settings are applied based on the section marked as `is_default`.
|
|||||||

|

|
||||||
|
|
||||||
|
|
||||||
## cm-cli: command line tools for power user
|
## cm-cli: command line tools for power users
|
||||||
* A tool is provided that allows you to use the features of ComfyUI-Manager without running ComfyUI.
|
* A tool is provided that allows you to use the features of ComfyUI-Manager without running ComfyUI.
|
||||||
* For more details, please refer to the [cm-cli documentation](docs/en/cm-cli.md).
|
* For more details, please refer to the [cm-cli documentation](docs/en/cm-cli.md).
|
||||||
|
|
||||||
@@ -171,7 +230,7 @@ The following settings are applied based on the section marked as `is_default`.
|
|||||||
* `<current timestamp>` Ensure that the timestamp is always unique.
|
* `<current timestamp>` Ensure that the timestamp is always unique.
|
||||||
* "components" should have the same structure as the content of the file stored in `<USER_DIRECTORY>/default/ComfyUI-Manager/components`.
|
* "components" should have the same structure as the content of the file stored in `<USER_DIRECTORY>/default/ComfyUI-Manager/components`.
|
||||||
* `<component name>`: The name should be in the format `<prefix>::<node name>`.
|
* `<component name>`: The name should be in the format `<prefix>::<node name>`.
|
||||||
* `<compnent nodeata>`: In the nodedata of the group node.
|
* `<component node data>`: In the node data of the group node.
|
||||||
* `<version>`: Only two formats are allowed: `major.minor.patch` or `major.minor`. (e.g. `1.0`, `2.2.1`)
|
* `<version>`: Only two formats are allowed: `major.minor.patch` or `major.minor`. (e.g. `1.0`, `2.2.1`)
|
||||||
* `<datetime>`: Saved time
|
* `<datetime>`: Saved time
|
||||||
* `<packname>`: If the packname is not empty, the category becomes packname/workflow, and it is saved in the <packname>.pack file in `<USER_DIRECTORY>/default/ComfyUI-Manager/components`.
|
* `<packname>`: If the packname is not empty, the category becomes packname/workflow, and it is saved in the <packname>.pack file in `<USER_DIRECTORY>/default/ComfyUI-Manager/components`.
|
||||||
@@ -189,7 +248,7 @@ The following settings are applied based on the section marked as `is_default`.
|
|||||||
* Dragging and dropping or pasting a single component will add a node. However, when adding multiple components, nodes will not be added.
|
* Dragging and dropping or pasting a single component will add a node. However, when adding multiple components, nodes will not be added.
|
||||||
|
|
||||||
|
|
||||||
## Support of missing nodes installation
|
## Support for installing missing nodes
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
@@ -228,10 +287,10 @@ The following settings are applied based on the section marked as `is_default`.
|
|||||||
* Logging to file feature
|
* Logging to file feature
|
||||||
* This feature is enabled by default and can be disabled by setting `file_logging = False` in the `config.ini`.
|
* This feature is enabled by default and can be disabled by setting `file_logging = False` in the `config.ini`.
|
||||||
|
|
||||||
* Fix node(recreate): When right-clicking on a node and selecting `Fix node (recreate)`, you can recreate the node. The widget's values are reset, while the connections maintain those with the same names.
|
* Fix node (recreate): When right-clicking on a node and selecting `Fix node (recreate)`, you can recreate the node. The widget's values are reset, while the connections maintain those with the same names.
|
||||||
* It is used to correct errors in nodes of old workflows created before, which are incompatible with the version changes of custom nodes.
|
* It is used to correct errors in nodes of old workflows created before, which are incompatible with the version changes of custom nodes.
|
||||||
|
|
||||||
* Double-Click Node Title: You can set the double click behavior of nodes in the ComfyUI-Manager menu.
|
* Double-Click Node Title: You can set the double-click behavior of nodes in the ComfyUI-Manager menu.
|
||||||
* `Copy All Connections`, `Copy Input Connections`: Double-clicking a node copies the connections of the nearest node.
|
* `Copy All Connections`, `Copy Input Connections`: Double-clicking a node copies the connections of the nearest node.
|
||||||
* This action targets the nearest node within a straight-line distance of 1000 pixels from the center of the node.
|
* This action targets the nearest node within a straight-line distance of 1000 pixels from the center of the node.
|
||||||
* In the case of `Copy All Connections`, it duplicates existing outputs, but since it does not allow duplicate connections, the existing output connections of the original node are disconnected.
|
* In the case of `Copy All Connections`, it duplicates existing outputs, but since it does not allow duplicate connections, the existing output connections of the original node are disconnected.
|
||||||
@@ -297,7 +356,7 @@ When you run the `scan.sh` script:
|
|||||||
|
|
||||||
* It updates the `github-stats.json`.
|
* It updates the `github-stats.json`.
|
||||||
* This uses the GitHub API, so set your token with `export GITHUB_TOKEN=your_token_here` to avoid quickly reaching the rate limit and malfunctioning.
|
* This uses the GitHub API, so set your token with `export GITHUB_TOKEN=your_token_here` to avoid quickly reaching the rate limit and malfunctioning.
|
||||||
* To skip this step, add the `--skip-update-stat` option.
|
* To skip this step, add the `--skip-stat-update` option.
|
||||||
|
|
||||||
* The `--skip-all` option applies both `--skip-update` and `--skip-stat-update`.
|
* The `--skip-all` option applies both `--skip-update` and `--skip-stat-update`.
|
||||||
|
|
||||||
@@ -305,9 +364,9 @@ When you run the `scan.sh` script:
|
|||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
* If your `git.exe` is installed in a specific location other than system git, please install ComfyUI-Manager and run ComfyUI. Then, specify the path including the file name in `git_exe = ` in the `<USER_DIRECTORY>/default/ComfyUI-Manager/config.ini` file that is generated.
|
* If your `git.exe` is installed in a specific location other than system git, please install ComfyUI-Manager and run ComfyUI. Then, specify the path including the file name in `git_exe = ` in the `<USER_DIRECTORY>/default/ComfyUI-Manager/config.ini` file that is generated.
|
||||||
* If updating ComfyUI-Manager itself fails, please go to the **ComfyUI-Manager** directory and execute the command `git update-ref refs/remotes/origin/main a361cc1 && git fetch --all && git pull`.
|
* If updating ComfyUI-Manager itself fails, please go to the **ComfyUI-Manager** directory and execute the command `git update-ref refs/remotes/origin/main a361cc1 && git fetch --all && git pull`.
|
||||||
* If you encounter the error message `Overlapped Object has pending operation at deallocation on Comfyui Manager load` under Windows
|
* If you encounter the error message `Overlapped Object has pending operation at deallocation on ComfyUI Manager load` under Windows
|
||||||
* Edit `config.ini` file: add `windows_selector_event_loop_policy = True`
|
* Edit `config.ini` file: add `windows_selector_event_loop_policy = True`
|
||||||
* if `SSL: CERTIFICATE_VERIFY_FAILED` error is occured.
|
* If the `SSL: CERTIFICATE_VERIFY_FAILED` error occurs.
|
||||||
* Edit `config.ini` file: add `bypass_ssl = True`
|
* Edit `config.ini` file: add `bypass_ssl = True`
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
25
__init__.py
Normal file
25
__init__.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
"""
|
||||||
|
This file is the entry point for the ComfyUI-Manager package, handling CLI-only mode and initial setup.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
cli_mode_flag = os.path.join(os.path.dirname(__file__), '.enable-cli-only-mode')
|
||||||
|
|
||||||
|
if not os.path.exists(cli_mode_flag):
|
||||||
|
sys.path.append(os.path.join(os.path.dirname(__file__), "glob"))
|
||||||
|
import manager_server # noqa: F401
|
||||||
|
import share_3rdparty # noqa: F401
|
||||||
|
import cm_global
|
||||||
|
|
||||||
|
if not cm_global.disable_front and not 'DISABLE_COMFYUI_MANAGER_FRONT' in os.environ:
|
||||||
|
WEB_DIRECTORY = "js"
|
||||||
|
else:
|
||||||
|
print("\n[ComfyUI-Manager] !! cli-only-mode is enabled !!\n")
|
||||||
|
|
||||||
|
NODE_CLASS_MAPPINGS = {}
|
||||||
|
__all__ = ['NODE_CLASS_MAPPINGS']
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
6
channels.list.template
Normal file
6
channels.list.template
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
default::https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main
|
||||||
|
recent::https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main/node_db/new
|
||||||
|
legacy::https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main/node_db/legacy
|
||||||
|
forked::https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main/node_db/forked
|
||||||
|
dev::https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main/node_db/dev
|
||||||
|
tutorial::https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main/node_db/tutorial
|
||||||
4
check.sh
4
check.sh
@@ -37,7 +37,7 @@ find ~/.tmp/default -name "*.py" -print0 | xargs -0 grep -E "crypto|^_A="
|
|||||||
|
|
||||||
echo
|
echo
|
||||||
echo CHECK3
|
echo CHECK3
|
||||||
find ~/.tmp/default -name "requirements.txt" | xargs grep "^\s*https\\?:"
|
find ~/.tmp/default -name "requirements.txt" | xargs grep "^\s*[^#]*https\?:"
|
||||||
find ~/.tmp/default -name "requirements.txt" | xargs grep "\.whl"
|
find ~/.tmp/default -name "requirements.txt" | xargs grep "^\s*[^#].*\.whl"
|
||||||
|
|
||||||
echo
|
echo
|
||||||
|
|||||||
@@ -15,41 +15,38 @@ import git
|
|||||||
import importlib
|
import importlib
|
||||||
|
|
||||||
|
|
||||||
from ..common import manager_util
|
sys.path.append(os.path.dirname(__file__))
|
||||||
|
sys.path.append(os.path.join(os.path.dirname(__file__), "glob"))
|
||||||
|
|
||||||
|
import manager_util
|
||||||
|
|
||||||
# read env vars
|
# read env vars
|
||||||
# COMFYUI_FOLDERS_BASE_PATH is not required in cm-cli.py
|
# COMFYUI_FOLDERS_BASE_PATH is not required in cm-cli.py
|
||||||
# `comfy_path` should be resolved before importing manager_core
|
# `comfy_path` should be resolved before importing manager_core
|
||||||
|
|
||||||
comfy_path = os.environ.get('COMFYUI_PATH')
|
comfy_path = os.environ.get('COMFYUI_PATH')
|
||||||
|
|
||||||
if comfy_path is None:
|
if comfy_path is None:
|
||||||
print("[bold red]cm-cli: environment variable 'COMFYUI_PATH' is not specified.[/bold red]")
|
try:
|
||||||
exit(-1)
|
import folder_paths
|
||||||
|
comfy_path = os.path.join(os.path.dirname(folder_paths.__file__))
|
||||||
|
except:
|
||||||
|
print("\n[bold yellow]WARN: The `COMFYUI_PATH` environment variable is not set. Assuming `custom_nodes/ComfyUI-Manager/../../` as the ComfyUI path.[/bold yellow]", file=sys.stderr)
|
||||||
|
comfy_path = os.path.abspath(os.path.join(manager_util.comfyui_manager_path, '..', '..'))
|
||||||
|
|
||||||
|
# This should be placed here
|
||||||
sys.path.append(comfy_path)
|
sys.path.append(comfy_path)
|
||||||
|
|
||||||
if not os.path.exists(os.path.join(comfy_path, 'folder_paths.py')):
|
|
||||||
print("[bold red]cm-cli: '{comfy_path}' is not a valid 'COMFYUI_PATH' location.[/bold red]")
|
|
||||||
exit(-1)
|
|
||||||
|
|
||||||
|
|
||||||
import utils.extra_config
|
import utils.extra_config
|
||||||
from ..common import cm_global
|
import cm_global
|
||||||
from ..legacy import manager_core as core
|
import manager_core as core
|
||||||
from ..common import context
|
from manager_core import unified_manager
|
||||||
from ..legacy.manager_core import unified_manager
|
import cnr_utils
|
||||||
from ..common import cnr_utils
|
|
||||||
|
|
||||||
comfyui_manager_path = os.path.abspath(os.path.dirname(__file__))
|
comfyui_manager_path = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
|
||||||
cm_global.pip_blacklist = {'torch', 'torchaudio', 'torchsde', 'torchvision'}
|
cm_global.pip_blacklist = {'torch', 'torchaudio', 'torchsde', 'torchvision'}
|
||||||
cm_global.pip_downgrade_blacklist = ['torch', 'torchaudio', 'torchsde', 'torchvision', 'transformers', 'safetensors', 'kornia']
|
cm_global.pip_downgrade_blacklist = ['torch', 'torchaudio', 'torchsde', 'torchvision', 'transformers', 'safetensors', 'kornia']
|
||||||
|
|
||||||
if sys.version_info < (3, 13):
|
cm_global.pip_overrides = {}
|
||||||
cm_global.pip_overrides = {'numpy': 'numpy<2'}
|
|
||||||
else:
|
|
||||||
cm_global.pip_overrides = {}
|
|
||||||
|
|
||||||
if os.path.exists(os.path.join(manager_util.comfyui_manager_path, "pip_overrides.json")):
|
if os.path.exists(os.path.join(manager_util.comfyui_manager_path, "pip_overrides.json")):
|
||||||
with open(os.path.join(manager_util.comfyui_manager_path, "pip_overrides.json"), 'r', encoding="UTF-8", errors="ignore") as json_file:
|
with open(os.path.join(manager_util.comfyui_manager_path, "pip_overrides.json"), 'r', encoding="UTF-8", errors="ignore") as json_file:
|
||||||
@@ -69,7 +66,7 @@ def check_comfyui_hash():
|
|||||||
repo = git.Repo(comfy_path)
|
repo = git.Repo(comfy_path)
|
||||||
core.comfy_ui_revision = len(list(repo.iter_commits('HEAD')))
|
core.comfy_ui_revision = len(list(repo.iter_commits('HEAD')))
|
||||||
core.comfy_ui_commit_datetime = repo.head.commit.committed_datetime
|
core.comfy_ui_commit_datetime = repo.head.commit.committed_datetime
|
||||||
except Exception:
|
except:
|
||||||
print('[bold yellow]INFO: Frozen ComfyUI mode.[/bold yellow]')
|
print('[bold yellow]INFO: Frozen ComfyUI mode.[/bold yellow]')
|
||||||
core.comfy_ui_revision = 0
|
core.comfy_ui_revision = 0
|
||||||
core.comfy_ui_commit_datetime = 0
|
core.comfy_ui_commit_datetime = 0
|
||||||
@@ -85,7 +82,7 @@ def read_downgrade_blacklist():
|
|||||||
try:
|
try:
|
||||||
import configparser
|
import configparser
|
||||||
config = configparser.ConfigParser(strict=False)
|
config = configparser.ConfigParser(strict=False)
|
||||||
config.read(context.manager_config_path)
|
config.read(core.manager_config.path)
|
||||||
default_conf = config['default']
|
default_conf = config['default']
|
||||||
|
|
||||||
if 'downgrade_blacklist' in default_conf:
|
if 'downgrade_blacklist' in default_conf:
|
||||||
@@ -93,7 +90,7 @@ def read_downgrade_blacklist():
|
|||||||
items = [x.strip() for x in items if x != '']
|
items = [x.strip() for x in items if x != '']
|
||||||
cm_global.pip_downgrade_blacklist += items
|
cm_global.pip_downgrade_blacklist += items
|
||||||
cm_global.pip_downgrade_blacklist = list(set(cm_global.pip_downgrade_blacklist))
|
cm_global.pip_downgrade_blacklist = list(set(cm_global.pip_downgrade_blacklist))
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@@ -108,7 +105,7 @@ class Ctx:
|
|||||||
self.no_deps = False
|
self.no_deps = False
|
||||||
self.mode = 'cache'
|
self.mode = 'cache'
|
||||||
self.user_directory = None
|
self.user_directory = None
|
||||||
self.custom_nodes_paths = [os.path.join(context.comfy_base_path, 'custom_nodes')]
|
self.custom_nodes_paths = [os.path.join(core.comfy_base_path, 'custom_nodes')]
|
||||||
self.manager_files_directory = os.path.dirname(__file__)
|
self.manager_files_directory = os.path.dirname(__file__)
|
||||||
|
|
||||||
if Ctx.folder_paths is None:
|
if Ctx.folder_paths is None:
|
||||||
@@ -146,17 +143,14 @@ class Ctx:
|
|||||||
if os.path.exists(extra_model_paths_yaml):
|
if os.path.exists(extra_model_paths_yaml):
|
||||||
utils.extra_config.load_extra_path_config(extra_model_paths_yaml)
|
utils.extra_config.load_extra_path_config(extra_model_paths_yaml)
|
||||||
|
|
||||||
context.update_user_directory(user_directory)
|
core.update_user_directory(user_directory)
|
||||||
|
|
||||||
if os.path.exists(context.manager_pip_overrides_path):
|
if os.path.exists(core.manager_pip_overrides_path):
|
||||||
with open(context.manager_pip_overrides_path, 'r', encoding="UTF-8", errors="ignore") as json_file:
|
with open(core.manager_pip_overrides_path, 'r', encoding="UTF-8", errors="ignore") as json_file:
|
||||||
cm_global.pip_overrides = json.load(json_file)
|
cm_global.pip_overrides = json.load(json_file)
|
||||||
|
|
||||||
if sys.version_info < (3, 13):
|
if os.path.exists(core.manager_pip_blacklist_path):
|
||||||
cm_global.pip_overrides = {'numpy': 'numpy<2'}
|
with open(core.manager_pip_blacklist_path, 'r', encoding="UTF-8", errors="ignore") as f:
|
||||||
|
|
||||||
if os.path.exists(context.manager_pip_blacklist_path):
|
|
||||||
with open(context.manager_pip_blacklist_path, 'r', encoding="UTF-8", errors="ignore") as f:
|
|
||||||
for x in f.readlines():
|
for x in f.readlines():
|
||||||
y = x.strip()
|
y = x.strip()
|
||||||
if y != '':
|
if y != '':
|
||||||
@@ -169,15 +163,15 @@ class Ctx:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_startup_scripts_path():
|
def get_startup_scripts_path():
|
||||||
return os.path.join(context.manager_startup_script_path, "install-scripts.txt")
|
return os.path.join(core.manager_startup_script_path, "install-scripts.txt")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_restore_snapshot_path():
|
def get_restore_snapshot_path():
|
||||||
return os.path.join(context.manager_startup_script_path, "restore-snapshot.json")
|
return os.path.join(core.manager_startup_script_path, "restore-snapshot.json")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_snapshot_path():
|
def get_snapshot_path():
|
||||||
return context.manager_snapshot_path
|
return core.manager_snapshot_path
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_custom_nodes_paths():
|
def get_custom_nodes_paths():
|
||||||
@@ -444,11 +438,8 @@ def show_list(kind, simple=False):
|
|||||||
flag = kind in ['all', 'cnr', 'installed', 'enabled']
|
flag = kind in ['all', 'cnr', 'installed', 'enabled']
|
||||||
for k, v in unified_manager.active_nodes.items():
|
for k, v in unified_manager.active_nodes.items():
|
||||||
if flag:
|
if flag:
|
||||||
cnr = unified_manager.cnr_map.get(k)
|
cnr = unified_manager.cnr_map[k]
|
||||||
if cnr:
|
processed[k] = "[ ENABLED ] ", cnr['name'], k, cnr['publisher']['name'], v[0]
|
||||||
processed[k] = "[ ENABLED ] ", cnr['name'], k, cnr['publisher']['name'], v[0]
|
|
||||||
else:
|
|
||||||
processed[k] = None
|
|
||||||
else:
|
else:
|
||||||
processed[k] = None
|
processed[k] = None
|
||||||
|
|
||||||
@@ -468,11 +459,8 @@ def show_list(kind, simple=False):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
if flag:
|
if flag:
|
||||||
cnr = unified_manager.cnr_map.get(k) # NOTE: can this be None if removed from CNR after installed
|
cnr = unified_manager.cnr_map[k]
|
||||||
if cnr:
|
processed[k] = "[ DISABLED ] ", cnr['name'], k, cnr['publisher']['name'], ", ".join(list(v.keys()))
|
||||||
processed[k] = "[ DISABLED ] ", cnr['name'], k, cnr['publisher']['name'], ", ".join(list(v.keys()))
|
|
||||||
else:
|
|
||||||
processed[k] = None
|
|
||||||
else:
|
else:
|
||||||
processed[k] = None
|
processed[k] = None
|
||||||
|
|
||||||
@@ -481,11 +469,8 @@ def show_list(kind, simple=False):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
if flag:
|
if flag:
|
||||||
cnr = unified_manager.cnr_map.get(k)
|
cnr = unified_manager.cnr_map[k]
|
||||||
if cnr:
|
processed[k] = "[ DISABLED ] ", cnr['name'], k, cnr['publisher']['name'], 'nightly'
|
||||||
processed[k] = "[ DISABLED ] ", cnr['name'], k, cnr['publisher']['name'], 'nightly'
|
|
||||||
else:
|
|
||||||
processed[k] = None
|
|
||||||
else:
|
else:
|
||||||
processed[k] = None
|
processed[k] = None
|
||||||
|
|
||||||
@@ -505,12 +490,9 @@ def show_list(kind, simple=False):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
if flag:
|
if flag:
|
||||||
cnr = unified_manager.cnr_map.get(k)
|
cnr = unified_manager.cnr_map[k]
|
||||||
if cnr:
|
ver_spec = v['latest_version']['version'] if 'latest_version' in v else '0.0.0'
|
||||||
ver_spec = v['latest_version']['version'] if 'latest_version' in v else '0.0.0'
|
processed[k] = "[ NOT INSTALLED ] ", cnr['name'], k, cnr['publisher']['name'], ver_spec
|
||||||
processed[k] = "[ NOT INSTALLED ] ", cnr['name'], k, cnr['publisher']['name'], ver_spec
|
|
||||||
else:
|
|
||||||
processed[k] = None
|
|
||||||
else:
|
else:
|
||||||
processed[k] = None
|
processed[k] = None
|
||||||
|
|
||||||
@@ -676,7 +658,7 @@ def install(
|
|||||||
cmd_ctx.set_channel_mode(channel, mode)
|
cmd_ctx.set_channel_mode(channel, mode)
|
||||||
cmd_ctx.set_no_deps(no_deps)
|
cmd_ctx.set_no_deps(no_deps)
|
||||||
|
|
||||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, core.manager_files_path)
|
||||||
for_each_nodes(nodes, act=install_node, exit_on_fail=exit_on_fail)
|
for_each_nodes(nodes, act=install_node, exit_on_fail=exit_on_fail)
|
||||||
pip_fixer.fix_broken()
|
pip_fixer.fix_broken()
|
||||||
|
|
||||||
@@ -714,7 +696,7 @@ def reinstall(
|
|||||||
cmd_ctx.set_channel_mode(channel, mode)
|
cmd_ctx.set_channel_mode(channel, mode)
|
||||||
cmd_ctx.set_no_deps(no_deps)
|
cmd_ctx.set_no_deps(no_deps)
|
||||||
|
|
||||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, core.manager_files_path)
|
||||||
for_each_nodes(nodes, act=reinstall_node)
|
for_each_nodes(nodes, act=reinstall_node)
|
||||||
pip_fixer.fix_broken()
|
pip_fixer.fix_broken()
|
||||||
|
|
||||||
@@ -768,7 +750,7 @@ def update(
|
|||||||
if 'all' in nodes:
|
if 'all' in nodes:
|
||||||
asyncio.run(auto_save_snapshot())
|
asyncio.run(auto_save_snapshot())
|
||||||
|
|
||||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, core.manager_files_path)
|
||||||
|
|
||||||
for x in nodes:
|
for x in nodes:
|
||||||
if x.lower() in ['comfyui', 'comfy', 'all']:
|
if x.lower() in ['comfyui', 'comfy', 'all']:
|
||||||
@@ -869,7 +851,7 @@ def fix(
|
|||||||
if 'all' in nodes:
|
if 'all' in nodes:
|
||||||
asyncio.run(auto_save_snapshot())
|
asyncio.run(auto_save_snapshot())
|
||||||
|
|
||||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, core.manager_files_path)
|
||||||
for_each_nodes(nodes, fix_node, allow_all=True)
|
for_each_nodes(nodes, fix_node, allow_all=True)
|
||||||
pip_fixer.fix_broken()
|
pip_fixer.fix_broken()
|
||||||
|
|
||||||
@@ -1146,7 +1128,7 @@ def restore_snapshot(
|
|||||||
print(f"[bold red]ERROR: `{snapshot_path}` is not exists.[/bold red]")
|
print(f"[bold red]ERROR: `{snapshot_path}` is not exists.[/bold red]")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, core.manager_files_path)
|
||||||
try:
|
try:
|
||||||
asyncio.run(core.restore_snapshot(snapshot_path, extras))
|
asyncio.run(core.restore_snapshot(snapshot_path, extras))
|
||||||
except Exception:
|
except Exception:
|
||||||
@@ -1178,7 +1160,7 @@ def restore_dependencies(
|
|||||||
total = len(node_paths)
|
total = len(node_paths)
|
||||||
i = 1
|
i = 1
|
||||||
|
|
||||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, core.manager_files_path)
|
||||||
for x in node_paths:
|
for x in node_paths:
|
||||||
print("----------------------------------------------------------------------------------------------------")
|
print("----------------------------------------------------------------------------------------------------")
|
||||||
print(f"Restoring [{i}/{total}]: {x}")
|
print(f"Restoring [{i}/{total}]: {x}")
|
||||||
@@ -1197,7 +1179,7 @@ def post_install(
|
|||||||
):
|
):
|
||||||
path = os.path.expanduser(path)
|
path = os.path.expanduser(path)
|
||||||
|
|
||||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, core.manager_files_path)
|
||||||
unified_manager.execute_install_script('', path, instant_execution=True)
|
unified_manager.execute_install_script('', path, instant_execution=True)
|
||||||
pip_fixer.fix_broken()
|
pip_fixer.fix_broken()
|
||||||
|
|
||||||
@@ -1237,11 +1219,11 @@ def install_deps(
|
|||||||
with open(deps, 'r', encoding="UTF-8", errors="ignore") as json_file:
|
with open(deps, 'r', encoding="UTF-8", errors="ignore") as json_file:
|
||||||
try:
|
try:
|
||||||
json_obj = json.load(json_file)
|
json_obj = json.load(json_file)
|
||||||
except Exception:
|
except:
|
||||||
print(f"[bold red]Invalid json file: {deps}[/bold red]")
|
print(f"[bold red]Invalid json file: {deps}[/bold red]")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, core.manager_files_path)
|
||||||
for k in json_obj['custom_nodes'].keys():
|
for k in json_obj['custom_nodes'].keys():
|
||||||
state = core.simple_check_custom_node(k)
|
state = core.simple_check_custom_node(k)
|
||||||
if state == 'installed':
|
if state == 'installed':
|
||||||
@@ -1298,10 +1280,6 @@ def export_custom_node_ids(
|
|||||||
print(f"{x['id']}@unknown", file=output_file)
|
print(f"{x['id']}@unknown", file=output_file)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
app()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
sys.exit(app())
|
sys.exit(app())
|
||||||
@@ -1,45 +0,0 @@
|
|||||||
import os
|
|
||||||
import logging
|
|
||||||
|
|
||||||
def prestartup():
|
|
||||||
from . import prestartup_script # noqa: F401
|
|
||||||
logging.info('[PRE] ComfyUI-Manager')
|
|
||||||
|
|
||||||
|
|
||||||
def start():
|
|
||||||
from comfy.cli_args import args
|
|
||||||
|
|
||||||
logging.info('[START] ComfyUI-Manager')
|
|
||||||
from .common import cm_global # noqa: F401
|
|
||||||
|
|
||||||
if not args.disable_manager:
|
|
||||||
if args.enable_manager_legacy_ui:
|
|
||||||
try:
|
|
||||||
from .legacy import manager_server # noqa: F401
|
|
||||||
from .legacy import share_3rdparty # noqa: F401
|
|
||||||
import nodes
|
|
||||||
|
|
||||||
logging.info("[ComfyUI-Manager] Legacy UI is enabled.")
|
|
||||||
nodes.EXTENSION_WEB_DIRS['comfyui-manager-legacy'] = os.path.join(os.path.dirname(__file__), 'js')
|
|
||||||
except Exception as e:
|
|
||||||
print("Error enabling legacy ComfyUI Manager frontend:", e)
|
|
||||||
else:
|
|
||||||
from .glob import manager_server # noqa: F401
|
|
||||||
from .glob import share_3rdparty # noqa: F401
|
|
||||||
|
|
||||||
|
|
||||||
def should_be_disabled(fullpath:str) -> bool:
|
|
||||||
"""
|
|
||||||
1. Disables the legacy ComfyUI-Manager.
|
|
||||||
2. The blocklist can be expanded later based on policies.
|
|
||||||
"""
|
|
||||||
from comfy.cli_args import args
|
|
||||||
|
|
||||||
if not args.disable_manager:
|
|
||||||
# In cases where installation is done via a zip archive, the directory name may not be comfyui-manager, and it may not contain a git repository.
|
|
||||||
# It is assumed that any installed legacy ComfyUI-Manager will have at least 'comfyui-manager' in its directory name.
|
|
||||||
dir_name = os.path.basename(fullpath).lower()
|
|
||||||
if 'comfyui-manager' in dir_name:
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
default::https://raw.githubusercontent.com/Comfy-Org/ComfyUI-Manager/main
|
|
||||||
recent::https://raw.githubusercontent.com/Comfy-Org/ComfyUI-Manager/main/node_db/new
|
|
||||||
legacy::https://raw.githubusercontent.com/Comfy-Org/ComfyUI-Manager/main/node_db/legacy
|
|
||||||
forked::https://raw.githubusercontent.com/Comfy-Org/ComfyUI-Manager/main/node_db/forked
|
|
||||||
dev::https://raw.githubusercontent.com/Comfy-Org/ComfyUI-Manager/main/node_db/dev
|
|
||||||
tutorial::https://raw.githubusercontent.com/Comfy-Org/ComfyUI-Manager/main/node_db/tutorial
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
# ComfyUI-Manager: Core Backend (glob)
|
|
||||||
|
|
||||||
This directory contains the Python backend modules that power ComfyUI-Manager, handling the core functionality of node management, downloading, security, and server operations.
|
|
||||||
|
|
||||||
## Core Modules
|
|
||||||
|
|
||||||
- **manager_downloader.py**: Handles downloading operations for models, extensions, and other resources.
|
|
||||||
- **manager_util.py**: Provides utility functions used throughout the system.
|
|
||||||
|
|
||||||
## Specialized Modules
|
|
||||||
|
|
||||||
- **cm_global.py**: Maintains global variables and state management across the system.
|
|
||||||
- **cnr_utils.py**: Helper utilities for interacting with the custom node registry (CNR).
|
|
||||||
- **git_utils.py**: Git-specific utilities for repository operations.
|
|
||||||
- **node_package.py**: Handles the packaging and installation of node extensions.
|
|
||||||
- **security_check.py**: Implements the multi-level security system for installation safety.
|
|
||||||
@@ -1,109 +0,0 @@
|
|||||||
import sys
|
|
||||||
import os
|
|
||||||
import logging
|
|
||||||
from . import manager_util
|
|
||||||
import toml
|
|
||||||
import git
|
|
||||||
|
|
||||||
|
|
||||||
# read env vars
|
|
||||||
comfy_path: str = os.environ.get('COMFYUI_PATH')
|
|
||||||
comfy_base_path = os.environ.get('COMFYUI_FOLDERS_BASE_PATH')
|
|
||||||
|
|
||||||
if comfy_path is None:
|
|
||||||
try:
|
|
||||||
comfy_path = os.path.abspath(os.path.dirname(sys.modules['__main__'].__file__))
|
|
||||||
os.environ['COMFYUI_PATH'] = comfy_path
|
|
||||||
except Exception:
|
|
||||||
logging.error("[ComfyUI-Manager] environment variable 'COMFYUI_PATH' is not specified.")
|
|
||||||
exit(-1)
|
|
||||||
|
|
||||||
if comfy_base_path is None:
|
|
||||||
comfy_base_path = comfy_path
|
|
||||||
|
|
||||||
channel_list_template_path = os.path.join(manager_util.comfyui_manager_path, 'channels.list.template')
|
|
||||||
git_script_path = os.path.join(manager_util.comfyui_manager_path, "git_helper.py")
|
|
||||||
|
|
||||||
manager_files_path = None
|
|
||||||
manager_config_path = None
|
|
||||||
manager_channel_list_path = None
|
|
||||||
manager_startup_script_path:str = None
|
|
||||||
manager_snapshot_path = None
|
|
||||||
manager_pip_overrides_path = None
|
|
||||||
manager_pip_blacklist_path = None
|
|
||||||
manager_components_path = None
|
|
||||||
manager_batch_history_path = None
|
|
||||||
|
|
||||||
def update_user_directory(user_dir):
|
|
||||||
global manager_files_path
|
|
||||||
global manager_config_path
|
|
||||||
global manager_channel_list_path
|
|
||||||
global manager_startup_script_path
|
|
||||||
global manager_snapshot_path
|
|
||||||
global manager_pip_overrides_path
|
|
||||||
global manager_pip_blacklist_path
|
|
||||||
global manager_components_path
|
|
||||||
global manager_batch_history_path
|
|
||||||
|
|
||||||
manager_files_path = os.path.abspath(os.path.join(user_dir, 'default', 'ComfyUI-Manager'))
|
|
||||||
if not os.path.exists(manager_files_path):
|
|
||||||
os.makedirs(manager_files_path)
|
|
||||||
|
|
||||||
manager_snapshot_path = os.path.join(manager_files_path, "snapshots")
|
|
||||||
if not os.path.exists(manager_snapshot_path):
|
|
||||||
os.makedirs(manager_snapshot_path)
|
|
||||||
|
|
||||||
manager_startup_script_path = os.path.join(manager_files_path, "startup-scripts")
|
|
||||||
if not os.path.exists(manager_startup_script_path):
|
|
||||||
os.makedirs(manager_startup_script_path)
|
|
||||||
|
|
||||||
manager_config_path = os.path.join(manager_files_path, 'config.ini')
|
|
||||||
manager_channel_list_path = os.path.join(manager_files_path, 'channels.list')
|
|
||||||
manager_pip_overrides_path = os.path.join(manager_files_path, "pip_overrides.json")
|
|
||||||
manager_pip_blacklist_path = os.path.join(manager_files_path, "pip_blacklist.list")
|
|
||||||
manager_components_path = os.path.join(manager_files_path, "components")
|
|
||||||
manager_util.cache_dir = os.path.join(manager_files_path, "cache")
|
|
||||||
manager_batch_history_path = os.path.join(manager_files_path, "batch_history")
|
|
||||||
|
|
||||||
if not os.path.exists(manager_util.cache_dir):
|
|
||||||
os.makedirs(manager_util.cache_dir)
|
|
||||||
|
|
||||||
if not os.path.exists(manager_batch_history_path):
|
|
||||||
os.makedirs(manager_batch_history_path)
|
|
||||||
|
|
||||||
try:
|
|
||||||
import folder_paths
|
|
||||||
update_user_directory(folder_paths.get_user_directory())
|
|
||||||
|
|
||||||
except Exception:
|
|
||||||
# fallback:
|
|
||||||
# This case is only possible when running with cm-cli, and in practice, this case is not actually used.
|
|
||||||
update_user_directory(os.path.abspath(manager_util.comfyui_manager_path))
|
|
||||||
|
|
||||||
|
|
||||||
def get_current_comfyui_ver():
|
|
||||||
"""
|
|
||||||
Extract version from pyproject.toml
|
|
||||||
"""
|
|
||||||
toml_path = os.path.join(comfy_path, 'pyproject.toml')
|
|
||||||
if not os.path.exists(toml_path):
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
with open(toml_path, "r", encoding="utf-8") as f:
|
|
||||||
data = toml.load(f)
|
|
||||||
|
|
||||||
project = data.get('project', {})
|
|
||||||
return project.get('version')
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def get_comfyui_tag():
|
|
||||||
try:
|
|
||||||
with git.Repo(comfy_path) as repo:
|
|
||||||
return repo.git.describe('--tags')
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
import enum
|
|
||||||
|
|
||||||
class NetworkMode(enum.Enum):
|
|
||||||
PUBLIC = "public"
|
|
||||||
PRIVATE = "private"
|
|
||||||
OFFLINE = "offline"
|
|
||||||
|
|
||||||
class SecurityLevel(enum.Enum):
|
|
||||||
STRONG = "strong"
|
|
||||||
NORMAL = "normal"
|
|
||||||
NORMAL_MINUS = "normal-minus"
|
|
||||||
WEAK = "weak"
|
|
||||||
|
|
||||||
class DBMode(enum.Enum):
|
|
||||||
LOCAL = "local"
|
|
||||||
CACHE = "cache"
|
|
||||||
REMOTE = "remote"
|
|
||||||
@@ -1,67 +0,0 @@
|
|||||||
# Data Models
|
|
||||||
|
|
||||||
This directory contains Pydantic models for ComfyUI Manager, providing type safety, validation, and serialization for the API and internal data structures.
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
- `generated_models.py` - All models auto-generated from OpenAPI spec
|
|
||||||
- `__init__.py` - Package exports for all models
|
|
||||||
|
|
||||||
**Note**: All models are now auto-generated from the OpenAPI specification. Manual model files (`task_queue.py`, `state_management.py`) have been deprecated in favor of a single source of truth.
|
|
||||||
|
|
||||||
## Generating Types from OpenAPI
|
|
||||||
|
|
||||||
The state management models are automatically generated from the OpenAPI specification using `datamodel-codegen`. This ensures type safety and consistency between the API specification and the Python code.
|
|
||||||
|
|
||||||
### Prerequisites
|
|
||||||
|
|
||||||
Install the code generator:
|
|
||||||
```bash
|
|
||||||
pipx install datamodel-code-generator
|
|
||||||
```
|
|
||||||
|
|
||||||
### Generation Command
|
|
||||||
|
|
||||||
To regenerate all models after updating the OpenAPI spec:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
datamodel-codegen \
|
|
||||||
--use-subclass-enum \
|
|
||||||
--field-constraints \
|
|
||||||
--strict-types bytes \
|
|
||||||
--input openapi.yaml \
|
|
||||||
--output comfyui_manager/data_models/generated_models.py \
|
|
||||||
--output-model-type pydantic_v2.BaseModel
|
|
||||||
```
|
|
||||||
|
|
||||||
### When to Regenerate
|
|
||||||
|
|
||||||
You should regenerate the models when:
|
|
||||||
|
|
||||||
1. **Adding new API endpoints** that return new data structures
|
|
||||||
2. **Modifying existing schemas** in the OpenAPI specification
|
|
||||||
3. **Adding new state management features** that require new models
|
|
||||||
|
|
||||||
### Important Notes
|
|
||||||
|
|
||||||
- **Single source of truth**: All models are now generated from `openapi.yaml`
|
|
||||||
- **No manual models**: All previously manual models have been migrated to the OpenAPI spec
|
|
||||||
- **OpenAPI requirements**: New schemas must be referenced in API paths to be generated by datamodel-codegen
|
|
||||||
- **Validation**: Always validate the OpenAPI spec before generation:
|
|
||||||
```bash
|
|
||||||
python3 -c "import yaml; yaml.safe_load(open('openapi.yaml'))"
|
|
||||||
```
|
|
||||||
|
|
||||||
### Example: Adding New State Models
|
|
||||||
|
|
||||||
1. Add your schema to `openapi.yaml` under `components/schemas/`
|
|
||||||
2. Reference the schema in an API endpoint response
|
|
||||||
3. Run the generation command above
|
|
||||||
4. Update `__init__.py` to export the new models
|
|
||||||
5. Import and use the models in your code
|
|
||||||
|
|
||||||
### Troubleshooting
|
|
||||||
|
|
||||||
- **Models not generated**: Ensure schemas are under `components/schemas/` (not `parameters/`)
|
|
||||||
- **Missing models**: Verify schemas are referenced in at least one API path
|
|
||||||
- **Import errors**: Check that new models are added to `__init__.py` exports
|
|
||||||
@@ -1,119 +0,0 @@
|
|||||||
"""
|
|
||||||
Data models for ComfyUI Manager.
|
|
||||||
|
|
||||||
This package contains Pydantic models used throughout the ComfyUI Manager
|
|
||||||
for data validation, serialization, and type safety.
|
|
||||||
|
|
||||||
All models are auto-generated from the OpenAPI specification to ensure
|
|
||||||
consistency between the API and implementation.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from .generated_models import (
|
|
||||||
# Core Task Queue Models
|
|
||||||
QueueTaskItem,
|
|
||||||
TaskHistoryItem,
|
|
||||||
TaskStateMessage,
|
|
||||||
TaskExecutionStatus,
|
|
||||||
|
|
||||||
# WebSocket Message Models
|
|
||||||
MessageTaskDone,
|
|
||||||
MessageTaskStarted,
|
|
||||||
MessageTaskFailed,
|
|
||||||
MessageUpdate,
|
|
||||||
ManagerMessageName,
|
|
||||||
|
|
||||||
# State Management Models
|
|
||||||
BatchExecutionRecord,
|
|
||||||
ComfyUISystemState,
|
|
||||||
BatchOperation,
|
|
||||||
InstalledNodeInfo,
|
|
||||||
InstalledModelInfo,
|
|
||||||
ComfyUIVersionInfo,
|
|
||||||
|
|
||||||
# Other models
|
|
||||||
Kind,
|
|
||||||
StatusStr,
|
|
||||||
ManagerPackInfo,
|
|
||||||
ManagerPackInstalled,
|
|
||||||
SelectedVersion,
|
|
||||||
ManagerChannel,
|
|
||||||
ManagerDatabaseSource,
|
|
||||||
ManagerPackState,
|
|
||||||
ManagerPackInstallType,
|
|
||||||
ManagerPack,
|
|
||||||
InstallPackParams,
|
|
||||||
UpdatePackParams,
|
|
||||||
UpdateAllPacksParams,
|
|
||||||
UpdateComfyUIParams,
|
|
||||||
FixPackParams,
|
|
||||||
UninstallPackParams,
|
|
||||||
DisablePackParams,
|
|
||||||
EnablePackParams,
|
|
||||||
QueueStatus,
|
|
||||||
ManagerMappings,
|
|
||||||
ModelMetadata,
|
|
||||||
NodePackageMetadata,
|
|
||||||
SnapshotItem,
|
|
||||||
Error,
|
|
||||||
InstalledPacksResponse,
|
|
||||||
HistoryResponse,
|
|
||||||
HistoryListResponse,
|
|
||||||
InstallType,
|
|
||||||
OperationType,
|
|
||||||
Result,
|
|
||||||
)
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
# Core Task Queue Models
|
|
||||||
"QueueTaskItem",
|
|
||||||
"TaskHistoryItem",
|
|
||||||
"TaskStateMessage",
|
|
||||||
"TaskExecutionStatus",
|
|
||||||
|
|
||||||
# WebSocket Message Models
|
|
||||||
"MessageTaskDone",
|
|
||||||
"MessageTaskStarted",
|
|
||||||
"MessageTaskFailed",
|
|
||||||
"MessageUpdate",
|
|
||||||
"ManagerMessageName",
|
|
||||||
|
|
||||||
# State Management Models
|
|
||||||
"BatchExecutionRecord",
|
|
||||||
"ComfyUISystemState",
|
|
||||||
"BatchOperation",
|
|
||||||
"InstalledNodeInfo",
|
|
||||||
"InstalledModelInfo",
|
|
||||||
"ComfyUIVersionInfo",
|
|
||||||
|
|
||||||
# Other models
|
|
||||||
"Kind",
|
|
||||||
"StatusStr",
|
|
||||||
"ManagerPackInfo",
|
|
||||||
"ManagerPackInstalled",
|
|
||||||
"SelectedVersion",
|
|
||||||
"ManagerChannel",
|
|
||||||
"ManagerDatabaseSource",
|
|
||||||
"ManagerPackState",
|
|
||||||
"ManagerPackInstallType",
|
|
||||||
"ManagerPack",
|
|
||||||
"InstallPackParams",
|
|
||||||
"UpdatePackParams",
|
|
||||||
"UpdateAllPacksParams",
|
|
||||||
"UpdateComfyUIParams",
|
|
||||||
"FixPackParams",
|
|
||||||
"UninstallPackParams",
|
|
||||||
"DisablePackParams",
|
|
||||||
"EnablePackParams",
|
|
||||||
"QueueStatus",
|
|
||||||
"ManagerMappings",
|
|
||||||
"ModelMetadata",
|
|
||||||
"NodePackageMetadata",
|
|
||||||
"SnapshotItem",
|
|
||||||
"Error",
|
|
||||||
"InstalledPacksResponse",
|
|
||||||
"HistoryResponse",
|
|
||||||
"HistoryListResponse",
|
|
||||||
"InstallType",
|
|
||||||
"OperationType",
|
|
||||||
"Result",
|
|
||||||
]
|
|
||||||
@@ -1,476 +0,0 @@
|
|||||||
# generated by datamodel-codegen:
|
|
||||||
# filename: openapi.yaml
|
|
||||||
# timestamp: 2025-06-14T01:44:21+00:00
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
from enum import Enum
|
|
||||||
from typing import Any, Dict, List, Optional, Union
|
|
||||||
|
|
||||||
from pydantic import BaseModel, Field, RootModel
|
|
||||||
|
|
||||||
|
|
||||||
class Kind(str, Enum):
|
|
||||||
install = 'install'
|
|
||||||
uninstall = 'uninstall'
|
|
||||||
update = 'update'
|
|
||||||
update_all = 'update-all'
|
|
||||||
update_comfyui = 'update-comfyui'
|
|
||||||
fix = 'fix'
|
|
||||||
disable = 'disable'
|
|
||||||
enable = 'enable'
|
|
||||||
install_model = 'install-model'
|
|
||||||
|
|
||||||
|
|
||||||
class StatusStr(str, Enum):
|
|
||||||
success = 'success'
|
|
||||||
error = 'error'
|
|
||||||
skip = 'skip'
|
|
||||||
|
|
||||||
|
|
||||||
class TaskExecutionStatus(BaseModel):
|
|
||||||
status_str: StatusStr = Field(..., description='Overall task execution status')
|
|
||||||
completed: bool = Field(..., description='Whether the task completed')
|
|
||||||
messages: List[str] = Field(..., description='Additional status messages')
|
|
||||||
|
|
||||||
|
|
||||||
class ManagerMessageName(str, Enum):
|
|
||||||
cm_task_completed = 'cm-task-completed'
|
|
||||||
cm_task_started = 'cm-task-started'
|
|
||||||
cm_queue_status = 'cm-queue-status'
|
|
||||||
|
|
||||||
|
|
||||||
class ManagerPackInfo(BaseModel):
|
|
||||||
id: str = Field(
|
|
||||||
...,
|
|
||||||
description='Either github-author/github-repo or name of pack from the registry',
|
|
||||||
)
|
|
||||||
version: str = Field(..., description='Semantic version or Git commit hash')
|
|
||||||
ui_id: Optional[str] = Field(None, description='Task ID - generated internally')
|
|
||||||
|
|
||||||
|
|
||||||
class ManagerPackInstalled(BaseModel):
|
|
||||||
ver: str = Field(
|
|
||||||
...,
|
|
||||||
description='The version of the pack that is installed (Git commit hash or semantic version)',
|
|
||||||
)
|
|
||||||
cnr_id: Optional[str] = Field(
|
|
||||||
None, description='The name of the pack if installed from the registry'
|
|
||||||
)
|
|
||||||
aux_id: Optional[str] = Field(
|
|
||||||
None,
|
|
||||||
description='The name of the pack if installed from github (author/repo-name format)',
|
|
||||||
)
|
|
||||||
enabled: bool = Field(..., description='Whether the pack is enabled')
|
|
||||||
|
|
||||||
|
|
||||||
class SelectedVersion(str, Enum):
|
|
||||||
latest = 'latest'
|
|
||||||
nightly = 'nightly'
|
|
||||||
|
|
||||||
|
|
||||||
class ManagerChannel(str, Enum):
|
|
||||||
default = 'default'
|
|
||||||
recent = 'recent'
|
|
||||||
legacy = 'legacy'
|
|
||||||
forked = 'forked'
|
|
||||||
dev = 'dev'
|
|
||||||
tutorial = 'tutorial'
|
|
||||||
|
|
||||||
|
|
||||||
class ManagerDatabaseSource(str, Enum):
|
|
||||||
remote = 'remote'
|
|
||||||
local = 'local'
|
|
||||||
cache = 'cache'
|
|
||||||
|
|
||||||
|
|
||||||
class ManagerPackState(str, Enum):
|
|
||||||
installed = 'installed'
|
|
||||||
disabled = 'disabled'
|
|
||||||
not_installed = 'not_installed'
|
|
||||||
import_failed = 'import_failed'
|
|
||||||
needs_update = 'needs_update'
|
|
||||||
|
|
||||||
|
|
||||||
class ManagerPackInstallType(str, Enum):
|
|
||||||
git_clone = 'git-clone'
|
|
||||||
copy = 'copy'
|
|
||||||
cnr = 'cnr'
|
|
||||||
|
|
||||||
|
|
||||||
class UpdateState(Enum):
|
|
||||||
false = 'false'
|
|
||||||
true = 'true'
|
|
||||||
|
|
||||||
|
|
||||||
class ManagerPack(ManagerPackInfo):
|
|
||||||
author: Optional[str] = Field(
|
|
||||||
None, description="Pack author name or 'Unclaimed' if added via GitHub crawl"
|
|
||||||
)
|
|
||||||
files: Optional[List[str]] = Field(None, description='Files included in the pack')
|
|
||||||
reference: Optional[str] = Field(
|
|
||||||
None, description='The type of installation reference'
|
|
||||||
)
|
|
||||||
title: Optional[str] = Field(None, description='The display name of the pack')
|
|
||||||
cnr_latest: Optional[SelectedVersion] = None
|
|
||||||
repository: Optional[str] = Field(None, description='GitHub repository URL')
|
|
||||||
state: Optional[ManagerPackState] = None
|
|
||||||
update_state: Optional[UpdateState] = Field(
|
|
||||||
None, alias='update-state', description='Update availability status'
|
|
||||||
)
|
|
||||||
stars: Optional[int] = Field(None, description='GitHub stars count')
|
|
||||||
last_update: Optional[datetime] = Field(None, description='Last update timestamp')
|
|
||||||
health: Optional[str] = Field(None, description='Health status of the pack')
|
|
||||||
description: Optional[str] = Field(None, description='Pack description')
|
|
||||||
trust: Optional[bool] = Field(None, description='Whether the pack is trusted')
|
|
||||||
install_type: Optional[ManagerPackInstallType] = None
|
|
||||||
|
|
||||||
|
|
||||||
class InstallPackParams(ManagerPackInfo):
|
|
||||||
selected_version: Union[str, SelectedVersion] = Field(
|
|
||||||
..., description='Semantic version, Git commit hash, latest, or nightly'
|
|
||||||
)
|
|
||||||
repository: Optional[str] = Field(
|
|
||||||
None,
|
|
||||||
description='GitHub repository URL (required if selected_version is nightly)',
|
|
||||||
)
|
|
||||||
pip: Optional[List[str]] = Field(None, description='PyPi dependency names')
|
|
||||||
mode: ManagerDatabaseSource
|
|
||||||
channel: ManagerChannel
|
|
||||||
skip_post_install: Optional[bool] = Field(
|
|
||||||
None, description='Whether to skip post-installation steps'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class UpdateAllPacksParams(BaseModel):
|
|
||||||
mode: Optional[ManagerDatabaseSource] = None
|
|
||||||
ui_id: Optional[str] = Field(None, description='Task ID - generated internally')
|
|
||||||
|
|
||||||
|
|
||||||
class UpdatePackParams(BaseModel):
|
|
||||||
node_name: str = Field(..., description='Name of the node package to update')
|
|
||||||
node_ver: Optional[str] = Field(
|
|
||||||
None, description='Current version of the node package'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class UpdateComfyUIParams(BaseModel):
|
|
||||||
is_stable: Optional[bool] = Field(
|
|
||||||
True,
|
|
||||||
description='Whether to update to stable version (true) or nightly (false)',
|
|
||||||
)
|
|
||||||
target_version: Optional[str] = Field(
|
|
||||||
None,
|
|
||||||
description='Specific version to switch to (for version switching operations)',
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class FixPackParams(BaseModel):
|
|
||||||
node_name: str = Field(..., description='Name of the node package to fix')
|
|
||||||
node_ver: str = Field(..., description='Version of the node package')
|
|
||||||
|
|
||||||
|
|
||||||
class UninstallPackParams(BaseModel):
|
|
||||||
node_name: str = Field(..., description='Name of the node package to uninstall')
|
|
||||||
is_unknown: Optional[bool] = Field(
|
|
||||||
False, description='Whether this is an unknown/unregistered package'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class DisablePackParams(BaseModel):
|
|
||||||
node_name: str = Field(..., description='Name of the node package to disable')
|
|
||||||
is_unknown: Optional[bool] = Field(
|
|
||||||
False, description='Whether this is an unknown/unregistered package'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class EnablePackParams(BaseModel):
|
|
||||||
cnr_id: str = Field(
|
|
||||||
..., description='ComfyUI Node Registry ID of the package to enable'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class QueueStatus(BaseModel):
|
|
||||||
total_count: int = Field(
|
|
||||||
..., description='Total number of tasks (pending + running)'
|
|
||||||
)
|
|
||||||
done_count: int = Field(..., description='Number of completed tasks')
|
|
||||||
in_progress_count: int = Field(..., description='Number of tasks currently running')
|
|
||||||
pending_count: Optional[int] = Field(
|
|
||||||
None, description='Number of tasks waiting to be executed'
|
|
||||||
)
|
|
||||||
is_processing: bool = Field(..., description='Whether the task worker is active')
|
|
||||||
client_id: Optional[str] = Field(
|
|
||||||
None, description='Client ID (when filtered by client)'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ManagerMappings1(BaseModel):
|
|
||||||
title_aux: Optional[str] = Field(None, description='The display name of the pack')
|
|
||||||
|
|
||||||
|
|
||||||
class ManagerMappings(
|
|
||||||
RootModel[Optional[Dict[str, List[Union[List[str], ManagerMappings1]]]]]
|
|
||||||
):
|
|
||||||
root: Optional[Dict[str, List[Union[List[str], ManagerMappings1]]]] = Field(
|
|
||||||
None, description='Tuple of [node_names, metadata]'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ModelMetadata(BaseModel):
|
|
||||||
name: str = Field(..., description='Name of the model')
|
|
||||||
type: str = Field(..., description='Type of model')
|
|
||||||
base: Optional[str] = Field(None, description='Base model type')
|
|
||||||
save_path: Optional[str] = Field(None, description='Path for saving the model')
|
|
||||||
url: str = Field(..., description='Download URL')
|
|
||||||
filename: str = Field(..., description='Target filename')
|
|
||||||
ui_id: Optional[str] = Field(None, description='ID for UI reference')
|
|
||||||
|
|
||||||
|
|
||||||
class InstallType(str, Enum):
|
|
||||||
git = 'git'
|
|
||||||
copy = 'copy'
|
|
||||||
pip = 'pip'
|
|
||||||
|
|
||||||
|
|
||||||
class NodePackageMetadata(BaseModel):
|
|
||||||
title: Optional[str] = Field(None, description='Display name of the node package')
|
|
||||||
name: Optional[str] = Field(None, description='Repository/package name')
|
|
||||||
files: Optional[List[str]] = Field(None, description='Source URLs for the package')
|
|
||||||
description: Optional[str] = Field(
|
|
||||||
None, description='Description of the node package functionality'
|
|
||||||
)
|
|
||||||
install_type: Optional[InstallType] = Field(None, description='Installation method')
|
|
||||||
version: Optional[str] = Field(None, description='Version identifier')
|
|
||||||
id: Optional[str] = Field(
|
|
||||||
None, description='Unique identifier for the node package'
|
|
||||||
)
|
|
||||||
ui_id: Optional[str] = Field(None, description='ID for UI reference')
|
|
||||||
channel: Optional[str] = Field(None, description='Source channel')
|
|
||||||
mode: Optional[str] = Field(None, description='Source mode')
|
|
||||||
|
|
||||||
|
|
||||||
class SnapshotItem(RootModel[str]):
|
|
||||||
root: str = Field(..., description='Name of the snapshot')
|
|
||||||
|
|
||||||
|
|
||||||
class Error(BaseModel):
|
|
||||||
error: str = Field(..., description='Error message')
|
|
||||||
|
|
||||||
|
|
||||||
class InstalledPacksResponse(RootModel[Optional[Dict[str, ManagerPackInstalled]]]):
|
|
||||||
root: Optional[Dict[str, ManagerPackInstalled]] = None
|
|
||||||
|
|
||||||
|
|
||||||
class HistoryListResponse(BaseModel):
|
|
||||||
ids: Optional[List[str]] = Field(
|
|
||||||
None, description='List of available batch history IDs'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class InstalledNodeInfo(BaseModel):
|
|
||||||
name: str = Field(..., description='Node package name')
|
|
||||||
version: str = Field(..., description='Installed version')
|
|
||||||
repository_url: Optional[str] = Field(None, description='Git repository URL')
|
|
||||||
install_method: str = Field(
|
|
||||||
..., description='Installation method (cnr, git, pip, etc.)'
|
|
||||||
)
|
|
||||||
enabled: Optional[bool] = Field(
|
|
||||||
True, description='Whether the node is currently enabled'
|
|
||||||
)
|
|
||||||
install_date: Optional[datetime] = Field(
|
|
||||||
None, description='ISO timestamp of installation'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class InstalledModelInfo(BaseModel):
|
|
||||||
name: str = Field(..., description='Model filename')
|
|
||||||
path: str = Field(..., description='Full path to model file')
|
|
||||||
type: str = Field(..., description='Model type (checkpoint, lora, vae, etc.)')
|
|
||||||
size_bytes: Optional[int] = Field(None, description='File size in bytes', ge=0)
|
|
||||||
hash: Optional[str] = Field(None, description='Model file hash for verification')
|
|
||||||
install_date: Optional[datetime] = Field(
|
|
||||||
None, description='ISO timestamp when added'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ComfyUIVersionInfo(BaseModel):
|
|
||||||
version: str = Field(..., description='ComfyUI version string')
|
|
||||||
commit_hash: Optional[str] = Field(None, description='Git commit hash')
|
|
||||||
branch: Optional[str] = Field(None, description='Git branch name')
|
|
||||||
is_stable: Optional[bool] = Field(
|
|
||||||
False, description='Whether this is a stable release'
|
|
||||||
)
|
|
||||||
last_updated: Optional[datetime] = Field(
|
|
||||||
None, description='ISO timestamp of last update'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class OperationType(str, Enum):
|
|
||||||
install = 'install'
|
|
||||||
update = 'update'
|
|
||||||
uninstall = 'uninstall'
|
|
||||||
fix = 'fix'
|
|
||||||
disable = 'disable'
|
|
||||||
enable = 'enable'
|
|
||||||
install_model = 'install-model'
|
|
||||||
|
|
||||||
|
|
||||||
class Result(str, Enum):
|
|
||||||
success = 'success'
|
|
||||||
failed = 'failed'
|
|
||||||
skipped = 'skipped'
|
|
||||||
|
|
||||||
|
|
||||||
class BatchOperation(BaseModel):
|
|
||||||
operation_id: str = Field(..., description='Unique operation identifier')
|
|
||||||
operation_type: OperationType = Field(..., description='Type of operation')
|
|
||||||
target: str = Field(
|
|
||||||
..., description='Target of the operation (node name, model name, etc.)'
|
|
||||||
)
|
|
||||||
target_version: Optional[str] = Field(
|
|
||||||
None, description='Target version for the operation'
|
|
||||||
)
|
|
||||||
result: Result = Field(..., description='Operation result')
|
|
||||||
error_message: Optional[str] = Field(
|
|
||||||
None, description='Error message if operation failed'
|
|
||||||
)
|
|
||||||
start_time: datetime = Field(
|
|
||||||
..., description='ISO timestamp when operation started'
|
|
||||||
)
|
|
||||||
end_time: Optional[datetime] = Field(
|
|
||||||
None, description='ISO timestamp when operation completed'
|
|
||||||
)
|
|
||||||
client_id: Optional[str] = Field(
|
|
||||||
None, description='Client that initiated the operation'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ComfyUISystemState(BaseModel):
|
|
||||||
snapshot_time: datetime = Field(
|
|
||||||
..., description='ISO timestamp when snapshot was taken'
|
|
||||||
)
|
|
||||||
comfyui_version: ComfyUIVersionInfo
|
|
||||||
frontend_version: Optional[str] = Field(
|
|
||||||
None, description='ComfyUI frontend version if available'
|
|
||||||
)
|
|
||||||
python_version: str = Field(..., description='Python interpreter version')
|
|
||||||
platform_info: str = Field(
|
|
||||||
..., description='Operating system and platform information'
|
|
||||||
)
|
|
||||||
installed_nodes: Optional[Dict[str, InstalledNodeInfo]] = Field(
|
|
||||||
None, description='Map of installed node packages by name'
|
|
||||||
)
|
|
||||||
installed_models: Optional[Dict[str, InstalledModelInfo]] = Field(
|
|
||||||
None, description='Map of installed models by name'
|
|
||||||
)
|
|
||||||
manager_config: Optional[Dict[str, Any]] = Field(
|
|
||||||
None, description='ComfyUI Manager configuration settings'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class BatchExecutionRecord(BaseModel):
|
|
||||||
batch_id: str = Field(..., description='Unique batch identifier')
|
|
||||||
start_time: datetime = Field(..., description='ISO timestamp when batch started')
|
|
||||||
end_time: Optional[datetime] = Field(
|
|
||||||
None, description='ISO timestamp when batch completed'
|
|
||||||
)
|
|
||||||
state_before: ComfyUISystemState
|
|
||||||
state_after: Optional[ComfyUISystemState] = Field(
|
|
||||||
None, description='System state after batch execution'
|
|
||||||
)
|
|
||||||
operations: Optional[List[BatchOperation]] = Field(
|
|
||||||
None, description='List of operations performed in this batch'
|
|
||||||
)
|
|
||||||
total_operations: Optional[int] = Field(
|
|
||||||
0, description='Total number of operations in batch', ge=0
|
|
||||||
)
|
|
||||||
successful_operations: Optional[int] = Field(
|
|
||||||
0, description='Number of successful operations', ge=0
|
|
||||||
)
|
|
||||||
failed_operations: Optional[int] = Field(
|
|
||||||
0, description='Number of failed operations', ge=0
|
|
||||||
)
|
|
||||||
skipped_operations: Optional[int] = Field(
|
|
||||||
0, description='Number of skipped operations', ge=0
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class QueueTaskItem(BaseModel):
|
|
||||||
ui_id: str = Field(..., description='Unique identifier for the task')
|
|
||||||
client_id: str = Field(..., description='Client identifier that initiated the task')
|
|
||||||
kind: Kind = Field(..., description='Type of task being performed')
|
|
||||||
params: Union[
|
|
||||||
InstallPackParams,
|
|
||||||
UpdatePackParams,
|
|
||||||
UpdateAllPacksParams,
|
|
||||||
UpdateComfyUIParams,
|
|
||||||
FixPackParams,
|
|
||||||
UninstallPackParams,
|
|
||||||
DisablePackParams,
|
|
||||||
EnablePackParams,
|
|
||||||
ModelMetadata,
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class TaskHistoryItem(BaseModel):
|
|
||||||
ui_id: str = Field(..., description='Unique identifier for the task')
|
|
||||||
client_id: str = Field(..., description='Client identifier that initiated the task')
|
|
||||||
kind: str = Field(..., description='Type of task that was performed')
|
|
||||||
timestamp: datetime = Field(..., description='ISO timestamp when task completed')
|
|
||||||
result: str = Field(..., description='Task result message or details')
|
|
||||||
status: Optional[TaskExecutionStatus] = None
|
|
||||||
|
|
||||||
|
|
||||||
class TaskStateMessage(BaseModel):
|
|
||||||
history: Dict[str, TaskHistoryItem] = Field(
|
|
||||||
..., description='Map of task IDs to their history items'
|
|
||||||
)
|
|
||||||
running_queue: List[QueueTaskItem] = Field(
|
|
||||||
..., description='Currently executing tasks'
|
|
||||||
)
|
|
||||||
pending_queue: List[QueueTaskItem] = Field(
|
|
||||||
..., description='Tasks waiting to be executed'
|
|
||||||
)
|
|
||||||
installed_packs: Dict[str, ManagerPackInstalled] = Field(
|
|
||||||
..., description='Map of currently installed node packages by name'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class MessageTaskDone(BaseModel):
|
|
||||||
ui_id: str = Field(..., description='Task identifier')
|
|
||||||
result: str = Field(..., description='Task result message')
|
|
||||||
kind: str = Field(..., description='Type of task')
|
|
||||||
status: Optional[TaskExecutionStatus] = None
|
|
||||||
timestamp: datetime = Field(..., description='ISO timestamp when task completed')
|
|
||||||
state: TaskStateMessage
|
|
||||||
|
|
||||||
|
|
||||||
class MessageTaskStarted(BaseModel):
|
|
||||||
ui_id: str = Field(..., description='Task identifier')
|
|
||||||
kind: str = Field(..., description='Type of task')
|
|
||||||
timestamp: datetime = Field(..., description='ISO timestamp when task started')
|
|
||||||
state: TaskStateMessage
|
|
||||||
|
|
||||||
|
|
||||||
class MessageTaskFailed(BaseModel):
|
|
||||||
ui_id: str = Field(..., description='Task identifier')
|
|
||||||
error: str = Field(..., description='Error message')
|
|
||||||
kind: str = Field(..., description='Type of task')
|
|
||||||
timestamp: datetime = Field(..., description='ISO timestamp when task failed')
|
|
||||||
state: TaskStateMessage
|
|
||||||
|
|
||||||
|
|
||||||
class MessageUpdate(
|
|
||||||
RootModel[Union[MessageTaskDone, MessageTaskStarted, MessageTaskFailed]]
|
|
||||||
):
|
|
||||||
root: Union[MessageTaskDone, MessageTaskStarted, MessageTaskFailed] = Field(
|
|
||||||
..., description='Union type for all possible WebSocket message updates'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class HistoryResponse(BaseModel):
|
|
||||||
history: Optional[Dict[str, TaskHistoryItem]] = Field(
|
|
||||||
None, description='Map of task IDs to their history items'
|
|
||||||
)
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
- Anytime you make a change to the data being sent or received, you should follow this process:
|
|
||||||
1. Adjust the openapi.yaml file first
|
|
||||||
2. Verify the syntax of the openapi.yaml file using `yaml.safe_load`
|
|
||||||
3. Regenerate the types following the instructions in the `data_models/README.md` file
|
|
||||||
4. Verify the new data model is generated
|
|
||||||
5. Verify the syntax of the generated types files
|
|
||||||
6. Run formatting and linting on the generated types files
|
|
||||||
7. Adjust the `__init__.py` files in the `data_models` directory to match/export the new data model
|
|
||||||
8. Only then, make the changes to the rest of the codebase
|
|
||||||
9. Run the CI tests to verify that the changes are working
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
from comfy.cli_args import args
|
|
||||||
|
|
||||||
SECURITY_MESSAGE_MIDDLE_OR_BELOW = "ERROR: To use this action, a security_level of `middle or below` is required. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
|
|
||||||
SECURITY_MESSAGE_NORMAL_MINUS = "ERROR: To use this feature, you must either set '--listen' to a local IP and set the security level to 'normal-' or lower, or set the security level to 'middle' or 'weak'. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
|
|
||||||
SECURITY_MESSAGE_GENERAL = "ERROR: This installation is not allowed in this security_level. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
|
|
||||||
SECURITY_MESSAGE_NORMAL_MINUS_MODEL = "ERROR: Downloading models that are not in '.safetensors' format is only allowed for models registered in the 'default' channel at this security level. If you want to download this model, set the security level to 'normal-' or lower."
|
|
||||||
|
|
||||||
|
|
||||||
def is_loopback(address):
|
|
||||||
import ipaddress
|
|
||||||
|
|
||||||
try:
|
|
||||||
return ipaddress.ip_address(address).is_loopback
|
|
||||||
except ValueError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
is_local_mode = is_loopback(args.listen)
|
|
||||||
|
|
||||||
|
|
||||||
model_dir_name_map = {
|
|
||||||
"checkpoints": "checkpoints",
|
|
||||||
"checkpoint": "checkpoints",
|
|
||||||
"unclip": "checkpoints",
|
|
||||||
"text_encoders": "text_encoders",
|
|
||||||
"clip": "text_encoders",
|
|
||||||
"vae": "vae",
|
|
||||||
"lora": "loras",
|
|
||||||
"t2i-adapter": "controlnet",
|
|
||||||
"t2i-style": "controlnet",
|
|
||||||
"controlnet": "controlnet",
|
|
||||||
"clip_vision": "clip_vision",
|
|
||||||
"gligen": "gligen",
|
|
||||||
"upscale": "upscale_models",
|
|
||||||
"embedding": "embeddings",
|
|
||||||
"embeddings": "embeddings",
|
|
||||||
"unet": "diffusion_models",
|
|
||||||
"diffusion_model": "diffusion_models",
|
|
||||||
}
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,386 +0,0 @@
|
|||||||
import mimetypes
|
|
||||||
from ..common import context
|
|
||||||
from . import manager_core as core
|
|
||||||
|
|
||||||
import os
|
|
||||||
from aiohttp import web
|
|
||||||
import aiohttp
|
|
||||||
import json
|
|
||||||
import hashlib
|
|
||||||
|
|
||||||
import folder_paths
|
|
||||||
from server import PromptServer
|
|
||||||
import logging
|
|
||||||
|
|
||||||
|
|
||||||
def extract_model_file_names(json_data):
|
|
||||||
"""Extract unique file names from the input JSON data."""
|
|
||||||
file_names = set()
|
|
||||||
model_filename_extensions = {'.safetensors', '.ckpt', '.pt', '.pth', '.bin'}
|
|
||||||
|
|
||||||
# Recursively search for file names in the JSON data
|
|
||||||
def recursive_search(data):
|
|
||||||
if isinstance(data, dict):
|
|
||||||
for value in data.values():
|
|
||||||
recursive_search(value)
|
|
||||||
elif isinstance(data, list):
|
|
||||||
for item in data:
|
|
||||||
recursive_search(item)
|
|
||||||
elif isinstance(data, str) and '.' in data:
|
|
||||||
file_names.add(os.path.basename(data)) # file_names.add(data)
|
|
||||||
|
|
||||||
recursive_search(json_data)
|
|
||||||
return [f for f in list(file_names) if os.path.splitext(f)[1] in model_filename_extensions]
|
|
||||||
|
|
||||||
|
|
||||||
def find_file_paths(base_dir, file_names):
|
|
||||||
"""Find the paths of the files in the base directory."""
|
|
||||||
file_paths = {}
|
|
||||||
|
|
||||||
for root, dirs, files in os.walk(base_dir):
|
|
||||||
# Exclude certain directories
|
|
||||||
dirs[:] = [d for d in dirs if d not in ['.git']]
|
|
||||||
|
|
||||||
for file in files:
|
|
||||||
if file in file_names:
|
|
||||||
file_paths[file] = os.path.join(root, file)
|
|
||||||
return file_paths
|
|
||||||
|
|
||||||
|
|
||||||
def compute_sha256_checksum(filepath):
|
|
||||||
"""Compute the SHA256 checksum of a file, in chunks"""
|
|
||||||
sha256 = hashlib.sha256()
|
|
||||||
with open(filepath, 'rb') as f:
|
|
||||||
for chunk in iter(lambda: f.read(4096), b''):
|
|
||||||
sha256.update(chunk)
|
|
||||||
return sha256.hexdigest()
|
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/share_option")
|
|
||||||
async def share_option(request):
|
|
||||||
if "value" in request.rel_url.query:
|
|
||||||
core.get_config()['share_option'] = request.rel_url.query['value']
|
|
||||||
core.write_config()
|
|
||||||
else:
|
|
||||||
return web.Response(text=core.get_config()['share_option'], status=200)
|
|
||||||
|
|
||||||
return web.Response(status=200)
|
|
||||||
|
|
||||||
|
|
||||||
def get_openart_auth():
|
|
||||||
if not os.path.exists(os.path.join(context.manager_files_path, ".openart_key")):
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
with open(os.path.join(context.manager_files_path, ".openart_key"), "r") as f:
|
|
||||||
openart_key = f.read().strip()
|
|
||||||
return openart_key if openart_key else None
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def get_matrix_auth():
|
|
||||||
if not os.path.exists(os.path.join(context.manager_files_path, "matrix_auth")):
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
with open(os.path.join(context.manager_files_path, "matrix_auth"), "r") as f:
|
|
||||||
matrix_auth = f.read()
|
|
||||||
homeserver, username, password = matrix_auth.strip().split("\n")
|
|
||||||
if not homeserver or not username or not password:
|
|
||||||
return None
|
|
||||||
return {
|
|
||||||
"homeserver": homeserver,
|
|
||||||
"username": username,
|
|
||||||
"password": password,
|
|
||||||
}
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def get_comfyworkflows_auth():
|
|
||||||
if not os.path.exists(os.path.join(context.manager_files_path, "comfyworkflows_sharekey")):
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
with open(os.path.join(context.manager_files_path, "comfyworkflows_sharekey"), "r") as f:
|
|
||||||
share_key = f.read()
|
|
||||||
if not share_key.strip():
|
|
||||||
return None
|
|
||||||
return share_key
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def get_youml_settings():
|
|
||||||
if not os.path.exists(os.path.join(context.manager_files_path, ".youml")):
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
with open(os.path.join(context.manager_files_path, ".youml"), "r") as f:
|
|
||||||
youml_settings = f.read().strip()
|
|
||||||
return youml_settings if youml_settings else None
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def set_youml_settings(settings):
|
|
||||||
with open(os.path.join(context.manager_files_path, ".youml"), "w") as f:
|
|
||||||
f.write(settings)
|
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/get_openart_auth")
|
|
||||||
async def api_get_openart_auth(request):
|
|
||||||
# print("Getting stored Matrix credentials...")
|
|
||||||
openart_key = get_openart_auth()
|
|
||||||
if not openart_key:
|
|
||||||
return web.Response(status=404)
|
|
||||||
return web.json_response({"openart_key": openart_key})
|
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.post("/v2/manager/set_openart_auth")
|
|
||||||
async def api_set_openart_auth(request):
|
|
||||||
json_data = await request.json()
|
|
||||||
openart_key = json_data['openart_key']
|
|
||||||
with open(os.path.join(context.manager_files_path, ".openart_key"), "w") as f:
|
|
||||||
f.write(openart_key)
|
|
||||||
return web.Response(status=200)
|
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/get_matrix_auth")
|
|
||||||
async def api_get_matrix_auth(request):
|
|
||||||
# print("Getting stored Matrix credentials...")
|
|
||||||
matrix_auth = get_matrix_auth()
|
|
||||||
if not matrix_auth:
|
|
||||||
return web.Response(status=404)
|
|
||||||
return web.json_response(matrix_auth)
|
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/youml/settings")
|
|
||||||
async def api_get_youml_settings(request):
|
|
||||||
youml_settings = get_youml_settings()
|
|
||||||
if not youml_settings:
|
|
||||||
return web.Response(status=404)
|
|
||||||
return web.json_response(json.loads(youml_settings))
|
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.post("/v2/manager/youml/settings")
|
|
||||||
async def api_set_youml_settings(request):
|
|
||||||
json_data = await request.json()
|
|
||||||
set_youml_settings(json.dumps(json_data))
|
|
||||||
return web.Response(status=200)
|
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/get_comfyworkflows_auth")
|
|
||||||
async def api_get_comfyworkflows_auth(request):
|
|
||||||
# Check if the user has provided Matrix credentials in a file called 'matrix_accesstoken'
|
|
||||||
# in the same directory as the ComfyUI base folder
|
|
||||||
# print("Getting stored Comfyworkflows.com auth...")
|
|
||||||
comfyworkflows_auth = get_comfyworkflows_auth()
|
|
||||||
if not comfyworkflows_auth:
|
|
||||||
return web.Response(status=404)
|
|
||||||
return web.json_response({"comfyworkflows_sharekey": comfyworkflows_auth})
|
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.post("/v2/manager/set_esheep_workflow_and_images")
|
|
||||||
async def set_esheep_workflow_and_images(request):
|
|
||||||
json_data = await request.json()
|
|
||||||
with open(os.path.join(context.manager_files_path, "esheep_share_message.json"), "w", encoding='utf-8') as file:
|
|
||||||
json.dump(json_data, file, indent=4)
|
|
||||||
return web.Response(status=200)
|
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/get_esheep_workflow_and_images")
|
|
||||||
async def get_esheep_workflow_and_images(request):
|
|
||||||
with open(os.path.join(context.manager_files_path, "esheep_share_message.json"), 'r', encoding='utf-8') as file:
|
|
||||||
data = json.load(file)
|
|
||||||
return web.Response(status=200, text=json.dumps(data))
|
|
||||||
|
|
||||||
|
|
||||||
def set_matrix_auth(json_data):
|
|
||||||
homeserver = json_data['homeserver']
|
|
||||||
username = json_data['username']
|
|
||||||
password = json_data['password']
|
|
||||||
with open(os.path.join(context.manager_files_path, "matrix_auth"), "w") as f:
|
|
||||||
f.write("\n".join([homeserver, username, password]))
|
|
||||||
|
|
||||||
|
|
||||||
def set_comfyworkflows_auth(comfyworkflows_sharekey):
|
|
||||||
with open(os.path.join(context.manager_files_path, "comfyworkflows_sharekey"), "w") as f:
|
|
||||||
f.write(comfyworkflows_sharekey)
|
|
||||||
|
|
||||||
|
|
||||||
def has_provided_matrix_auth(matrix_auth):
|
|
||||||
return matrix_auth['homeserver'].strip() and matrix_auth['username'].strip() and matrix_auth['password'].strip()
|
|
||||||
|
|
||||||
|
|
||||||
def has_provided_comfyworkflows_auth(comfyworkflows_sharekey):
|
|
||||||
return comfyworkflows_sharekey.strip()
|
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.post("/v2/manager/share")
|
|
||||||
async def share_art(request):
|
|
||||||
# get json data
|
|
||||||
json_data = await request.json()
|
|
||||||
|
|
||||||
matrix_auth = json_data['matrix_auth']
|
|
||||||
comfyworkflows_sharekey = json_data['cw_auth']['cw_sharekey']
|
|
||||||
|
|
||||||
set_matrix_auth(matrix_auth)
|
|
||||||
set_comfyworkflows_auth(comfyworkflows_sharekey)
|
|
||||||
|
|
||||||
share_destinations = json_data['share_destinations']
|
|
||||||
credits = json_data['credits']
|
|
||||||
title = json_data['title']
|
|
||||||
description = json_data['description']
|
|
||||||
is_nsfw = json_data['is_nsfw']
|
|
||||||
prompt = json_data['prompt']
|
|
||||||
potential_outputs = json_data['potential_outputs']
|
|
||||||
selected_output_index = json_data['selected_output_index']
|
|
||||||
|
|
||||||
try:
|
|
||||||
output_to_share = potential_outputs[int(selected_output_index)]
|
|
||||||
except Exception:
|
|
||||||
# for now, pick the first output
|
|
||||||
output_to_share = potential_outputs[0]
|
|
||||||
|
|
||||||
assert output_to_share['type'] in ('image', 'output')
|
|
||||||
output_dir = folder_paths.get_output_directory()
|
|
||||||
|
|
||||||
if output_to_share['type'] == 'image':
|
|
||||||
asset_filename = output_to_share['image']['filename']
|
|
||||||
asset_subfolder = output_to_share['image']['subfolder']
|
|
||||||
|
|
||||||
if output_to_share['image']['type'] == 'temp':
|
|
||||||
output_dir = folder_paths.get_temp_directory()
|
|
||||||
else:
|
|
||||||
asset_filename = output_to_share['output']['filename']
|
|
||||||
asset_subfolder = output_to_share['output']['subfolder']
|
|
||||||
|
|
||||||
if asset_subfolder:
|
|
||||||
asset_filepath = os.path.join(output_dir, asset_subfolder, asset_filename)
|
|
||||||
else:
|
|
||||||
asset_filepath = os.path.join(output_dir, asset_filename)
|
|
||||||
|
|
||||||
# get the mime type of the asset
|
|
||||||
assetFileType = mimetypes.guess_type(asset_filepath)[0]
|
|
||||||
|
|
||||||
share_website_host = "UNKNOWN"
|
|
||||||
if "comfyworkflows" in share_destinations:
|
|
||||||
share_website_host = "https://comfyworkflows.com"
|
|
||||||
share_endpoint = f"{share_website_host}/api"
|
|
||||||
|
|
||||||
# get presigned urls
|
|
||||||
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
|
|
||||||
async with session.post(
|
|
||||||
f"{share_endpoint}/get_presigned_urls",
|
|
||||||
json={
|
|
||||||
"assetFileName": asset_filename,
|
|
||||||
"assetFileType": assetFileType,
|
|
||||||
"workflowJsonFileName": 'workflow.json',
|
|
||||||
"workflowJsonFileType": 'application/json',
|
|
||||||
},
|
|
||||||
) as resp:
|
|
||||||
assert resp.status == 200
|
|
||||||
presigned_urls_json = await resp.json()
|
|
||||||
assetFilePresignedUrl = presigned_urls_json["assetFilePresignedUrl"]
|
|
||||||
assetFileKey = presigned_urls_json["assetFileKey"]
|
|
||||||
workflowJsonFilePresignedUrl = presigned_urls_json["workflowJsonFilePresignedUrl"]
|
|
||||||
workflowJsonFileKey = presigned_urls_json["workflowJsonFileKey"]
|
|
||||||
|
|
||||||
# upload asset
|
|
||||||
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
|
|
||||||
async with session.put(assetFilePresignedUrl, data=open(asset_filepath, "rb")) as resp:
|
|
||||||
assert resp.status == 200
|
|
||||||
|
|
||||||
# upload workflow json
|
|
||||||
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
|
|
||||||
async with session.put(workflowJsonFilePresignedUrl, data=json.dumps(prompt['workflow']).encode('utf-8')) as resp:
|
|
||||||
assert resp.status == 200
|
|
||||||
|
|
||||||
model_filenames = extract_model_file_names(prompt['workflow'])
|
|
||||||
model_file_paths = find_file_paths(folder_paths.base_path, model_filenames)
|
|
||||||
|
|
||||||
models_info = {}
|
|
||||||
for filename, filepath in model_file_paths.items():
|
|
||||||
models_info[filename] = {
|
|
||||||
"filename": filename,
|
|
||||||
"sha256_checksum": compute_sha256_checksum(filepath),
|
|
||||||
"relative_path": os.path.relpath(filepath, folder_paths.base_path),
|
|
||||||
}
|
|
||||||
|
|
||||||
# make a POST request to /api/upload_workflow with form data key values
|
|
||||||
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
|
|
||||||
form = aiohttp.FormData()
|
|
||||||
if comfyworkflows_sharekey:
|
|
||||||
form.add_field("shareKey", comfyworkflows_sharekey)
|
|
||||||
form.add_field("source", "comfyui_manager")
|
|
||||||
form.add_field("assetFileKey", assetFileKey)
|
|
||||||
form.add_field("assetFileType", assetFileType)
|
|
||||||
form.add_field("workflowJsonFileKey", workflowJsonFileKey)
|
|
||||||
form.add_field("sharedWorkflowWorkflowJsonString", json.dumps(prompt['workflow']))
|
|
||||||
form.add_field("sharedWorkflowPromptJsonString", json.dumps(prompt['output']))
|
|
||||||
form.add_field("shareWorkflowCredits", credits)
|
|
||||||
form.add_field("shareWorkflowTitle", title)
|
|
||||||
form.add_field("shareWorkflowDescription", description)
|
|
||||||
form.add_field("shareWorkflowIsNSFW", str(is_nsfw).lower())
|
|
||||||
form.add_field("currentSnapshot", json.dumps(await core.get_current_snapshot()))
|
|
||||||
form.add_field("modelsInfo", json.dumps(models_info))
|
|
||||||
|
|
||||||
async with session.post(
|
|
||||||
f"{share_endpoint}/upload_workflow",
|
|
||||||
data=form,
|
|
||||||
) as resp:
|
|
||||||
assert resp.status == 200
|
|
||||||
upload_workflow_json = await resp.json()
|
|
||||||
workflowId = upload_workflow_json["workflowId"]
|
|
||||||
|
|
||||||
# check if the user has provided Matrix credentials
|
|
||||||
if "matrix" in share_destinations:
|
|
||||||
comfyui_share_room_id = '!LGYSoacpJPhIfBqVfb:matrix.org'
|
|
||||||
filename = os.path.basename(asset_filepath)
|
|
||||||
content_type = assetFileType
|
|
||||||
|
|
||||||
try:
|
|
||||||
from matrix_client.api import MatrixHttpApi
|
|
||||||
from matrix_client.client import MatrixClient
|
|
||||||
|
|
||||||
homeserver = 'matrix.org'
|
|
||||||
if matrix_auth:
|
|
||||||
homeserver = matrix_auth.get('homeserver', 'matrix.org')
|
|
||||||
homeserver = homeserver.replace("http://", "https://")
|
|
||||||
if not homeserver.startswith("https://"):
|
|
||||||
homeserver = "https://" + homeserver
|
|
||||||
|
|
||||||
client = MatrixClient(homeserver)
|
|
||||||
try:
|
|
||||||
token = client.login(username=matrix_auth['username'], password=matrix_auth['password'])
|
|
||||||
if not token:
|
|
||||||
return web.json_response({"error": "Invalid Matrix credentials."}, content_type='application/json', status=400)
|
|
||||||
except Exception:
|
|
||||||
return web.json_response({"error": "Invalid Matrix credentials."}, content_type='application/json', status=400)
|
|
||||||
|
|
||||||
matrix = MatrixHttpApi(homeserver, token=token)
|
|
||||||
with open(asset_filepath, 'rb') as f:
|
|
||||||
mxc_url = matrix.media_upload(f.read(), content_type, filename=filename)['content_uri']
|
|
||||||
|
|
||||||
workflow_json_mxc_url = matrix.media_upload(prompt['workflow'], 'application/json', filename='workflow.json')['content_uri']
|
|
||||||
|
|
||||||
text_content = ""
|
|
||||||
if title:
|
|
||||||
text_content += f"{title}\n"
|
|
||||||
if description:
|
|
||||||
text_content += f"{description}\n"
|
|
||||||
if credits:
|
|
||||||
text_content += f"\ncredits: {credits}\n"
|
|
||||||
matrix.send_message(comfyui_share_room_id, text_content)
|
|
||||||
matrix.send_content(comfyui_share_room_id, mxc_url, filename, 'm.image')
|
|
||||||
matrix.send_content(comfyui_share_room_id, workflow_json_mxc_url, 'workflow.json', 'm.file')
|
|
||||||
except Exception:
|
|
||||||
logging.exception("An error occurred")
|
|
||||||
return web.json_response({"error": "An error occurred when sharing your art to Matrix."}, content_type='application/json', status=500)
|
|
||||||
|
|
||||||
return web.json_response({
|
|
||||||
"comfyworkflows": {
|
|
||||||
"url": None if "comfyworkflows" not in share_destinations else f"{share_website_host}/workflows/{workflowId}",
|
|
||||||
},
|
|
||||||
"matrix": {
|
|
||||||
"success": None if "matrix" not in share_destinations else True
|
|
||||||
}
|
|
||||||
}, content_type='application/json', status=200)
|
|
||||||
@@ -1,142 +0,0 @@
|
|||||||
import os
|
|
||||||
import git
|
|
||||||
import logging
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
from comfyui_manager.common import context
|
|
||||||
import folder_paths
|
|
||||||
from comfy.cli_args import args
|
|
||||||
import latent_preview
|
|
||||||
|
|
||||||
from comfyui_manager.glob import manager_core as core
|
|
||||||
from comfyui_manager.common import cm_global
|
|
||||||
|
|
||||||
|
|
||||||
comfy_ui_hash = "-"
|
|
||||||
comfyui_tag = None
|
|
||||||
|
|
||||||
|
|
||||||
def print_comfyui_version():
|
|
||||||
global comfy_ui_hash
|
|
||||||
global comfyui_tag
|
|
||||||
|
|
||||||
is_detached = False
|
|
||||||
try:
|
|
||||||
repo = git.Repo(os.path.dirname(folder_paths.__file__))
|
|
||||||
core.comfy_ui_revision = len(list(repo.iter_commits("HEAD")))
|
|
||||||
|
|
||||||
comfy_ui_hash = repo.head.commit.hexsha
|
|
||||||
cm_global.variables["comfyui.revision"] = core.comfy_ui_revision
|
|
||||||
|
|
||||||
core.comfy_ui_commit_datetime = repo.head.commit.committed_datetime
|
|
||||||
cm_global.variables["comfyui.commit_datetime"] = core.comfy_ui_commit_datetime
|
|
||||||
|
|
||||||
is_detached = repo.head.is_detached
|
|
||||||
current_branch = repo.active_branch.name
|
|
||||||
|
|
||||||
comfyui_tag = context.get_comfyui_tag()
|
|
||||||
|
|
||||||
try:
|
|
||||||
if (
|
|
||||||
not os.environ.get("__COMFYUI_DESKTOP_VERSION__")
|
|
||||||
and core.comfy_ui_commit_datetime.date()
|
|
||||||
< core.comfy_ui_required_commit_datetime.date()
|
|
||||||
):
|
|
||||||
logging.warning(
|
|
||||||
f"\n\n## [WARN] ComfyUI-Manager: Your ComfyUI version ({core.comfy_ui_revision})[{core.comfy_ui_commit_datetime.date()}] is too old. Please update to the latest version. ##\n\n"
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# process on_revision_detected -->
|
|
||||||
if "cm.on_revision_detected_handler" in cm_global.variables:
|
|
||||||
for k, f in cm_global.variables["cm.on_revision_detected_handler"]:
|
|
||||||
try:
|
|
||||||
f(core.comfy_ui_revision)
|
|
||||||
except Exception:
|
|
||||||
logging.error(f"[ERROR] '{k}' on_revision_detected_handler")
|
|
||||||
traceback.print_exc()
|
|
||||||
|
|
||||||
del cm_global.variables["cm.on_revision_detected_handler"]
|
|
||||||
else:
|
|
||||||
logging.warning(
|
|
||||||
"[ComfyUI-Manager] Some features are restricted due to your ComfyUI being outdated."
|
|
||||||
)
|
|
||||||
# <--
|
|
||||||
|
|
||||||
if current_branch == "master":
|
|
||||||
if comfyui_tag:
|
|
||||||
logging.info(
|
|
||||||
f"### ComfyUI Version: {comfyui_tag} | Released on '{core.comfy_ui_commit_datetime.date()}'"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logging.info(
|
|
||||||
f"### ComfyUI Revision: {core.comfy_ui_revision} [{comfy_ui_hash[:8]}] | Released on '{core.comfy_ui_commit_datetime.date()}'"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
if comfyui_tag:
|
|
||||||
logging.info(
|
|
||||||
f"### ComfyUI Version: {comfyui_tag} on '{current_branch}' | Released on '{core.comfy_ui_commit_datetime.date()}'"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logging.info(
|
|
||||||
f"### ComfyUI Revision: {core.comfy_ui_revision} on '{current_branch}' [{comfy_ui_hash[:8]}] | Released on '{core.comfy_ui_commit_datetime.date()}'"
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
if is_detached:
|
|
||||||
logging.info(
|
|
||||||
f"### ComfyUI Revision: {core.comfy_ui_revision} [{comfy_ui_hash[:8]}] *DETACHED | Released on '{core.comfy_ui_commit_datetime.date()}'"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logging.info(
|
|
||||||
"### ComfyUI Revision: UNKNOWN (The currently installed ComfyUI is not a Git repository)"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def set_preview_method(method):
|
|
||||||
if method == "auto":
|
|
||||||
args.preview_method = latent_preview.LatentPreviewMethod.Auto
|
|
||||||
elif method == "latent2rgb":
|
|
||||||
args.preview_method = latent_preview.LatentPreviewMethod.Latent2RGB
|
|
||||||
elif method == "taesd":
|
|
||||||
args.preview_method = latent_preview.LatentPreviewMethod.TAESD
|
|
||||||
else:
|
|
||||||
args.preview_method = latent_preview.LatentPreviewMethod.NoPreviews
|
|
||||||
|
|
||||||
core.get_config()["preview_method"] = method
|
|
||||||
|
|
||||||
|
|
||||||
def set_update_policy(mode):
|
|
||||||
core.get_config()["update_policy"] = mode
|
|
||||||
|
|
||||||
|
|
||||||
def set_db_mode(mode):
|
|
||||||
core.get_config()["db_mode"] = mode
|
|
||||||
|
|
||||||
|
|
||||||
def setup_environment():
|
|
||||||
git_exe = core.get_config()["git_exe"]
|
|
||||||
|
|
||||||
if git_exe != "":
|
|
||||||
git.Git().update_environment(GIT_PYTHON_GIT_EXECUTABLE=git_exe)
|
|
||||||
|
|
||||||
|
|
||||||
def initialize_environment():
|
|
||||||
context.comfy_path = os.path.dirname(folder_paths.__file__)
|
|
||||||
core.js_path = os.path.join(context.comfy_path, "web", "extensions")
|
|
||||||
|
|
||||||
# Legacy database paths - kept for potential future use
|
|
||||||
# local_db_model = os.path.join(manager_util.comfyui_manager_path, "model-list.json")
|
|
||||||
# local_db_alter = os.path.join(manager_util.comfyui_manager_path, "alter-list.json")
|
|
||||||
# local_db_custom_node_list = os.path.join(
|
|
||||||
# manager_util.comfyui_manager_path, "custom-node-list.json"
|
|
||||||
# )
|
|
||||||
# local_db_extension_node_mappings = os.path.join(
|
|
||||||
# manager_util.comfyui_manager_path, "extension-node-map.json"
|
|
||||||
# )
|
|
||||||
|
|
||||||
set_preview_method(core.get_config()["preview_method"])
|
|
||||||
print_comfyui_version()
|
|
||||||
setup_environment()
|
|
||||||
|
|
||||||
core.check_invalid_nodes()
|
|
||||||
@@ -1,60 +0,0 @@
|
|||||||
import locale
|
|
||||||
import sys
|
|
||||||
import re
|
|
||||||
|
|
||||||
|
|
||||||
def handle_stream(stream, prefix):
|
|
||||||
stream.reconfigure(encoding=locale.getpreferredencoding(), errors="replace")
|
|
||||||
for msg in stream:
|
|
||||||
if (
|
|
||||||
prefix == "[!]"
|
|
||||||
and ("it/s]" in msg or "s/it]" in msg)
|
|
||||||
and ("%|" in msg or "it [" in msg)
|
|
||||||
):
|
|
||||||
if msg.startswith("100%"):
|
|
||||||
print("\r" + msg, end="", file=sys.stderr),
|
|
||||||
else:
|
|
||||||
print("\r" + msg[:-1], end="", file=sys.stderr),
|
|
||||||
else:
|
|
||||||
if prefix == "[!]":
|
|
||||||
print(prefix, msg, end="", file=sys.stderr)
|
|
||||||
else:
|
|
||||||
print(prefix, msg, end="")
|
|
||||||
|
|
||||||
|
|
||||||
def convert_markdown_to_html(input_text):
|
|
||||||
pattern_a = re.compile(r"\[a/([^]]+)]\(([^)]+)\)")
|
|
||||||
pattern_w = re.compile(r"\[w/([^]]+)]")
|
|
||||||
pattern_i = re.compile(r"\[i/([^]]+)]")
|
|
||||||
pattern_bold = re.compile(r"\*\*([^*]+)\*\*")
|
|
||||||
pattern_white = re.compile(r"%%([^*]+)%%")
|
|
||||||
|
|
||||||
def replace_a(match):
|
|
||||||
return f"<a href='{match.group(2)}' target='blank'>{match.group(1)}</a>"
|
|
||||||
|
|
||||||
def replace_w(match):
|
|
||||||
return f"<p class='cm-warn-note'>{match.group(1)}</p>"
|
|
||||||
|
|
||||||
def replace_i(match):
|
|
||||||
return f"<p class='cm-info-note'>{match.group(1)}</p>"
|
|
||||||
|
|
||||||
def replace_bold(match):
|
|
||||||
return f"<B>{match.group(1)}</B>"
|
|
||||||
|
|
||||||
def replace_white(match):
|
|
||||||
return f"<font color='white'>{match.group(1)}</font>"
|
|
||||||
|
|
||||||
input_text = (
|
|
||||||
input_text.replace("\\[", "[")
|
|
||||||
.replace("\\]", "]")
|
|
||||||
.replace("<", "<")
|
|
||||||
.replace(">", ">")
|
|
||||||
)
|
|
||||||
|
|
||||||
result_text = re.sub(pattern_a, replace_a, input_text)
|
|
||||||
result_text = re.sub(pattern_w, replace_w, result_text)
|
|
||||||
result_text = re.sub(pattern_i, replace_i, result_text)
|
|
||||||
result_text = re.sub(pattern_bold, replace_bold, result_text)
|
|
||||||
result_text = re.sub(pattern_white, replace_white, result_text)
|
|
||||||
|
|
||||||
return result_text.replace("\n", "<BR>")
|
|
||||||
@@ -1,74 +0,0 @@
|
|||||||
import os
|
|
||||||
import logging
|
|
||||||
import folder_paths
|
|
||||||
|
|
||||||
from comfyui_manager.glob import manager_core as core
|
|
||||||
from comfyui_manager.glob.constants import model_dir_name_map
|
|
||||||
|
|
||||||
|
|
||||||
def get_model_dir(data, show_log=False):
|
|
||||||
if "download_model_base" in folder_paths.folder_names_and_paths:
|
|
||||||
models_base = folder_paths.folder_names_and_paths["download_model_base"][0][0]
|
|
||||||
else:
|
|
||||||
models_base = folder_paths.models_dir
|
|
||||||
|
|
||||||
# NOTE: Validate to prevent path traversal.
|
|
||||||
if any(char in data["filename"] for char in {"/", "\\", ":"}):
|
|
||||||
return None
|
|
||||||
|
|
||||||
def resolve_custom_node(save_path):
|
|
||||||
save_path = save_path[13:] # remove 'custom_nodes/'
|
|
||||||
|
|
||||||
# NOTE: Validate to prevent path traversal.
|
|
||||||
if save_path.startswith(os.path.sep) or ":" in save_path:
|
|
||||||
return None
|
|
||||||
|
|
||||||
repo_name = save_path.replace("\\", "/").split("/")[
|
|
||||||
0
|
|
||||||
] # get custom node repo name
|
|
||||||
|
|
||||||
# NOTE: The creation of files within the custom node path should be removed in the future.
|
|
||||||
repo_path = core.lookup_installed_custom_nodes_legacy(repo_name)
|
|
||||||
if repo_path is not None and repo_path[0]:
|
|
||||||
# Returns the retargeted path based on the actually installed repository
|
|
||||||
return os.path.join(os.path.dirname(repo_path[1]), save_path)
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if data["save_path"] != "default":
|
|
||||||
if ".." in data["save_path"] or data["save_path"].startswith("/"):
|
|
||||||
if show_log:
|
|
||||||
logging.info(
|
|
||||||
f"[WARN] '{data['save_path']}' is not allowed path. So it will be saved into 'models/etc'."
|
|
||||||
)
|
|
||||||
base_model = os.path.join(models_base, "etc")
|
|
||||||
else:
|
|
||||||
if data["save_path"].startswith("custom_nodes"):
|
|
||||||
base_model = resolve_custom_node(data["save_path"])
|
|
||||||
if base_model is None:
|
|
||||||
if show_log:
|
|
||||||
logging.info(
|
|
||||||
f"[ComfyUI-Manager] The target custom node for model download is not installed: {data['save_path']}"
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
base_model = os.path.join(models_base, data["save_path"])
|
|
||||||
else:
|
|
||||||
model_dir_name = model_dir_name_map.get(data["type"].lower())
|
|
||||||
if model_dir_name is not None:
|
|
||||||
base_model = folder_paths.folder_names_and_paths[model_dir_name][0][0]
|
|
||||||
else:
|
|
||||||
base_model = os.path.join(models_base, "etc")
|
|
||||||
|
|
||||||
return base_model
|
|
||||||
|
|
||||||
|
|
||||||
def get_model_path(data, show_log=False):
|
|
||||||
base_model = get_model_dir(data, show_log)
|
|
||||||
if base_model is None:
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
if data["filename"] == "<huggingface>":
|
|
||||||
return os.path.join(base_model, os.path.basename(data["url"]))
|
|
||||||
else:
|
|
||||||
return os.path.join(base_model, data["filename"])
|
|
||||||
@@ -1,65 +0,0 @@
|
|||||||
import concurrent.futures
|
|
||||||
|
|
||||||
from comfyui_manager.glob import manager_core as core
|
|
||||||
|
|
||||||
|
|
||||||
def check_state_of_git_node_pack(
|
|
||||||
node_packs, do_fetch=False, do_update_check=True, do_update=False
|
|
||||||
):
|
|
||||||
if do_fetch:
|
|
||||||
print("Start fetching...", end="")
|
|
||||||
elif do_update:
|
|
||||||
print("Start updating...", end="")
|
|
||||||
elif do_update_check:
|
|
||||||
print("Start update check...", end="")
|
|
||||||
|
|
||||||
def process_custom_node(item):
|
|
||||||
core.check_state_of_git_node_pack_single(
|
|
||||||
item, do_fetch, do_update_check, do_update
|
|
||||||
)
|
|
||||||
|
|
||||||
with concurrent.futures.ThreadPoolExecutor(4) as executor:
|
|
||||||
for k, v in node_packs.items():
|
|
||||||
if v.get("active_version") in ["unknown", "nightly"]:
|
|
||||||
executor.submit(process_custom_node, v)
|
|
||||||
|
|
||||||
if do_fetch:
|
|
||||||
print("\x1b[2K\rFetching done.")
|
|
||||||
elif do_update:
|
|
||||||
update_exists = any(
|
|
||||||
item.get("updatable", False) for item in node_packs.values()
|
|
||||||
)
|
|
||||||
if update_exists:
|
|
||||||
print("\x1b[2K\rUpdate done.")
|
|
||||||
else:
|
|
||||||
print("\x1b[2K\rAll extensions are already up-to-date.")
|
|
||||||
elif do_update_check:
|
|
||||||
print("\x1b[2K\rUpdate check done.")
|
|
||||||
|
|
||||||
|
|
||||||
def nickname_filter(json_obj):
|
|
||||||
preemptions_map = {}
|
|
||||||
|
|
||||||
for k, x in json_obj.items():
|
|
||||||
if "preemptions" in x[1]:
|
|
||||||
for y in x[1]["preemptions"]:
|
|
||||||
preemptions_map[y] = k
|
|
||||||
elif k.endswith("/ComfyUI"):
|
|
||||||
for y in x[0]:
|
|
||||||
preemptions_map[y] = k
|
|
||||||
|
|
||||||
updates = {}
|
|
||||||
for k, x in json_obj.items():
|
|
||||||
removes = set()
|
|
||||||
for y in x[0]:
|
|
||||||
k2 = preemptions_map.get(y)
|
|
||||||
if k2 is not None and k != k2:
|
|
||||||
removes.add(y)
|
|
||||||
|
|
||||||
if len(removes) > 0:
|
|
||||||
updates[k] = [y for y in x[0] if y not in removes]
|
|
||||||
|
|
||||||
for k, v in updates.items():
|
|
||||||
json_obj[k][0] = v
|
|
||||||
|
|
||||||
return json_obj
|
|
||||||
@@ -1,53 +0,0 @@
|
|||||||
from comfyui_manager.glob import manager_core as core
|
|
||||||
from comfy.cli_args import args
|
|
||||||
|
|
||||||
|
|
||||||
def is_loopback(address):
|
|
||||||
import ipaddress
|
|
||||||
try:
|
|
||||||
return ipaddress.ip_address(address).is_loopback
|
|
||||||
except ValueError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def is_allowed_security_level(level):
|
|
||||||
is_local_mode = is_loopback(args.listen)
|
|
||||||
|
|
||||||
if level == "block":
|
|
||||||
return False
|
|
||||||
elif level == "high":
|
|
||||||
if is_local_mode:
|
|
||||||
return core.get_config()["security_level"] in ["weak", "normal-"]
|
|
||||||
else:
|
|
||||||
return core.get_config()["security_level"] == "weak"
|
|
||||||
elif level == "middle":
|
|
||||||
return core.get_config()["security_level"] in ["weak", "normal", "normal-"]
|
|
||||||
else:
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
async def get_risky_level(files, pip_packages):
|
|
||||||
json_data1 = await core.get_data_by_mode("local", "custom-node-list.json")
|
|
||||||
json_data2 = await core.get_data_by_mode(
|
|
||||||
"cache",
|
|
||||||
"custom-node-list.json",
|
|
||||||
channel_url="https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main",
|
|
||||||
)
|
|
||||||
|
|
||||||
all_urls = set()
|
|
||||||
for x in json_data1["custom_nodes"] + json_data2["custom_nodes"]:
|
|
||||||
all_urls.update(x.get("files", []))
|
|
||||||
|
|
||||||
for x in files:
|
|
||||||
if x not in all_urls:
|
|
||||||
return "high"
|
|
||||||
|
|
||||||
all_pip_packages = set()
|
|
||||||
for x in json_data1["custom_nodes"] + json_data2["custom_nodes"]:
|
|
||||||
all_pip_packages.update(x.get("pip", []))
|
|
||||||
|
|
||||||
for p in pip_packages:
|
|
||||||
if p not in all_pip_packages:
|
|
||||||
return "block"
|
|
||||||
|
|
||||||
return "middle"
|
|
||||||
File diff suppressed because it is too large
Load Diff
11519
custom-node-list.json
Executable file → Normal file
11519
custom-node-list.json
Executable file → Normal file
File diff suppressed because it is too large
Load Diff
@@ -139,7 +139,7 @@ You can set whether to use ComfyUI-Manager solely via CLI.
|
|||||||
`restore-dependencies`
|
`restore-dependencies`
|
||||||
|
|
||||||
* This command can be used if custom nodes are installed under the `ComfyUI/custom_nodes` path but their dependencies are not installed.
|
* This command can be used if custom nodes are installed under the `ComfyUI/custom_nodes` path but their dependencies are not installed.
|
||||||
* It is useful when starting a new cloud instance, like colab, where dependencies need to be reinstalled and installation scripts re-executed.
|
* It is useful when starting a new cloud instance, like Colab, where dependencies need to be reinstalled and installation scripts re-executed.
|
||||||
* It can also be utilized if ComfyUI is reinstalled and only the custom_nodes path has been backed up and restored.
|
* It can also be utilized if ComfyUI is reinstalled and only the custom_nodes path has been backed up and restored.
|
||||||
|
|
||||||
### 7. Clear
|
### 7. Clear
|
||||||
|
|||||||
230
docs/en/v3.38-userdata-security-migration.md
Normal file
230
docs/en/v3.38-userdata-security-migration.md
Normal file
@@ -0,0 +1,230 @@
|
|||||||
|
# ComfyUI-Manager V3.38: Userdata Security Migration Guide
|
||||||
|
|
||||||
|
## Introduction
|
||||||
|
|
||||||
|
ComfyUI-Manager V3.38 introduces a **security patch** that migrates Manager's configuration and data to a protected system path. This change leverages ComfyUI's new System User Protection API (PR #10966) to provide enhanced security isolation.
|
||||||
|
|
||||||
|
This guide explains what happens during the migration and how to handle various situations.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## What Changed
|
||||||
|
|
||||||
|
### Finding Your Paths
|
||||||
|
|
||||||
|
When ComfyUI starts, it displays the full paths in the terminal:
|
||||||
|
|
||||||
|
```
|
||||||
|
** User directory: /path/to/ComfyUI/user
|
||||||
|
** ComfyUI-Manager config path: /path/to/ComfyUI/user/__manager/config.ini
|
||||||
|
```
|
||||||
|
|
||||||
|
Look for these lines in your startup log to find the exact location on your system. In this guide, paths are shown relative to the `user` directory.
|
||||||
|
|
||||||
|
### Path Migration
|
||||||
|
|
||||||
|
| Data | Legacy Path | New Path |
|
||||||
|
|------|-------------|----------|
|
||||||
|
| Configuration | `user/default/ComfyUI-Manager/` | `user/__manager/` |
|
||||||
|
| Snapshots | `user/default/ComfyUI-Manager/snapshots/` | `user/__manager/snapshots/` |
|
||||||
|
|
||||||
|
### Why This Change
|
||||||
|
|
||||||
|
In older ComfyUI versions, the `default/` directory was **unprotected** and accessible via web APIs. If you ran ComfyUI with `--listen 0.0.0.0` or similar options to allow external connections, this data **may have been tampered with** by malicious actors.
|
||||||
|
|
||||||
|
**Note:** If you only used ComfyUI locally (without `--listen` or with `--listen 127.0.0.1`), your data was not exposed to this vulnerability.
|
||||||
|
|
||||||
|
The new `__manager` path uses ComfyUI's protected system directory, which:
|
||||||
|
- **Cannot be accessed** from outside (protected by ComfyUI)
|
||||||
|
- Isolates system settings from user data
|
||||||
|
- Enables stricter security for remote access
|
||||||
|
|
||||||
|
**This is why only `config.ini` is automatically migrated** - other files (snapshots) may have been compromised and should be manually verified before copying.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Automatic Migration
|
||||||
|
|
||||||
|
When you start ComfyUI with the new System User Protection API, Manager automatically handles the migration:
|
||||||
|
|
||||||
|
### Step 1: Configuration Migration
|
||||||
|
|
||||||
|
Only `config.ini` is migrated automatically.
|
||||||
|
|
||||||
|
**Important**: Snapshots are **NOT** automatically migrated. You must copy them manually if needed.
|
||||||
|
|
||||||
|
### Step 2: Security Level Check
|
||||||
|
|
||||||
|
During migration, if your security level is below `normal` (i.e., `weak` or `normal-`), it will be automatically raised to `normal`. This is a safety measure because the security level setting itself may have been tampered with in the old version.
|
||||||
|
|
||||||
|
```
|
||||||
|
======================================================================
|
||||||
|
[ComfyUI-Manager] WARNING: Security level adjusted
|
||||||
|
- Previous: 'weak' → New: 'normal'
|
||||||
|
- Raised to prevent unauthorized remote access.
|
||||||
|
======================================================================
|
||||||
|
```
|
||||||
|
|
||||||
|
If you need a lower security level, you can manually edit the config after migration.
|
||||||
|
|
||||||
|
### Step 3: Legacy Backup
|
||||||
|
|
||||||
|
Your entire legacy directory is moved to a backup location:
|
||||||
|
```
|
||||||
|
user/__manager/.legacy-manager-backup/
|
||||||
|
```
|
||||||
|
|
||||||
|
This backup is preserved until you manually delete it.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Persistent Backup Notification
|
||||||
|
|
||||||
|
As long as the backup exists, Manager will remind you on **every startup**:
|
||||||
|
|
||||||
|
```
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
[ComfyUI-Manager] NOTICE: Legacy backup exists
|
||||||
|
- Your old Manager data was backed up to:
|
||||||
|
/path/to/ComfyUI/user/__manager/.legacy-manager-backup
|
||||||
|
- Please verify and remove it when no longer needed.
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
```
|
||||||
|
|
||||||
|
**To stop this notification**: Delete the `.legacy-manager-backup` folder inside `user/__manager/` after confirming you don't need any data from it.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Recovering Old Data
|
||||||
|
|
||||||
|
### Snapshots
|
||||||
|
|
||||||
|
If you need your old snapshots, copy the contents of `.legacy-manager-backup/snapshots/` to `user/__manager/snapshots/`.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Outdated ComfyUI Warning
|
||||||
|
|
||||||
|
If you're running an older version of ComfyUI without the System User Protection API, Manager will:
|
||||||
|
|
||||||
|
1. **Force security level to `strong`** - All installations are blocked
|
||||||
|
2. **Display warning message**:
|
||||||
|
|
||||||
|
```
|
||||||
|
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||||
|
[ComfyUI-Manager] ERROR: ComfyUI version is outdated!
|
||||||
|
- Most operations are blocked for security.
|
||||||
|
- ComfyUI update is still allowed.
|
||||||
|
- Please update ComfyUI to use Manager normally.
|
||||||
|
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||||
|
```
|
||||||
|
|
||||||
|
**Solution**: Update ComfyUI to v0.3.76 or later.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Security Levels
|
||||||
|
|
||||||
|
| Level | What's Allowed |
|
||||||
|
|-------|----------------|
|
||||||
|
| `strong` | ComfyUI update only. All other installations blocked. |
|
||||||
|
| `normal` | Install/update/remove registered custom nodes and models. |
|
||||||
|
| `normal-` | Above + Install via Git URL or pip (localhost only). |
|
||||||
|
| `weak` | All operations allowed, including from remote connections. |
|
||||||
|
|
||||||
|
**Notes:**
|
||||||
|
- `strong` is forced on outdated ComfyUI versions.
|
||||||
|
- `normal` is the default and recommended for most users.
|
||||||
|
- `normal-` is for developers who need to install unregistered nodes locally.
|
||||||
|
- `weak` should only be used in isolated development environments.
|
||||||
|
|
||||||
|
### Changing Security Level
|
||||||
|
|
||||||
|
Edit `user/__manager/config.ini`:
|
||||||
|
```ini
|
||||||
|
[default]
|
||||||
|
security_level = normal
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Error Messages
|
||||||
|
|
||||||
|
### "comfyui_outdated" (HTTP 403)
|
||||||
|
|
||||||
|
This error appears when:
|
||||||
|
- Your ComfyUI doesn't have the System User Protection API
|
||||||
|
- All installations are blocked until you update ComfyUI
|
||||||
|
|
||||||
|
**Solution**: Update ComfyUI to the latest version.
|
||||||
|
|
||||||
|
### "security_level" (HTTP 403)
|
||||||
|
|
||||||
|
This error appears when:
|
||||||
|
- Your security level blocks the requested operation
|
||||||
|
- For example, `strong` level blocks all installations
|
||||||
|
|
||||||
|
**Solution**: Lower your security level in config.ini if appropriate for your use case.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Security Warning: Suspicious Path
|
||||||
|
|
||||||
|
If you see this error on an **older** ComfyUI:
|
||||||
|
|
||||||
|
```
|
||||||
|
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||||
|
[ComfyUI-Manager] ERROR: Suspicious path detected!
|
||||||
|
- '__manager' exists with low security level: 'weak'
|
||||||
|
- Please verify manually:
|
||||||
|
/path/to/ComfyUI/user/__manager/config.ini
|
||||||
|
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||||
|
```
|
||||||
|
|
||||||
|
On older ComfyUI versions, the `__manager` directory is not normally created. If this directory exists, it may have been created externally. For safety, manually verify the contents of this directory before updating ComfyUI.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### All my installations are blocked
|
||||||
|
|
||||||
|
**Check 1**: Is your ComfyUI updated?
|
||||||
|
- Old ComfyUI forces `security_level = strong`
|
||||||
|
- Update ComfyUI to resolve
|
||||||
|
|
||||||
|
**Check 2**: What's your security level?
|
||||||
|
- Check `user/__manager/config.ini`
|
||||||
|
- `security_level = strong` blocks all installations
|
||||||
|
|
||||||
|
### My snapshots are missing
|
||||||
|
|
||||||
|
Snapshots are not automatically migrated. You need to manually copy the `snapshots` folder from inside `.legacy-manager-backup` to the `user/__manager/` directory.
|
||||||
|
|
||||||
|
### I keep seeing the backup notification
|
||||||
|
|
||||||
|
Delete the `.legacy-manager-backup` folder inside `user/__manager/` after confirming you don't need any data from it.
|
||||||
|
|
||||||
|
### Snapshot restore is blocked
|
||||||
|
|
||||||
|
On old ComfyUI (without System User API), snapshot restore is blocked because security is forced to `strong`. Update ComfyUI to enable snapshot restore.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## File Structure Reference
|
||||||
|
|
||||||
|
```
|
||||||
|
user/
|
||||||
|
└── __manager/
|
||||||
|
├── config.ini # Manager configuration
|
||||||
|
├── channels.list # Custom node channels
|
||||||
|
├── snapshots/ # Environment snapshots
|
||||||
|
└── .legacy-manager-backup/ # Backup of old Manager data (temporary)
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
- **ComfyUI**: v0.3.76 or later (with System User Protection API)
|
||||||
|
- **ComfyUI-Manager**: V3.38 or later
|
||||||
@@ -23,13 +23,13 @@ OPTIONS:
|
|||||||
## How To Use?
|
## How To Use?
|
||||||
* `python cm-cli.py` 를 통해서 실행 시킬 수 있습니다.
|
* `python cm-cli.py` 를 통해서 실행 시킬 수 있습니다.
|
||||||
* 예를 들어 custom node를 모두 업데이트 하고 싶다면
|
* 예를 들어 custom node를 모두 업데이트 하고 싶다면
|
||||||
* ComfyUI-Manager경로 에서 `python cm-cli.py update all` 를 command를 실행할 수 있습니다.
|
* ComfyUI-Manager 경로에서 `python cm-cli.py update all` 명령을 실행할 수 있습니다.
|
||||||
* ComfyUI 경로에서 실행한다면, `python custom_nodes/ComfyUI-Manager/cm-cli.py update all` 와 같이 cm-cli.py 의 경로를 지정할 수도 있습니다.
|
* ComfyUI 경로에서 실행한다면, `python custom_nodes/ComfyUI-Manager/cm-cli.py update all` 와 같이 cm-cli.py 의 경로를 지정할 수도 있습니다.
|
||||||
|
|
||||||
## Prerequisite
|
## Prerequisite
|
||||||
* ComfyUI 를 실행하는 python과 동일한 python 환경에서 실행해야 합니다.
|
* ComfyUI 를 실행하는 python과 동일한 python 환경에서 실행해야 합니다.
|
||||||
* venv를 사용할 경우 해당 venv를 activate 한 상태에서 실행해야 합니다.
|
* venv를 사용할 경우 해당 venv를 activate 한 상태에서 실행해야 합니다.
|
||||||
* portable 버전을 사용할 경우 run_nvidia_gpu.bat 파일이 있는 경로인 경우, 다음과 같은 방식으로 코맨드를 실행해야 합니다.
|
* portable 버전을 사용할 경우 run_nvidia_gpu.bat 파일이 있는 경로인 경우, 다음과 같은 방식으로 명령을 실행해야 합니다.
|
||||||
`.\python_embeded\python.exe ComfyUI\custom_nodes\ComfyUI-Manager\cm-cli.py update all`
|
`.\python_embeded\python.exe ComfyUI\custom_nodes\ComfyUI-Manager\cm-cli.py update all`
|
||||||
* ComfyUI 의 경로는 COMFYUI_PATH 환경 변수로 설정할 수 있습니다. 만약 생략할 경우 다음과 같은 경고 메시지가 나타나며, ComfyUI-Manager가 설치된 경로를 기준으로 상대 경로로 설정됩니다.
|
* ComfyUI 의 경로는 COMFYUI_PATH 환경 변수로 설정할 수 있습니다. 만약 생략할 경우 다음과 같은 경고 메시지가 나타나며, ComfyUI-Manager가 설치된 경로를 기준으로 상대 경로로 설정됩니다.
|
||||||
```
|
```
|
||||||
@@ -40,8 +40,8 @@ OPTIONS:
|
|||||||
|
|
||||||
### 1. --channel, --mode
|
### 1. --channel, --mode
|
||||||
* 정보 보기 기능과 커스텀 노드 관리 기능의 경우는 --channel과 --mode를 통해 정보 DB를 설정할 수 있습니다.
|
* 정보 보기 기능과 커스텀 노드 관리 기능의 경우는 --channel과 --mode를 통해 정보 DB를 설정할 수 있습니다.
|
||||||
* 예들 들어 `python cm-cli.py update all --channel recent --mode remote`와 같은 command를 실행할 경우, 현재 ComfyUI-Manager repo에 내장된 로컬의 정보가 아닌 remote의 최신 정보를 기준으로 동작하며, recent channel에 있는 목록을 대상으로만 동작합니다.
|
* 예를 들어 `python cm-cli.py update all --channel recent --mode remote`와 같은 명령을 실행할 경우, 현재 ComfyUI-Manager repo에 내장된 로컬의 정보가 아닌 remote의 최신 정보를 기준으로 동작하며, recent channel에 있는 목록을 대상으로만 동작합니다.
|
||||||
* --channel, --mode 는 `simple-show, show, install, uninstall, update, disable, enable, fix` command에서만 사용 가능합니다.
|
* --channel, --mode 는 `simple-show, show, install, uninstall, update, disable, enable, fix` 명령에서만 사용 가능합니다.
|
||||||
|
|
||||||
### 2. 관리 정보 보기
|
### 2. 관리 정보 보기
|
||||||
|
|
||||||
@@ -51,7 +51,7 @@ OPTIONS:
|
|||||||
* `[show|simple-show]` - `show`는 상세하게 정보를 보여주며, `simple-show`는 간단하게 정보를 보여줍니다.
|
* `[show|simple-show]` - `show`는 상세하게 정보를 보여주며, `simple-show`는 간단하게 정보를 보여줍니다.
|
||||||
|
|
||||||
|
|
||||||
`python cm-cli.py show installed` 와 같은 코맨드를 실행하면 설치된 커스텀 노드의 정보를 상세하게 보여줍니다.
|
`python cm-cli.py show installed` 와 같은 명령을 실행하면 설치된 커스텀 노드의 정보를 상세하게 보여줍니다.
|
||||||
```
|
```
|
||||||
-= ComfyUI-Manager CLI (V2.24) =-
|
-= ComfyUI-Manager CLI (V2.24) =-
|
||||||
|
|
||||||
@@ -67,7 +67,7 @@ FETCH DATA from: https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main
|
|||||||
[ DISABLED ] ComfyUI-Loopchain (author: Fannovel16)
|
[ DISABLED ] ComfyUI-Loopchain (author: Fannovel16)
|
||||||
```
|
```
|
||||||
|
|
||||||
`python cm-cli.py simple-show installed` 와 같은 코맨드를 이용해서 설치된 커스텀 노드의 정보를 간단하게 보여줍니다.
|
`python cm-cli.py simple-show installed` 와 같은 명령을 이용해서 설치된 커스텀 노드의 정보를 간단하게 보여줍니다.
|
||||||
|
|
||||||
```
|
```
|
||||||
-= ComfyUI-Manager CLI (V2.24) =-
|
-= ComfyUI-Manager CLI (V2.24) =-
|
||||||
@@ -89,7 +89,7 @@ ComfyUI-Loopchain
|
|||||||
* `installed`: enable, disable 여부와 상관없이 설치된 모든 노드를 보여줍니다
|
* `installed`: enable, disable 여부와 상관없이 설치된 모든 노드를 보여줍니다
|
||||||
* `not-installed`: 설치되지 않은 커스텀 노드의 목록을 보여줍니다.
|
* `not-installed`: 설치되지 않은 커스텀 노드의 목록을 보여줍니다.
|
||||||
* `all`: 모든 커스텀 노드의 목록을 보여줍니다.
|
* `all`: 모든 커스텀 노드의 목록을 보여줍니다.
|
||||||
* `snapshot`: 현재 설치된 커스텀 노드의 snapshot 정보를 보여줍니다. `show`롤 통해서 볼 경우는 json 출력 형태로 보여주며, `simple-show`를 통해서 볼 경우는 간단하게, 커밋 해시와 함께 보여줍니다.
|
* `snapshot`: 현재 설치된 커스텀 노드의 snapshot 정보를 보여줍니다. `show`를 통해서 볼 경우는 json 출력 형태로 보여주며, `simple-show`를 통해서 볼 경우는 간단하게, 커밋 해시와 함께 보여줍니다.
|
||||||
* `snapshot-list`: ComfyUI-Manager/snapshots 에 저장된 snapshot 파일의 목록을 보여줍니다.
|
* `snapshot-list`: ComfyUI-Manager/snapshots 에 저장된 snapshot 파일의 목록을 보여줍니다.
|
||||||
|
|
||||||
### 3. 커스텀 노드 관리 하기
|
### 3. 커스텀 노드 관리 하기
|
||||||
@@ -98,7 +98,7 @@ ComfyUI-Loopchain
|
|||||||
|
|
||||||
* `python cm-cli.py install ComfyUI-Impact-Pack ComfyUI-Inspire-Pack ComfyUI_experiments` 와 같이 커스텀 노드의 이름을 나열해서 관리 기능을 적용할 수 있습니다.
|
* `python cm-cli.py install ComfyUI-Impact-Pack ComfyUI-Inspire-Pack ComfyUI_experiments` 와 같이 커스텀 노드의 이름을 나열해서 관리 기능을 적용할 수 있습니다.
|
||||||
* 커스텀 노드의 이름은 `show`를 했을 때 보여주는 이름이며, git repository의 이름입니다.
|
* 커스텀 노드의 이름은 `show`를 했을 때 보여주는 이름이며, git repository의 이름입니다.
|
||||||
(추후 nickname 을 사용가능하돌고 업데이트 할 예정입니다.)
|
(추후 nickname을 사용 가능하도록 업데이트할 예정입니다.)
|
||||||
|
|
||||||
`[update|disable|enable|fix] all ?[--channel <channel name>] ?[--mode [remote|local|cache]]`
|
`[update|disable|enable|fix] all ?[--channel <channel name>] ?[--mode [remote|local|cache]]`
|
||||||
|
|
||||||
@@ -124,7 +124,7 @@ ComfyUI-Loopchain
|
|||||||
* `--pip-non-local-url`: web URL에 등록된 pip 패키지들에 대해서 복구를 수행
|
* `--pip-non-local-url`: web URL에 등록된 pip 패키지들에 대해서 복구를 수행
|
||||||
* `--pip-local-url`: local 경로를 지정하고 있는 pip 패키지들에 대해서 복구를 수행
|
* `--pip-local-url`: local 경로를 지정하고 있는 pip 패키지들에 대해서 복구를 수행
|
||||||
* `--user-directory`: 사용자 디렉토리 설정
|
* `--user-directory`: 사용자 디렉토리 설정
|
||||||
* `--restore-to`: 복구될 커스텀 노드가 설치될 경로. (이 옵션을 적용할 경우 오직 대상 경로에 설치된 custom nodes 만 설치된 것으로 인식함.)
|
* `--restore-to`: 복구될 커스텀 노드가 설치될 경로. (이 옵션을 적용할 경우 오직 대상 경로에 설치된 custom nodes만 설치된 것으로 인식함.)
|
||||||
|
|
||||||
### 5. CLI only mode
|
### 5. CLI only mode
|
||||||
|
|
||||||
@@ -133,7 +133,7 @@ ComfyUI-Manager를 CLI로만 사용할 것인지를 설정할 수 있습니다.
|
|||||||
`cli-only-mode [enable|disable]`
|
`cli-only-mode [enable|disable]`
|
||||||
|
|
||||||
* security 혹은 policy 의 이유로 GUI 를 통한 ComfyUI-Manager 사용을 제한하고 싶은 경우 이 모드를 사용할 수 있습니다.
|
* security 혹은 policy 의 이유로 GUI 를 통한 ComfyUI-Manager 사용을 제한하고 싶은 경우 이 모드를 사용할 수 있습니다.
|
||||||
* CLI only mode를 적용할 경우 ComfyUI-Manager 가 매우 제한된 상태로 로드되어, 내부적으로 제공하는 web API가 비활성화 되며, 메인 메뉴에서도 Manager 버튼이 표시되지 않습니다.
|
* CLI only mode를 적용할 경우 ComfyUI-Manager 가 매우 제한된 상태로 로드되어, 내부적으로 제공하는 web API가 비활성화되며, 메인 메뉴에서도 Manager 버튼이 표시되지 않습니다.
|
||||||
|
|
||||||
|
|
||||||
### 6. 의존성 설치
|
### 6. 의존성 설치
|
||||||
@@ -141,10 +141,10 @@ ComfyUI-Manager를 CLI로만 사용할 것인지를 설정할 수 있습니다.
|
|||||||
`restore-dependencies`
|
`restore-dependencies`
|
||||||
|
|
||||||
* `ComfyUI/custom_nodes` 하위 경로에 커스텀 노드들이 설치되어 있긴 하지만, 의존성이 설치되지 않은 경우 사용할 수 있습니다.
|
* `ComfyUI/custom_nodes` 하위 경로에 커스텀 노드들이 설치되어 있긴 하지만, 의존성이 설치되지 않은 경우 사용할 수 있습니다.
|
||||||
* colab 과 같이 cloud instance를 새로 시작하는 경우 의존성 재설치 및 설치 스크립트가 재실행 되어야 하는 경우 사용합니다.
|
* Colab과 같이 cloud instance를 새로 시작하는 경우 의존성 재설치 및 설치 스크립트가 재실행되어야 하는 경우 사용합니다.
|
||||||
* ComfyUI을 재설치할 경우, custom_nodes 경로만 백업했다가 재설치 할 경우 활용 가능합니다.
|
* ComfyUI를 재설치할 경우, custom_nodes 경로만 백업했다가 재설치할 경우 활용 가능합니다.
|
||||||
|
|
||||||
|
|
||||||
### 7. clear
|
### 7. clear
|
||||||
|
|
||||||
GUI에서 install, update를 하거나 snapshot 을 restore하는 경우 예약을 통해서 다음번 ComfyUI를 실행할 경우 실행되는 구조입니다. `clear` 는 이런 예약 상태를 clear해서, 아무런 사전 실행이 적용되지 않도록 합니다.
|
GUI에서 install, update를 하거나 snapshot을 restore하는 경우 예약을 통해서 다음번 ComfyUI를 실행할 경우 실행되는 구조입니다. `clear` 는 이런 예약 상태를 clear해서, 아무런 사전 실행이 적용되지 않도록 합니다.
|
||||||
|
|||||||
17428
extension-node-map.json
17428
extension-node-map.json
File diff suppressed because it is too large
Load Diff
@@ -15,12 +15,9 @@ comfy_path = os.environ.get('COMFYUI_PATH')
|
|||||||
git_exe_path = os.environ.get('GIT_EXE_PATH')
|
git_exe_path = os.environ.get('GIT_EXE_PATH')
|
||||||
|
|
||||||
if comfy_path is None:
|
if comfy_path is None:
|
||||||
print("git_helper: environment variable 'COMFYUI_PATH' is not specified.")
|
print("\nWARN: The `COMFYUI_PATH` environment variable is not set. Assuming `custom_nodes/ComfyUI-Manager/../../` as the ComfyUI path.", file=sys.stderr)
|
||||||
exit(-1)
|
comfy_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
|
||||||
|
|
||||||
if not os.path.exists(os.path.join(comfy_path, 'folder_paths.py')):
|
|
||||||
print("git_helper: '{comfy_path}' is not a valid 'COMFYUI_PATH' location.")
|
|
||||||
exit(-1)
|
|
||||||
|
|
||||||
def download_url(url, dest_folder, filename=None):
|
def download_url(url, dest_folder, filename=None):
|
||||||
# Ensure the destination folder exists
|
# Ensure the destination folder exists
|
||||||
@@ -156,27 +153,27 @@ def switch_to_default_branch(repo):
|
|||||||
default_branch = repo.git.symbolic_ref(f'refs/remotes/{remote_name}/HEAD').replace(f'refs/remotes/{remote_name}/', '')
|
default_branch = repo.git.symbolic_ref(f'refs/remotes/{remote_name}/HEAD').replace(f'refs/remotes/{remote_name}/', '')
|
||||||
repo.git.checkout(default_branch)
|
repo.git.checkout(default_branch)
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except:
|
||||||
# try checkout master
|
# try checkout master
|
||||||
# try checkout main if failed
|
# try checkout main if failed
|
||||||
try:
|
try:
|
||||||
repo.git.checkout(repo.heads.master)
|
repo.git.checkout(repo.heads.master)
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except:
|
||||||
try:
|
try:
|
||||||
if remote_name is not None:
|
if remote_name is not None:
|
||||||
repo.git.checkout('-b', 'master', f'{remote_name}/master')
|
repo.git.checkout('-b', 'master', f'{remote_name}/master')
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except:
|
||||||
try:
|
try:
|
||||||
repo.git.checkout(repo.heads.main)
|
repo.git.checkout(repo.heads.main)
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except:
|
||||||
try:
|
try:
|
||||||
if remote_name is not None:
|
if remote_name is not None:
|
||||||
repo.git.checkout('-b', 'main', f'{remote_name}/main')
|
repo.git.checkout('-b', 'main', f'{remote_name}/main')
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
print("[ComfyUI Manager] Failed to switch to the default branch")
|
print("[ComfyUI Manager] Failed to switch to the default branch")
|
||||||
@@ -447,7 +444,7 @@ def restore_pip_snapshot(pips, options):
|
|||||||
res = 1
|
res = 1
|
||||||
try:
|
try:
|
||||||
res = subprocess.check_call([sys.executable, '-m', 'pip', 'install'] + non_url)
|
res = subprocess.check_call([sys.executable, '-m', 'pip', 'install'] + non_url)
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# fallback
|
# fallback
|
||||||
@@ -456,7 +453,7 @@ def restore_pip_snapshot(pips, options):
|
|||||||
res = 1
|
res = 1
|
||||||
try:
|
try:
|
||||||
res = subprocess.check_call([sys.executable, '-m', 'pip', 'install', x])
|
res = subprocess.check_call([sys.executable, '-m', 'pip', 'install', x])
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if res != 0:
|
if res != 0:
|
||||||
@@ -467,7 +464,7 @@ def restore_pip_snapshot(pips, options):
|
|||||||
res = 1
|
res = 1
|
||||||
try:
|
try:
|
||||||
res = subprocess.check_call([sys.executable, '-m', 'pip', 'install', x])
|
res = subprocess.check_call([sys.executable, '-m', 'pip', 'install', x])
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if res != 0:
|
if res != 0:
|
||||||
@@ -478,7 +475,7 @@ def restore_pip_snapshot(pips, options):
|
|||||||
res = 1
|
res = 1
|
||||||
try:
|
try:
|
||||||
res = subprocess.check_call([sys.executable, '-m', 'pip', 'install', x])
|
res = subprocess.check_call([sys.executable, '-m', 'pip', 'install', x])
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if res != 0:
|
if res != 0:
|
||||||
15161
github-stats.json
15161
github-stats.json
File diff suppressed because it is too large
Load Diff
@@ -2,16 +2,20 @@
|
|||||||
|
|
||||||
This directory contains the Python backend modules that power ComfyUI-Manager, handling the core functionality of node management, downloading, security, and server operations.
|
This directory contains the Python backend modules that power ComfyUI-Manager, handling the core functionality of node management, downloading, security, and server operations.
|
||||||
|
|
||||||
## Directory Structure
|
|
||||||
- **glob/** - code for new cacheless ComfyUI-Manager
|
|
||||||
- **legacy/** - code for legacy ComfyUI-Manager
|
|
||||||
|
|
||||||
## Core Modules
|
## Core Modules
|
||||||
|
|
||||||
- **manager_core.py**: The central implementation of management functions, handling configuration, installation, updates, and node management.
|
- **manager_core.py**: The central implementation of management functions, handling configuration, installation, updates, and node management.
|
||||||
- **manager_server.py**: Implements server functionality and API endpoints for the web interface to interact with the backend.
|
- **manager_server.py**: Implements server functionality and API endpoints for the web interface to interact with the backend.
|
||||||
|
- **manager_downloader.py**: Handles downloading operations for models, extensions, and other resources.
|
||||||
|
- **manager_util.py**: Provides utility functions used throughout the system.
|
||||||
|
|
||||||
## Specialized Modules
|
## Specialized Modules
|
||||||
|
|
||||||
|
- **cm_global.py**: Maintains global variables and state management across the system.
|
||||||
|
- **cnr_utils.py**: Helper utilities for interacting with the custom node registry (CNR).
|
||||||
|
- **git_utils.py**: Git-specific utilities for repository operations.
|
||||||
|
- **node_package.py**: Handles the packaging and installation of node extensions.
|
||||||
|
- **security_check.py**: Implements the multi-level security system for installation safety.
|
||||||
- **share_3rdparty.py**: Manages integration with third-party sharing platforms.
|
- **share_3rdparty.py**: Manages integration with third-party sharing platforms.
|
||||||
|
|
||||||
## Architecture
|
## Architecture
|
||||||
@@ -6,9 +6,8 @@ import time
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
from . import context
|
import manager_core
|
||||||
from . import manager_util
|
import manager_util
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
import toml
|
import toml
|
||||||
|
|
||||||
@@ -48,9 +47,9 @@ async def _get_cnr_data(cache_mode=True, dont_wait=True):
|
|||||||
# Get ComfyUI version tag
|
# Get ComfyUI version tag
|
||||||
if is_desktop:
|
if is_desktop:
|
||||||
# extract version from pyproject.toml instead of git tag
|
# extract version from pyproject.toml instead of git tag
|
||||||
comfyui_ver = context.get_current_comfyui_ver() or 'unknown'
|
comfyui_ver = manager_core.get_current_comfyui_ver() or 'unknown'
|
||||||
else:
|
else:
|
||||||
comfyui_ver = context.get_comfyui_tag() or 'unknown'
|
comfyui_ver = manager_core.get_comfyui_tag() or 'unknown'
|
||||||
|
|
||||||
if is_desktop:
|
if is_desktop:
|
||||||
if is_windows:
|
if is_windows:
|
||||||
@@ -112,7 +111,7 @@ async def _get_cnr_data(cache_mode=True, dont_wait=True):
|
|||||||
json_obj = await fetch_all()
|
json_obj = await fetch_all()
|
||||||
manager_util.save_to_cache(uri, json_obj)
|
manager_util.save_to_cache(uri, json_obj)
|
||||||
return json_obj['nodes']
|
return json_obj['nodes']
|
||||||
except Exception:
|
except:
|
||||||
res = {}
|
res = {}
|
||||||
print("Cannot connect to comfyregistry.")
|
print("Cannot connect to comfyregistry.")
|
||||||
finally:
|
finally:
|
||||||
@@ -180,7 +179,7 @@ def install_node(node_id, version=None):
|
|||||||
else:
|
else:
|
||||||
url = f"{base_url}/nodes/{node_id}/install?version={version}"
|
url = f"{base_url}/nodes/{node_id}/install?version={version}"
|
||||||
|
|
||||||
response = requests.get(url)
|
response = requests.get(url, verify=not manager_util.bypass_ssl)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
# Convert the API response to a NodeVersion object
|
# Convert the API response to a NodeVersion object
|
||||||
return map_node_version(response.json())
|
return map_node_version(response.json())
|
||||||
@@ -191,7 +190,7 @@ def install_node(node_id, version=None):
|
|||||||
def all_versions_of_node(node_id):
|
def all_versions_of_node(node_id):
|
||||||
url = f"{base_url}/nodes/{node_id}/versions?statuses=NodeVersionStatusActive&statuses=NodeVersionStatusPending"
|
url = f"{base_url}/nodes/{node_id}/versions?statuses=NodeVersionStatusActive&statuses=NodeVersionStatusPending"
|
||||||
|
|
||||||
response = requests.get(url)
|
response = requests.get(url, verify=not manager_util.bypass_ssl)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
return response.json()
|
return response.json()
|
||||||
else:
|
else:
|
||||||
@@ -237,7 +236,7 @@ def generate_cnr_id(fullpath, cnr_id):
|
|||||||
if not os.path.exists(cnr_id_path):
|
if not os.path.exists(cnr_id_path):
|
||||||
with open(cnr_id_path, "w") as f:
|
with open(cnr_id_path, "w") as f:
|
||||||
return f.write(cnr_id)
|
return f.write(cnr_id)
|
||||||
except Exception:
|
except:
|
||||||
print(f"[ComfyUI Manager] unable to create file: {cnr_id_path}")
|
print(f"[ComfyUI Manager] unable to create file: {cnr_id_path}")
|
||||||
|
|
||||||
|
|
||||||
@@ -247,7 +246,7 @@ def read_cnr_id(fullpath):
|
|||||||
if os.path.exists(cnr_id_path):
|
if os.path.exists(cnr_id_path):
|
||||||
with open(cnr_id_path) as f:
|
with open(cnr_id_path) as f:
|
||||||
return f.read().strip()
|
return f.read().strip()
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return None
|
return None
|
||||||
@@ -23,6 +23,7 @@ import yaml
|
|||||||
import zipfile
|
import zipfile
|
||||||
import traceback
|
import traceback
|
||||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||||
|
import toml
|
||||||
|
|
||||||
orig_print = print
|
orig_print = print
|
||||||
|
|
||||||
@@ -31,17 +32,19 @@ from packaging import version
|
|||||||
|
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from ..common import cm_global
|
glob_path = os.path.join(os.path.dirname(__file__)) # ComfyUI-Manager/glob
|
||||||
from ..common import cnr_utils
|
sys.path.append(glob_path)
|
||||||
from ..common import manager_util
|
|
||||||
from ..common import git_utils
|
import cm_global
|
||||||
from ..common import manager_downloader
|
import cnr_utils
|
||||||
from ..common.node_package import InstalledNodePackage
|
import manager_util
|
||||||
from ..common.enums import NetworkMode, SecurityLevel, DBMode
|
import git_utils
|
||||||
from ..common import context
|
import manager_downloader
|
||||||
|
import manager_migration
|
||||||
|
from node_package import InstalledNodePackage
|
||||||
|
|
||||||
|
|
||||||
version_code = [4, 0]
|
version_code = [3, 38]
|
||||||
version_str = f"V{version_code[0]}.{version_code[1]}" + (f'.{version_code[2]}' if len(version_code) > 2 else '')
|
version_str = f"V{version_code[0]}.{version_code[1]}" + (f'.{version_code[2]}' if len(version_code) > 2 else '')
|
||||||
|
|
||||||
|
|
||||||
@@ -56,14 +59,13 @@ class InvalidChannel(Exception):
|
|||||||
self.channel = channel
|
self.channel = channel
|
||||||
super().__init__(channel)
|
super().__init__(channel)
|
||||||
|
|
||||||
|
|
||||||
def get_default_custom_nodes_path():
|
def get_default_custom_nodes_path():
|
||||||
global default_custom_nodes_path
|
global default_custom_nodes_path
|
||||||
if default_custom_nodes_path is None:
|
if default_custom_nodes_path is None:
|
||||||
try:
|
try:
|
||||||
import folder_paths
|
import folder_paths
|
||||||
default_custom_nodes_path = folder_paths.get_folder_paths("custom_nodes")[0]
|
default_custom_nodes_path = folder_paths.get_folder_paths("custom_nodes")[0]
|
||||||
except Exception:
|
except:
|
||||||
default_custom_nodes_path = os.path.abspath(os.path.join(manager_util.comfyui_manager_path, '..'))
|
default_custom_nodes_path = os.path.abspath(os.path.join(manager_util.comfyui_manager_path, '..'))
|
||||||
|
|
||||||
return default_custom_nodes_path
|
return default_custom_nodes_path
|
||||||
@@ -73,11 +75,37 @@ def get_custom_nodes_paths():
|
|||||||
try:
|
try:
|
||||||
import folder_paths
|
import folder_paths
|
||||||
return folder_paths.get_folder_paths("custom_nodes")
|
return folder_paths.get_folder_paths("custom_nodes")
|
||||||
except Exception:
|
except:
|
||||||
custom_nodes_path = os.path.abspath(os.path.join(manager_util.comfyui_manager_path, '..'))
|
custom_nodes_path = os.path.abspath(os.path.join(manager_util.comfyui_manager_path, '..'))
|
||||||
return [custom_nodes_path]
|
return [custom_nodes_path]
|
||||||
|
|
||||||
|
|
||||||
|
def get_comfyui_tag():
|
||||||
|
try:
|
||||||
|
repo = git.Repo(comfy_path)
|
||||||
|
return repo.git.describe('--tags')
|
||||||
|
except:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_current_comfyui_ver():
|
||||||
|
"""
|
||||||
|
Extract version from pyproject.toml
|
||||||
|
"""
|
||||||
|
toml_path = os.path.join(comfy_path, 'pyproject.toml')
|
||||||
|
if not os.path.exists(toml_path):
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
with open(toml_path, "r", encoding="utf-8") as f:
|
||||||
|
data = toml.load(f)
|
||||||
|
|
||||||
|
project = data.get('project', {})
|
||||||
|
return project.get('version')
|
||||||
|
except:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_script_env():
|
def get_script_env():
|
||||||
new_env = os.environ.copy()
|
new_env = os.environ.copy()
|
||||||
git_exe = get_config().get('git_exe')
|
git_exe = get_config().get('git_exe')
|
||||||
@@ -85,10 +113,10 @@ def get_script_env():
|
|||||||
new_env['GIT_EXE_PATH'] = git_exe
|
new_env['GIT_EXE_PATH'] = git_exe
|
||||||
|
|
||||||
if 'COMFYUI_PATH' not in new_env:
|
if 'COMFYUI_PATH' not in new_env:
|
||||||
new_env['COMFYUI_PATH'] = context.comfy_path
|
new_env['COMFYUI_PATH'] = comfy_path
|
||||||
|
|
||||||
if 'COMFYUI_FOLDERS_BASE_PATH' not in new_env:
|
if 'COMFYUI_FOLDERS_BASE_PATH' not in new_env:
|
||||||
new_env['COMFYUI_FOLDERS_BASE_PATH'] = context.comfy_path
|
new_env['COMFYUI_FOLDERS_BASE_PATH'] = comfy_path
|
||||||
|
|
||||||
return new_env
|
return new_env
|
||||||
|
|
||||||
@@ -110,12 +138,12 @@ def check_invalid_nodes():
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
import folder_paths
|
import folder_paths
|
||||||
except Exception:
|
except:
|
||||||
try:
|
try:
|
||||||
sys.path.append(context.comfy_path)
|
sys.path.append(comfy_path)
|
||||||
import folder_paths
|
import folder_paths
|
||||||
except Exception:
|
except:
|
||||||
raise Exception(f"Invalid COMFYUI_FOLDERS_BASE_PATH: {context.comfy_path}")
|
raise Exception(f"Invalid COMFYUI_FOLDERS_BASE_PATH: {comfy_path}")
|
||||||
|
|
||||||
def check(root):
|
def check(root):
|
||||||
global invalid_nodes
|
global invalid_nodes
|
||||||
@@ -150,6 +178,76 @@ def check_invalid_nodes():
|
|||||||
print("\n---------------------------------------------------------------------------\n")
|
print("\n---------------------------------------------------------------------------\n")
|
||||||
|
|
||||||
|
|
||||||
|
# read env vars
|
||||||
|
comfy_path: str = os.environ.get('COMFYUI_PATH')
|
||||||
|
comfy_base_path = os.environ.get('COMFYUI_FOLDERS_BASE_PATH')
|
||||||
|
|
||||||
|
if comfy_path is None:
|
||||||
|
try:
|
||||||
|
import folder_paths
|
||||||
|
comfy_path = os.path.join(os.path.dirname(folder_paths.__file__))
|
||||||
|
except:
|
||||||
|
comfy_path = os.path.abspath(os.path.join(manager_util.comfyui_manager_path, '..', '..'))
|
||||||
|
|
||||||
|
if comfy_base_path is None:
|
||||||
|
comfy_base_path = comfy_path
|
||||||
|
|
||||||
|
|
||||||
|
channel_list_template_path = os.path.join(manager_util.comfyui_manager_path, 'channels.list.template')
|
||||||
|
git_script_path = os.path.join(manager_util.comfyui_manager_path, "git_helper.py")
|
||||||
|
|
||||||
|
manager_files_path = None
|
||||||
|
manager_config_path = None
|
||||||
|
manager_channel_list_path = None
|
||||||
|
manager_startup_script_path:str = None
|
||||||
|
manager_snapshot_path = None
|
||||||
|
manager_pip_overrides_path = None
|
||||||
|
manager_pip_blacklist_path = None
|
||||||
|
manager_components_path = None
|
||||||
|
|
||||||
|
def update_user_directory(user_dir):
|
||||||
|
global manager_files_path
|
||||||
|
global manager_config_path
|
||||||
|
global manager_channel_list_path
|
||||||
|
global manager_startup_script_path
|
||||||
|
global manager_snapshot_path
|
||||||
|
global manager_pip_overrides_path
|
||||||
|
global manager_pip_blacklist_path
|
||||||
|
global manager_components_path
|
||||||
|
|
||||||
|
manager_files_path = manager_migration.get_manager_path(user_dir)
|
||||||
|
if not os.path.exists(manager_files_path):
|
||||||
|
os.makedirs(manager_files_path)
|
||||||
|
manager_migration.run_migration_checks(user_dir, manager_files_path)
|
||||||
|
|
||||||
|
manager_snapshot_path = os.path.join(manager_files_path, "snapshots")
|
||||||
|
if not os.path.exists(manager_snapshot_path):
|
||||||
|
os.makedirs(manager_snapshot_path)
|
||||||
|
|
||||||
|
manager_startup_script_path = os.path.join(manager_files_path, "startup-scripts")
|
||||||
|
if not os.path.exists(manager_startup_script_path):
|
||||||
|
os.makedirs(manager_startup_script_path)
|
||||||
|
|
||||||
|
manager_config_path = os.path.join(manager_files_path, 'config.ini')
|
||||||
|
manager_channel_list_path = os.path.join(manager_files_path, 'channels.list')
|
||||||
|
manager_pip_overrides_path = os.path.join(manager_files_path, "pip_overrides.json")
|
||||||
|
manager_pip_blacklist_path = os.path.join(manager_files_path, "pip_blacklist.list")
|
||||||
|
manager_components_path = os.path.join(manager_files_path, "components")
|
||||||
|
manager_util.cache_dir = os.path.join(manager_files_path, "cache")
|
||||||
|
|
||||||
|
if not os.path.exists(manager_util.cache_dir):
|
||||||
|
os.makedirs(manager_util.cache_dir)
|
||||||
|
|
||||||
|
try:
|
||||||
|
import folder_paths
|
||||||
|
update_user_directory(folder_paths.get_user_directory())
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
# fallback:
|
||||||
|
# This case is only possible when running with cm-cli, and in practice, this case is not actually used.
|
||||||
|
update_user_directory(os.path.abspath(manager_util.comfyui_manager_path))
|
||||||
|
|
||||||
|
|
||||||
cached_config = None
|
cached_config = None
|
||||||
js_path = None
|
js_path = None
|
||||||
|
|
||||||
@@ -304,18 +402,86 @@ class ManagedResult:
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
class NormalizedKeyDict:
|
||||||
|
def __init__(self):
|
||||||
|
self._store = {}
|
||||||
|
self._key_map = {}
|
||||||
|
|
||||||
|
def _normalize_key(self, key):
|
||||||
|
if isinstance(key, str):
|
||||||
|
return key.strip().lower()
|
||||||
|
return key
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
norm_key = self._normalize_key(key)
|
||||||
|
self._key_map[norm_key] = key
|
||||||
|
self._store[key] = value
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
norm_key = self._normalize_key(key)
|
||||||
|
original_key = self._key_map[norm_key]
|
||||||
|
return self._store[original_key]
|
||||||
|
|
||||||
|
def __delitem__(self, key):
|
||||||
|
norm_key = self._normalize_key(key)
|
||||||
|
original_key = self._key_map.pop(norm_key)
|
||||||
|
del self._store[original_key]
|
||||||
|
|
||||||
|
def __contains__(self, key):
|
||||||
|
return self._normalize_key(key) in self._key_map
|
||||||
|
|
||||||
|
def get(self, key, default=None):
|
||||||
|
return self[key] if key in self else default
|
||||||
|
|
||||||
|
def setdefault(self, key, default=None):
|
||||||
|
if key in self:
|
||||||
|
return self[key]
|
||||||
|
self[key] = default
|
||||||
|
return default
|
||||||
|
|
||||||
|
def pop(self, key, default=None):
|
||||||
|
if key in self:
|
||||||
|
val = self[key]
|
||||||
|
del self[key]
|
||||||
|
return val
|
||||||
|
if default is not None:
|
||||||
|
return default
|
||||||
|
raise KeyError(key)
|
||||||
|
|
||||||
|
def keys(self):
|
||||||
|
return self._store.keys()
|
||||||
|
|
||||||
|
def values(self):
|
||||||
|
return self._store.values()
|
||||||
|
|
||||||
|
def items(self):
|
||||||
|
return self._store.items()
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(self._store)
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self._store)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return repr(self._store)
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
return dict(self._store)
|
||||||
|
|
||||||
|
|
||||||
class UnifiedManager:
|
class UnifiedManager:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.installed_node_packages: dict[str, InstalledNodePackage] = {}
|
self.installed_node_packages: dict[str, InstalledNodePackage] = {}
|
||||||
|
|
||||||
self.cnr_inactive_nodes = {} # node_id -> node_version -> fullpath
|
self.cnr_inactive_nodes = NormalizedKeyDict() # node_id -> node_version -> fullpath
|
||||||
self.nightly_inactive_nodes = {} # node_id -> fullpath
|
self.nightly_inactive_nodes = NormalizedKeyDict() # node_id -> fullpath
|
||||||
self.unknown_inactive_nodes = {} # node_id -> repo url * fullpath
|
self.unknown_inactive_nodes = {} # node_id -> repo url * fullpath
|
||||||
self.active_nodes = {} # node_id -> node_version * fullpath
|
self.active_nodes = NormalizedKeyDict() # node_id -> node_version * fullpath
|
||||||
self.unknown_active_nodes = {} # node_id -> repo url * fullpath
|
self.unknown_active_nodes = {} # node_id -> repo url * fullpath
|
||||||
self.cnr_map = {} # node_id -> cnr info
|
self.cnr_map = NormalizedKeyDict() # node_id -> cnr info
|
||||||
self.repo_cnr_map = {} # repo_url -> cnr info
|
self.repo_cnr_map = {} # repo_url -> cnr info
|
||||||
self.custom_node_map_cache = {} # (channel, mode) -> augmented custom node list json
|
self.custom_node_map_cache = {} # (channel, mode) -> augmented custom node list json
|
||||||
self.processed_install = set()
|
self.processed_install = set()
|
||||||
|
|
||||||
def get_module_name(self, x):
|
def get_module_name(self, x):
|
||||||
@@ -457,7 +623,7 @@ class UnifiedManager:
|
|||||||
ver = str(manager_util.StrictVersion(info['version']))
|
ver = str(manager_util.StrictVersion(info['version']))
|
||||||
return {'id': cnr['id'], 'cnr': cnr, 'ver': ver}
|
return {'id': cnr['id'], 'cnr': cnr, 'ver': ver}
|
||||||
else:
|
else:
|
||||||
return {'id': info['id'], 'ver': info['version']}
|
return None
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -633,9 +799,7 @@ class UnifiedManager:
|
|||||||
|
|
||||||
return latest
|
return latest
|
||||||
|
|
||||||
async def reload(self, cache_mode, dont_wait=True, update_cnr_map=True):
|
async def reload(self, cache_mode, dont_wait=True):
|
||||||
import folder_paths
|
|
||||||
|
|
||||||
self.custom_node_map_cache = {}
|
self.custom_node_map_cache = {}
|
||||||
self.cnr_inactive_nodes = {} # node_id -> node_version -> fullpath
|
self.cnr_inactive_nodes = {} # node_id -> node_version -> fullpath
|
||||||
self.nightly_inactive_nodes = {} # node_id -> fullpath
|
self.nightly_inactive_nodes = {} # node_id -> fullpath
|
||||||
@@ -643,18 +807,17 @@ class UnifiedManager:
|
|||||||
self.unknown_active_nodes = {} # node_id -> repo url * fullpath
|
self.unknown_active_nodes = {} # node_id -> repo url * fullpath
|
||||||
self.active_nodes = {} # node_id -> node_version * fullpath
|
self.active_nodes = {} # node_id -> node_version * fullpath
|
||||||
|
|
||||||
if get_config()['network_mode'] != 'public' or manager_util.is_manager_pip_package():
|
if get_config()['network_mode'] != 'public':
|
||||||
dont_wait = True
|
dont_wait = True
|
||||||
|
|
||||||
if update_cnr_map:
|
# reload 'cnr_map' and 'repo_cnr_map'
|
||||||
# reload 'cnr_map' and 'repo_cnr_map'
|
cnrs = await cnr_utils.get_cnr_data(cache_mode=cache_mode=='cache', dont_wait=dont_wait)
|
||||||
cnrs = await cnr_utils.get_cnr_data(cache_mode=cache_mode=='cache', dont_wait=dont_wait)
|
|
||||||
|
|
||||||
for x in cnrs:
|
for x in cnrs:
|
||||||
self.cnr_map[x['id']] = x
|
self.cnr_map[x['id']] = x
|
||||||
if 'repository' in x:
|
if 'repository' in x:
|
||||||
normalized_url = git_utils.normalize_url(x['repository'])
|
normalized_url = git_utils.normalize_url(x['repository'])
|
||||||
self.repo_cnr_map[normalized_url] = x
|
self.repo_cnr_map[normalized_url] = x
|
||||||
|
|
||||||
# reload node status info from custom_nodes/*
|
# reload node status info from custom_nodes/*
|
||||||
for custom_nodes_path in folder_paths.get_folder_paths('custom_nodes'):
|
for custom_nodes_path in folder_paths.get_folder_paths('custom_nodes'):
|
||||||
@@ -702,7 +865,7 @@ class UnifiedManager:
|
|||||||
if 'id' in x:
|
if 'id' in x:
|
||||||
if x['id'] not in res:
|
if x['id'] not in res:
|
||||||
res[x['id']] = (x, True)
|
res[x['id']] = (x, True)
|
||||||
except Exception:
|
except:
|
||||||
logging.error(f"[ComfyUI-Manager] broken item:{x}")
|
logging.error(f"[ComfyUI-Manager] broken item:{x}")
|
||||||
|
|
||||||
return res
|
return res
|
||||||
@@ -721,7 +884,7 @@ class UnifiedManager:
|
|||||||
channel = normalize_channel(channel)
|
channel = normalize_channel(channel)
|
||||||
nodes = await self.load_nightly(channel, mode)
|
nodes = await self.load_nightly(channel, mode)
|
||||||
|
|
||||||
res = {}
|
res = NormalizedKeyDict()
|
||||||
added_cnr = set()
|
added_cnr = set()
|
||||||
for v in nodes.values():
|
for v in nodes.values():
|
||||||
v = v[0]
|
v = v[0]
|
||||||
@@ -755,7 +918,7 @@ class UnifiedManager:
|
|||||||
def safe_version(ver_str):
|
def safe_version(ver_str):
|
||||||
try:
|
try:
|
||||||
return version.parse(ver_str)
|
return version.parse(ver_str)
|
||||||
except Exception:
|
except:
|
||||||
return version.parse("0.0.0")
|
return version.parse("0.0.0")
|
||||||
|
|
||||||
def execute_install_script(self, url, repo_path, instant_execution=False, lazy_mode=False, no_deps=False):
|
def execute_install_script(self, url, repo_path, instant_execution=False, lazy_mode=False, no_deps=False):
|
||||||
@@ -769,7 +932,7 @@ class UnifiedManager:
|
|||||||
else:
|
else:
|
||||||
if os.path.exists(requirements_path) and not no_deps:
|
if os.path.exists(requirements_path) and not no_deps:
|
||||||
print("Install: pip packages")
|
print("Install: pip packages")
|
||||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), context.comfy_path, context.manager_files_path)
|
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, manager_files_path)
|
||||||
lines = manager_util.robust_readlines(requirements_path)
|
lines = manager_util.robust_readlines(requirements_path)
|
||||||
for line in lines:
|
for line in lines:
|
||||||
package_name = remap_pip_package(line.strip())
|
package_name = remap_pip_package(line.strip())
|
||||||
@@ -791,7 +954,7 @@ class UnifiedManager:
|
|||||||
return res
|
return res
|
||||||
|
|
||||||
def reserve_cnr_switch(self, target, zip_url, from_path, to_path, no_deps):
|
def reserve_cnr_switch(self, target, zip_url, from_path, to_path, no_deps):
|
||||||
script_path = os.path.join(context.manager_startup_script_path, "install-scripts.txt")
|
script_path = os.path.join(manager_startup_script_path, "install-scripts.txt")
|
||||||
with open(script_path, "a") as file:
|
with open(script_path, "a") as file:
|
||||||
obj = [target, "#LAZY-CNR-SWITCH-SCRIPT", zip_url, from_path, to_path, no_deps, get_default_custom_nodes_path(), sys.executable]
|
obj = [target, "#LAZY-CNR-SWITCH-SCRIPT", zip_url, from_path, to_path, no_deps, get_default_custom_nodes_path(), sys.executable]
|
||||||
file.write(f"{obj}\n")
|
file.write(f"{obj}\n")
|
||||||
@@ -1197,7 +1360,7 @@ class UnifiedManager:
|
|||||||
print(f"Download: git clone '{clone_url}'")
|
print(f"Download: git clone '{clone_url}'")
|
||||||
|
|
||||||
if not instant_execution and platform.system() == 'Windows':
|
if not instant_execution and platform.system() == 'Windows':
|
||||||
res = manager_funcs.run_script([sys.executable, context.git_script_path, "--clone", get_default_custom_nodes_path(), clone_url, repo_path], cwd=get_default_custom_nodes_path())
|
res = manager_funcs.run_script([sys.executable, git_script_path, "--clone", get_default_custom_nodes_path(), clone_url, repo_path], cwd=get_default_custom_nodes_path())
|
||||||
if res != 0:
|
if res != 0:
|
||||||
return result.fail(f"Failed to clone repo: {clone_url}")
|
return result.fail(f"Failed to clone repo: {clone_url}")
|
||||||
else:
|
else:
|
||||||
@@ -1323,6 +1486,7 @@ class UnifiedManager:
|
|||||||
return ManagedResult('skip')
|
return ManagedResult('skip')
|
||||||
elif self.is_disabled(node_id):
|
elif self.is_disabled(node_id):
|
||||||
return self.unified_enable(node_id)
|
return self.unified_enable(node_id)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
version_spec = self.resolve_unspecified_version(node_id)
|
version_spec = self.resolve_unspecified_version(node_id)
|
||||||
|
|
||||||
@@ -1349,20 +1513,12 @@ class UnifiedManager:
|
|||||||
return self.unified_enable(node_id, version_spec)
|
return self.unified_enable(node_id, version_spec)
|
||||||
|
|
||||||
elif version_spec == 'unknown' or version_spec == 'nightly':
|
elif version_spec == 'unknown' or version_spec == 'nightly':
|
||||||
to_path = os.path.abspath(os.path.join(get_default_custom_nodes_path(), node_id))
|
|
||||||
|
|
||||||
if version_spec == 'nightly':
|
if version_spec == 'nightly':
|
||||||
# disable cnr nodes
|
# disable cnr nodes
|
||||||
if self.is_enabled(node_id, 'cnr'):
|
if self.is_enabled(node_id, 'cnr'):
|
||||||
self.unified_disable(node_id, False)
|
self.unified_disable(node_id, False)
|
||||||
|
|
||||||
# use `repo name` as a dir name instead of `cnr id` if system added nodepack (i.e. publisher is null)
|
to_path = os.path.abspath(os.path.join(get_default_custom_nodes_path(), node_id))
|
||||||
cnr = self.cnr_map.get(node_id)
|
|
||||||
|
|
||||||
if cnr is not None and cnr.get('publisher') is None:
|
|
||||||
repo_name = os.path.basename(git_utils.normalize_url(repo_url))
|
|
||||||
to_path = os.path.abspath(os.path.join(get_default_custom_nodes_path(), repo_name))
|
|
||||||
|
|
||||||
res = self.repo_install(repo_url, to_path, instant_execution=instant_execution, no_deps=no_deps, return_postinstall=return_postinstall)
|
res = self.repo_install(repo_url, to_path, instant_execution=instant_execution, no_deps=no_deps, return_postinstall=return_postinstall)
|
||||||
if res.result:
|
if res.result:
|
||||||
if version_spec == 'unknown':
|
if version_spec == 'unknown':
|
||||||
@@ -1423,7 +1579,7 @@ def identify_node_pack_from_path(fullpath):
|
|||||||
if github_id is None:
|
if github_id is None:
|
||||||
try:
|
try:
|
||||||
github_id = os.path.basename(repo_url)
|
github_id = os.path.basename(repo_url)
|
||||||
except Exception:
|
except:
|
||||||
logging.warning(f"[ComfyUI-Manager] unexpected repo url: {repo_url}")
|
logging.warning(f"[ComfyUI-Manager] unexpected repo url: {repo_url}")
|
||||||
github_id = module_name
|
github_id = module_name
|
||||||
|
|
||||||
@@ -1478,10 +1634,10 @@ def get_channel_dict():
|
|||||||
if channel_dict is None:
|
if channel_dict is None:
|
||||||
channel_dict = {}
|
channel_dict = {}
|
||||||
|
|
||||||
if not os.path.exists(context.manager_channel_list_path):
|
if not os.path.exists(manager_channel_list_path):
|
||||||
shutil.copy(context.channel_list_template_path, context.manager_channel_list_path)
|
shutil.copy(channel_list_template_path, manager_channel_list_path)
|
||||||
|
|
||||||
with open(context.manager_channel_list_path, 'r') as file:
|
with open(manager_channel_list_path, 'r') as file:
|
||||||
channels = file.read()
|
channels = file.read()
|
||||||
for x in channels.split('\n'):
|
for x in channels.split('\n'):
|
||||||
channel_info = x.split("::")
|
channel_info = x.split("::")
|
||||||
@@ -1545,25 +1701,27 @@ def write_config():
|
|||||||
'db_mode': get_config()['db_mode'],
|
'db_mode': get_config()['db_mode'],
|
||||||
}
|
}
|
||||||
|
|
||||||
directory = os.path.dirname(context.manager_config_path)
|
directory = os.path.dirname(manager_config_path)
|
||||||
if not os.path.exists(directory):
|
if not os.path.exists(directory):
|
||||||
os.makedirs(directory)
|
os.makedirs(directory)
|
||||||
|
|
||||||
with open(context.manager_config_path, 'w') as configfile:
|
with open(manager_config_path, 'w') as configfile:
|
||||||
config.write(configfile)
|
config.write(configfile)
|
||||||
|
|
||||||
|
|
||||||
def read_config():
|
def read_config():
|
||||||
try:
|
try:
|
||||||
config = configparser.ConfigParser(strict=False)
|
config = configparser.ConfigParser(strict=False)
|
||||||
config.read(context.manager_config_path)
|
config.read(manager_config_path)
|
||||||
default_conf = config['default']
|
default_conf = config['default']
|
||||||
manager_util.use_uv = default_conf['use_uv'].lower() == 'true' if 'use_uv' in default_conf else False
|
|
||||||
|
|
||||||
def get_bool(key, default_value):
|
def get_bool(key, default_value):
|
||||||
return default_conf[key].lower() == 'true' if key in default_conf else False
|
return default_conf[key].lower() == 'true' if key in default_conf else False
|
||||||
|
|
||||||
return {
|
manager_util.use_uv = default_conf['use_uv'].lower() == 'true' if 'use_uv' in default_conf else False
|
||||||
|
manager_util.bypass_ssl = get_bool('bypass_ssl', False)
|
||||||
|
|
||||||
|
result = {
|
||||||
'http_channel_enabled': get_bool('http_channel_enabled', False),
|
'http_channel_enabled': get_bool('http_channel_enabled', False),
|
||||||
'preview_method': default_conf.get('preview_method', manager_funcs.get_current_preview_method()).lower(),
|
'preview_method': default_conf.get('preview_method', manager_funcs.get_current_preview_method()).lower(),
|
||||||
'git_exe': default_conf.get('git_exe', ''),
|
'git_exe': default_conf.get('git_exe', ''),
|
||||||
@@ -1579,22 +1737,28 @@ def read_config():
|
|||||||
'model_download_by_agent': get_bool('model_download_by_agent', False),
|
'model_download_by_agent': get_bool('model_download_by_agent', False),
|
||||||
'downgrade_blacklist': default_conf.get('downgrade_blacklist', '').lower(),
|
'downgrade_blacklist': default_conf.get('downgrade_blacklist', '').lower(),
|
||||||
'always_lazy_install': get_bool('always_lazy_install', False),
|
'always_lazy_install': get_bool('always_lazy_install', False),
|
||||||
'network_mode': default_conf.get('network_mode', NetworkMode.PUBLIC.value).lower(),
|
'network_mode': default_conf.get('network_mode', 'public').lower(),
|
||||||
'security_level': default_conf.get('security_level', SecurityLevel.NORMAL.value).lower(),
|
'security_level': default_conf.get('security_level', 'normal').lower(),
|
||||||
'db_mode': default_conf.get('db_mode', DBMode.CACHE.value).lower(),
|
'db_mode': default_conf.get('db_mode', 'cache').lower(),
|
||||||
}
|
}
|
||||||
|
manager_migration.force_security_level_if_needed(result)
|
||||||
|
return result
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
manager_util.use_uv = False
|
import importlib.util
|
||||||
return {
|
# temporary disable `uv` on Windows by default (https://github.com/Comfy-Org/ComfyUI-Manager/issues/1969)
|
||||||
|
manager_util.use_uv = importlib.util.find_spec("uv") is not None and platform.system() != "Windows"
|
||||||
|
manager_util.bypass_ssl = False
|
||||||
|
|
||||||
|
result = {
|
||||||
'http_channel_enabled': False,
|
'http_channel_enabled': False,
|
||||||
'preview_method': manager_funcs.get_current_preview_method(),
|
'preview_method': manager_funcs.get_current_preview_method(),
|
||||||
'git_exe': '',
|
'git_exe': '',
|
||||||
'use_uv': False,
|
'use_uv': manager_util.use_uv,
|
||||||
'channel_url': DEFAULT_CHANNEL,
|
'channel_url': DEFAULT_CHANNEL,
|
||||||
'default_cache_as_channel_url': False,
|
'default_cache_as_channel_url': False,
|
||||||
'share_option': 'all',
|
'share_option': 'all',
|
||||||
'bypass_ssl': False,
|
'bypass_ssl': manager_util.bypass_ssl,
|
||||||
'file_logging': True,
|
'file_logging': True,
|
||||||
'component_policy': 'workflow',
|
'component_policy': 'workflow',
|
||||||
'update_policy': 'stable-comfyui',
|
'update_policy': 'stable-comfyui',
|
||||||
@@ -1602,10 +1766,12 @@ def read_config():
|
|||||||
'model_download_by_agent': False,
|
'model_download_by_agent': False,
|
||||||
'downgrade_blacklist': '',
|
'downgrade_blacklist': '',
|
||||||
'always_lazy_install': False,
|
'always_lazy_install': False,
|
||||||
'network_mode': NetworkMode.PUBLIC.value,
|
'network_mode': 'public', # public | private | offline
|
||||||
'security_level': SecurityLevel.NORMAL.value,
|
'security_level': 'normal', # strong | normal | normal- | weak
|
||||||
'db_mode': DBMode.CACHE.value,
|
'db_mode': 'cache', # local | cache | remote
|
||||||
}
|
}
|
||||||
|
manager_migration.force_security_level_if_needed(result)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
def get_config():
|
def get_config():
|
||||||
@@ -1648,27 +1814,27 @@ def switch_to_default_branch(repo):
|
|||||||
default_branch = repo.git.symbolic_ref(f'refs/remotes/{remote_name}/HEAD').replace(f'refs/remotes/{remote_name}/', '')
|
default_branch = repo.git.symbolic_ref(f'refs/remotes/{remote_name}/HEAD').replace(f'refs/remotes/{remote_name}/', '')
|
||||||
repo.git.checkout(default_branch)
|
repo.git.checkout(default_branch)
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except:
|
||||||
# try checkout master
|
# try checkout master
|
||||||
# try checkout main if failed
|
# try checkout main if failed
|
||||||
try:
|
try:
|
||||||
repo.git.checkout(repo.heads.master)
|
repo.git.checkout(repo.heads.master)
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except:
|
||||||
try:
|
try:
|
||||||
if remote_name is not None:
|
if remote_name is not None:
|
||||||
repo.git.checkout('-b', 'master', f'{remote_name}/master')
|
repo.git.checkout('-b', 'master', f'{remote_name}/master')
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except:
|
||||||
try:
|
try:
|
||||||
repo.git.checkout(repo.heads.main)
|
repo.git.checkout(repo.heads.main)
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except:
|
||||||
try:
|
try:
|
||||||
if remote_name is not None:
|
if remote_name is not None:
|
||||||
repo.git.checkout('-b', 'main', f'{remote_name}/main')
|
repo.git.checkout('-b', 'main', f'{remote_name}/main')
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
print("[ComfyUI Manager] Failed to switch to the default branch")
|
print("[ComfyUI Manager] Failed to switch to the default branch")
|
||||||
@@ -1676,10 +1842,10 @@ def switch_to_default_branch(repo):
|
|||||||
|
|
||||||
|
|
||||||
def reserve_script(repo_path, install_cmds):
|
def reserve_script(repo_path, install_cmds):
|
||||||
if not os.path.exists(context.manager_startup_script_path):
|
if not os.path.exists(manager_startup_script_path):
|
||||||
os.makedirs(context.manager_startup_script_path)
|
os.makedirs(manager_startup_script_path)
|
||||||
|
|
||||||
script_path = os.path.join(context.manager_startup_script_path, "install-scripts.txt")
|
script_path = os.path.join(manager_startup_script_path, "install-scripts.txt")
|
||||||
with open(script_path, "a") as file:
|
with open(script_path, "a") as file:
|
||||||
obj = [repo_path] + install_cmds
|
obj = [repo_path] + install_cmds
|
||||||
file.write(f"{obj}\n")
|
file.write(f"{obj}\n")
|
||||||
@@ -1719,7 +1885,7 @@ def try_install_script(url, repo_path, install_cmd, instant_execution=False):
|
|||||||
print(f"[WARN] ComfyUI-Manager: Your ComfyUI version ({comfy_ui_revision})[{comfy_ui_commit_datetime.date()}] is too old. Please update to the latest version.")
|
print(f"[WARN] ComfyUI-Manager: Your ComfyUI version ({comfy_ui_revision})[{comfy_ui_commit_datetime.date()}] is too old. Please update to the latest version.")
|
||||||
print("[WARN] The extension installation feature may not work properly in the current installed ComfyUI version on Windows environment.")
|
print("[WARN] The extension installation feature may not work properly in the current installed ComfyUI version on Windows environment.")
|
||||||
print("###################################################################\n\n")
|
print("###################################################################\n\n")
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if code != 0:
|
if code != 0:
|
||||||
@@ -1734,11 +1900,11 @@ def try_install_script(url, repo_path, install_cmd, instant_execution=False):
|
|||||||
# use subprocess to avoid file system lock by git (Windows)
|
# use subprocess to avoid file system lock by git (Windows)
|
||||||
def __win_check_git_update(path, do_fetch=False, do_update=False):
|
def __win_check_git_update(path, do_fetch=False, do_update=False):
|
||||||
if do_fetch:
|
if do_fetch:
|
||||||
command = [sys.executable, context.git_script_path, "--fetch", path]
|
command = [sys.executable, git_script_path, "--fetch", path]
|
||||||
elif do_update:
|
elif do_update:
|
||||||
command = [sys.executable, context.git_script_path, "--pull", path]
|
command = [sys.executable, git_script_path, "--pull", path]
|
||||||
else:
|
else:
|
||||||
command = [sys.executable, context.git_script_path, "--check", path]
|
command = [sys.executable, git_script_path, "--check", path]
|
||||||
|
|
||||||
new_env = get_script_env()
|
new_env = get_script_env()
|
||||||
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=get_default_custom_nodes_path(), env=new_env)
|
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=get_default_custom_nodes_path(), env=new_env)
|
||||||
@@ -1792,7 +1958,7 @@ def __win_check_git_update(path, do_fetch=False, do_update=False):
|
|||||||
|
|
||||||
|
|
||||||
def __win_check_git_pull(path):
|
def __win_check_git_pull(path):
|
||||||
command = [sys.executable, context.git_script_path, "--pull", path]
|
command = [sys.executable, git_script_path, "--pull", path]
|
||||||
process = subprocess.Popen(command, env=get_script_env(), cwd=get_default_custom_nodes_path())
|
process = subprocess.Popen(command, env=get_script_env(), cwd=get_default_custom_nodes_path())
|
||||||
process.wait()
|
process.wait()
|
||||||
|
|
||||||
@@ -1808,7 +1974,7 @@ def execute_install_script(url, repo_path, lazy_mode=False, instant_execution=Fa
|
|||||||
else:
|
else:
|
||||||
if os.path.exists(requirements_path) and not no_deps:
|
if os.path.exists(requirements_path) and not no_deps:
|
||||||
print("Install: pip packages")
|
print("Install: pip packages")
|
||||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), context.comfy_path, context.manager_files_path)
|
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, manager_files_path)
|
||||||
with open(requirements_path, "r") as requirements_file:
|
with open(requirements_path, "r") as requirements_file:
|
||||||
for line in requirements_file:
|
for line in requirements_file:
|
||||||
#handle comments
|
#handle comments
|
||||||
@@ -2044,7 +2210,7 @@ async def gitclone_install(url, instant_execution=False, msg_prefix='', no_deps=
|
|||||||
clone_url = git_utils.get_url_for_clone(url)
|
clone_url = git_utils.get_url_for_clone(url)
|
||||||
|
|
||||||
if not instant_execution and platform.system() == 'Windows':
|
if not instant_execution and platform.system() == 'Windows':
|
||||||
res = manager_funcs.run_script([sys.executable, context.git_script_path, "--clone", get_default_custom_nodes_path(), clone_url, repo_path], cwd=get_default_custom_nodes_path())
|
res = manager_funcs.run_script([sys.executable, git_script_path, "--clone", get_default_custom_nodes_path(), clone_url, repo_path], cwd=get_default_custom_nodes_path())
|
||||||
if res != 0:
|
if res != 0:
|
||||||
return result.fail(f"Failed to clone '{clone_url}' into '{repo_path}'")
|
return result.fail(f"Failed to clone '{clone_url}' into '{repo_path}'")
|
||||||
else:
|
else:
|
||||||
@@ -2115,7 +2281,7 @@ async def get_data_by_mode(mode, filename, channel_url=None):
|
|||||||
cache_uri = str(manager_util.simple_hash(uri))+'_'+filename
|
cache_uri = str(manager_util.simple_hash(uri))+'_'+filename
|
||||||
cache_uri = os.path.join(manager_util.cache_dir, cache_uri)
|
cache_uri = os.path.join(manager_util.cache_dir, cache_uri)
|
||||||
|
|
||||||
if get_config()['network_mode'] == 'offline' or manager_util.is_manager_pip_package():
|
if get_config()['network_mode'] == 'offline':
|
||||||
# offline network mode
|
# offline network mode
|
||||||
if os.path.exists(cache_uri):
|
if os.path.exists(cache_uri):
|
||||||
json_obj = await manager_util.get_data(cache_uri)
|
json_obj = await manager_util.get_data(cache_uri)
|
||||||
@@ -2135,7 +2301,7 @@ async def get_data_by_mode(mode, filename, channel_url=None):
|
|||||||
with open(cache_uri, "w", encoding='utf-8') as file:
|
with open(cache_uri, "w", encoding='utf-8') as file:
|
||||||
json.dump(json_obj, file, indent=4, sort_keys=True)
|
json.dump(json_obj, file, indent=4, sort_keys=True)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"[ComfyUI-Manager] Due to a network error, switching to local mode.\n=> {filename} @ {channel_url}/{mode}\n=> {e}")
|
print(f"[ComfyUI-Manager] Due to a network error, switching to local mode.\n=> {filename}\n=> {e}")
|
||||||
uri = os.path.join(manager_util.comfyui_manager_path, filename)
|
uri = os.path.join(manager_util.comfyui_manager_path, filename)
|
||||||
json_obj = await manager_util.get_data(uri)
|
json_obj = await manager_util.get_data(uri)
|
||||||
|
|
||||||
@@ -2206,7 +2372,7 @@ def gitclone_uninstall(files):
|
|||||||
url = url[:-1]
|
url = url[:-1]
|
||||||
try:
|
try:
|
||||||
for custom_nodes_dir in get_custom_nodes_paths():
|
for custom_nodes_dir in get_custom_nodes_paths():
|
||||||
dir_name:str = os.path.splitext(os.path.basename(url))[0].replace(".git", "")
|
dir_name = os.path.splitext(os.path.basename(url))[0].replace(".git", "")
|
||||||
dir_path = os.path.join(custom_nodes_dir, dir_name)
|
dir_path = os.path.join(custom_nodes_dir, dir_name)
|
||||||
|
|
||||||
# safety check
|
# safety check
|
||||||
@@ -2254,7 +2420,7 @@ def gitclone_set_active(files, is_disable):
|
|||||||
url = url[:-1]
|
url = url[:-1]
|
||||||
try:
|
try:
|
||||||
for custom_nodes_dir in get_custom_nodes_paths():
|
for custom_nodes_dir in get_custom_nodes_paths():
|
||||||
dir_name:str = os.path.splitext(os.path.basename(url))[0].replace(".git", "")
|
dir_name = os.path.splitext(os.path.basename(url))[0].replace(".git", "")
|
||||||
dir_path = os.path.join(custom_nodes_dir, dir_name)
|
dir_path = os.path.join(custom_nodes_dir, dir_name)
|
||||||
|
|
||||||
# safety check
|
# safety check
|
||||||
@@ -2351,7 +2517,7 @@ def update_to_stable_comfyui(repo_path):
|
|||||||
repo = git.Repo(repo_path)
|
repo = git.Repo(repo_path)
|
||||||
try:
|
try:
|
||||||
repo.git.checkout(repo.heads.master)
|
repo.git.checkout(repo.heads.master)
|
||||||
except Exception:
|
except:
|
||||||
logging.error(f"[ComfyUI-Manager] Failed to checkout 'master' branch.\nrepo_path={repo_path}\nAvailable branches:")
|
logging.error(f"[ComfyUI-Manager] Failed to checkout 'master' branch.\nrepo_path={repo_path}\nAvailable branches:")
|
||||||
for branch in repo.branches:
|
for branch in repo.branches:
|
||||||
logging.error('\t'+branch.name)
|
logging.error('\t'+branch.name)
|
||||||
@@ -2373,8 +2539,9 @@ def update_to_stable_comfyui(repo_path):
|
|||||||
else:
|
else:
|
||||||
logging.info(f"[ComfyUI-Manager] Updating ComfyUI: {current_tag} -> {latest_tag}")
|
logging.info(f"[ComfyUI-Manager] Updating ComfyUI: {current_tag} -> {latest_tag}")
|
||||||
repo.git.checkout(latest_tag)
|
repo.git.checkout(latest_tag)
|
||||||
|
execute_install_script("ComfyUI", repo_path, instant_execution=False, no_deps=False)
|
||||||
return 'updated', latest_tag
|
return 'updated', latest_tag
|
||||||
except Exception:
|
except:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
return "fail", None
|
return "fail", None
|
||||||
|
|
||||||
@@ -2504,9 +2671,13 @@ def check_state_of_git_node_pack_single(item, do_fetch=False, do_update_check=Tr
|
|||||||
|
|
||||||
|
|
||||||
def get_installed_pip_packages():
|
def get_installed_pip_packages():
|
||||||
# extract pip package infos
|
try:
|
||||||
cmd = manager_util.make_pip_cmd(['freeze'])
|
# extract pip package infos
|
||||||
pips = subprocess.check_output(cmd, text=True).split('\n')
|
cmd = manager_util.make_pip_cmd(['freeze'])
|
||||||
|
pips = subprocess.check_output(cmd, text=True).split('\n')
|
||||||
|
except Exception as e:
|
||||||
|
logging.warning("[ComfyUI-Manager] Could not enumerate pip packages for snapshot: %s", e)
|
||||||
|
return {}
|
||||||
|
|
||||||
res = {}
|
res = {}
|
||||||
for x in pips:
|
for x in pips:
|
||||||
@@ -2527,7 +2698,7 @@ async def get_current_snapshot(custom_nodes_only = False):
|
|||||||
await unified_manager.get_custom_nodes('default', 'cache')
|
await unified_manager.get_custom_nodes('default', 'cache')
|
||||||
|
|
||||||
# Get ComfyUI hash
|
# Get ComfyUI hash
|
||||||
repo_path = context.comfy_path
|
repo_path = comfy_path
|
||||||
|
|
||||||
comfyui_commit_hash = None
|
comfyui_commit_hash = None
|
||||||
if not custom_nodes_only:
|
if not custom_nodes_only:
|
||||||
@@ -2572,7 +2743,7 @@ async def get_current_snapshot(custom_nodes_only = False):
|
|||||||
commit_hash = git_utils.get_commit_hash(fullpath)
|
commit_hash = git_utils.get_commit_hash(fullpath)
|
||||||
url = git_utils.git_url(fullpath)
|
url = git_utils.git_url(fullpath)
|
||||||
git_custom_nodes[url] = dict(hash=commit_hash, disabled=is_disabled)
|
git_custom_nodes[url] = dict(hash=commit_hash, disabled=is_disabled)
|
||||||
except Exception:
|
except:
|
||||||
print(f"Failed to extract snapshots for the custom node '{path}'.")
|
print(f"Failed to extract snapshots for the custom node '{path}'.")
|
||||||
|
|
||||||
elif path.endswith('.py'):
|
elif path.endswith('.py'):
|
||||||
@@ -2603,7 +2774,7 @@ async def save_snapshot_with_postfix(postfix, path=None, custom_nodes_only = Fal
|
|||||||
date_time_format = now.strftime("%Y-%m-%d_%H-%M-%S")
|
date_time_format = now.strftime("%Y-%m-%d_%H-%M-%S")
|
||||||
file_name = f"{date_time_format}_{postfix}"
|
file_name = f"{date_time_format}_{postfix}"
|
||||||
|
|
||||||
path = os.path.join(context.manager_snapshot_path, f"{file_name}.json")
|
path = os.path.join(manager_snapshot_path, f"{file_name}.json")
|
||||||
else:
|
else:
|
||||||
file_name = path.replace('\\', '/').split('/')[-1]
|
file_name = path.replace('\\', '/').split('/')[-1]
|
||||||
file_name = file_name.split('.')[-2]
|
file_name = file_name.split('.')[-2]
|
||||||
@@ -2630,7 +2801,7 @@ async def extract_nodes_from_workflow(filepath, mode='local', channel_url='defau
|
|||||||
with open(filepath, "r", encoding="UTF-8", errors="ignore") as json_file:
|
with open(filepath, "r", encoding="UTF-8", errors="ignore") as json_file:
|
||||||
try:
|
try:
|
||||||
workflow = json.load(json_file)
|
workflow = json.load(json_file)
|
||||||
except Exception:
|
except:
|
||||||
print(f"Invalid workflow file: {filepath}")
|
print(f"Invalid workflow file: {filepath}")
|
||||||
exit(-1)
|
exit(-1)
|
||||||
|
|
||||||
@@ -2643,7 +2814,7 @@ async def extract_nodes_from_workflow(filepath, mode='local', channel_url='defau
|
|||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
workflow = json.loads(img.info['workflow'])
|
workflow = json.loads(img.info['workflow'])
|
||||||
except Exception:
|
except:
|
||||||
print(f"This is not a valid .png file containing a ComfyUI workflow: {filepath}")
|
print(f"This is not a valid .png file containing a ComfyUI workflow: {filepath}")
|
||||||
exit(-1)
|
exit(-1)
|
||||||
|
|
||||||
@@ -2791,7 +2962,7 @@ async def get_unified_total_nodes(channel, mode, regsitry_cache_mode='cache'):
|
|||||||
|
|
||||||
if cnr_id is not None:
|
if cnr_id is not None:
|
||||||
# cnr or nightly version
|
# cnr or nightly version
|
||||||
cnr_ids.remove(cnr_id)
|
cnr_ids.discard(cnr_id)
|
||||||
updatable = False
|
updatable = False
|
||||||
cnr = unified_manager.cnr_map[cnr_id]
|
cnr = unified_manager.cnr_map[cnr_id]
|
||||||
|
|
||||||
@@ -2914,7 +3085,7 @@ def populate_github_stats(node_packs, json_obj_github):
|
|||||||
v['stars'] = -1
|
v['stars'] = -1
|
||||||
v['last_update'] = -1
|
v['last_update'] = -1
|
||||||
v['trust'] = False
|
v['trust'] = False
|
||||||
except Exception:
|
except:
|
||||||
logging.error(f"[ComfyUI-Manager] DB item is broken:\n{v}")
|
logging.error(f"[ComfyUI-Manager] DB item is broken:\n{v}")
|
||||||
|
|
||||||
|
|
||||||
@@ -2955,6 +3126,11 @@ async def restore_snapshot(snapshot_path, git_helper_extras=None):
|
|||||||
info = yaml.load(snapshot_file, Loader=yaml.SafeLoader)
|
info = yaml.load(snapshot_file, Loader=yaml.SafeLoader)
|
||||||
info = info['custom_nodes']
|
info = info['custom_nodes']
|
||||||
|
|
||||||
|
if 'pips' in info and info['pips']:
|
||||||
|
pips = info['pips']
|
||||||
|
else:
|
||||||
|
pips = {}
|
||||||
|
|
||||||
# for cnr restore
|
# for cnr restore
|
||||||
cnr_info = info.get('cnr_custom_nodes')
|
cnr_info = info.get('cnr_custom_nodes')
|
||||||
if cnr_info is not None:
|
if cnr_info is not None:
|
||||||
@@ -3161,6 +3337,8 @@ async def restore_snapshot(snapshot_path, git_helper_extras=None):
|
|||||||
unified_manager.repo_install(repo_url, to_path, instant_execution=True, no_deps=False, return_postinstall=False)
|
unified_manager.repo_install(repo_url, to_path, instant_execution=True, no_deps=False, return_postinstall=False)
|
||||||
cloned_repos.append(repo_name)
|
cloned_repos.append(repo_name)
|
||||||
|
|
||||||
|
manager_util.restore_pip_snapshot(pips, git_helper_extras)
|
||||||
|
|
||||||
# print summary
|
# print summary
|
||||||
for x in cloned_repos:
|
for x in cloned_repos:
|
||||||
print(f"[ INSTALLED ] {x}")
|
print(f"[ INSTALLED ] {x}")
|
||||||
@@ -3185,12 +3363,12 @@ async def restore_snapshot(snapshot_path, git_helper_extras=None):
|
|||||||
|
|
||||||
def get_comfyui_versions(repo=None):
|
def get_comfyui_versions(repo=None):
|
||||||
if repo is None:
|
if repo is None:
|
||||||
repo = git.Repo(context.comfy_path)
|
repo = git.Repo(comfy_path)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
remote = get_remote_name(repo)
|
remote = get_remote_name(repo)
|
||||||
repo.remotes[remote].fetch()
|
repo.remotes[remote].fetch()
|
||||||
except Exception:
|
except:
|
||||||
logging.error("[ComfyUI-Manager] Failed to fetch ComfyUI")
|
logging.error("[ComfyUI-Manager] Failed to fetch ComfyUI")
|
||||||
|
|
||||||
versions = [x.name for x in repo.tags if x.name.startswith('v')]
|
versions = [x.name for x in repo.tags if x.name.startswith('v')]
|
||||||
@@ -3219,7 +3397,7 @@ def get_comfyui_versions(repo=None):
|
|||||||
|
|
||||||
|
|
||||||
def switch_comfyui(tag):
|
def switch_comfyui(tag):
|
||||||
repo = git.Repo(context.comfy_path)
|
repo = git.Repo(comfy_path)
|
||||||
|
|
||||||
if tag == 'nightly':
|
if tag == 'nightly':
|
||||||
repo.git.checkout('master')
|
repo.git.checkout('master')
|
||||||
@@ -3259,5 +3437,5 @@ def repo_switch_commit(repo_path, commit_hash):
|
|||||||
|
|
||||||
repo.git.checkout(commit_hash)
|
repo.git.checkout(commit_hash)
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except:
|
||||||
return None
|
return None
|
||||||
@@ -55,7 +55,11 @@ def download_url(model_url: str, model_dir: str, filename: str):
|
|||||||
return aria2_download_url(model_url, model_dir, filename)
|
return aria2_download_url(model_url, model_dir, filename)
|
||||||
else:
|
else:
|
||||||
from torchvision.datasets.utils import download_url as torchvision_download_url
|
from torchvision.datasets.utils import download_url as torchvision_download_url
|
||||||
return torchvision_download_url(model_url, model_dir, filename)
|
try:
|
||||||
|
return torchvision_download_url(model_url, model_dir, filename)
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"[ComfyUI-Manager] Failed to download: {model_url} / {repr(e)}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
def aria2_find_task(dir: str, filename: str):
|
def aria2_find_task(dir: str, filename: str):
|
||||||
356
glob/manager_migration.py
Normal file
356
glob/manager_migration.py
Normal file
@@ -0,0 +1,356 @@
|
|||||||
|
"""
|
||||||
|
ComfyUI-Manager migration module.
|
||||||
|
Handles migration from legacy paths to new __manager path structure.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import subprocess
|
||||||
|
import configparser
|
||||||
|
|
||||||
|
# Startup notices for notice board
|
||||||
|
startup_notices = [] # List of (message, level) tuples
|
||||||
|
|
||||||
|
|
||||||
|
def add_startup_notice(message, level='warning'):
|
||||||
|
"""Add a notice to be displayed on Manager notice board.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
message: HTML-formatted message string
|
||||||
|
level: 'warning', 'error', 'info'
|
||||||
|
"""
|
||||||
|
global startup_notices
|
||||||
|
startup_notices.append((message, level))
|
||||||
|
|
||||||
|
|
||||||
|
# Cache for API check (computed once per session)
|
||||||
|
_cached_has_system_user_api = None
|
||||||
|
|
||||||
|
|
||||||
|
def has_system_user_api():
|
||||||
|
"""Check if ComfyUI has the System User Protection API (PR #10966).
|
||||||
|
|
||||||
|
Result is cached for performance.
|
||||||
|
"""
|
||||||
|
global _cached_has_system_user_api
|
||||||
|
if _cached_has_system_user_api is None:
|
||||||
|
try:
|
||||||
|
import folder_paths
|
||||||
|
_cached_has_system_user_api = hasattr(folder_paths, 'get_system_user_directory')
|
||||||
|
except Exception:
|
||||||
|
_cached_has_system_user_api = False
|
||||||
|
return _cached_has_system_user_api
|
||||||
|
|
||||||
|
|
||||||
|
def get_manager_path(user_dir):
|
||||||
|
"""Get the appropriate manager files path based on ComfyUI version.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: manager_files_path
|
||||||
|
"""
|
||||||
|
if has_system_user_api():
|
||||||
|
return os.path.abspath(os.path.join(user_dir, '__manager'))
|
||||||
|
else:
|
||||||
|
return os.path.abspath(os.path.join(user_dir, 'default', 'ComfyUI-Manager'))
|
||||||
|
|
||||||
|
|
||||||
|
def run_migration_checks(user_dir, manager_files_path):
|
||||||
|
"""Run all migration and security checks.
|
||||||
|
|
||||||
|
Call this after get_manager_path() to handle:
|
||||||
|
- Legacy config migration (new ComfyUI)
|
||||||
|
- Legacy backup notification (every startup)
|
||||||
|
- Suspicious directory detection (old ComfyUI)
|
||||||
|
- Outdated ComfyUI warning (old ComfyUI)
|
||||||
|
"""
|
||||||
|
if has_system_user_api():
|
||||||
|
migrated = migrate_legacy_config(user_dir, manager_files_path)
|
||||||
|
# Only check for legacy backup if migration didn't just happen
|
||||||
|
# (migration already shows backup location in its message)
|
||||||
|
if not migrated:
|
||||||
|
check_legacy_backup(manager_files_path)
|
||||||
|
else:
|
||||||
|
check_suspicious_manager(user_dir)
|
||||||
|
warn_outdated_comfyui()
|
||||||
|
|
||||||
|
|
||||||
|
def check_legacy_backup(manager_files_path):
|
||||||
|
"""Check for legacy backup and notify user to verify and remove it.
|
||||||
|
|
||||||
|
This runs on every startup to remind users about pending legacy backup.
|
||||||
|
"""
|
||||||
|
backup_dir = os.path.join(manager_files_path, '.legacy-manager-backup')
|
||||||
|
if not os.path.exists(backup_dir):
|
||||||
|
return
|
||||||
|
|
||||||
|
# Terminal output
|
||||||
|
print("\n" + "-"*70)
|
||||||
|
print("[ComfyUI-Manager] NOTICE: Legacy backup exists")
|
||||||
|
print(" - Your old Manager data was backed up to:")
|
||||||
|
print(f" {backup_dir}")
|
||||||
|
print(" - Please verify and remove it when no longer needed.")
|
||||||
|
print("-"*70 + "\n")
|
||||||
|
|
||||||
|
# Notice board output
|
||||||
|
add_startup_notice(
|
||||||
|
"Legacy ComfyUI-Manager data backup exists. Please verify and remove when no longer needed.",
|
||||||
|
level='info'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def check_suspicious_manager(user_dir):
|
||||||
|
"""Check for suspicious __manager directory on old ComfyUI.
|
||||||
|
|
||||||
|
On old ComfyUI without System User API, if __manager exists with low security,
|
||||||
|
warn the user to verify manually.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if suspicious setup detected
|
||||||
|
"""
|
||||||
|
if has_system_user_api():
|
||||||
|
return False # Not suspicious on new ComfyUI
|
||||||
|
|
||||||
|
suspicious_path = os.path.abspath(os.path.join(user_dir, '__manager'))
|
||||||
|
if not os.path.exists(suspicious_path):
|
||||||
|
return False
|
||||||
|
|
||||||
|
config_path = os.path.join(suspicious_path, 'config.ini')
|
||||||
|
if not os.path.exists(config_path):
|
||||||
|
return False
|
||||||
|
|
||||||
|
config = configparser.ConfigParser()
|
||||||
|
config.read(config_path)
|
||||||
|
sec_level = config.get('default', 'security_level', fallback='normal').lower()
|
||||||
|
|
||||||
|
if sec_level in ['weak', 'normal-']:
|
||||||
|
# Terminal output
|
||||||
|
print("\n" + "!"*70)
|
||||||
|
print("[ComfyUI-Manager] ERROR: Suspicious path detected!")
|
||||||
|
print(f" - '__manager' exists with low security level: '{sec_level}'")
|
||||||
|
print(" - Please verify manually:")
|
||||||
|
print(f" {config_path}")
|
||||||
|
print("!"*70 + "\n")
|
||||||
|
|
||||||
|
# Notice board output
|
||||||
|
add_startup_notice(
|
||||||
|
"[Security Alert] Suspicious path detected. See terminal log for details.",
|
||||||
|
level='error'
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def warn_outdated_comfyui():
|
||||||
|
"""Warn user about outdated ComfyUI without System User API."""
|
||||||
|
if has_system_user_api():
|
||||||
|
return
|
||||||
|
|
||||||
|
# Terminal output
|
||||||
|
print("\n" + "!"*70)
|
||||||
|
print("[ComfyUI-Manager] ERROR: ComfyUI version is outdated!")
|
||||||
|
print(" - Most operations are blocked for security.")
|
||||||
|
print(" - ComfyUI update is still allowed.")
|
||||||
|
print(" - Please update ComfyUI to use Manager normally.")
|
||||||
|
print("!"*70 + "\n")
|
||||||
|
|
||||||
|
# Notice board output
|
||||||
|
add_startup_notice(
|
||||||
|
"[Security Alert] ComfyUI outdated. Installations blocked (update allowed).<BR>"
|
||||||
|
"Update ComfyUI for normal operation.",
|
||||||
|
level='error'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def migrate_legacy_config(user_dir, manager_files_path):
|
||||||
|
"""Migrate ONLY config.ini to new __manager path if needed.
|
||||||
|
|
||||||
|
IMPORTANT: Only config.ini is migrated. Other files (snapshots, cache, etc.)
|
||||||
|
are NOT migrated - users must recreate them.
|
||||||
|
|
||||||
|
Scenarios:
|
||||||
|
1. Legacy exists, New doesn't exist → Migrate config.ini
|
||||||
|
2. Legacy exists, New exists → First update after upgrade
|
||||||
|
- Run ComfyUI dependency installation
|
||||||
|
- Rename legacy to .backup
|
||||||
|
3. Legacy doesn't exist → No migration needed
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if migration was performed
|
||||||
|
"""
|
||||||
|
if not has_system_user_api():
|
||||||
|
return False
|
||||||
|
|
||||||
|
legacy_dir = os.path.join(user_dir, 'default', 'ComfyUI-Manager')
|
||||||
|
legacy_config = os.path.join(legacy_dir, 'config.ini')
|
||||||
|
new_config = os.path.join(manager_files_path, 'config.ini')
|
||||||
|
|
||||||
|
if not os.path.exists(legacy_dir):
|
||||||
|
return False # No legacy directory, nothing to migrate
|
||||||
|
|
||||||
|
# IMPORTANT: Check for config.ini existence, not just directory
|
||||||
|
# (because makedirs() creates __manager before this function is called)
|
||||||
|
|
||||||
|
# Case: Both configs exist (first update after ComfyUI upgrade)
|
||||||
|
# This means user ran new ComfyUI at least once, creating __manager/config.ini
|
||||||
|
if os.path.exists(legacy_config) and os.path.exists(new_config):
|
||||||
|
_handle_first_update_migration(user_dir, legacy_dir, manager_files_path)
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Case: Legacy config exists but new config doesn't (normal migration)
|
||||||
|
# This is the first run after ComfyUI upgrade
|
||||||
|
if os.path.exists(legacy_config) and not os.path.exists(new_config):
|
||||||
|
pass # Continue with normal migration below
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Terminal output
|
||||||
|
print("\n" + "-"*70)
|
||||||
|
print("[ComfyUI-Manager] NOTICE: Legacy config.ini detected")
|
||||||
|
print(f" - Old: {legacy_config}")
|
||||||
|
print(f" - New: {new_config}")
|
||||||
|
print(" - Migrating config.ini only (other files are NOT migrated).")
|
||||||
|
print(" - Security level below 'normal' will be raised.")
|
||||||
|
print("-"*70 + "\n")
|
||||||
|
|
||||||
|
_migrate_config_with_security_check(legacy_config, new_config)
|
||||||
|
|
||||||
|
# Move legacy directory to backup
|
||||||
|
_move_legacy_to_backup(legacy_dir, manager_files_path)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_first_update_migration(user_dir, legacy_dir, manager_files_path):
|
||||||
|
"""Handle first ComfyUI update when both legacy and new directories exist.
|
||||||
|
|
||||||
|
This scenario happens when:
|
||||||
|
- User was on old ComfyUI (using default/ComfyUI-Manager)
|
||||||
|
- ComfyUI was updated (now has System User API)
|
||||||
|
- Manager already created __manager on first new run
|
||||||
|
- But legacy directory still exists
|
||||||
|
|
||||||
|
Actions:
|
||||||
|
1. Run ComfyUI dependency installation
|
||||||
|
2. Move legacy to __manager/.legacy-manager-backup
|
||||||
|
"""
|
||||||
|
# Terminal output
|
||||||
|
print("\n" + "-"*70)
|
||||||
|
print("[ComfyUI-Manager] NOTICE: First update after ComfyUI upgrade detected")
|
||||||
|
print(" - Both legacy and new directories exist.")
|
||||||
|
print(" - Running ComfyUI dependency installation...")
|
||||||
|
print("-"*70 + "\n")
|
||||||
|
|
||||||
|
# Run ComfyUI dependency installation
|
||||||
|
# Path: glob/manager_migration.py → glob → comfyui-manager → custom_nodes → ComfyUI
|
||||||
|
try:
|
||||||
|
comfyui_path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
|
||||||
|
requirements_path = os.path.join(comfyui_path, 'requirements.txt')
|
||||||
|
if os.path.exists(requirements_path):
|
||||||
|
subprocess.run([sys.executable, '-m', 'pip', 'install', '-r', requirements_path],
|
||||||
|
capture_output=True, check=False)
|
||||||
|
print("[ComfyUI-Manager] ComfyUI dependencies installation completed.")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[ComfyUI-Manager] WARNING: Failed to install ComfyUI dependencies: {e}")
|
||||||
|
|
||||||
|
# Move legacy to backup inside __manager
|
||||||
|
_move_legacy_to_backup(legacy_dir, manager_files_path)
|
||||||
|
|
||||||
|
|
||||||
|
def _move_legacy_to_backup(legacy_dir, manager_files_path):
|
||||||
|
"""Move legacy directory to backup inside __manager.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Path to backup directory if successful, None if failed
|
||||||
|
"""
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
backup_dir = os.path.join(manager_files_path, '.legacy-manager-backup')
|
||||||
|
|
||||||
|
try:
|
||||||
|
if os.path.exists(backup_dir):
|
||||||
|
shutil.rmtree(backup_dir) # Remove old backup if exists
|
||||||
|
shutil.move(legacy_dir, backup_dir)
|
||||||
|
|
||||||
|
# Terminal output (full paths shown here only)
|
||||||
|
print("\n" + "-"*70)
|
||||||
|
print("[ComfyUI-Manager] NOTICE: Legacy settings migrated")
|
||||||
|
print(f" - Old location: {legacy_dir}")
|
||||||
|
print(f" - Backed up to: {backup_dir}")
|
||||||
|
print(" - Please verify and remove the backup when no longer needed.")
|
||||||
|
print("-"*70 + "\n")
|
||||||
|
|
||||||
|
# Notice board output (no full paths for security)
|
||||||
|
add_startup_notice(
|
||||||
|
"Legacy ComfyUI-Manager data migrated. See terminal for details.",
|
||||||
|
level='info'
|
||||||
|
)
|
||||||
|
return backup_dir
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[ComfyUI-Manager] WARNING: Failed to backup legacy directory: {e}")
|
||||||
|
add_startup_notice(
|
||||||
|
f"[MIGRATION] Failed to backup legacy directory: {e}",
|
||||||
|
level='warning'
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _migrate_config_with_security_check(legacy_path, new_path):
|
||||||
|
"""Migrate legacy config, raising security level only if below default."""
|
||||||
|
config = configparser.ConfigParser()
|
||||||
|
try:
|
||||||
|
config.read(legacy_path)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[ComfyUI-Manager] WARNING: Failed to parse config.ini: {e}")
|
||||||
|
print(" - Creating fresh config with default settings.")
|
||||||
|
add_startup_notice(
|
||||||
|
"[MIGRATION] Failed to parse legacy config. Using defaults.",
|
||||||
|
level='warning'
|
||||||
|
)
|
||||||
|
return # Skip migration, let Manager create fresh config
|
||||||
|
|
||||||
|
# Security level hierarchy: strong > normal > normal- > weak
|
||||||
|
# Default is 'normal', only raise if below default
|
||||||
|
if 'default' in config:
|
||||||
|
current_level = config['default'].get('security_level', 'normal').lower()
|
||||||
|
below_default_levels = ['weak', 'normal-']
|
||||||
|
|
||||||
|
if current_level in below_default_levels:
|
||||||
|
config['default']['security_level'] = 'normal'
|
||||||
|
|
||||||
|
# Terminal output
|
||||||
|
print("\n" + "="*70)
|
||||||
|
print("[ComfyUI-Manager] WARNING: Security level adjusted")
|
||||||
|
print(f" - Previous: '{current_level}' → New: 'normal'")
|
||||||
|
print(" - Raised to prevent unauthorized remote access.")
|
||||||
|
print("="*70 + "\n")
|
||||||
|
|
||||||
|
# Notice board output
|
||||||
|
add_startup_notice(
|
||||||
|
f"[MIGRATION] Security level raised: '{current_level}' → 'normal'.<BR>"
|
||||||
|
"To prevent unauthorized remote access.",
|
||||||
|
level='warning'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
print(f" - Security level: '{current_level}' (no change needed)")
|
||||||
|
|
||||||
|
# Ensure directory exists
|
||||||
|
os.makedirs(os.path.dirname(new_path), exist_ok=True)
|
||||||
|
|
||||||
|
with open(new_path, 'w') as f:
|
||||||
|
config.write(f)
|
||||||
|
|
||||||
|
|
||||||
|
def force_security_level_if_needed(config_dict):
|
||||||
|
"""Force security level to 'strong' if on old ComfyUI.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config_dict: Configuration dictionary to modify in-place
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if security level was forced
|
||||||
|
"""
|
||||||
|
if not has_system_user_api():
|
||||||
|
config_dict['security_level'] = 'strong'
|
||||||
|
return True
|
||||||
|
return False
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -15,19 +15,16 @@ import re
|
|||||||
import logging
|
import logging
|
||||||
import platform
|
import platform
|
||||||
import shlex
|
import shlex
|
||||||
from . import cm_global
|
from functools import lru_cache
|
||||||
|
|
||||||
|
|
||||||
cache_lock = threading.Lock()
|
cache_lock = threading.Lock()
|
||||||
session_lock = threading.Lock()
|
|
||||||
|
|
||||||
comfyui_manager_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
|
comfyui_manager_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
|
||||||
cache_dir = os.path.join(comfyui_manager_path, '.cache') # This path is also updated together in **manager_core.update_user_directory**.
|
cache_dir = os.path.join(comfyui_manager_path, '.cache') # This path is also updated together in **manager_core.update_user_directory**.
|
||||||
|
|
||||||
use_uv = False
|
use_uv = False
|
||||||
|
bypass_ssl = False
|
||||||
def is_manager_pip_package():
|
|
||||||
return not os.path.exists(os.path.join(comfyui_manager_path, '..', 'custom_nodes'))
|
|
||||||
|
|
||||||
def add_python_path_to_env():
|
def add_python_path_to_env():
|
||||||
if platform.system() != "Windows":
|
if platform.system() != "Windows":
|
||||||
@@ -38,23 +35,69 @@ def add_python_path_to_env():
|
|||||||
os.environ['PATH'] = os.path.dirname(sys.executable)+sep+os.environ['PATH']
|
os.environ['PATH'] = os.path.dirname(sys.executable)+sep+os.environ['PATH']
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(maxsize=2)
|
||||||
|
def get_pip_cmd(force_uv=False):
|
||||||
|
"""
|
||||||
|
Get the base pip command, with automatic fallback to uv if pip is unavailable.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
force_uv (bool): If True, use uv directly without trying pip
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: Base command for pip operations
|
||||||
|
"""
|
||||||
|
embedded = 'python_embeded' in sys.executable
|
||||||
|
|
||||||
|
# Try pip first (unless forcing uv)
|
||||||
|
if not force_uv:
|
||||||
|
try:
|
||||||
|
test_cmd = [sys.executable] + (['-s'] if embedded else []) + ['-m', 'pip', '--version']
|
||||||
|
subprocess.check_output(test_cmd, stderr=subprocess.DEVNULL, timeout=5)
|
||||||
|
return [sys.executable] + (['-s'] if embedded else []) + ['-m', 'pip']
|
||||||
|
except Exception:
|
||||||
|
logging.warning("[ComfyUI-Manager] python -m pip not available. Falling back to uv.")
|
||||||
|
|
||||||
|
# Try uv (either forced or pip failed)
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
# Try uv as Python module
|
||||||
|
try:
|
||||||
|
test_cmd = [sys.executable] + (['-s'] if embedded else []) + ['-m', 'uv', '--version']
|
||||||
|
subprocess.check_output(test_cmd, stderr=subprocess.DEVNULL, timeout=5)
|
||||||
|
logging.info("[ComfyUI-Manager] Using uv as Python module for pip operations.")
|
||||||
|
return [sys.executable] + (['-s'] if embedded else []) + ['-m', 'uv', 'pip']
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Try standalone uv
|
||||||
|
if shutil.which('uv'):
|
||||||
|
logging.info("[ComfyUI-Manager] Using standalone uv for pip operations.")
|
||||||
|
return ['uv', 'pip']
|
||||||
|
|
||||||
|
# Nothing worked
|
||||||
|
logging.error("[ComfyUI-Manager] Neither python -m pip nor uv are available. Cannot proceed with package operations.")
|
||||||
|
raise Exception("Neither pip nor uv are available for package management")
|
||||||
|
|
||||||
|
|
||||||
def make_pip_cmd(cmd):
|
def make_pip_cmd(cmd):
|
||||||
if 'python_embeded' in sys.executable:
|
"""
|
||||||
if use_uv:
|
Create a pip command by combining the cached base pip command with the given arguments.
|
||||||
return [sys.executable, '-s', '-m', 'uv', 'pip'] + cmd
|
|
||||||
else:
|
Args:
|
||||||
return [sys.executable, '-s', '-m', 'pip'] + cmd
|
cmd (list): List of pip command arguments (e.g., ['install', 'package'])
|
||||||
else:
|
|
||||||
# FIXED: https://github.com/ltdrdata/ComfyUI-Manager/issues/1667
|
Returns:
|
||||||
if use_uv:
|
list: Complete command list ready for subprocess execution
|
||||||
return [sys.executable, '-m', 'uv', 'pip'] + cmd
|
"""
|
||||||
else:
|
global use_uv
|
||||||
return [sys.executable, '-m', 'pip'] + cmd
|
base_cmd = get_pip_cmd(force_uv=use_uv)
|
||||||
|
return base_cmd + cmd
|
||||||
|
|
||||||
|
|
||||||
# DON'T USE StrictVersion - cannot handle pre_release version
|
# DON'T USE StrictVersion - cannot handle pre_release version
|
||||||
# try:
|
# try:
|
||||||
# from distutils.version import StrictVersion
|
# from distutils.version import StrictVersion
|
||||||
# except Exception:
|
# except:
|
||||||
# print(f"[ComfyUI-Manager] 'distutils' package not found. Activating fallback mode for compatibility.")
|
# print(f"[ComfyUI-Manager] 'distutils' package not found. Activating fallback mode for compatibility.")
|
||||||
class StrictVersion:
|
class StrictVersion:
|
||||||
def __init__(self, version_string):
|
def __init__(self, version_string):
|
||||||
@@ -140,7 +183,7 @@ async def get_data(uri, silent=False):
|
|||||||
print(f"FETCH DATA from: {uri}", end="")
|
print(f"FETCH DATA from: {uri}", end="")
|
||||||
|
|
||||||
if uri.startswith("http"):
|
if uri.startswith("http"):
|
||||||
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
|
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=not bypass_ssl)) as session:
|
||||||
headers = {
|
headers = {
|
||||||
'Cache-Control': 'no-cache',
|
'Cache-Control': 'no-cache',
|
||||||
'Pragma': 'no-cache',
|
'Pragma': 'no-cache',
|
||||||
@@ -330,6 +373,32 @@ torch_torchvision_torchaudio_version_map = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def torch_rollback(prev):
|
||||||
|
spec = prev.split('+')
|
||||||
|
if len(spec) > 1:
|
||||||
|
platform = spec[1]
|
||||||
|
else:
|
||||||
|
cmd = make_pip_cmd(['install', '--force', 'torch', 'torchvision', 'torchaudio'])
|
||||||
|
subprocess.check_output(cmd, universal_newlines=True)
|
||||||
|
logging.error(cmd)
|
||||||
|
return
|
||||||
|
|
||||||
|
torch_ver = StrictVersion(spec[0])
|
||||||
|
torch_ver = f"{torch_ver.major}.{torch_ver.minor}.{torch_ver.patch}"
|
||||||
|
torch_torchvision_torchaudio_ver = torch_torchvision_torchaudio_version_map.get(torch_ver)
|
||||||
|
|
||||||
|
if torch_torchvision_torchaudio_ver is None:
|
||||||
|
cmd = make_pip_cmd(['install', '--pre', 'torch', 'torchvision', 'torchaudio',
|
||||||
|
'--index-url', f"https://download.pytorch.org/whl/nightly/{platform}"])
|
||||||
|
logging.info("[ComfyUI-Manager] restore PyTorch to nightly version")
|
||||||
|
else:
|
||||||
|
torchvision_ver, torchaudio_ver = torch_torchvision_torchaudio_ver
|
||||||
|
cmd = make_pip_cmd(['install', f'torch=={torch_ver}', f'torchvision=={torchvision_ver}', f"torchaudio=={torchaudio_ver}",
|
||||||
|
'--index-url', f"https://download.pytorch.org/whl/{platform}"])
|
||||||
|
logging.info(f"[ComfyUI-Manager] restore PyTorch to {torch_ver}+{platform}")
|
||||||
|
|
||||||
|
subprocess.check_output(cmd, universal_newlines=True)
|
||||||
|
|
||||||
|
|
||||||
class PIPFixer:
|
class PIPFixer:
|
||||||
def __init__(self, prev_pip_versions, comfyui_path, manager_files_path):
|
def __init__(self, prev_pip_versions, comfyui_path, manager_files_path):
|
||||||
@@ -337,32 +406,6 @@ class PIPFixer:
|
|||||||
self.comfyui_path = comfyui_path
|
self.comfyui_path = comfyui_path
|
||||||
self.manager_files_path = manager_files_path
|
self.manager_files_path = manager_files_path
|
||||||
|
|
||||||
def torch_rollback(self):
|
|
||||||
spec = self.prev_pip_versions['torch'].split('+')
|
|
||||||
if len(spec) > 0:
|
|
||||||
platform = spec[1]
|
|
||||||
else:
|
|
||||||
cmd = make_pip_cmd(['install', '--force', 'torch', 'torchvision', 'torchaudio'])
|
|
||||||
subprocess.check_output(cmd, universal_newlines=True)
|
|
||||||
logging.error(cmd)
|
|
||||||
return
|
|
||||||
|
|
||||||
torch_ver = StrictVersion(spec[0])
|
|
||||||
torch_ver = f"{torch_ver.major}.{torch_ver.minor}.{torch_ver.patch}"
|
|
||||||
torch_torchvision_torchaudio_ver = torch_torchvision_torchaudio_version_map.get(torch_ver)
|
|
||||||
|
|
||||||
if torch_torchvision_torchaudio_ver is None:
|
|
||||||
cmd = make_pip_cmd(['install', '--pre', 'torch', 'torchvision', 'torchaudio',
|
|
||||||
'--index-url', f"https://download.pytorch.org/whl/nightly/{platform}"])
|
|
||||||
logging.info("[ComfyUI-Manager] restore PyTorch to nightly version")
|
|
||||||
else:
|
|
||||||
torchvision_ver, torchaudio_ver = torch_torchvision_torchaudio_ver
|
|
||||||
cmd = make_pip_cmd(['install', f'torch=={torch_ver}', f'torchvision=={torchvision_ver}', f"torchaudio=={torchaudio_ver}",
|
|
||||||
'--index-url', f"https://download.pytorch.org/whl/{platform}"])
|
|
||||||
logging.info(f"[ComfyUI-Manager] restore PyTorch to {torch_ver}+{platform}")
|
|
||||||
|
|
||||||
subprocess.check_output(cmd, universal_newlines=True)
|
|
||||||
|
|
||||||
def fix_broken(self):
|
def fix_broken(self):
|
||||||
new_pip_versions = get_installed_packages(True)
|
new_pip_versions = get_installed_packages(True)
|
||||||
|
|
||||||
@@ -384,7 +427,7 @@ class PIPFixer:
|
|||||||
elif self.prev_pip_versions['torch'] != new_pip_versions['torch'] \
|
elif self.prev_pip_versions['torch'] != new_pip_versions['torch'] \
|
||||||
or self.prev_pip_versions['torchvision'] != new_pip_versions['torchvision'] \
|
or self.prev_pip_versions['torchvision'] != new_pip_versions['torchvision'] \
|
||||||
or self.prev_pip_versions['torchaudio'] != new_pip_versions['torchaudio']:
|
or self.prev_pip_versions['torchaudio'] != new_pip_versions['torchaudio']:
|
||||||
self.torch_rollback()
|
torch_rollback(self.prev_pip_versions['torch'])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error("[ComfyUI-Manager] Failed to restore PyTorch")
|
logging.error("[ComfyUI-Manager] Failed to restore PyTorch")
|
||||||
logging.error(e)
|
logging.error(e)
|
||||||
@@ -415,32 +458,14 @@ class PIPFixer:
|
|||||||
|
|
||||||
if len(targets) > 0:
|
if len(targets) > 0:
|
||||||
for x in targets:
|
for x in targets:
|
||||||
if sys.version_info < (3, 13):
|
cmd = make_pip_cmd(['install', f"{x}=={versions[0].version_string}"])
|
||||||
cmd = make_pip_cmd(['install', f"{x}=={versions[0].version_string}", "numpy<2"])
|
subprocess.check_output(cmd, universal_newlines=True)
|
||||||
subprocess.check_output(cmd, universal_newlines=True)
|
|
||||||
|
|
||||||
logging.info(f"[ComfyUI-Manager] 'opencv' dependencies were fixed: {targets}")
|
logging.info(f"[ComfyUI-Manager] 'opencv' dependencies were fixed: {targets}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error("[ComfyUI-Manager] Failed to restore opencv")
|
logging.error("[ComfyUI-Manager] Failed to restore opencv")
|
||||||
logging.error(e)
|
logging.error(e)
|
||||||
|
|
||||||
# fix numpy
|
|
||||||
if sys.version_info >= (3, 13):
|
|
||||||
logging.info("[ComfyUI-Manager] In Python 3.13 and above, PIP Fixer does not downgrade `numpy` below version 2.0. If you need to force a downgrade of `numpy`, please use `pip_auto_fix.list`.")
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
np = new_pip_versions.get('numpy')
|
|
||||||
if cm_global.pip_overrides.get('numpy') == 'numpy<2':
|
|
||||||
if np is not None:
|
|
||||||
if StrictVersion(np) >= StrictVersion('2'):
|
|
||||||
cmd = make_pip_cmd(['install', "numpy<2"])
|
|
||||||
subprocess.check_output(cmd , universal_newlines=True)
|
|
||||||
|
|
||||||
logging.info("[ComfyUI-Manager] 'numpy' dependency were fixed")
|
|
||||||
except Exception as e:
|
|
||||||
logging.error("[ComfyUI-Manager] Failed to restore numpy")
|
|
||||||
logging.error(e)
|
|
||||||
|
|
||||||
# fix missing frontend
|
# fix missing frontend
|
||||||
try:
|
try:
|
||||||
# NOTE: package name in requirements is 'comfyui-frontend-package'
|
# NOTE: package name in requirements is 'comfyui-frontend-package'
|
||||||
@@ -527,7 +552,7 @@ def robust_readlines(fullpath):
|
|||||||
try:
|
try:
|
||||||
with open(fullpath, "r") as f:
|
with open(fullpath, "r") as f:
|
||||||
return f.readlines()
|
return f.readlines()
|
||||||
except Exception:
|
except:
|
||||||
encoding = None
|
encoding = None
|
||||||
with open(fullpath, "rb") as f:
|
with open(fullpath, "rb") as f:
|
||||||
raw_data = f.read()
|
raw_data = f.read()
|
||||||
@@ -540,3 +565,69 @@ def robust_readlines(fullpath):
|
|||||||
|
|
||||||
print(f"[ComfyUI-Manager] Failed to recognize encoding for: {fullpath}")
|
print(f"[ComfyUI-Manager] Failed to recognize encoding for: {fullpath}")
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def restore_pip_snapshot(pips, options):
|
||||||
|
non_url = []
|
||||||
|
local_url = []
|
||||||
|
non_local_url = []
|
||||||
|
|
||||||
|
for k, v in pips.items():
|
||||||
|
# NOTE: skip torch related packages
|
||||||
|
if k.startswith("torch==") or k.startswith("torchvision==") or k.startswith("torchaudio==") or k.startswith("nvidia-"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if v == "":
|
||||||
|
non_url.append(k)
|
||||||
|
else:
|
||||||
|
if v.startswith('file:'):
|
||||||
|
local_url.append(v)
|
||||||
|
else:
|
||||||
|
non_local_url.append(v)
|
||||||
|
|
||||||
|
|
||||||
|
# restore other pips
|
||||||
|
failed = []
|
||||||
|
if '--pip-non-url' in options:
|
||||||
|
# try all at once
|
||||||
|
res = 1
|
||||||
|
try:
|
||||||
|
res = subprocess.check_output(make_pip_cmd(['install'] + non_url))
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# fallback
|
||||||
|
if res != 0:
|
||||||
|
for x in non_url:
|
||||||
|
res = 1
|
||||||
|
try:
|
||||||
|
res = subprocess.check_output(make_pip_cmd(['install', '--no-deps', x]))
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if res != 0:
|
||||||
|
failed.append(x)
|
||||||
|
|
||||||
|
if '--pip-non-local-url' in options:
|
||||||
|
for x in non_local_url:
|
||||||
|
res = 1
|
||||||
|
try:
|
||||||
|
res = subprocess.check_output(make_pip_cmd(['install', '--no-deps', x]))
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if res != 0:
|
||||||
|
failed.append(x)
|
||||||
|
|
||||||
|
if '--pip-local-url' in options:
|
||||||
|
for x in local_url:
|
||||||
|
res = 1
|
||||||
|
try:
|
||||||
|
res = subprocess.check_output(make_pip_cmd(['install', '--no-deps', x]))
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if res != 0:
|
||||||
|
failed.append(x)
|
||||||
|
|
||||||
|
print(f"Installation failed for pip packages: {failed}")
|
||||||
@@ -3,7 +3,7 @@ from __future__ import annotations
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from .git_utils import get_commit_hash
|
from git_utils import get_commit_hash
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -2,6 +2,8 @@ import sys
|
|||||||
import subprocess
|
import subprocess
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
import manager_util
|
||||||
|
|
||||||
|
|
||||||
def security_check():
|
def security_check():
|
||||||
print("[START] Security scan")
|
print("[START] Security scan")
|
||||||
@@ -66,18 +68,23 @@ https://blog.comfy.org/comfyui-statement-on-the-ultralytics-crypto-miner-situati
|
|||||||
"lolMiner": [os.path.join(comfyui_path, 'lolMiner')]
|
"lolMiner": [os.path.join(comfyui_path, 'lolMiner')]
|
||||||
}
|
}
|
||||||
|
|
||||||
installed_pips = subprocess.check_output([sys.executable, '-m', "pip", "freeze"], text=True)
|
installed_pips = subprocess.check_output(manager_util.make_pip_cmd(["freeze"]), text=True)
|
||||||
|
|
||||||
detected = set()
|
detected = set()
|
||||||
try:
|
try:
|
||||||
anthropic_info = subprocess.check_output([sys.executable, '-m', "pip", "show", "anthropic"], text=True, stderr=subprocess.DEVNULL)
|
anthropic_info = subprocess.check_output(manager_util.make_pip_cmd(["show", "anthropic"]), text=True, stderr=subprocess.DEVNULL)
|
||||||
anthropic_reqs = [x for x in anthropic_info.split('\n') if x.startswith("Requires")][0].split(': ')[1]
|
requires_lines = [x for x in anthropic_info.split('\n') if x.startswith("Requires")]
|
||||||
if "pycrypto" in anthropic_reqs:
|
if requires_lines:
|
||||||
location = [x for x in anthropic_info.split('\n') if x.startswith("Location")][0].split(': ')[1]
|
anthropic_reqs = requires_lines[0].split(": ", 1)[1]
|
||||||
for fi in os.listdir(location):
|
if "pycrypto" in anthropic_reqs:
|
||||||
if fi.startswith("anthropic"):
|
location_lines = [x for x in anthropic_info.split('\n') if x.startswith("Location")]
|
||||||
guide["ComfyUI_LLMVISION"] = f"\n0.Remove {os.path.join(location, fi)}" + guide["ComfyUI_LLMVISION"]
|
if location_lines:
|
||||||
detected.add("ComfyUI_LLMVISION")
|
location = location_lines[0].split(": ", 1)[1]
|
||||||
|
for fi in os.listdir(location):
|
||||||
|
if fi.startswith("anthropic"):
|
||||||
|
guide["ComfyUI_LLMVISION"] = (f"\n0.Remove {os.path.join(location, fi)}" + guide["ComfyUI_LLMVISION"])
|
||||||
|
detected.add("ComfyUI_LLMVISION")
|
||||||
|
|
||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -1,7 +1,5 @@
|
|||||||
import mimetypes
|
import mimetypes
|
||||||
from ..common import context
|
import manager_core as core
|
||||||
from . import manager_core as core
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
import aiohttp
|
import aiohttp
|
||||||
@@ -55,7 +53,7 @@ def compute_sha256_checksum(filepath):
|
|||||||
return sha256.hexdigest()
|
return sha256.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/share_option")
|
@PromptServer.instance.routes.get("/manager/share_option")
|
||||||
async def share_option(request):
|
async def share_option(request):
|
||||||
if "value" in request.rel_url.query:
|
if "value" in request.rel_url.query:
|
||||||
core.get_config()['share_option'] = request.rel_url.query['value']
|
core.get_config()['share_option'] = request.rel_url.query['value']
|
||||||
@@ -67,21 +65,21 @@ async def share_option(request):
|
|||||||
|
|
||||||
|
|
||||||
def get_openart_auth():
|
def get_openart_auth():
|
||||||
if not os.path.exists(os.path.join(context.manager_files_path, ".openart_key")):
|
if not os.path.exists(os.path.join(core.manager_files_path, ".openart_key")):
|
||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
with open(os.path.join(context.manager_files_path, ".openart_key"), "r") as f:
|
with open(os.path.join(core.manager_files_path, ".openart_key"), "r") as f:
|
||||||
openart_key = f.read().strip()
|
openart_key = f.read().strip()
|
||||||
return openart_key if openart_key else None
|
return openart_key if openart_key else None
|
||||||
except Exception:
|
except:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_matrix_auth():
|
def get_matrix_auth():
|
||||||
if not os.path.exists(os.path.join(context.manager_files_path, "matrix_auth")):
|
if not os.path.exists(os.path.join(core.manager_files_path, "matrix_auth")):
|
||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
with open(os.path.join(context.manager_files_path, "matrix_auth"), "r") as f:
|
with open(os.path.join(core.manager_files_path, "matrix_auth"), "r") as f:
|
||||||
matrix_auth = f.read()
|
matrix_auth = f.read()
|
||||||
homeserver, username, password = matrix_auth.strip().split("\n")
|
homeserver, username, password = matrix_auth.strip().split("\n")
|
||||||
if not homeserver or not username or not password:
|
if not homeserver or not username or not password:
|
||||||
@@ -91,40 +89,40 @@ def get_matrix_auth():
|
|||||||
"username": username,
|
"username": username,
|
||||||
"password": password,
|
"password": password,
|
||||||
}
|
}
|
||||||
except Exception:
|
except:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_comfyworkflows_auth():
|
def get_comfyworkflows_auth():
|
||||||
if not os.path.exists(os.path.join(context.manager_files_path, "comfyworkflows_sharekey")):
|
if not os.path.exists(os.path.join(core.manager_files_path, "comfyworkflows_sharekey")):
|
||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
with open(os.path.join(context.manager_files_path, "comfyworkflows_sharekey"), "r") as f:
|
with open(os.path.join(core.manager_files_path, "comfyworkflows_sharekey"), "r") as f:
|
||||||
share_key = f.read()
|
share_key = f.read()
|
||||||
if not share_key.strip():
|
if not share_key.strip():
|
||||||
return None
|
return None
|
||||||
return share_key
|
return share_key
|
||||||
except Exception:
|
except:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_youml_settings():
|
def get_youml_settings():
|
||||||
if not os.path.exists(os.path.join(context.manager_files_path, ".youml")):
|
if not os.path.exists(os.path.join(core.manager_files_path, ".youml")):
|
||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
with open(os.path.join(context.manager_files_path, ".youml"), "r") as f:
|
with open(os.path.join(core.manager_files_path, ".youml"), "r") as f:
|
||||||
youml_settings = f.read().strip()
|
youml_settings = f.read().strip()
|
||||||
return youml_settings if youml_settings else None
|
return youml_settings if youml_settings else None
|
||||||
except Exception:
|
except:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def set_youml_settings(settings):
|
def set_youml_settings(settings):
|
||||||
with open(os.path.join(context.manager_files_path, ".youml"), "w") as f:
|
with open(os.path.join(core.manager_files_path, ".youml"), "w") as f:
|
||||||
f.write(settings)
|
f.write(settings)
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/get_openart_auth")
|
@PromptServer.instance.routes.get("/manager/get_openart_auth")
|
||||||
async def api_get_openart_auth(request):
|
async def api_get_openart_auth(request):
|
||||||
# print("Getting stored Matrix credentials...")
|
# print("Getting stored Matrix credentials...")
|
||||||
openart_key = get_openart_auth()
|
openart_key = get_openart_auth()
|
||||||
@@ -133,16 +131,16 @@ async def api_get_openart_auth(request):
|
|||||||
return web.json_response({"openart_key": openart_key})
|
return web.json_response({"openart_key": openart_key})
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.post("/v2/manager/set_openart_auth")
|
@PromptServer.instance.routes.post("/manager/set_openart_auth")
|
||||||
async def api_set_openart_auth(request):
|
async def api_set_openart_auth(request):
|
||||||
json_data = await request.json()
|
json_data = await request.json()
|
||||||
openart_key = json_data['openart_key']
|
openart_key = json_data['openart_key']
|
||||||
with open(os.path.join(context.manager_files_path, ".openart_key"), "w") as f:
|
with open(os.path.join(core.manager_files_path, ".openart_key"), "w") as f:
|
||||||
f.write(openart_key)
|
f.write(openart_key)
|
||||||
return web.Response(status=200)
|
return web.Response(status=200)
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/get_matrix_auth")
|
@PromptServer.instance.routes.get("/manager/get_matrix_auth")
|
||||||
async def api_get_matrix_auth(request):
|
async def api_get_matrix_auth(request):
|
||||||
# print("Getting stored Matrix credentials...")
|
# print("Getting stored Matrix credentials...")
|
||||||
matrix_auth = get_matrix_auth()
|
matrix_auth = get_matrix_auth()
|
||||||
@@ -151,7 +149,7 @@ async def api_get_matrix_auth(request):
|
|||||||
return web.json_response(matrix_auth)
|
return web.json_response(matrix_auth)
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/youml/settings")
|
@PromptServer.instance.routes.get("/manager/youml/settings")
|
||||||
async def api_get_youml_settings(request):
|
async def api_get_youml_settings(request):
|
||||||
youml_settings = get_youml_settings()
|
youml_settings = get_youml_settings()
|
||||||
if not youml_settings:
|
if not youml_settings:
|
||||||
@@ -159,14 +157,14 @@ async def api_get_youml_settings(request):
|
|||||||
return web.json_response(json.loads(youml_settings))
|
return web.json_response(json.loads(youml_settings))
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.post("/v2/manager/youml/settings")
|
@PromptServer.instance.routes.post("/manager/youml/settings")
|
||||||
async def api_set_youml_settings(request):
|
async def api_set_youml_settings(request):
|
||||||
json_data = await request.json()
|
json_data = await request.json()
|
||||||
set_youml_settings(json.dumps(json_data))
|
set_youml_settings(json.dumps(json_data))
|
||||||
return web.Response(status=200)
|
return web.Response(status=200)
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/get_comfyworkflows_auth")
|
@PromptServer.instance.routes.get("/manager/get_comfyworkflows_auth")
|
||||||
async def api_get_comfyworkflows_auth(request):
|
async def api_get_comfyworkflows_auth(request):
|
||||||
# Check if the user has provided Matrix credentials in a file called 'matrix_accesstoken'
|
# Check if the user has provided Matrix credentials in a file called 'matrix_accesstoken'
|
||||||
# in the same directory as the ComfyUI base folder
|
# in the same directory as the ComfyUI base folder
|
||||||
@@ -177,17 +175,17 @@ async def api_get_comfyworkflows_auth(request):
|
|||||||
return web.json_response({"comfyworkflows_sharekey": comfyworkflows_auth})
|
return web.json_response({"comfyworkflows_sharekey": comfyworkflows_auth})
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.post("/v2/manager/set_esheep_workflow_and_images")
|
@PromptServer.instance.routes.post("/manager/set_esheep_workflow_and_images")
|
||||||
async def set_esheep_workflow_and_images(request):
|
async def set_esheep_workflow_and_images(request):
|
||||||
json_data = await request.json()
|
json_data = await request.json()
|
||||||
with open(os.path.join(context.manager_files_path, "esheep_share_message.json"), "w", encoding='utf-8') as file:
|
with open(os.path.join(core.manager_files_path, "esheep_share_message.json"), "w", encoding='utf-8') as file:
|
||||||
json.dump(json_data, file, indent=4)
|
json.dump(json_data, file, indent=4)
|
||||||
return web.Response(status=200)
|
return web.Response(status=200)
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/get_esheep_workflow_and_images")
|
@PromptServer.instance.routes.get("/manager/get_esheep_workflow_and_images")
|
||||||
async def get_esheep_workflow_and_images(request):
|
async def get_esheep_workflow_and_images(request):
|
||||||
with open(os.path.join(context.manager_files_path, "esheep_share_message.json"), 'r', encoding='utf-8') as file:
|
with open(os.path.join(core.manager_files_path, "esheep_share_message.json"), 'r', encoding='utf-8') as file:
|
||||||
data = json.load(file)
|
data = json.load(file)
|
||||||
return web.Response(status=200, text=json.dumps(data))
|
return web.Response(status=200, text=json.dumps(data))
|
||||||
|
|
||||||
@@ -196,12 +194,12 @@ def set_matrix_auth(json_data):
|
|||||||
homeserver = json_data['homeserver']
|
homeserver = json_data['homeserver']
|
||||||
username = json_data['username']
|
username = json_data['username']
|
||||||
password = json_data['password']
|
password = json_data['password']
|
||||||
with open(os.path.join(context.manager_files_path, "matrix_auth"), "w") as f:
|
with open(os.path.join(core.manager_files_path, "matrix_auth"), "w") as f:
|
||||||
f.write("\n".join([homeserver, username, password]))
|
f.write("\n".join([homeserver, username, password]))
|
||||||
|
|
||||||
|
|
||||||
def set_comfyworkflows_auth(comfyworkflows_sharekey):
|
def set_comfyworkflows_auth(comfyworkflows_sharekey):
|
||||||
with open(os.path.join(context.manager_files_path, "comfyworkflows_sharekey"), "w") as f:
|
with open(os.path.join(core.manager_files_path, "comfyworkflows_sharekey"), "w") as f:
|
||||||
f.write(comfyworkflows_sharekey)
|
f.write(comfyworkflows_sharekey)
|
||||||
|
|
||||||
|
|
||||||
@@ -213,7 +211,7 @@ def has_provided_comfyworkflows_auth(comfyworkflows_sharekey):
|
|||||||
return comfyworkflows_sharekey.strip()
|
return comfyworkflows_sharekey.strip()
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.post("/v2/manager/share")
|
@PromptServer.instance.routes.post("/manager/share")
|
||||||
async def share_art(request):
|
async def share_art(request):
|
||||||
# get json data
|
# get json data
|
||||||
json_data = await request.json()
|
json_data = await request.json()
|
||||||
@@ -235,7 +233,7 @@ async def share_art(request):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
output_to_share = potential_outputs[int(selected_output_index)]
|
output_to_share = potential_outputs[int(selected_output_index)]
|
||||||
except Exception:
|
except:
|
||||||
# for now, pick the first output
|
# for now, pick the first output
|
||||||
output_to_share = potential_outputs[0]
|
output_to_share = potential_outputs[0]
|
||||||
|
|
||||||
@@ -337,8 +335,7 @@ async def share_art(request):
|
|||||||
content_type = assetFileType
|
content_type = assetFileType
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from matrix_client.api import MatrixHttpApi
|
from nio import AsyncClient, LoginResponse, UploadResponse
|
||||||
from matrix_client.client import MatrixClient
|
|
||||||
|
|
||||||
homeserver = 'matrix.org'
|
homeserver = 'matrix.org'
|
||||||
if matrix_auth:
|
if matrix_auth:
|
||||||
@@ -347,20 +344,35 @@ async def share_art(request):
|
|||||||
if not homeserver.startswith("https://"):
|
if not homeserver.startswith("https://"):
|
||||||
homeserver = "https://" + homeserver
|
homeserver = "https://" + homeserver
|
||||||
|
|
||||||
client = MatrixClient(homeserver)
|
client = AsyncClient(homeserver, matrix_auth['username'])
|
||||||
try:
|
|
||||||
token = client.login(username=matrix_auth['username'], password=matrix_auth['password'])
|
# Login
|
||||||
if not token:
|
login_resp = await client.login(matrix_auth['password'])
|
||||||
return web.json_response({"error": "Invalid Matrix credentials."}, content_type='application/json', status=400)
|
if not isinstance(login_resp, LoginResponse) or not login_resp.access_token:
|
||||||
except Exception:
|
await client.close()
|
||||||
return web.json_response({"error": "Invalid Matrix credentials."}, content_type='application/json', status=400)
|
return web.json_response({"error": "Invalid Matrix credentials."}, content_type='application/json', status=400)
|
||||||
|
|
||||||
matrix = MatrixHttpApi(homeserver, token=token)
|
# Upload asset
|
||||||
with open(asset_filepath, 'rb') as f:
|
with open(asset_filepath, 'rb') as f:
|
||||||
mxc_url = matrix.media_upload(f.read(), content_type, filename=filename)['content_uri']
|
upload_resp, _maybe_keys = await client.upload(f, content_type=content_type, filename=filename)
|
||||||
|
asset_data = f.seek(0) or f.read() # get size for info below
|
||||||
|
if not isinstance(upload_resp, UploadResponse) or not upload_resp.content_uri:
|
||||||
|
await client.close()
|
||||||
|
return web.json_response({"error": "Failed to upload asset to Matrix."}, content_type='application/json', status=500)
|
||||||
|
mxc_url = upload_resp.content_uri
|
||||||
|
|
||||||
workflow_json_mxc_url = matrix.media_upload(prompt['workflow'], 'application/json', filename='workflow.json')['content_uri']
|
# Upload workflow JSON
|
||||||
|
import io
|
||||||
|
workflow_json_bytes = json.dumps(prompt['workflow']).encode('utf-8')
|
||||||
|
workflow_io = io.BytesIO(workflow_json_bytes)
|
||||||
|
upload_workflow_resp, _maybe_keys = await client.upload(workflow_io, content_type='application/json', filename='workflow.json')
|
||||||
|
workflow_io.seek(0)
|
||||||
|
if not isinstance(upload_workflow_resp, UploadResponse) or not upload_workflow_resp.content_uri:
|
||||||
|
await client.close()
|
||||||
|
return web.json_response({"error": "Failed to upload workflow to Matrix."}, content_type='application/json', status=500)
|
||||||
|
workflow_json_mxc_url = upload_workflow_resp.content_uri
|
||||||
|
|
||||||
|
# Send text message
|
||||||
text_content = ""
|
text_content = ""
|
||||||
if title:
|
if title:
|
||||||
text_content += f"{title}\n"
|
text_content += f"{title}\n"
|
||||||
@@ -368,10 +380,45 @@ async def share_art(request):
|
|||||||
text_content += f"{description}\n"
|
text_content += f"{description}\n"
|
||||||
if credits:
|
if credits:
|
||||||
text_content += f"\ncredits: {credits}\n"
|
text_content += f"\ncredits: {credits}\n"
|
||||||
matrix.send_message(comfyui_share_room_id, text_content)
|
await client.room_send(
|
||||||
matrix.send_content(comfyui_share_room_id, mxc_url, filename, 'm.image')
|
room_id=comfyui_share_room_id,
|
||||||
matrix.send_content(comfyui_share_room_id, workflow_json_mxc_url, 'workflow.json', 'm.file')
|
message_type="m.room.message",
|
||||||
except Exception:
|
content={"msgtype": "m.text", "body": text_content}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Send image
|
||||||
|
await client.room_send(
|
||||||
|
room_id=comfyui_share_room_id,
|
||||||
|
message_type="m.room.message",
|
||||||
|
content={
|
||||||
|
"msgtype": "m.image",
|
||||||
|
"body": filename,
|
||||||
|
"url": mxc_url,
|
||||||
|
"info": {
|
||||||
|
"mimetype": content_type,
|
||||||
|
"size": len(asset_data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Send workflow JSON file
|
||||||
|
await client.room_send(
|
||||||
|
room_id=comfyui_share_room_id,
|
||||||
|
message_type="m.room.message",
|
||||||
|
content={
|
||||||
|
"msgtype": "m.file",
|
||||||
|
"body": "workflow.json",
|
||||||
|
"url": workflow_json_mxc_url,
|
||||||
|
"info": {
|
||||||
|
"mimetype": "application/json",
|
||||||
|
"size": len(workflow_json_bytes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
await client.close()
|
||||||
|
|
||||||
|
except:
|
||||||
import traceback
|
import traceback
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
return web.json_response({"error": "An error occurred when sharing your art to Matrix."}, content_type='application/json', status=500)
|
return web.json_response({"error": "An error occurred when sharing your art to Matrix."}, content_type='application/json', status=500)
|
||||||
@@ -13,7 +13,7 @@ This directory contains the JavaScript frontend implementation for ComfyUI-Manag
|
|||||||
## Sharing Components
|
## Sharing Components
|
||||||
|
|
||||||
- **comfyui-share-common.js**: Base functionality for workflow sharing features.
|
- **comfyui-share-common.js**: Base functionality for workflow sharing features.
|
||||||
- **comfyui-share-copus.js**: Integration with the ComfyUI Opus sharing platform.
|
- **comfyui-share-copus.js**: Integration with the ComfyUI Copus sharing platform.
|
||||||
- **comfyui-share-openart.js**: Integration with the OpenArt sharing platform.
|
- **comfyui-share-openart.js**: Integration with the OpenArt sharing platform.
|
||||||
- **comfyui-share-youml.js**: Integration with the YouML sharing platform.
|
- **comfyui-share-youml.js**: Integration with the YouML sharing platform.
|
||||||
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
import { api } from "../../scripts/api.js";
|
import { api } from "../../scripts/api.js";
|
||||||
import { app } from "../../scripts/app.js";
|
import { app } from "../../scripts/app.js";
|
||||||
import { sleep, customConfirm, customAlert } from "./common.js";
|
import { sleep, customConfirm, customAlert, handle403Response, show_message } from "./common.js";
|
||||||
|
|
||||||
async function tryInstallCustomNode(event) {
|
async function tryInstallCustomNode(event) {
|
||||||
let msg = '-= [ComfyUI Manager] extension installation request =-\n\n';
|
let msg = '-= [ComfyUI Manager] extension installation request =-\n\n';
|
||||||
@@ -25,7 +25,7 @@ async function tryInstallCustomNode(event) {
|
|||||||
const res = await customConfirm(msg);
|
const res = await customConfirm(msg);
|
||||||
if(res) {
|
if(res) {
|
||||||
if(event.detail.target.installed == 'Disabled') {
|
if(event.detail.target.installed == 'Disabled') {
|
||||||
const response = await api.fetchApi(`/v2/customnode/toggle_active`, {
|
const response = await api.fetchApi(`/customnode/toggle_active`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
body: JSON.stringify(event.detail.target)
|
body: JSON.stringify(event.detail.target)
|
||||||
@@ -35,14 +35,14 @@ async function tryInstallCustomNode(event) {
|
|||||||
await sleep(300);
|
await sleep(300);
|
||||||
app.ui.dialog.show(`Installing... '${event.detail.target.title}'`);
|
app.ui.dialog.show(`Installing... '${event.detail.target.title}'`);
|
||||||
|
|
||||||
const response = await api.fetchApi(`/v2/customnode/install`, {
|
const response = await api.fetchApi(`/customnode/install`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
body: JSON.stringify(event.detail.target)
|
body: JSON.stringify(event.detail.target)
|
||||||
});
|
});
|
||||||
|
|
||||||
if(response.status == 403) {
|
if(response.status == 403) {
|
||||||
show_message('This action is not allowed with this security level configuration.');
|
await handle403Response(response);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
else if(response.status == 400) {
|
else if(response.status == 400) {
|
||||||
@@ -52,9 +52,9 @@ async function tryInstallCustomNode(event) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let response = await api.fetchApi("/v2/manager/reboot");
|
let response = await api.fetchApi("/manager/reboot");
|
||||||
if(response.status == 403) {
|
if(response.status == 403) {
|
||||||
show_message('This action is not allowed with this security level configuration.');
|
await handle403Response(response);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -14,9 +14,9 @@ import { OpenArtShareDialog } from "./comfyui-share-openart.js";
|
|||||||
import {
|
import {
|
||||||
free_models, install_pip, install_via_git_url, manager_instance,
|
free_models, install_pip, install_via_git_url, manager_instance,
|
||||||
rebootAPI, setManagerInstance, show_message, customAlert, customPrompt,
|
rebootAPI, setManagerInstance, show_message, customAlert, customPrompt,
|
||||||
infoToast, showTerminal, setNeedRestart, generateUUID
|
infoToast, showTerminal, setNeedRestart, handle403Response
|
||||||
} from "./common.js";
|
} from "./common.js";
|
||||||
import { ComponentBuilderDialog, load_components, set_component_policy } from "./components-manager.js";
|
import { ComponentBuilderDialog, getPureName, load_components, set_component_policy } from "./components-manager.js";
|
||||||
import { CustomNodesManager } from "./custom-nodes-manager.js";
|
import { CustomNodesManager } from "./custom-nodes-manager.js";
|
||||||
import { ModelManager } from "./model-manager.js";
|
import { ModelManager } from "./model-manager.js";
|
||||||
import { SnapshotManager } from "./snapshot.js";
|
import { SnapshotManager } from "./snapshot.js";
|
||||||
@@ -189,7 +189,8 @@ docStyle.innerHTML = `
|
|||||||
}
|
}
|
||||||
`;
|
`;
|
||||||
|
|
||||||
function isBeforeFrontendVersion(compareVersion) {
|
function is_legacy_front() {
|
||||||
|
let compareVersion = '1.2.49';
|
||||||
try {
|
try {
|
||||||
const frontendVersion = window['__COMFYUI_FRONTEND_VERSION__'];
|
const frontendVersion = window['__COMFYUI_FRONTEND_VERSION__'];
|
||||||
if (typeof frontendVersion !== 'string') {
|
if (typeof frontendVersion !== 'string') {
|
||||||
@@ -222,9 +223,6 @@ function isBeforeFrontendVersion(compareVersion) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const is_legacy_front = () => isBeforeFrontendVersion('1.2.49');
|
|
||||||
const isNotNewManagerUI = () => isBeforeFrontendVersion('1.16.4');
|
|
||||||
|
|
||||||
document.head.appendChild(docStyle);
|
document.head.appendChild(docStyle);
|
||||||
|
|
||||||
var update_comfyui_button = null;
|
var update_comfyui_button = null;
|
||||||
@@ -234,7 +232,7 @@ var restart_stop_button = null;
|
|||||||
var update_policy_combo = null;
|
var update_policy_combo = null;
|
||||||
|
|
||||||
let share_option = 'all';
|
let share_option = 'all';
|
||||||
var batch_id = null;
|
var is_updating = false;
|
||||||
|
|
||||||
|
|
||||||
// copied style from https://github.com/pythongosssss/ComfyUI-Custom-Scripts
|
// copied style from https://github.com/pythongosssss/ComfyUI-Custom-Scripts
|
||||||
@@ -417,7 +415,7 @@ const style = `
|
|||||||
`;
|
`;
|
||||||
|
|
||||||
async function init_share_option() {
|
async function init_share_option() {
|
||||||
api.fetchApi('/v2/manager/share_option')
|
api.fetchApi('/manager/share_option')
|
||||||
.then(response => response.text())
|
.then(response => response.text())
|
||||||
.then(data => {
|
.then(data => {
|
||||||
share_option = data || 'all';
|
share_option = data || 'all';
|
||||||
@@ -425,7 +423,7 @@ async function init_share_option() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function init_notice(notice) {
|
async function init_notice(notice) {
|
||||||
api.fetchApi('/v2/manager/notice')
|
api.fetchApi('/manager/notice')
|
||||||
.then(response => response.text())
|
.then(response => response.text())
|
||||||
.then(data => {
|
.then(data => {
|
||||||
notice.innerHTML = data;
|
notice.innerHTML = data;
|
||||||
@@ -476,19 +474,14 @@ async function updateComfyUI() {
|
|||||||
let prev_text = update_comfyui_button.innerText;
|
let prev_text = update_comfyui_button.innerText;
|
||||||
update_comfyui_button.innerText = "Updating ComfyUI...";
|
update_comfyui_button.innerText = "Updating ComfyUI...";
|
||||||
|
|
||||||
// set_inprogress_mode();
|
set_inprogress_mode();
|
||||||
|
|
||||||
|
const response = await api.fetchApi('/manager/queue/update_comfyui');
|
||||||
|
|
||||||
showTerminal();
|
showTerminal();
|
||||||
|
|
||||||
batch_id = generateUUID();
|
is_updating = true;
|
||||||
|
await api.fetchApi('/manager/queue/start');
|
||||||
let batch = {};
|
|
||||||
batch['batch_id'] = batch_id;
|
|
||||||
batch['update_comfyui'] = true;
|
|
||||||
|
|
||||||
const res = await api.fetchApi(`/v2/manager/queue/batch`, {
|
|
||||||
method: 'POST',
|
|
||||||
body: JSON.stringify(batch)
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function showVersionSelectorDialog(versions, current, onSelect) {
|
function showVersionSelectorDialog(versions, current, onSelect) {
|
||||||
@@ -619,7 +612,7 @@ async function switchComfyUI() {
|
|||||||
switch_comfyui_button.disabled = true;
|
switch_comfyui_button.disabled = true;
|
||||||
switch_comfyui_button.style.backgroundColor = "gray";
|
switch_comfyui_button.style.backgroundColor = "gray";
|
||||||
|
|
||||||
let res = await api.fetchApi(`/v2/comfyui_manager/comfyui_versions`, { cache: "no-store" });
|
let res = await api.fetchApi(`/comfyui_manager/comfyui_versions`, { cache: "no-store" });
|
||||||
|
|
||||||
switch_comfyui_button.disabled = false;
|
switch_comfyui_button.disabled = false;
|
||||||
switch_comfyui_button.style.backgroundColor = "";
|
switch_comfyui_button.style.backgroundColor = "";
|
||||||
@@ -638,14 +631,14 @@ async function switchComfyUI() {
|
|||||||
showVersionSelectorDialog(versions, obj.current, async (selected_version) => {
|
showVersionSelectorDialog(versions, obj.current, async (selected_version) => {
|
||||||
if(selected_version == 'nightly') {
|
if(selected_version == 'nightly') {
|
||||||
update_policy_combo.value = 'nightly-comfyui';
|
update_policy_combo.value = 'nightly-comfyui';
|
||||||
api.fetchApi('/v2/manager/policy/update?value=nightly-comfyui');
|
api.fetchApi('/manager/policy/update?value=nightly-comfyui');
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
update_policy_combo.value = 'stable-comfyui';
|
update_policy_combo.value = 'stable-comfyui';
|
||||||
api.fetchApi('/v2/manager/policy/update?value=stable-comfyui');
|
api.fetchApi('/manager/policy/update?value=stable-comfyui');
|
||||||
}
|
}
|
||||||
|
|
||||||
let response = await api.fetchApi(`/v2/comfyui_manager/comfyui_switch_version?ver=${selected_version}`, { cache: "no-store" });
|
let response = await api.fetchApi(`/comfyui_manager/comfyui_switch_version?ver=${selected_version}`, { cache: "no-store" });
|
||||||
if (response.status == 200) {
|
if (response.status == 200) {
|
||||||
infoToast(`ComfyUI version is switched to ${selected_version}`);
|
infoToast(`ComfyUI version is switched to ${selected_version}`);
|
||||||
}
|
}
|
||||||
@@ -663,17 +656,18 @@ async function onQueueStatus(event) {
|
|||||||
const isElectron = 'electronAPI' in window;
|
const isElectron = 'electronAPI' in window;
|
||||||
|
|
||||||
if(event.detail.status == 'in_progress') {
|
if(event.detail.status == 'in_progress') {
|
||||||
// set_inprogress_mode();
|
set_inprogress_mode();
|
||||||
update_all_button.innerText = `in progress.. (${event.detail.done_count}/${event.detail.total_count})`;
|
update_all_button.innerText = `in progress.. (${event.detail.done_count}/${event.detail.total_count})`;
|
||||||
}
|
}
|
||||||
else if(event.detail.status == 'all-done') {
|
else if(event.detail.status == 'done') {
|
||||||
reset_action_buttons();
|
reset_action_buttons();
|
||||||
}
|
|
||||||
else if(event.detail.status == 'batch-done') {
|
if(!is_updating) {
|
||||||
if(batch_id != event.detail.batch_id) {
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
is_updating = false;
|
||||||
|
|
||||||
let success_list = [];
|
let success_list = [];
|
||||||
let failed_list = [];
|
let failed_list = [];
|
||||||
let comfyui_state = null;
|
let comfyui_state = null;
|
||||||
@@ -759,9 +753,9 @@ async function onQueueStatus(event) {
|
|||||||
|
|
||||||
const rebootButton = document.getElementById('cm-reboot-button5');
|
const rebootButton = document.getElementById('cm-reboot-button5');
|
||||||
rebootButton?.addEventListener("click",
|
rebootButton?.addEventListener("click",
|
||||||
function() {
|
async function() {
|
||||||
if(rebootAPI()) {
|
if(await rebootAPI()) {
|
||||||
manager_dialog.close();
|
manager_instance.close();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -773,28 +767,46 @@ api.addEventListener("cm-queue-status", onQueueStatus);
|
|||||||
async function updateAll(update_comfyui) {
|
async function updateAll(update_comfyui) {
|
||||||
update_all_button.innerText = "Updating...";
|
update_all_button.innerText = "Updating...";
|
||||||
|
|
||||||
// set_inprogress_mode();
|
set_inprogress_mode();
|
||||||
|
|
||||||
var mode = manager_instance.datasrc_combo.value;
|
var mode = manager_instance.datasrc_combo.value;
|
||||||
|
|
||||||
showTerminal();
|
showTerminal();
|
||||||
|
|
||||||
batch_id = generateUUID();
|
|
||||||
|
|
||||||
let batch = {};
|
|
||||||
if(update_comfyui) {
|
if(update_comfyui) {
|
||||||
update_all_button.innerText = "Updating ComfyUI...";
|
update_all_button.innerText = "Updating ComfyUI...";
|
||||||
batch['update_comfyui'] = true;
|
await api.fetchApi('/manager/queue/update_comfyui');
|
||||||
}
|
}
|
||||||
|
|
||||||
batch['update_all'] = mode;
|
const response = await api.fetchApi(`/manager/queue/update_all?mode=${mode}`);
|
||||||
|
|
||||||
const res = await api.fetchApi(`/v2/manager/queue/batch`, {
|
if (response.status == 403) {
|
||||||
method: 'POST',
|
await handle403Response(response);
|
||||||
body: JSON.stringify(batch)
|
reset_action_buttons();
|
||||||
});
|
}
|
||||||
|
else if (response.status == 401) {
|
||||||
|
customAlert('Another task is already in progress. Please stop the ongoing task first.');
|
||||||
|
reset_action_buttons();
|
||||||
|
}
|
||||||
|
else if(response.status == 200) {
|
||||||
|
is_updating = true;
|
||||||
|
await api.fetchApi('/manager/queue/start');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function newDOMTokenList(initialTokens) {
|
||||||
|
const tmp = document.createElement(`div`);
|
||||||
|
|
||||||
|
const classList = tmp.classList;
|
||||||
|
if (initialTokens) {
|
||||||
|
initialTokens.forEach(token => {
|
||||||
|
classList.add(token);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return classList;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check whether the node is a potential output node (img, gif or video output)
|
* Check whether the node is a potential output node (img, gif or video output)
|
||||||
*/
|
*/
|
||||||
@@ -807,7 +819,7 @@ function restartOrStop() {
|
|||||||
rebootAPI();
|
rebootAPI();
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
api.fetchApi('/v2/manager/queue/reset');
|
api.fetchApi('/manager/queue/reset');
|
||||||
infoToast('Cancel', 'Remaining tasks will stop after completing the current task.');
|
infoToast('Cancel', 'Remaining tasks will stop after completing the current task.');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -955,12 +967,12 @@ class ManagerMenuDialog extends ComfyDialog {
|
|||||||
this.datasrc_combo.appendChild($el('option', { value: 'local', text: 'DB: Local' }, []));
|
this.datasrc_combo.appendChild($el('option', { value: 'local', text: 'DB: Local' }, []));
|
||||||
this.datasrc_combo.appendChild($el('option', { value: 'remote', text: 'DB: Channel (remote)' }, []));
|
this.datasrc_combo.appendChild($el('option', { value: 'remote', text: 'DB: Channel (remote)' }, []));
|
||||||
|
|
||||||
api.fetchApi('/v2/manager/db_mode')
|
api.fetchApi('/manager/db_mode')
|
||||||
.then(response => response.text())
|
.then(response => response.text())
|
||||||
.then(data => { this.datasrc_combo.value = data; });
|
.then(data => { this.datasrc_combo.value = data; });
|
||||||
|
|
||||||
this.datasrc_combo.addEventListener('change', function (event) {
|
this.datasrc_combo.addEventListener('change', function (event) {
|
||||||
api.fetchApi(`/v2/manager/db_mode?value=${event.target.value}`);
|
api.fetchApi(`/manager/db_mode?value=${event.target.value}`);
|
||||||
});
|
});
|
||||||
|
|
||||||
// preview method
|
// preview method
|
||||||
@@ -972,19 +984,19 @@ class ManagerMenuDialog extends ComfyDialog {
|
|||||||
preview_combo.appendChild($el('option', { value: 'latent2rgb', text: 'Preview method: Latent2RGB (fast)' }, []));
|
preview_combo.appendChild($el('option', { value: 'latent2rgb', text: 'Preview method: Latent2RGB (fast)' }, []));
|
||||||
preview_combo.appendChild($el('option', { value: 'none', text: 'Preview method: None (very fast)' }, []));
|
preview_combo.appendChild($el('option', { value: 'none', text: 'Preview method: None (very fast)' }, []));
|
||||||
|
|
||||||
api.fetchApi('/v2/manager/preview_method')
|
api.fetchApi('/manager/preview_method')
|
||||||
.then(response => response.text())
|
.then(response => response.text())
|
||||||
.then(data => { preview_combo.value = data; });
|
.then(data => { preview_combo.value = data; });
|
||||||
|
|
||||||
preview_combo.addEventListener('change', function (event) {
|
preview_combo.addEventListener('change', function (event) {
|
||||||
api.fetchApi(`/v2/manager/preview_method?value=${event.target.value}`);
|
api.fetchApi(`/manager/preview_method?value=${event.target.value}`);
|
||||||
});
|
});
|
||||||
|
|
||||||
// channel
|
// channel
|
||||||
let channel_combo = document.createElement("select");
|
let channel_combo = document.createElement("select");
|
||||||
channel_combo.setAttribute("title", "Configure the channel for retrieving data from the Custom Node list (including missing nodes) or the Model list.");
|
channel_combo.setAttribute("title", "Configure the channel for retrieving data from the Custom Node list (including missing nodes) or the Model list.");
|
||||||
channel_combo.className = "cm-menu-combo";
|
channel_combo.className = "cm-menu-combo";
|
||||||
api.fetchApi('/v2/manager/channel_url_list')
|
api.fetchApi('/manager/channel_url_list')
|
||||||
.then(response => response.json())
|
.then(response => response.json())
|
||||||
.then(async data => {
|
.then(async data => {
|
||||||
try {
|
try {
|
||||||
@@ -997,7 +1009,7 @@ class ManagerMenuDialog extends ComfyDialog {
|
|||||||
}
|
}
|
||||||
|
|
||||||
channel_combo.addEventListener('change', function (event) {
|
channel_combo.addEventListener('change', function (event) {
|
||||||
api.fetchApi(`/v2/manager/channel_url_list?value=${event.target.value}`);
|
api.fetchApi(`/manager/channel_url_list?value=${event.target.value}`);
|
||||||
});
|
});
|
||||||
|
|
||||||
channel_combo.value = data.selected;
|
channel_combo.value = data.selected;
|
||||||
@@ -1025,7 +1037,7 @@ class ManagerMenuDialog extends ComfyDialog {
|
|||||||
share_combo.appendChild($el('option', { value: option[0], text: `Share: ${option[1]}` }, []));
|
share_combo.appendChild($el('option', { value: option[0], text: `Share: ${option[1]}` }, []));
|
||||||
}
|
}
|
||||||
|
|
||||||
api.fetchApi('/v2/manager/share_option')
|
api.fetchApi('/manager/share_option')
|
||||||
.then(response => response.text())
|
.then(response => response.text())
|
||||||
.then(data => {
|
.then(data => {
|
||||||
share_combo.value = data || 'all';
|
share_combo.value = data || 'all';
|
||||||
@@ -1035,7 +1047,7 @@ class ManagerMenuDialog extends ComfyDialog {
|
|||||||
share_combo.addEventListener('change', function (event) {
|
share_combo.addEventListener('change', function (event) {
|
||||||
const value = event.target.value;
|
const value = event.target.value;
|
||||||
share_option = value;
|
share_option = value;
|
||||||
api.fetchApi(`/v2/manager/share_option?value=${value}`);
|
api.fetchApi(`/manager/share_option?value=${value}`);
|
||||||
const shareButton = document.getElementById("shareButton");
|
const shareButton = document.getElementById("shareButton");
|
||||||
if (value === 'none') {
|
if (value === 'none') {
|
||||||
shareButton.style.display = "none";
|
shareButton.style.display = "none";
|
||||||
@@ -1050,7 +1062,7 @@ class ManagerMenuDialog extends ComfyDialog {
|
|||||||
component_policy_combo.appendChild($el('option', { value: 'workflow', text: 'Component: Use workflow version' }, []));
|
component_policy_combo.appendChild($el('option', { value: 'workflow', text: 'Component: Use workflow version' }, []));
|
||||||
component_policy_combo.appendChild($el('option', { value: 'higher', text: 'Component: Use higher version' }, []));
|
component_policy_combo.appendChild($el('option', { value: 'higher', text: 'Component: Use higher version' }, []));
|
||||||
component_policy_combo.appendChild($el('option', { value: 'mine', text: 'Component: Use my version' }, []));
|
component_policy_combo.appendChild($el('option', { value: 'mine', text: 'Component: Use my version' }, []));
|
||||||
api.fetchApi('/v2/manager/policy/component')
|
api.fetchApi('/manager/policy/component')
|
||||||
.then(response => response.text())
|
.then(response => response.text())
|
||||||
.then(data => {
|
.then(data => {
|
||||||
component_policy_combo.value = data;
|
component_policy_combo.value = data;
|
||||||
@@ -1058,7 +1070,7 @@ class ManagerMenuDialog extends ComfyDialog {
|
|||||||
});
|
});
|
||||||
|
|
||||||
component_policy_combo.addEventListener('change', function (event) {
|
component_policy_combo.addEventListener('change', function (event) {
|
||||||
api.fetchApi(`/v2/manager/policy/component?value=${event.target.value}`);
|
api.fetchApi(`/manager/policy/component?value=${event.target.value}`);
|
||||||
set_component_policy(event.target.value);
|
set_component_policy(event.target.value);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -1071,14 +1083,14 @@ class ManagerMenuDialog extends ComfyDialog {
|
|||||||
update_policy_combo.className = "cm-menu-combo";
|
update_policy_combo.className = "cm-menu-combo";
|
||||||
update_policy_combo.appendChild($el('option', { value: 'stable-comfyui', text: 'Update: ComfyUI Stable Version' }, []));
|
update_policy_combo.appendChild($el('option', { value: 'stable-comfyui', text: 'Update: ComfyUI Stable Version' }, []));
|
||||||
update_policy_combo.appendChild($el('option', { value: 'nightly-comfyui', text: 'Update: ComfyUI Nightly Version' }, []));
|
update_policy_combo.appendChild($el('option', { value: 'nightly-comfyui', text: 'Update: ComfyUI Nightly Version' }, []));
|
||||||
api.fetchApi('/v2/manager/policy/update')
|
api.fetchApi('/manager/policy/update')
|
||||||
.then(response => response.text())
|
.then(response => response.text())
|
||||||
.then(data => {
|
.then(data => {
|
||||||
update_policy_combo.value = data;
|
update_policy_combo.value = data;
|
||||||
});
|
});
|
||||||
|
|
||||||
update_policy_combo.addEventListener('change', function (event) {
|
update_policy_combo.addEventListener('change', function (event) {
|
||||||
api.fetchApi(`/v2/manager/policy/update?value=${event.target.value}`);
|
api.fetchApi(`/manager/policy/update?value=${event.target.value}`);
|
||||||
});
|
});
|
||||||
|
|
||||||
return [
|
return [
|
||||||
@@ -1381,12 +1393,12 @@ class ManagerMenuDialog extends ComfyDialog {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function getVersion() {
|
async function getVersion() {
|
||||||
let version = await api.fetchApi(`/v2/manager/version`);
|
let version = await api.fetchApi(`/manager/version`);
|
||||||
return await version.text();
|
return await version.text();
|
||||||
}
|
}
|
||||||
|
|
||||||
app.registerExtension({
|
app.registerExtension({
|
||||||
name: "Comfy.Legacy.ManagerMenu",
|
name: "Comfy.ManagerMenu",
|
||||||
|
|
||||||
aboutPageBadges: [
|
aboutPageBadges: [
|
||||||
{
|
{
|
||||||
@@ -1446,6 +1458,31 @@ app.registerExtension({
|
|||||||
|
|
||||||
load_components();
|
load_components();
|
||||||
|
|
||||||
|
// Fetch and show startup alerts (critical errors like outdated ComfyUI)
|
||||||
|
// Poll until extensionManager.toast is ready (set in Vue onMounted)
|
||||||
|
const showStartupAlerts = async () => {
|
||||||
|
let toastWaitCount = 0;
|
||||||
|
const waitForToast = () => {
|
||||||
|
if (window['app']?.extensionManager?.toast) {
|
||||||
|
fetch('/manager/startup_alerts')
|
||||||
|
.then(response => response.ok ? response.json() : [])
|
||||||
|
.then(alerts => {
|
||||||
|
for (const alert of alerts) {
|
||||||
|
customAlert(alert.message);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.catch(e => console.warn('[ComfyUI-Manager] Failed to fetch startup alerts:', e));
|
||||||
|
} else if (toastWaitCount < 300) { // Max 30 seconds (300 * 100ms)
|
||||||
|
toastWaitCount++;
|
||||||
|
setTimeout(waitForToast, 100);
|
||||||
|
} else {
|
||||||
|
console.warn('[ComfyUI-Manager] Timeout waiting for toast. Startup alerts skipped.');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
waitForToast();
|
||||||
|
};
|
||||||
|
showStartupAlerts();
|
||||||
|
|
||||||
const menu = document.querySelector(".comfy-menu");
|
const menu = document.querySelector(".comfy-menu");
|
||||||
const separator = document.createElement("hr");
|
const separator = document.createElement("hr");
|
||||||
|
|
||||||
@@ -1518,10 +1555,7 @@ app.registerExtension({
|
|||||||
}).element
|
}).element
|
||||||
);
|
);
|
||||||
|
|
||||||
const shouldShowLegacyMenuItems = isNotNewManagerUI();
|
app.menu?.settingsGroup.element.before(cmGroup.element);
|
||||||
if (shouldShowLegacyMenuItems) {
|
|
||||||
app.menu?.settingsGroup.element.before(cmGroup.element);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
catch(exception) {
|
catch(exception) {
|
||||||
console.log('ComfyUI is outdated. New style menu based features are disabled.');
|
console.log('ComfyUI is outdated. New style menu based features are disabled.');
|
||||||
@@ -172,7 +172,7 @@ export const shareToEsheep= () => {
|
|||||||
const nodes = app.graph._nodes
|
const nodes = app.graph._nodes
|
||||||
const { potential_outputs, potential_output_nodes } = getPotentialOutputsAndOutputNodes(nodes);
|
const { potential_outputs, potential_output_nodes } = getPotentialOutputsAndOutputNodes(nodes);
|
||||||
const workflow = prompt['workflow']
|
const workflow = prompt['workflow']
|
||||||
api.fetchApi(`/v2/manager/set_esheep_workflow_and_images`, {
|
api.fetchApi(`/manager/set_esheep_workflow_and_images`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
@@ -812,7 +812,7 @@ export class ShareDialog extends ComfyDialog {
|
|||||||
// get the user's existing matrix auth and share key
|
// get the user's existing matrix auth and share key
|
||||||
ShareDialog.matrix_auth = { homeserver: "matrix.org", username: "", password: "" };
|
ShareDialog.matrix_auth = { homeserver: "matrix.org", username: "", password: "" };
|
||||||
try {
|
try {
|
||||||
api.fetchApi(`/v2/manager/get_matrix_auth`)
|
api.fetchApi(`/manager/get_matrix_auth`)
|
||||||
.then(response => response.json())
|
.then(response => response.json())
|
||||||
.then(data => {
|
.then(data => {
|
||||||
ShareDialog.matrix_auth = data;
|
ShareDialog.matrix_auth = data;
|
||||||
@@ -831,7 +831,7 @@ export class ShareDialog extends ComfyDialog {
|
|||||||
ShareDialog.cw_sharekey = "";
|
ShareDialog.cw_sharekey = "";
|
||||||
try {
|
try {
|
||||||
// console.log("Fetching comfyworkflows share key")
|
// console.log("Fetching comfyworkflows share key")
|
||||||
api.fetchApi(`/v2/manager/get_comfyworkflows_auth`)
|
api.fetchApi(`/manager/get_comfyworkflows_auth`)
|
||||||
.then(response => response.json())
|
.then(response => response.json())
|
||||||
.then(data => {
|
.then(data => {
|
||||||
ShareDialog.cw_sharekey = data.comfyworkflows_sharekey;
|
ShareDialog.cw_sharekey = data.comfyworkflows_sharekey;
|
||||||
@@ -891,7 +891,7 @@ export class ShareDialog extends ComfyDialog {
|
|||||||
// Change the text of the share button to "Sharing..." to indicate that the share process has started
|
// Change the text of the share button to "Sharing..." to indicate that the share process has started
|
||||||
this.share_button.textContent = "Sharing...";
|
this.share_button.textContent = "Sharing...";
|
||||||
|
|
||||||
const response = await api.fetchApi(`/v2/manager/share`, {
|
const response = await api.fetchApi(`/manager/share`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
@@ -201,13 +201,15 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
});
|
});
|
||||||
this.LockInput = $el("input", {
|
this.LockInput = $el("input", {
|
||||||
type: "text",
|
type: "text",
|
||||||
placeholder: "",
|
placeholder: "0",
|
||||||
style: {
|
style: {
|
||||||
width: "100px",
|
width: "100px",
|
||||||
padding: "7px",
|
padding: "7px",
|
||||||
|
paddingLeft: "30px",
|
||||||
borderRadius: "4px",
|
borderRadius: "4px",
|
||||||
border: "1px solid #ddd",
|
border: "1px solid #ddd",
|
||||||
boxSizing: "border-box",
|
boxSizing: "border-box",
|
||||||
|
position: "relative",
|
||||||
},
|
},
|
||||||
oninput: (event) => {
|
oninput: (event) => {
|
||||||
let input = event.target.value;
|
let input = event.target.value;
|
||||||
@@ -342,15 +344,11 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
["0/70"]
|
["0/70"]
|
||||||
);
|
);
|
||||||
// Additional Inputs Section
|
// Additional Inputs Section
|
||||||
const additionalInputsSection = $el(
|
const additionalInputsSection = $el("div", { style: { ...sectionStyle } }, [
|
||||||
"div",
|
$el("label", { style: labelStyle }, ["3️⃣ Title "]),
|
||||||
{ style: { ...sectionStyle, } },
|
this.TitleInput,
|
||||||
[
|
titleNumDom,
|
||||||
$el("label", { style: labelStyle }, ["3️⃣ Title "]),
|
]);
|
||||||
this.TitleInput,
|
|
||||||
titleNumDom,
|
|
||||||
]
|
|
||||||
);
|
|
||||||
const SubtitleSection = $el("div", { style: sectionStyle }, [
|
const SubtitleSection = $el("div", { style: sectionStyle }, [
|
||||||
$el("label", { style: labelStyle }, ["4️⃣ Subtitle "]),
|
$el("label", { style: labelStyle }, ["4️⃣ Subtitle "]),
|
||||||
this.SubTitleInput,
|
this.SubTitleInput,
|
||||||
@@ -379,7 +377,7 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const blockChainSection_lock = $el("div", { style: sectionStyle }, [
|
const blockChainSection_lock = $el("div", { style: sectionStyle }, [
|
||||||
$el("label", { style: labelStyle }, ["6️⃣ Pay to download"]),
|
$el("label", { style: labelStyle }, ["6️⃣ Download threshold"]),
|
||||||
$el(
|
$el(
|
||||||
"label",
|
"label",
|
||||||
{
|
{
|
||||||
@@ -392,11 +390,42 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
},
|
},
|
||||||
[
|
[
|
||||||
this.radioButtonsCheck_lock,
|
this.radioButtonsCheck_lock,
|
||||||
$el("div", { style: { marginLeft: "5px" ,display:'flex',alignItems:'center'} }, [
|
$el(
|
||||||
$el("span", { style: { marginLeft: "5px" } }, ["ON"]),
|
"div",
|
||||||
$el("span", { style: { marginLeft: "20px",marginRight:'10px' ,color:'#fff'} }, ["Price US$"]),
|
{
|
||||||
this.LockInput
|
style: {
|
||||||
]),
|
marginLeft: "5px",
|
||||||
|
display: "flex",
|
||||||
|
alignItems: "center",
|
||||||
|
position: "relative",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[
|
||||||
|
$el("span", { style: { marginLeft: "5px" } }, ["ON"]),
|
||||||
|
$el(
|
||||||
|
"span",
|
||||||
|
{
|
||||||
|
style: {
|
||||||
|
marginLeft: "20px",
|
||||||
|
marginRight: "10px",
|
||||||
|
color: "#fff",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
["Unlock with"]
|
||||||
|
),
|
||||||
|
$el("img", {
|
||||||
|
style: {
|
||||||
|
width: "16px",
|
||||||
|
height: "16px",
|
||||||
|
position: "absolute",
|
||||||
|
right: "75px",
|
||||||
|
zIndex: "100",
|
||||||
|
},
|
||||||
|
src: "https://static.copus.io/images/admin/202507/prod/e2919a1d8f3c2d99d3b8fe27ff94b841.png",
|
||||||
|
}),
|
||||||
|
this.LockInput,
|
||||||
|
]
|
||||||
|
),
|
||||||
]
|
]
|
||||||
),
|
),
|
||||||
$el(
|
$el(
|
||||||
@@ -404,14 +433,25 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
{ style: { display: "flex", alignItems: "center", cursor: "pointer" } },
|
{ style: { display: "flex", alignItems: "center", cursor: "pointer" } },
|
||||||
[
|
[
|
||||||
this.radioButtonsCheckOff_lock,
|
this.radioButtonsCheckOff_lock,
|
||||||
$el("span", { style: { marginLeft: "5px" } }, ["OFF"]),
|
$el(
|
||||||
|
"div",
|
||||||
|
{
|
||||||
|
style: {
|
||||||
|
marginLeft: "5px",
|
||||||
|
display: "flex",
|
||||||
|
alignItems: "center",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[$el("span", { style: { marginLeft: "5px" } }, ["OFF"])]
|
||||||
|
),
|
||||||
]
|
]
|
||||||
),
|
),
|
||||||
|
|
||||||
$el(
|
$el(
|
||||||
"p",
|
"p",
|
||||||
{ style: { fontSize: "16px", color: "#fff", margin: "10px 0 0 0" } },
|
{ style: { fontSize: "16px", color: "#fff", margin: "10px 0 0 0" } },
|
||||||
["Get paid from your workflow. You can change the price and withdraw your earnings on Copus."]
|
[
|
||||||
|
]
|
||||||
),
|
),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
@@ -432,7 +472,7 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const blockChainSection = $el("div", { style: sectionStyle }, [
|
const blockChainSection = $el("div", { style: sectionStyle }, [
|
||||||
$el("label", { style: labelStyle }, ["7️⃣ Store on blockchain "]),
|
$el("label", { style: labelStyle }, ["8️⃣ Store on blockchain "]),
|
||||||
$el(
|
$el(
|
||||||
"label",
|
"label",
|
||||||
{
|
{
|
||||||
@@ -463,6 +503,139 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
),
|
),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
this.ratingRadioButtonsCheck0 = $el("input", {
|
||||||
|
type: "radio",
|
||||||
|
name: "content_rating",
|
||||||
|
value: "0",
|
||||||
|
id: "content_rating0",
|
||||||
|
});
|
||||||
|
this.ratingRadioButtonsCheck1 = $el("input", {
|
||||||
|
type: "radio",
|
||||||
|
name: "content_rating",
|
||||||
|
value: "1",
|
||||||
|
id: "content_rating1",
|
||||||
|
});
|
||||||
|
this.ratingRadioButtonsCheck2 = $el("input", {
|
||||||
|
type: "radio",
|
||||||
|
name: "content_rating",
|
||||||
|
value: "2",
|
||||||
|
id: "content_rating2",
|
||||||
|
});
|
||||||
|
this.ratingRadioButtonsCheck_1 = $el("input", {
|
||||||
|
type: "radio",
|
||||||
|
name: "content_rating",
|
||||||
|
value: "-1",
|
||||||
|
id: "content_rating_1",
|
||||||
|
checked: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
// content rating
|
||||||
|
const contentRatingSection = $el("div", { style: sectionStyle }, [
|
||||||
|
$el("label", { style: labelStyle }, ["7️⃣ Content rating "]),
|
||||||
|
$el(
|
||||||
|
"label",
|
||||||
|
{
|
||||||
|
style: {
|
||||||
|
marginTop: "10px",
|
||||||
|
display: "flex",
|
||||||
|
alignItems: "center",
|
||||||
|
cursor: "pointer",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[
|
||||||
|
this.ratingRadioButtonsCheck0,
|
||||||
|
$el("img", {
|
||||||
|
style: {
|
||||||
|
width: "12px",
|
||||||
|
height: "12px",
|
||||||
|
marginLeft: "5px",
|
||||||
|
},
|
||||||
|
src: "https://static.copus.io/images/client/202507/test/b9f17da83b054d53cd0cb4508c2c30dc.png",
|
||||||
|
}),
|
||||||
|
$el("span", { style: { marginLeft: "5px", color: "#fff" } }, [
|
||||||
|
"All ages",
|
||||||
|
]),
|
||||||
|
]
|
||||||
|
),
|
||||||
|
$el(
|
||||||
|
"p",
|
||||||
|
{ style: { fontSize: "10px", color: "#fff", marginLeft: "20px" } },
|
||||||
|
["Safe for all viewers; no profanity, violence, or mature themes."]
|
||||||
|
),
|
||||||
|
$el(
|
||||||
|
"label",
|
||||||
|
{ style: { display: "flex", alignItems: "center", cursor: "pointer" } },
|
||||||
|
[
|
||||||
|
this.ratingRadioButtonsCheck1,
|
||||||
|
$el("img", {
|
||||||
|
style: {
|
||||||
|
width: "12px",
|
||||||
|
height: "12px",
|
||||||
|
marginLeft: "5px",
|
||||||
|
},
|
||||||
|
src: "https://static.copus.io/images/client/202507/test/7848bc0d3690671df21c7cf00c4cfc81.png",
|
||||||
|
}),
|
||||||
|
$el("span", { style: { marginLeft: "5px", color: "#fff" } }, [
|
||||||
|
"13+ (Teen)",
|
||||||
|
]),
|
||||||
|
]
|
||||||
|
),
|
||||||
|
$el(
|
||||||
|
"p",
|
||||||
|
{ style: { fontSize: "10px", color: "#fff", marginLeft: "20px" } },
|
||||||
|
[
|
||||||
|
"Mild language, light themes, or cartoon violence; no explicit content. ",
|
||||||
|
]
|
||||||
|
),
|
||||||
|
$el(
|
||||||
|
"label",
|
||||||
|
{ style: { display: "flex", alignItems: "center", cursor: "pointer" } },
|
||||||
|
[
|
||||||
|
this.ratingRadioButtonsCheck2,
|
||||||
|
$el("img", {
|
||||||
|
style: {
|
||||||
|
width: "12px",
|
||||||
|
height: "12px",
|
||||||
|
marginLeft: "5px",
|
||||||
|
},
|
||||||
|
src: "https://static.copus.io/images/client/202507/test/bc51839c208d68d91173e43c23bff039.png",
|
||||||
|
}),
|
||||||
|
$el("span", { style: { marginLeft: "5px", color: "#fff" } }, [
|
||||||
|
"18+ (Explicit)",
|
||||||
|
]),
|
||||||
|
]
|
||||||
|
),
|
||||||
|
$el(
|
||||||
|
"p",
|
||||||
|
{ style: { fontSize: "10px", color: "#fff", marginLeft: "20px" } },
|
||||||
|
[
|
||||||
|
"Explicit content, including sexual content, strong violence, or intense themes. ",
|
||||||
|
]
|
||||||
|
),
|
||||||
|
$el(
|
||||||
|
"label",
|
||||||
|
{ style: { display: "flex", alignItems: "center", cursor: "pointer" } },
|
||||||
|
[
|
||||||
|
this.ratingRadioButtonsCheck_1,
|
||||||
|
$el("img", {
|
||||||
|
style: {
|
||||||
|
width: "12px",
|
||||||
|
height: "12px",
|
||||||
|
marginLeft: "5px",
|
||||||
|
},
|
||||||
|
src: "https://static.copus.io/images/client/202507/test/5c802fdcaaea4e7bbed37393eec0d5ba.png",
|
||||||
|
}),
|
||||||
|
$el("span", { style: { marginLeft: "5px", color: "#fff" } }, [
|
||||||
|
"Not Rated",
|
||||||
|
]),
|
||||||
|
]
|
||||||
|
),
|
||||||
|
$el(
|
||||||
|
"p",
|
||||||
|
{ style: { fontSize: "10px", color: "#fff", marginLeft: "20px" } },
|
||||||
|
["No age rating provided."]
|
||||||
|
),
|
||||||
|
]);
|
||||||
|
|
||||||
// Message Section
|
// Message Section
|
||||||
this.message = $el(
|
this.message = $el(
|
||||||
@@ -526,6 +699,7 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
DescriptionSection,
|
DescriptionSection,
|
||||||
// contestSection,
|
// contestSection,
|
||||||
blockChainSection_lock,
|
blockChainSection_lock,
|
||||||
|
contentRatingSection,
|
||||||
blockChainSection,
|
blockChainSection,
|
||||||
this.message,
|
this.message,
|
||||||
buttonsSection,
|
buttonsSection,
|
||||||
@@ -587,7 +761,9 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
url: data,
|
url: data,
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
throw new Error("make sure your API key is correct and try again later");
|
throw new Error(
|
||||||
|
"make sure your API key is correct and try again later"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
if (e?.response?.status === 413) {
|
if (e?.response?.status === 413) {
|
||||||
@@ -628,8 +804,15 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
subTitle: this.SubTitleInput.value,
|
subTitle: this.SubTitleInput.value,
|
||||||
content: this.descriptionInput.value,
|
content: this.descriptionInput.value,
|
||||||
storeOnChain: this.radioButtonsCheck.checked ? true : false,
|
storeOnChain: this.radioButtonsCheck.checked ? true : false,
|
||||||
lockState:this.radioButtonsCheck_lock.checked ? 2 : 0,
|
lockState: this.radioButtonsCheck_lock.checked ? 2 : 0,
|
||||||
unlockPrice:this.LockInput.value,
|
unlockPrice: this.LockInput.value,
|
||||||
|
rating: this.ratingRadioButtonsCheck0.checked
|
||||||
|
? 0
|
||||||
|
: this.ratingRadioButtonsCheck1.checked
|
||||||
|
? 1
|
||||||
|
: this.ratingRadioButtonsCheck2.checked
|
||||||
|
? 2
|
||||||
|
: -1,
|
||||||
};
|
};
|
||||||
|
|
||||||
if (!this.keyInput.value) {
|
if (!this.keyInput.value) {
|
||||||
@@ -644,8 +827,8 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
throw new Error("Title is required");
|
throw new Error("Title is required");
|
||||||
}
|
}
|
||||||
|
|
||||||
if(this.radioButtonsCheck_lock.checked){
|
if (this.radioButtonsCheck_lock.checked) {
|
||||||
if (!this.LockInput.value){
|
if (!this.LockInput.value) {
|
||||||
throw new Error("Price is required");
|
throw new Error("Price is required");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -695,23 +878,23 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
"Uploading workflow..."
|
"Uploading workflow..."
|
||||||
);
|
);
|
||||||
|
|
||||||
if (res.status && res.data.status && res.data) {
|
if (res.status && res.data.status && res.data) {
|
||||||
localStorage.setItem("copus_token",this.keyInput.value);
|
localStorage.setItem("copus_token", this.keyInput.value);
|
||||||
const { data } = res.data;
|
const { data } = res.data;
|
||||||
if (data) {
|
if (data) {
|
||||||
const url = `${DEFAULT_HOMEPAGE_URL}/work/${data}`;
|
const url = `${DEFAULT_HOMEPAGE_URL}/work/${data}`;
|
||||||
this.message.innerHTML = `Workflow has been shared successfully. <a href="${url}" target="_blank">Click here to view it.</a>`;
|
this.message.innerHTML = `Workflow has been shared successfully. <a href="${url}" target="_blank">Click here to view it.</a>`;
|
||||||
this.previewImage.src = "";
|
this.previewImage.src = "";
|
||||||
this.previewImage.style.display = "none";
|
this.previewImage.style.display = "none";
|
||||||
this.uploadedImages = [];
|
this.uploadedImages = [];
|
||||||
this.allFilesImages = [];
|
this.allFilesImages = [];
|
||||||
this.allFiles = [];
|
this.allFiles = [];
|
||||||
this.TitleInput.value = "";
|
this.TitleInput.value = "";
|
||||||
this.SubTitleInput.value = "";
|
this.SubTitleInput.value = "";
|
||||||
this.descriptionInput.value = "";
|
this.descriptionInput.value = "";
|
||||||
this.selectedFile = null;
|
this.selectedFile = null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
throw new Error("Error sharing workflow: " + e.message);
|
throw new Error("Error sharing workflow: " + e.message);
|
||||||
}
|
}
|
||||||
@@ -757,7 +940,7 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
this.element.style.display = "block";
|
this.element.style.display = "block";
|
||||||
this.previewImage.src = "";
|
this.previewImage.src = "";
|
||||||
this.previewImage.style.display = "none";
|
this.previewImage.style.display = "none";
|
||||||
this.keyInput.value = apiToken!=null?apiToken:"";
|
this.keyInput.value = apiToken != null ? apiToken : "";
|
||||||
this.uploadedImages = [];
|
this.uploadedImages = [];
|
||||||
this.allFilesImages = [];
|
this.allFilesImages = [];
|
||||||
this.allFiles = [];
|
this.allFiles = [];
|
||||||
@@ -67,7 +67,7 @@ export class OpenArtShareDialog extends ComfyDialog {
|
|||||||
async readKey() {
|
async readKey() {
|
||||||
let key = ""
|
let key = ""
|
||||||
try {
|
try {
|
||||||
key = await api.fetchApi(`/v2/manager/get_openart_auth`)
|
key = await api.fetchApi(`/manager/get_openart_auth`)
|
||||||
.then(response => response.json())
|
.then(response => response.json())
|
||||||
.then(data => {
|
.then(data => {
|
||||||
return data.openart_key;
|
return data.openart_key;
|
||||||
@@ -82,7 +82,7 @@ export class OpenArtShareDialog extends ComfyDialog {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async saveKey(value) {
|
async saveKey(value) {
|
||||||
await api.fetchApi(`/v2/manager/set_openart_auth`, {
|
await api.fetchApi(`/manager/set_openart_auth`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {'Content-Type': 'application/json'},
|
headers: {'Content-Type': 'application/json'},
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
@@ -399,7 +399,7 @@ export class OpenArtShareDialog extends ComfyDialog {
|
|||||||
form.append("file", uploadFile);
|
form.append("file", uploadFile);
|
||||||
try {
|
try {
|
||||||
const res = await this.fetchApi(
|
const res = await this.fetchApi(
|
||||||
`/v2/workflows/upload_thumbnail`,
|
`/workflows/upload_thumbnail`,
|
||||||
{
|
{
|
||||||
method: "POST",
|
method: "POST",
|
||||||
body: form,
|
body: form,
|
||||||
@@ -459,7 +459,7 @@ export class OpenArtShareDialog extends ComfyDialog {
|
|||||||
throw new Error("Title is required");
|
throw new Error("Title is required");
|
||||||
}
|
}
|
||||||
|
|
||||||
const current_snapshot = await api.fetchApi(`/v2/snapshot/get_current`)
|
const current_snapshot = await api.fetchApi(`/snapshot/get_current`)
|
||||||
.then(response => response.json())
|
.then(response => response.json())
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
// console.log(error);
|
// console.log(error);
|
||||||
@@ -489,7 +489,7 @@ export class OpenArtShareDialog extends ComfyDialog {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await this.fetchApi(
|
const response = await this.fetchApi(
|
||||||
"/v2/workflows/publish",
|
"/workflows/publish",
|
||||||
{
|
{
|
||||||
method: "POST",
|
method: "POST",
|
||||||
headers: {"Content-Type": "application/json"},
|
headers: {"Content-Type": "application/json"},
|
||||||
@@ -179,7 +179,7 @@ export class YouMLShareDialog extends ComfyDialog {
|
|||||||
async loadToken() {
|
async loadToken() {
|
||||||
let key = ""
|
let key = ""
|
||||||
try {
|
try {
|
||||||
const response = await api.fetchApi(`/v2/manager/youml/settings`)
|
const response = await api.fetchApi(`/manager/youml/settings`)
|
||||||
const settings = await response.json()
|
const settings = await response.json()
|
||||||
return settings.token
|
return settings.token
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -188,7 +188,7 @@ export class YouMLShareDialog extends ComfyDialog {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async saveToken(value) {
|
async saveToken(value) {
|
||||||
await api.fetchApi(`/v2/manager/youml/settings`, {
|
await api.fetchApi(`/manager/youml/settings`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {'Content-Type': 'application/json'},
|
headers: {'Content-Type': 'application/json'},
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
@@ -380,7 +380,7 @@ export class YouMLShareDialog extends ComfyDialog {
|
|||||||
try {
|
try {
|
||||||
let snapshotData = null;
|
let snapshotData = null;
|
||||||
try {
|
try {
|
||||||
const snapshot = await api.fetchApi(`/v2/snapshot/get_current`)
|
const snapshot = await api.fetchApi(`/snapshot/get_current`)
|
||||||
snapshotData = await snapshot.json()
|
snapshotData = await snapshot.json()
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error("Failed to get snapshot", e)
|
console.error("Failed to get snapshot", e)
|
||||||
@@ -100,6 +100,19 @@ export function show_message(msg) {
|
|||||||
app.ui.dialog.element.style.zIndex = 1100;
|
app.ui.dialog.element.style.zIndex = 1100;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function handle403Response(res, defaultMessage) {
|
||||||
|
try {
|
||||||
|
const data = await res.json();
|
||||||
|
if(data.error === 'comfyui_outdated') {
|
||||||
|
show_message('ComfyUI version is outdated.<BR>Please update ComfyUI to use Manager normally.');
|
||||||
|
} else {
|
||||||
|
show_message(defaultMessage || 'This action is not allowed with this security level configuration.');
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
show_message(defaultMessage || 'This action is not allowed with this security level configuration.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export async function sleep(ms) {
|
export async function sleep(ms) {
|
||||||
return new Promise(resolve => setTimeout(resolve, ms));
|
return new Promise(resolve => setTimeout(resolve, ms));
|
||||||
}
|
}
|
||||||
@@ -163,20 +176,23 @@ export async function customPrompt(title, message) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
export function rebootAPI() {
|
export async function rebootAPI() {
|
||||||
if ('electronAPI' in window) {
|
if ('electronAPI' in window) {
|
||||||
window.electronAPI.restartApp();
|
window.electronAPI.restartApp();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
customConfirm("Are you sure you'd like to reboot the server?").then((isConfirmed) => {
|
const isConfirmed = await customConfirm("Are you sure you'd like to reboot the server?");
|
||||||
if (isConfirmed) {
|
if (isConfirmed) {
|
||||||
try {
|
try {
|
||||||
api.fetchApi("/v2/manager/reboot");
|
const response = await api.fetchApi("/manager/reboot");
|
||||||
|
if (response.status == 403) {
|
||||||
|
await handle403Response(response);
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
catch(exception) {}
|
|
||||||
}
|
}
|
||||||
});
|
catch(exception) {}
|
||||||
|
}
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@@ -210,13 +226,13 @@ export async function install_pip(packages) {
|
|||||||
if(packages.includes('&'))
|
if(packages.includes('&'))
|
||||||
app.ui.dialog.show(`Invalid PIP package enumeration: '${packages}'`);
|
app.ui.dialog.show(`Invalid PIP package enumeration: '${packages}'`);
|
||||||
|
|
||||||
const res = await api.fetchApi("/v2/customnode/install/pip", {
|
const res = await api.fetchApi("/customnode/install/pip", {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
body: packages,
|
body: packages,
|
||||||
});
|
});
|
||||||
|
|
||||||
if(res.status == 403) {
|
if(res.status == 403) {
|
||||||
show_message('This action is not allowed with this security level configuration.');
|
await handle403Response(res);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -245,13 +261,13 @@ export async function install_via_git_url(url, manager_dialog) {
|
|||||||
|
|
||||||
show_message(`Wait...<BR><BR>Installing '${url}'`);
|
show_message(`Wait...<BR><BR>Installing '${url}'`);
|
||||||
|
|
||||||
const res = await api.fetchApi("/v2/customnode/install/git_url", {
|
const res = await api.fetchApi("/customnode/install/git_url", {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
body: url,
|
body: url,
|
||||||
});
|
});
|
||||||
|
|
||||||
if(res.status == 403) {
|
if(res.status == 403) {
|
||||||
show_message('This action is not allowed with this security level configuration.');
|
await handle403Response(res);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -262,9 +278,9 @@ export async function install_via_git_url(url, manager_dialog) {
|
|||||||
const self = this;
|
const self = this;
|
||||||
|
|
||||||
rebootButton.addEventListener("click",
|
rebootButton.addEventListener("click",
|
||||||
function() {
|
async function() {
|
||||||
if(rebootAPI()) {
|
if(await rebootAPI()) {
|
||||||
manager_dialog.close();
|
manager_instance.close();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -630,14 +646,6 @@ export function showTooltip(target, text, className = 'cn-tooltip', styleMap = {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
export function generateUUID() {
|
|
||||||
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
|
|
||||||
const r = Math.random() * 16 | 0;
|
|
||||||
const v = c === 'x' ? r : (r & 0x3 | 0x8);
|
|
||||||
return v.toString(16);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function initTooltip () {
|
function initTooltip () {
|
||||||
const mouseenterHandler = (e) => {
|
const mouseenterHandler = (e) => {
|
||||||
const target = e.target;
|
const target = e.target;
|
||||||
@@ -64,7 +64,7 @@ function storeGroupNode(name, data, register=true) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export async function load_components() {
|
export async function load_components() {
|
||||||
let data = await api.fetchApi('/v2/manager/component/loads', {method: "POST"});
|
let data = await api.fetchApi('/manager/component/loads', {method: "POST"});
|
||||||
let components = await data.json();
|
let components = await data.json();
|
||||||
|
|
||||||
let start_time = Date.now();
|
let start_time = Date.now();
|
||||||
@@ -222,7 +222,7 @@ async function save_as_component(node, version, author, prefix, nodename, packna
|
|||||||
pack_map[packname] = component_name;
|
pack_map[packname] = component_name;
|
||||||
rpack_map[component_name] = subgraph;
|
rpack_map[component_name] = subgraph;
|
||||||
|
|
||||||
const res = await api.fetchApi('/v2/manager/component/save', {
|
const res = await api.fetchApi('/manager/component/save', {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
@@ -259,7 +259,7 @@ async function import_component(component_name, component, mode) {
|
|||||||
workflow: component
|
workflow: component
|
||||||
};
|
};
|
||||||
|
|
||||||
const res = await api.fetchApi('/v2/manager/component/save', {
|
const res = await api.fetchApi('/manager/component/save', {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
headers: { "Content-Type": "application/json", },
|
headers: { "Content-Type": "application/json", },
|
||||||
body: JSON.stringify(body)
|
body: JSON.stringify(body)
|
||||||
@@ -709,7 +709,7 @@ app.handleFile = handleFile;
|
|||||||
|
|
||||||
let current_component_policy = 'workflow';
|
let current_component_policy = 'workflow';
|
||||||
try {
|
try {
|
||||||
api.fetchApi('/v2/manager/policy/component')
|
api.fetchApi('/manager/policy/component')
|
||||||
.then(response => response.text())
|
.then(response => response.text())
|
||||||
.then(data => { current_component_policy = data; });
|
.then(data => { current_component_policy = data; });
|
||||||
}
|
}
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
.cn-manager {
|
.cn-manager {
|
||||||
--grid-font: -apple-system, BlinkMacSystemFont, "Segue UI", "Noto Sans", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji";
|
--grid-font: -apple-system, BlinkMacSystemFont, "Segoe UI", "Noto Sans", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji";
|
||||||
z-index: 1099;
|
z-index: 1099;
|
||||||
width: 80%;
|
width: 80%;
|
||||||
height: 80%;
|
height: 80%;
|
||||||
@@ -7,7 +7,7 @@ import {
|
|||||||
fetchData, md5, icons, show_message, customConfirm, customAlert, customPrompt,
|
fetchData, md5, icons, show_message, customConfirm, customAlert, customPrompt,
|
||||||
sanitizeHTML, infoToast, showTerminal, setNeedRestart,
|
sanitizeHTML, infoToast, showTerminal, setNeedRestart,
|
||||||
storeColumnWidth, restoreColumnWidth, getTimeAgo, copyText, loadCss,
|
storeColumnWidth, restoreColumnWidth, getTimeAgo, copyText, loadCss,
|
||||||
showPopover, hidePopover, generateUUID
|
showPopover, hidePopover, handle403Response
|
||||||
} from "./common.js";
|
} from "./common.js";
|
||||||
|
|
||||||
// https://cenfun.github.io/turbogrid/api.html
|
// https://cenfun.github.io/turbogrid/api.html
|
||||||
@@ -66,7 +66,7 @@ export class CustomNodesManager {
|
|||||||
this.id = "cn-manager";
|
this.id = "cn-manager";
|
||||||
|
|
||||||
app.registerExtension({
|
app.registerExtension({
|
||||||
name: "Comfy.Legacy.CustomNodesManager",
|
name: "Comfy.CustomNodesManager",
|
||||||
afterConfigureGraph: (missingNodeTypes) => {
|
afterConfigureGraph: (missingNodeTypes) => {
|
||||||
const item = this.getFilterItem(ShowMode.MISSING);
|
const item = this.getFilterItem(ShowMode.MISSING);
|
||||||
if (item) {
|
if (item) {
|
||||||
@@ -459,7 +459,7 @@ export class CustomNodesManager {
|
|||||||
|
|
||||||
".cn-manager-stop": {
|
".cn-manager-stop": {
|
||||||
click: () => {
|
click: () => {
|
||||||
api.fetchApi('/v2/manager/queue/reset');
|
api.fetchApi('/manager/queue/reset');
|
||||||
infoToast('Cancel', 'Remaining tasks will stop after completing the current task.');
|
infoToast('Cancel', 'Remaining tasks will stop after completing the current task.');
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -635,7 +635,7 @@ export class CustomNodesManager {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const response = await api.fetchApi(`/v2/customnode/import_fail_info`, {
|
const response = await api.fetchApi(`/customnode/import_fail_info`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
body: JSON.stringify(info)
|
body: JSON.stringify(info)
|
||||||
@@ -714,6 +714,7 @@ export class CustomNodesManager {
|
|||||||
link.href = rowItem.reference;
|
link.href = rowItem.reference;
|
||||||
link.target = '_blank';
|
link.target = '_blank';
|
||||||
link.innerHTML = `<b>${title}</b>`;
|
link.innerHTML = `<b>${title}</b>`;
|
||||||
|
link.title = rowItem.originalData.id;
|
||||||
container.appendChild(link);
|
container.appendChild(link);
|
||||||
|
|
||||||
return container;
|
return container;
|
||||||
@@ -1243,7 +1244,7 @@ export class CustomNodesManager {
|
|||||||
async loadNodes(node_packs) {
|
async loadNodes(node_packs) {
|
||||||
const mode = manager_instance.datasrc_combo.value;
|
const mode = manager_instance.datasrc_combo.value;
|
||||||
this.showStatus(`Loading node mappings (${mode}) ...`);
|
this.showStatus(`Loading node mappings (${mode}) ...`);
|
||||||
const res = await fetchData(`/v2/customnode/getmappings?mode=${mode}`);
|
const res = await fetchData(`/customnode/getmappings?mode=${mode}`);
|
||||||
if (res.error) {
|
if (res.error) {
|
||||||
console.log(res.error);
|
console.log(res.error);
|
||||||
return;
|
return;
|
||||||
@@ -1395,10 +1396,10 @@ export class CustomNodesManager {
|
|||||||
this.showLoading();
|
this.showLoading();
|
||||||
let res;
|
let res;
|
||||||
if(is_enable) {
|
if(is_enable) {
|
||||||
res = await api.fetchApi(`/v2/customnode/disabled_versions/${node_id}`, { cache: "no-store" });
|
res = await api.fetchApi(`/customnode/disabled_versions/${node_id}`, { cache: "no-store" });
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
res = await api.fetchApi(`/v2/customnode/versions/${node_id}`, { cache: "no-store" });
|
res = await api.fetchApi(`/customnode/versions/${node_id}`, { cache: "no-store" });
|
||||||
}
|
}
|
||||||
this.hideLoading();
|
this.hideLoading();
|
||||||
|
|
||||||
@@ -1440,6 +1441,13 @@ export class CustomNodesManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async installNodes(list, btn, title, selected_version) {
|
async installNodes(list, btn, title, selected_version) {
|
||||||
|
let stats = await api.fetchApi('/manager/queue/status');
|
||||||
|
stats = await stats.json();
|
||||||
|
if(stats.is_processing) {
|
||||||
|
customAlert(`[ComfyUI-Manager] There are already tasks in progress. Please try again after it is completed. (${stats.done_count}/${stats.total_count})`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const { target, label, mode} = btn;
|
const { target, label, mode} = btn;
|
||||||
|
|
||||||
if(mode === "uninstall") {
|
if(mode === "uninstall") {
|
||||||
@@ -1466,9 +1474,9 @@ export class CustomNodesManager {
|
|||||||
let needRestart = false;
|
let needRestart = false;
|
||||||
let errorMsg = "";
|
let errorMsg = "";
|
||||||
|
|
||||||
let target_items = [];
|
await api.fetchApi('/manager/queue/reset');
|
||||||
|
|
||||||
let batch = {};
|
let target_items = [];
|
||||||
|
|
||||||
for (const hash of list) {
|
for (const hash of list) {
|
||||||
const item = this.grid.getRowItemBy("hash", hash);
|
const item = this.grid.getRowItemBy("hash", hash);
|
||||||
@@ -1511,11 +1519,32 @@ export class CustomNodesManager {
|
|||||||
api_mode = 'reinstall';
|
api_mode = 'reinstall';
|
||||||
}
|
}
|
||||||
|
|
||||||
if(batch[api_mode]) {
|
const res = await api.fetchApi(`/manager/queue/${api_mode}`, {
|
||||||
batch[api_mode].push(data);
|
method: 'POST',
|
||||||
}
|
body: JSON.stringify(data)
|
||||||
else {
|
});
|
||||||
batch[api_mode] = [data];
|
|
||||||
|
if (res.status != 200) {
|
||||||
|
errorMsg = `'${item.title}': `;
|
||||||
|
|
||||||
|
if(res.status == 403) {
|
||||||
|
try {
|
||||||
|
const data = await res.json();
|
||||||
|
if(data.error === 'comfyui_outdated') {
|
||||||
|
errorMsg += `ComfyUI version is outdated. Please update ComfyUI to use Manager normally.\n`;
|
||||||
|
} else {
|
||||||
|
errorMsg += `This action is not allowed with this security level configuration.\n`;
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
errorMsg += `This action is not allowed with this security level configuration.\n`;
|
||||||
|
}
|
||||||
|
} else if(res.status == 404) {
|
||||||
|
errorMsg += `With the current security level configuration, only custom nodes from the <B>"default channel"</B> can be installed.\n`;
|
||||||
|
} else {
|
||||||
|
errorMsg += await res.text() + '\n';
|
||||||
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1532,24 +1561,7 @@ export class CustomNodesManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
this.batch_id = generateUUID();
|
await api.fetchApi('/manager/queue/start');
|
||||||
batch['batch_id'] = this.batch_id;
|
|
||||||
|
|
||||||
const res = await api.fetchApi(`/v2/manager/queue/batch`, {
|
|
||||||
method: 'POST',
|
|
||||||
body: JSON.stringify(batch)
|
|
||||||
});
|
|
||||||
|
|
||||||
let failed = await res.json();
|
|
||||||
|
|
||||||
if(failed.length > 0) {
|
|
||||||
for(let k in failed) {
|
|
||||||
let hash = failed[k];
|
|
||||||
const item = this.grid.getRowItemBy("hash", hash);
|
|
||||||
errorMsg = `[FAIL] ${item.title}`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
this.showStop();
|
this.showStop();
|
||||||
showTerminal();
|
showTerminal();
|
||||||
}
|
}
|
||||||
@@ -1557,9 +1569,6 @@ export class CustomNodesManager {
|
|||||||
|
|
||||||
async onQueueStatus(event) {
|
async onQueueStatus(event) {
|
||||||
let self = CustomNodesManager.instance;
|
let self = CustomNodesManager.instance;
|
||||||
// If legacy manager front is not open, return early (using new manager front)
|
|
||||||
if (self.element?.style.display === 'none') return
|
|
||||||
|
|
||||||
if(event.detail.status == 'in_progress' && event.detail.ui_target == 'nodepack_manager') {
|
if(event.detail.status == 'in_progress' && event.detail.ui_target == 'nodepack_manager') {
|
||||||
const hash = event.detail.target;
|
const hash = event.detail.target;
|
||||||
|
|
||||||
@@ -1570,7 +1579,7 @@ export class CustomNodesManager {
|
|||||||
self.grid.updateCell(item, "action");
|
self.grid.updateCell(item, "action");
|
||||||
self.grid.setRowSelected(item, false);
|
self.grid.setRowSelected(item, false);
|
||||||
}
|
}
|
||||||
else if(event.detail.status == 'batch-done' && event.detail.batch_id == self.batch_id) {
|
else if(event.detail.status == 'done') {
|
||||||
self.hideStop();
|
self.hideStop();
|
||||||
self.onQueueCompleted(event.detail);
|
self.onQueueCompleted(event.detail);
|
||||||
}
|
}
|
||||||
@@ -1625,17 +1634,35 @@ export class CustomNodesManager {
|
|||||||
getNodesInWorkflow() {
|
getNodesInWorkflow() {
|
||||||
let usedGroupNodes = new Set();
|
let usedGroupNodes = new Set();
|
||||||
let allUsedNodes = {};
|
let allUsedNodes = {};
|
||||||
|
const visitedGraphs = new Set();
|
||||||
|
|
||||||
for(let k in app.graph._nodes) {
|
const visitGraph = (graph) => {
|
||||||
let node = app.graph._nodes[k];
|
if (!graph || visitedGraphs.has(graph)) return;
|
||||||
|
visitedGraphs.add(graph);
|
||||||
|
|
||||||
if(node.type.startsWith('workflow>')) {
|
const nodes = graph._nodes || graph.nodes || [];
|
||||||
usedGroupNodes.add(node.type.slice(9));
|
for(let k in nodes) {
|
||||||
continue;
|
let node = nodes[k];
|
||||||
|
if (!node) continue;
|
||||||
|
|
||||||
|
// If it's a SubgraphNode, recurse into its graph and continue searching
|
||||||
|
if (node.isSubgraphNode?.() && node.subgraph) {
|
||||||
|
visitGraph(node.subgraph);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!node.type) continue;
|
||||||
|
|
||||||
|
// Group nodes / components
|
||||||
|
if(typeof node.type === 'string' && node.type.startsWith('workflow>')) {
|
||||||
|
usedGroupNodes.add(node.type.slice(9));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
allUsedNodes[node.type] = node;
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
|
||||||
allUsedNodes[node.type] = node;
|
visitGraph(app.graph);
|
||||||
}
|
|
||||||
|
|
||||||
for(let k of usedGroupNodes) {
|
for(let k of usedGroupNodes) {
|
||||||
let subnodes = app.graph.extra.groupNodes[k]?.nodes;
|
let subnodes = app.graph.extra.groupNodes[k]?.nodes;
|
||||||
@@ -1746,7 +1773,7 @@ export class CustomNodesManager {
|
|||||||
async getMissingNodesLegacy(hashMap, missing_nodes) {
|
async getMissingNodesLegacy(hashMap, missing_nodes) {
|
||||||
const mode = manager_instance.datasrc_combo.value;
|
const mode = manager_instance.datasrc_combo.value;
|
||||||
this.showStatus(`Loading missing nodes (${mode}) ...`);
|
this.showStatus(`Loading missing nodes (${mode}) ...`);
|
||||||
const res = await fetchData(`/v2/customnode/getmappings?mode=${mode}`);
|
const res = await fetchData(`/customnode/getmappings?mode=${mode}`);
|
||||||
if (res.error) {
|
if (res.error) {
|
||||||
this.showError(`Failed to get custom node mappings: ${res.error}`);
|
this.showError(`Failed to get custom node mappings: ${res.error}`);
|
||||||
return;
|
return;
|
||||||
@@ -1861,7 +1888,7 @@ export class CustomNodesManager {
|
|||||||
async getAlternatives() {
|
async getAlternatives() {
|
||||||
const mode = manager_instance.datasrc_combo.value;
|
const mode = manager_instance.datasrc_combo.value;
|
||||||
this.showStatus(`Loading alternatives (${mode}) ...`);
|
this.showStatus(`Loading alternatives (${mode}) ...`);
|
||||||
const res = await fetchData(`/v2/customnode/alternatives?mode=${mode}`);
|
const res = await fetchData(`/customnode/alternatives?mode=${mode}`);
|
||||||
if (res.error) {
|
if (res.error) {
|
||||||
this.showError(`Failed to get alternatives: ${res.error}`);
|
this.showError(`Failed to get alternatives: ${res.error}`);
|
||||||
return [];
|
return [];
|
||||||
@@ -1909,7 +1936,7 @@ export class CustomNodesManager {
|
|||||||
infoToast('Fetching updated information. This may take some time if many custom nodes are installed.');
|
infoToast('Fetching updated information. This may take some time if many custom nodes are installed.');
|
||||||
}
|
}
|
||||||
|
|
||||||
const res = await fetchData(`/v2/customnode/getlist?mode=${mode}${skip_update}`);
|
const res = await fetchData(`/customnode/getlist?mode=${mode}${skip_update}`);
|
||||||
if (res.error) {
|
if (res.error) {
|
||||||
this.showError("Failed to get custom node list.");
|
this.showError("Failed to get custom node list.");
|
||||||
this.hideLoading();
|
this.hideLoading();
|
||||||
@@ -3,7 +3,7 @@ import { $el } from "../../scripts/ui.js";
|
|||||||
import {
|
import {
|
||||||
manager_instance, rebootAPI,
|
manager_instance, rebootAPI,
|
||||||
fetchData, md5, icons, show_message, customAlert, infoToast, showTerminal,
|
fetchData, md5, icons, show_message, customAlert, infoToast, showTerminal,
|
||||||
storeColumnWidth, restoreColumnWidth, loadCss, generateUUID
|
storeColumnWidth, restoreColumnWidth, loadCss, handle403Response
|
||||||
} from "./common.js";
|
} from "./common.js";
|
||||||
import { api } from "../../scripts/api.js";
|
import { api } from "../../scripts/api.js";
|
||||||
|
|
||||||
@@ -175,7 +175,7 @@ export class ModelManager {
|
|||||||
|
|
||||||
".cmm-manager-stop": {
|
".cmm-manager-stop": {
|
||||||
click: () => {
|
click: () => {
|
||||||
api.fetchApi('/v2/manager/queue/reset');
|
api.fetchApi('/manager/queue/reset');
|
||||||
infoToast('Cancel', 'Remaining tasks will stop after completing the current task.');
|
infoToast('Cancel', 'Remaining tasks will stop after completing the current task.');
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -435,15 +435,23 @@ export class ModelManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async installModels(list, btn) {
|
async installModels(list, btn) {
|
||||||
|
let stats = await api.fetchApi('/manager/queue/status');
|
||||||
|
|
||||||
|
stats = await stats.json();
|
||||||
|
if(stats.is_processing) {
|
||||||
|
customAlert(`[ComfyUI-Manager] There are already tasks in progress. Please try again after it is completed. (${stats.done_count}/${stats.total_count})`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
btn.classList.add("cmm-btn-loading");
|
btn.classList.add("cmm-btn-loading");
|
||||||
this.showError("");
|
this.showError("");
|
||||||
|
|
||||||
let needRefresh = false;
|
let needRefresh = false;
|
||||||
let errorMsg = "";
|
let errorMsg = "";
|
||||||
|
|
||||||
let target_items = [];
|
await api.fetchApi('/manager/queue/reset');
|
||||||
|
|
||||||
let batch = {};
|
let target_items = [];
|
||||||
|
|
||||||
for (const item of list) {
|
for (const item of list) {
|
||||||
this.grid.scrollRowIntoView(item);
|
this.grid.scrollRowIntoView(item);
|
||||||
@@ -460,12 +468,30 @@ export class ModelManager {
|
|||||||
const data = item.originalData;
|
const data = item.originalData;
|
||||||
data.ui_id = item.hash;
|
data.ui_id = item.hash;
|
||||||
|
|
||||||
|
const res = await api.fetchApi(`/manager/queue/install_model`, {
|
||||||
|
method: 'POST',
|
||||||
|
body: JSON.stringify(data)
|
||||||
|
});
|
||||||
|
|
||||||
if(batch['install_model']) {
|
if (res.status != 200) {
|
||||||
batch['install_model'].push(data);
|
errorMsg = `'${item.name}': `;
|
||||||
}
|
|
||||||
else {
|
if(res.status == 403) {
|
||||||
batch['install_model'] = [data];
|
try {
|
||||||
|
const data = await res.json();
|
||||||
|
if(data.error === 'comfyui_outdated') {
|
||||||
|
errorMsg += `ComfyUI version is outdated. Please update ComfyUI to use Manager normally.\n`;
|
||||||
|
} else {
|
||||||
|
errorMsg += `This action is not allowed with this security level configuration.\n`;
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
errorMsg += `This action is not allowed with this security level configuration.\n`;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
errorMsg += await res.text() + '\n';
|
||||||
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -482,24 +508,7 @@ export class ModelManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
this.batch_id = generateUUID();
|
await api.fetchApi('/manager/queue/start');
|
||||||
batch['batch_id'] = this.batch_id;
|
|
||||||
|
|
||||||
const res = await api.fetchApi(`/v2/manager/queue/batch`, {
|
|
||||||
method: 'POST',
|
|
||||||
body: JSON.stringify(batch)
|
|
||||||
});
|
|
||||||
|
|
||||||
let failed = await res.json();
|
|
||||||
|
|
||||||
if(failed.length > 0) {
|
|
||||||
for(let k in failed) {
|
|
||||||
let hash = failed[k];
|
|
||||||
const item = self.grid.getRowItemBy("hash", hash);
|
|
||||||
errorMsg = `[FAIL] ${item.title}`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
this.showStop();
|
this.showStop();
|
||||||
showTerminal();
|
showTerminal();
|
||||||
}
|
}
|
||||||
@@ -519,7 +528,7 @@ export class ModelManager {
|
|||||||
// self.grid.updateCell(item, "tg-column-select");
|
// self.grid.updateCell(item, "tg-column-select");
|
||||||
self.grid.updateRow(item);
|
self.grid.updateRow(item);
|
||||||
}
|
}
|
||||||
else if(event.detail.status == 'batch-done') {
|
else if(event.detail.status == 'done') {
|
||||||
self.hideStop();
|
self.hideStop();
|
||||||
self.onQueueCompleted(event.detail);
|
self.onQueueCompleted(event.detail);
|
||||||
}
|
}
|
||||||
@@ -645,7 +654,7 @@ export class ModelManager {
|
|||||||
|
|
||||||
const mode = manager_instance.datasrc_combo.value;
|
const mode = manager_instance.datasrc_combo.value;
|
||||||
|
|
||||||
const res = await fetchData(`/v2/externalmodel/getlist?mode=${mode}`);
|
const res = await fetchData(`/externalmodel/getlist?mode=${mode}`);
|
||||||
if (res.error) {
|
if (res.error) {
|
||||||
this.showError("Failed to get external model list.");
|
this.showError("Failed to get external model list.");
|
||||||
this.hideLoading();
|
this.hideLoading();
|
||||||
@@ -142,7 +142,7 @@ function node_info_copy(src, dest, connect_both, copy_shape) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
app.registerExtension({
|
app.registerExtension({
|
||||||
name: "Comfy.Legacy.Manager.NodeFixer",
|
name: "Comfy.Manager.NodeFixer",
|
||||||
beforeRegisterNodeDef(nodeType, nodeData, app) {
|
beforeRegisterNodeDef(nodeType, nodeData, app) {
|
||||||
addMenuHandler(nodeType, function (_, options) {
|
addMenuHandler(nodeType, function (_, options) {
|
||||||
options.push({
|
options.push({
|
||||||
@@ -1,16 +1,16 @@
|
|||||||
import { app } from "../../scripts/app.js";
|
import { app } from "../../scripts/app.js";
|
||||||
import { api } from "../../scripts/api.js"
|
import { api } from "../../scripts/api.js"
|
||||||
import { ComfyDialog, $el } from "../../scripts/ui.js";
|
import { ComfyDialog, $el } from "../../scripts/ui.js";
|
||||||
import { manager_instance, rebootAPI, show_message } from "./common.js";
|
import { manager_instance, rebootAPI, show_message, handle403Response } from "./common.js";
|
||||||
|
|
||||||
|
|
||||||
async function restore_snapshot(target) {
|
async function restore_snapshot(target) {
|
||||||
if(SnapshotManager.instance) {
|
if(SnapshotManager.instance) {
|
||||||
try {
|
try {
|
||||||
const response = await api.fetchApi(`/v2/snapshot/restore?target=${target}`, { cache: "no-store" });
|
const response = await api.fetchApi(`/snapshot/restore?target=${target}`, { cache: "no-store" });
|
||||||
|
|
||||||
if(response.status == 403) {
|
if(response.status == 403) {
|
||||||
show_message('This action is not allowed with this security level configuration.');
|
await handle403Response(response);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -35,10 +35,10 @@ async function restore_snapshot(target) {
|
|||||||
async function remove_snapshot(target) {
|
async function remove_snapshot(target) {
|
||||||
if(SnapshotManager.instance) {
|
if(SnapshotManager.instance) {
|
||||||
try {
|
try {
|
||||||
const response = await api.fetchApi(`/v2/snapshot/remove?target=${target}`, { cache: "no-store" });
|
const response = await api.fetchApi(`/snapshot/remove?target=${target}`, { cache: "no-store" });
|
||||||
|
|
||||||
if(response.status == 403) {
|
if(response.status == 403) {
|
||||||
show_message('This action is not allowed with this security level configuration.');
|
await handle403Response(response);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -61,7 +61,7 @@ async function remove_snapshot(target) {
|
|||||||
|
|
||||||
async function save_current_snapshot() {
|
async function save_current_snapshot() {
|
||||||
try {
|
try {
|
||||||
const response = await api.fetchApi('/v2/snapshot/save', { cache: "no-store" });
|
const response = await api.fetchApi('/snapshot/save', { cache: "no-store" });
|
||||||
app.ui.dialog.close();
|
app.ui.dialog.close();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@@ -76,7 +76,7 @@ async function save_current_snapshot() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function getSnapshotList() {
|
async function getSnapshotList() {
|
||||||
const response = await api.fetchApi(`/v2/snapshot/getlist`);
|
const response = await api.fetchApi(`/snapshot/getlist`);
|
||||||
const data = await response.json();
|
const data = await response.json();
|
||||||
return data;
|
return data;
|
||||||
}
|
}
|
||||||
@@ -145,8 +145,8 @@ export class SnapshotManager extends ComfyDialog {
|
|||||||
if(btn_id) {
|
if(btn_id) {
|
||||||
const rebootButton = document.getElementById(btn_id);
|
const rebootButton = document.getElementById(btn_id);
|
||||||
const self = this;
|
const self = this;
|
||||||
rebootButton.onclick = function() {
|
rebootButton.onclick = async function() {
|
||||||
if(rebootAPI()) {
|
if(await rebootAPI()) {
|
||||||
self.close();
|
self.close();
|
||||||
self.manager_dialog.close();
|
self.manager_dialog.close();
|
||||||
}
|
}
|
||||||
@@ -38,7 +38,7 @@ class WorkflowMetadataExtension {
|
|||||||
* enabled is true if the node is enabled, false if it is disabled
|
* enabled is true if the node is enabled, false if it is disabled
|
||||||
*/
|
*/
|
||||||
async getInstalledNodes() {
|
async getInstalledNodes() {
|
||||||
const res = await api.fetchApi("/v2/customnode/installed");
|
const res = await api.fetchApi("/customnode/installed");
|
||||||
return await res.json();
|
return await res.json();
|
||||||
}
|
}
|
||||||
|
|
||||||
614
model-list.json
614
model-list.json
@@ -1973,6 +1973,97 @@
|
|||||||
"url": "https://dl.fbaipublicfiles.com/segment_anything/sam_vit_b_01ec64.pth",
|
"url": "https://dl.fbaipublicfiles.com/segment_anything/sam_vit_b_01ec64.pth",
|
||||||
"size": "375.0MB"
|
"size": "375.0MB"
|
||||||
},
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "sam2.1_hiera_tiny.pt",
|
||||||
|
"type": "sam2.1",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2.1 hiera model (tiny)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2.1_hiera_tiny.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_tiny.pt",
|
||||||
|
"size": "149.0MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sam2.1_hiera_small.pt",
|
||||||
|
"type": "sam2.1",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2.1 hiera model (small)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2.1_hiera_small.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_small.pt",
|
||||||
|
"size": "176.0MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sam2.1_hiera_base_plus.pt",
|
||||||
|
"type": "sam2.1",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2.1 hiera model (base+)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2.1_hiera_base_plus.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_base_plus.pt",
|
||||||
|
"size": "309.0MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sam2.1_hiera_large.pt",
|
||||||
|
"type": "sam2.1",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2.1 hiera model (large)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2.1_hiera_large.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_large.pt",
|
||||||
|
"size": "857.0MB"
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "sam2_hiera_tiny.pt",
|
||||||
|
"type": "sam2",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2 hiera model (tiny)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2_hiera_tiny.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/072824/sam2_hiera_tiny.pt",
|
||||||
|
"size": "149.0MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sam2_hiera_small.pt",
|
||||||
|
"type": "sam2",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2 hiera model (small)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2_hiera_small.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/072824/sam2_hiera_small.pt",
|
||||||
|
"size": "176.0MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sam2_hiera_base_plus.pt",
|
||||||
|
"type": "sam2",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2 hiera model (base+)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2_hiera_base_plus.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/072824/sam2_hiera_base_plus.pt",
|
||||||
|
"size": "309.0MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sam2_hiera_large.pt",
|
||||||
|
"type": "sam2",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2 hiera model (large)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2_hiera_large.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/072824/sam2_hiera_large.pt",
|
||||||
|
"size": "857.0MB"
|
||||||
|
},
|
||||||
|
|
||||||
{
|
{
|
||||||
"name": "seecoder v1.0",
|
"name": "seecoder v1.0",
|
||||||
"type": "seecoder",
|
"type": "seecoder",
|
||||||
@@ -4006,6 +4097,29 @@
|
|||||||
"size": "649MB"
|
"size": "649MB"
|
||||||
},
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/omnigen2_fp16.safetensors",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "OmniGen2",
|
||||||
|
"save_path": "default",
|
||||||
|
"description": "OmniGen2 diffusion model. This is required for using OmniGen2.",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged",
|
||||||
|
"filename": "omnigen2_fp16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged/resolve/main/split_files/diffusion_models/omnigen2_fp16.safetensors",
|
||||||
|
"size": "7.93GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/qwen_2.5_vl_fp16.safetensors",
|
||||||
|
"type": "clip",
|
||||||
|
"base": "qwen-2.5",
|
||||||
|
"save_path": "default",
|
||||||
|
"description": "text encoder for OmniGen2",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged",
|
||||||
|
"filename": "qwen_2.5_vl_fp16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged/resolve/main/split_files/text_encoders/qwen_2.5_vl_fp16.safetensors",
|
||||||
|
"size": "7.51GB"
|
||||||
|
},
|
||||||
|
|
||||||
{
|
{
|
||||||
"name": "FLUX.1 [Schnell] Diffusion model",
|
"name": "FLUX.1 [Schnell] Diffusion model",
|
||||||
"type": "diffusion_model",
|
"type": "diffusion_model",
|
||||||
@@ -4023,7 +4137,7 @@
|
|||||||
"type": "VAE",
|
"type": "VAE",
|
||||||
"base": "FLUX.1",
|
"base": "FLUX.1",
|
||||||
"save_path": "vae/FLUX1",
|
"save_path": "vae/FLUX1",
|
||||||
"description": "FLUX.1 VAE model",
|
"description": "FLUX.1 VAE model\nNOTE: This VAE model can also be used for image generation with OmniGen2.",
|
||||||
"reference": "https://huggingface.co/black-forest-labs/FLUX.1-schnell",
|
"reference": "https://huggingface.co/black-forest-labs/FLUX.1-schnell",
|
||||||
"filename": "ae.safetensors",
|
"filename": "ae.safetensors",
|
||||||
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors",
|
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors",
|
||||||
@@ -4931,6 +5045,105 @@
|
|||||||
"size": "1.26GB"
|
"size": "1.26GB"
|
||||||
},
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 i2v high noise 14B (fp16)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for i2v high noise 14B (fp16)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_i2v_high_noise_14B_fp16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_high_noise_14B_fp16.safetensors",
|
||||||
|
"size": "28.6GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 i2v high noise 14B (fp8_scaled)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for i2v high noise 14B (fp8_scaled)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_i2v_high_noise_14B_fp8_scaled.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_high_noise_14B_fp8_scaled.safetensors",
|
||||||
|
"size": "14.3GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 i2v low noise 14B (fp16)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for i2v low noise 14B (fp16)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_i2v_low_noise_14B_fp16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_low_noise_14B_fp16.safetensors",
|
||||||
|
"size": "28.6GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 i2v low noise 14B (fp8_scaled)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for i2v low noise 14B (fp8_scaled)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_i2v_low_noise_14B_fp8_scaled.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_low_noise_14B_fp8_scaled.safetensors",
|
||||||
|
"size": "14.3GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 t2v high noise 14B (fp16)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for t2v high noise 14B (fp16)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_t2v_high_noise_14B_fp16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_high_noise_14B_fp16.safetensors",
|
||||||
|
"size": "28.6GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 t2v high noise 14B (fp8_scaled)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for t2v high noise 14B (fp8_scaled)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_t2v_high_noise_14B_fp8_scaled.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_high_noise_14B_fp8_scaled.safetensors",
|
||||||
|
"size": "14.3GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 t2v low noise 14B (fp16)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for t2v low noise 14B (fp16)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_t2v_low_noise_14B_fp16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_low_noise_14B_fp16.safetensors",
|
||||||
|
"size": "28.6GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 t2v low noise 14B (fp8_scaled)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for t2v low noise 14B (fp8_scaled)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_t2v_low_noise_14B_fp8_scaled.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_low_noise_14B_fp8_scaled.safetensors",
|
||||||
|
"size": "14.3GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 ti2v 5B (fp16)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for ti2v 5B (fp16)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_ti2v_5B_fp16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_ti2v_5B_fp16.safetensors",
|
||||||
|
"size": "10.0GB"
|
||||||
|
},
|
||||||
|
|
||||||
{
|
{
|
||||||
"name": "Comfy-Org/umt5_xxl_fp16.safetensors",
|
"name": "Comfy-Org/umt5_xxl_fp16.safetensors",
|
||||||
@@ -5033,6 +5246,50 @@
|
|||||||
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltxv-13b-0.9.7-distilled-fp8.safetensors",
|
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltxv-13b-0.9.7-distilled-fp8.safetensors",
|
||||||
"size": "15.7GB"
|
"size": "15.7GB"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "LTX-Video 2B Distilled v0.9.8",
|
||||||
|
"type": "checkpoint",
|
||||||
|
"base": "LTX-Video",
|
||||||
|
"save_path": "checkpoints/LTXV",
|
||||||
|
"description": "LTX-Video 2B distilled model v0.9.8 with improved prompt understanding and detail generation.",
|
||||||
|
"reference": "https://huggingface.co/Lightricks/LTX-Video",
|
||||||
|
"filename": "ltxv-2b-0.9.8-distilled.safetensors",
|
||||||
|
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltxv-2b-0.9.8-distilled.safetensors",
|
||||||
|
"size": "6.34GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "LTX-Video 2B Distilled FP8 v0.9.8",
|
||||||
|
"type": "checkpoint",
|
||||||
|
"base": "LTX-Video",
|
||||||
|
"save_path": "checkpoints/LTXV",
|
||||||
|
"description": "Quantized LTX-Video 2B distilled model v0.9.8 with improved prompt understanding and detail generation, optimized for lower VRAM usage.",
|
||||||
|
"reference": "https://huggingface.co/Lightricks/LTX-Video",
|
||||||
|
"filename": "ltxv-2b-0.9.8-distilled-fp8.safetensors",
|
||||||
|
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltxv-2b-0.9.8-distilled-fp8.safetensors",
|
||||||
|
"size": "4.46GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "LTX-Video 13B Distilled v0.9.8",
|
||||||
|
"type": "checkpoint",
|
||||||
|
"base": "LTX-Video",
|
||||||
|
"save_path": "checkpoints/LTXV",
|
||||||
|
"description": "LTX-Video 13B distilled model v0.9.8 with improved prompt understanding and detail generation.",
|
||||||
|
"reference": "https://huggingface.co/Lightricks/LTX-Video",
|
||||||
|
"filename": "ltxv-13b-0.9.8-distilled.safetensors",
|
||||||
|
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltxv-13b-0.9.8-distilled.safetensors",
|
||||||
|
"size": "28.6GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "LTX-Video 13B Distilled FP8 v0.9.8",
|
||||||
|
"type": "checkpoint",
|
||||||
|
"base": "LTX-Video",
|
||||||
|
"save_path": "checkpoints/LTXV",
|
||||||
|
"description": "Quantized LTX-Video 13B distilled model v0.9.8 with improved prompt understanding and detail generation, optimized for lower VRAM usage.",
|
||||||
|
"reference": "https://huggingface.co/Lightricks/LTX-Video",
|
||||||
|
"filename": "ltxv-13b-0.9.8-distilled-fp8.safetensors",
|
||||||
|
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltxv-13b-0.9.8-distilled-fp8.safetensors",
|
||||||
|
"size": "15.7GB"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "LTX-Video 13B Distilled LoRA v0.9.7",
|
"name": "LTX-Video 13B Distilled LoRA v0.9.7",
|
||||||
"type": "lora",
|
"type": "lora",
|
||||||
@@ -5044,6 +5301,50 @@
|
|||||||
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltxv-13b-0.9.7-distilled-lora128.safetensors",
|
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltxv-13b-0.9.7-distilled-lora128.safetensors",
|
||||||
"size": "1.33GB"
|
"size": "1.33GB"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "LTX-Video ICLoRA Depth 13B v0.9.7",
|
||||||
|
"type": "lora",
|
||||||
|
"base": "LTX-Video",
|
||||||
|
"save_path": "loras",
|
||||||
|
"description": "In-Context LoRA (IC LoRA) for depth-controlled video-to-video generation with precise depth conditioning.",
|
||||||
|
"reference": "https://huggingface.co/Lightricks/LTX-Video-ICLoRA-depth-13b-0.9.7",
|
||||||
|
"filename": "ltxv-097-ic-lora-depth-control-comfyui.safetensors",
|
||||||
|
"url": "https://huggingface.co/Lightricks/LTX-Video-ICLoRA-depth-13b-0.9.7/resolve/main/ltxv-097-ic-lora-depth-control-comfyui.safetensors",
|
||||||
|
"size": "81.9MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "LTX-Video ICLoRA Pose 13B v0.9.7",
|
||||||
|
"type": "lora",
|
||||||
|
"base": "LTX-Video",
|
||||||
|
"save_path": "loras",
|
||||||
|
"description": "In-Context LoRA (IC LoRA) for pose-controlled video-to-video generation with precise pose conditioning.",
|
||||||
|
"reference": "https://huggingface.co/Lightricks/LTX-Video-ICLoRA-pose-13b-0.9.7",
|
||||||
|
"filename": "ltxv-097-ic-lora-pose-control-comfyui.safetensors",
|
||||||
|
"url": "https://huggingface.co/Lightricks/LTX-Video-ICLoRA-pose-13b-0.9.7/resolve/main/ltxv-097-ic-lora-pose-control-comfyui.safetensors",
|
||||||
|
"size": "151MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "LTX-Video ICLoRA Canny 13B v0.9.7",
|
||||||
|
"type": "lora",
|
||||||
|
"base": "LTX-Video",
|
||||||
|
"save_path": "loras",
|
||||||
|
"description": "In-Context LoRA (IC LoRA) for canny edge-controlled video-to-video generation with precise edge conditioning.",
|
||||||
|
"reference": "https://huggingface.co/Lightricks/LTX-Video-ICLoRA-canny-13b-0.9.7",
|
||||||
|
"filename": "ltxv-097-ic-lora-canny-control-comfyui.safetensors",
|
||||||
|
"url": "https://huggingface.co/Lightricks/LTX-Video-ICLoRA-canny-13b-0.9.7/resolve/main/ltxv-097-ic-lora-canny-control-comfyui.safetensors",
|
||||||
|
"size": "81.9MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "LTX-Video ICLoRA Detailer 13B v0.9.8",
|
||||||
|
"type": "lora",
|
||||||
|
"base": "LTX-Video",
|
||||||
|
"save_path": "loras",
|
||||||
|
"description": "A video detailer model on top of LTXV_13B_098_DEV trained on custom data using In-Context LoRA (IC LoRA) method.",
|
||||||
|
"reference": "https://huggingface.co/Lightricks/LTX-Video-ICLoRA-detailer-13b-0.9.8",
|
||||||
|
"filename": "ltxv-098-ic-lora-detailer-comfyui.safetensors",
|
||||||
|
"url": "https://huggingface.co/Lightricks/LTX-Video-ICLoRA-detailer-13b-0.9.8/resolve/main/ltxv-098-ic-lora-detailer-comfyui.safetensors",
|
||||||
|
"size": "1.31GB"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "Latent Bridge Matching for Image Relighting",
|
"name": "Latent Bridge Matching for Image Relighting",
|
||||||
"type": "diffusion_model",
|
"type": "diffusion_model",
|
||||||
@@ -5054,6 +5355,317 @@
|
|||||||
"filename": "LBM_relighting.safetensors",
|
"filename": "LBM_relighting.safetensors",
|
||||||
"url": "https://huggingface.co/jasperai/LBM_relighting/resolve/main/model.safetensors",
|
"url": "https://huggingface.co/jasperai/LBM_relighting/resolve/main/model.safetensors",
|
||||||
"size": "5.02GB"
|
"size": "5.02GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image VAE",
|
||||||
|
"type": "VAE",
|
||||||
|
"base": "Qwen-Image",
|
||||||
|
"save_path": "vae/qwen-image",
|
||||||
|
"description": "VAE model for Qwen-Image",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI",
|
||||||
|
"filename": "qwen_image_vae.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors",
|
||||||
|
"size": "335MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Qwen 2.5 VL 7B Text Encoder (fp8_scaled)",
|
||||||
|
"type": "clip",
|
||||||
|
"base": "Qwen-2.5-VL",
|
||||||
|
"save_path": "text_encoders/qwen",
|
||||||
|
"description": "Qwen 2.5 VL 7B text encoder model (fp8_scaled)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI",
|
||||||
|
"filename": "qwen_2.5_vl_7b_fp8_scaled.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors",
|
||||||
|
"size": "3.75GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Qwen 2.5 VL 7B Text Encoder",
|
||||||
|
"type": "clip",
|
||||||
|
"base": "Qwen-2.5-VL",
|
||||||
|
"save_path": "text_encoders/qwen",
|
||||||
|
"description": "Qwen 2.5 VL 7B text encoder model",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI",
|
||||||
|
"filename": "qwen_2.5_vl_7b.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b.safetensors",
|
||||||
|
"size": "7.51GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image Diffusion Model (fp8_e4m3fn)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Qwen-Image",
|
||||||
|
"save_path": "diffusion_models/qwen-image",
|
||||||
|
"description": "Qwen-Image diffusion model (fp8_e4m3fn)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI",
|
||||||
|
"filename": "qwen_image_fp8_e4m3fn.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors",
|
||||||
|
"size": "4.89GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image Diffusion Model (bf16)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Qwen-Image",
|
||||||
|
"save_path": "diffusion_models/qwen-image",
|
||||||
|
"description": "Qwen-Image diffusion model (bf16)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI",
|
||||||
|
"filename": "qwen_image_bf16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_bf16.safetensors",
|
||||||
|
"size": "9.78GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image-Edit 2509 Diffusion Model (fp8_e4m3fn)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Qwen-Image-Edit",
|
||||||
|
"save_path": "diffusion_models/qwen-image-edit",
|
||||||
|
"description": "Qwen-Image-Edit 2509 diffusion model (fp8_e4m3fn)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI",
|
||||||
|
"filename": "qwen_image_edit_2509_fp8_e4m3fn.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_edit_2509_fp8_e4m3fn.safetensors",
|
||||||
|
"size": "4.89GB"
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image-Edit 2509 Diffusion Model (bf16)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Qwen-Image-Edit",
|
||||||
|
"save_path": "diffusion_models/qwen-image-edit",
|
||||||
|
"description": "Qwen-Image-Edit 2509 diffusion model (bf16)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI",
|
||||||
|
"filename": "qwen_image_edit_2509_bf16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_edit_2509_bf16.safetensors",
|
||||||
|
"size": "9.78GB"
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image-Edit Diffusion Model (fp8_e4m3fn)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Qwen-Image-Edit",
|
||||||
|
"save_path": "diffusion_models/qwen-image-edit",
|
||||||
|
"description": "Qwen-Image-Edit diffusion model (fp8_e4m3fn)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI",
|
||||||
|
"filename": "qwen_image_edit_fp8_e4m3fn.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_edit_fp8_e4m3fn.safetensors",
|
||||||
|
"size": "4.89GB"
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image-Edit Diffusion Model (bf16)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Qwen-Image-Edit",
|
||||||
|
"save_path": "diffusion_models/qwen-image-edit",
|
||||||
|
"description": "Qwen-Image-Edit diffusion model (bf16)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI",
|
||||||
|
"filename": "qwen_image_edit_bf16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_edit_bf16.safetensors",
|
||||||
|
"size": "9.78GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image-Lightning 8steps V1.0",
|
||||||
|
"type": "lora",
|
||||||
|
"base": "Qwen-Image",
|
||||||
|
"save_path": "loras/qwen-image-lightning",
|
||||||
|
"description": "Qwen-Image-Lightning 8-step LoRA model V1.0",
|
||||||
|
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||||
|
"filename": "Qwen-Image-Lightning-8steps-V1.0.safetensors",
|
||||||
|
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-8steps-V1.0.safetensors",
|
||||||
|
"size": "9.78GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image-Lightning 4steps V1.0",
|
||||||
|
"type": "lora",
|
||||||
|
"base": "Qwen-Image",
|
||||||
|
"save_path": "loras/qwen-image-lightning",
|
||||||
|
"description": "Qwen-Image-Lightning 4-step LoRA model V1.0",
|
||||||
|
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||||
|
"filename": "Qwen-Image-Lightning-4steps-V1.0.safetensors",
|
||||||
|
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors",
|
||||||
|
"size": "9.78GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image-Lightning 4steps V1.0 (bf16)",
|
||||||
|
"type": "lora",
|
||||||
|
"base": "Qwen-Image",
|
||||||
|
"save_path": "loras/qwen-image-lightning",
|
||||||
|
"description": "Qwen-Image-Lightning 4-step LoRA model V1.0 (bf16)",
|
||||||
|
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||||
|
"filename": "Qwen-Image-Lightning-4steps-V1.0-bf16.safetensors",
|
||||||
|
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0-bf16.safetensors",
|
||||||
|
"size": "19.6GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image-Lightning 4steps V2.0",
|
||||||
|
"type": "lora",
|
||||||
|
"base": "Qwen-Image",
|
||||||
|
"save_path": "loras/qwen-image-lightning",
|
||||||
|
"description": "Qwen-Image-Lightning 4-step LoRA model V2.0",
|
||||||
|
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||||
|
"filename": "Qwen-Image-Lightning-4steps-V2.0.safetensors",
|
||||||
|
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V2.0.safetensors",
|
||||||
|
"size": "9.78GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image-Lightning 4steps V2.0 (bf16)",
|
||||||
|
"type": "lora",
|
||||||
|
"base": "Qwen-Image",
|
||||||
|
"save_path": "loras/qwen-image-lightning",
|
||||||
|
"description": "Qwen-Image-Lightning 4-step LoRA model V2.0 (bf16)",
|
||||||
|
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||||
|
"filename": "Qwen-Image-Lightning-4steps-V2.0-bf16.safetensors",
|
||||||
|
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V2.0-bf16.safetensors",
|
||||||
|
"size": "19.6GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image-Lightning 8steps V1.1",
|
||||||
|
"type": "lora",
|
||||||
|
"base": "Qwen-Image",
|
||||||
|
"save_path": "loras/qwen-image-lightning",
|
||||||
|
"description": "Qwen-Image-Lightning 8-step LoRA model V1.1",
|
||||||
|
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||||
|
"filename": "Qwen-Image-Lightning-8steps-V1.1.safetensors",
|
||||||
|
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-8steps-V1.1.safetensors",
|
||||||
|
"size": "9.78GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image-Lightning 8steps V1.1 (bf16)",
|
||||||
|
"type": "lora",
|
||||||
|
"base": "Qwen-Image",
|
||||||
|
"save_path": "loras/qwen-image-lightning",
|
||||||
|
"description": "Qwen-Image-Lightning 8-step LoRA model V1.1 (bf16)",
|
||||||
|
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||||
|
"filename": "Qwen-Image-Lightning-8steps-V1.1-bf16.safetensors",
|
||||||
|
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-8steps-V1.1-bf16.safetensors",
|
||||||
|
"size": "19.6GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image-Lightning 8steps V2.0",
|
||||||
|
"type": "lora",
|
||||||
|
"base": "Qwen-Image",
|
||||||
|
"save_path": "loras/qwen-image-lightning",
|
||||||
|
"description": "Qwen-Image-Lightning 8-step LoRA model V2.0",
|
||||||
|
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||||
|
"filename": "Qwen-Image-Lightning-8steps-V2.0.safetensors",
|
||||||
|
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-8steps-V2.0.safetensors",
|
||||||
|
"size": "9.78GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image-Lightning 8steps V2.0 (bf16)",
|
||||||
|
"type": "lora",
|
||||||
|
"base": "Qwen-Image",
|
||||||
|
"save_path": "loras/qwen-image-lightning",
|
||||||
|
"description": "Qwen-Image-Lightning 8-step LoRA model V2.0 (bf16)",
|
||||||
|
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||||
|
"filename": "Qwen-Image-Lightning-8steps-V2.0-bf16.safetensors",
|
||||||
|
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-8steps-V2.0-bf16.safetensors",
|
||||||
|
"size": "19.6GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image-Edit-Lightning 4steps V1.0",
|
||||||
|
"type": "lora",
|
||||||
|
"base": "Qwen-Image-Edit",
|
||||||
|
"save_path": "loras/qwen-image-edit-lightning",
|
||||||
|
"description": "Qwen-Image-Edit-Lightning 4-step LoRA model V1.0",
|
||||||
|
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||||
|
"filename": "Qwen-Image-Edit-Lightning-4steps-V1.0.safetensors",
|
||||||
|
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-Lightning-4steps-V1.0.safetensors",
|
||||||
|
"size": "9.78GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image-Edit-Lightning 4steps V1.0 (bf16)",
|
||||||
|
"type": "lora",
|
||||||
|
"base": "Qwen-Image-Edit",
|
||||||
|
"save_path": "loras/qwen-image-edit-lightning",
|
||||||
|
"description": "Qwen-Image-Edit-Lightning 4-step LoRA model V1.0 (bf16)",
|
||||||
|
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||||
|
"filename": "Qwen-Image-Edit-Lightning-4steps-V1.0-bf16.safetensors",
|
||||||
|
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-Lightning-4steps-V1.0-bf16.safetensors",
|
||||||
|
"size": "19.6GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image-Edit-Lightning 8steps V1.0",
|
||||||
|
"type": "lora",
|
||||||
|
"base": "Qwen-Image-Edit",
|
||||||
|
"save_path": "loras/qwen-image-edit-lightning",
|
||||||
|
"description": "Qwen-Image-Edit-Lightning 8-step LoRA model V1.0",
|
||||||
|
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||||
|
"filename": "Qwen-Image-Edit-Lightning-8steps-V1.0.safetensors",
|
||||||
|
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-Lightning-8steps-V1.0.safetensors",
|
||||||
|
"size": "9.78GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image-Edit-Lightning 8steps V1.0 (bf16)",
|
||||||
|
"type": "lora",
|
||||||
|
"base": "Qwen-Image-Edit",
|
||||||
|
"save_path": "loras/qwen-image-edit-lightning",
|
||||||
|
"description": "Qwen-Image-Edit-Lightning 8-step LoRA model V1.0 (bf16)",
|
||||||
|
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||||
|
"filename": "Qwen-Image-Edit-Lightning-8steps-V1.0-bf16.safetensors",
|
||||||
|
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-Lightning-8steps-V1.0-bf16.safetensors",
|
||||||
|
"size": "19.6GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image-Edit-2509-Lightning 4steps V1.0 (bf16)",
|
||||||
|
"type": "lora",
|
||||||
|
"base": "Qwen-Image-Edit",
|
||||||
|
"save_path": "loras/qwen-image-edit-lightning",
|
||||||
|
"description": "Qwen-Image-Edit-2509-Lightning 4-step LoRA model V1.0 (bf16)",
|
||||||
|
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||||
|
"filename": "Qwen-Image-Edit-2509-Lightning-4steps-V1.0-bf16.safetensors",
|
||||||
|
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-2509/Qwen-Image-Edit-2509-Lightning-4steps-V1.0-bf16.safetensors",
|
||||||
|
"size": "19.6GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image-Edit-2509-Lightning 4steps V1.0 (fp32)",
|
||||||
|
"type": "lora",
|
||||||
|
"base": "Qwen-Image-Edit",
|
||||||
|
"save_path": "loras/qwen-image-edit-lightning",
|
||||||
|
"description": "Qwen-Image-Edit-2509-Lightning 4-step LoRA model V1.0 (fp32)",
|
||||||
|
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||||
|
"filename": "Qwen-Image-Edit-2509-Lightning-4steps-V1.0-fp32.safetensors",
|
||||||
|
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-2509/Qwen-Image-Edit-2509-Lightning-4steps-V1.0-fp32.safetensors",
|
||||||
|
"size": "39.1GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image-Edit-2509-Lightning 8steps V1.0 (bf16)",
|
||||||
|
"type": "lora",
|
||||||
|
"base": "Qwen-Image-Edit",
|
||||||
|
"save_path": "loras/qwen-image-edit-lightning",
|
||||||
|
"description": "Qwen-Image-Edit-2509-Lightning 8-step LoRA model V1.0 (bf16)",
|
||||||
|
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||||
|
"filename": "Qwen-Image-Edit-2509-Lightning-8steps-V1.0-bf16.safetensors",
|
||||||
|
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-2509/Qwen-Image-Edit-2509-Lightning-8steps-V1.0-bf16.safetensors",
|
||||||
|
"size": "19.6GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image-Edit-2509-Lightning 8steps V1.0 (fp32)",
|
||||||
|
"type": "lora",
|
||||||
|
"base": "Qwen-Image-Edit",
|
||||||
|
"save_path": "loras/qwen-image-edit-lightning",
|
||||||
|
"description": "Qwen-Image-Edit-2509-Lightning 8-step LoRA model V1.0 (fp32)",
|
||||||
|
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||||
|
"filename": "Qwen-Image-Edit-2509-Lightning-8steps-V1.0-fp32.safetensors",
|
||||||
|
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-2509/Qwen-Image-Edit-2509-Lightning-8steps-V1.0-fp32.safetensors",
|
||||||
|
"size": "39.1GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image InstantX ControlNet Union",
|
||||||
|
"type": "controlnet",
|
||||||
|
"base": "Qwen-Image",
|
||||||
|
"save_path": "controlnet/qwen-image/instantx",
|
||||||
|
"description": "Qwen-Image InstantX ControlNet Union model",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image-InstantX-ControlNets",
|
||||||
|
"filename": "Qwen-Image-InstantX-ControlNet-Union.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Qwen-Image-InstantX-ControlNets/resolve/main/split_files/controlnet/Qwen-Image-InstantX-ControlNet-Union.safetensors",
|
||||||
|
"size": "2.54GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Qwen-Image InstantX ControlNet Inpainting",
|
||||||
|
"type": "controlnet",
|
||||||
|
"base": "Qwen-Image",
|
||||||
|
"save_path": "controlnet/qwen-image/instantx",
|
||||||
|
"description": "Qwen-Image InstantX ControlNet Inpainting model",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image-InstantX-ControlNets",
|
||||||
|
"filename": "Qwen-Image-InstantX-ControlNet-Inpainting.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Qwen-Image-InstantX-ControlNets/resolve/main/split_files/controlnet/Qwen-Image-InstantX-ControlNet-Inpainting.safetensors",
|
||||||
|
"size": "2.54GB"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,3 +1,3 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
rm ~/.tmp/dev/*.py > /dev/null 2>&1
|
rm ~/.tmp/dev/*.py > /dev/null 2>&1
|
||||||
python ../../scanner.py ~/.tmp/dev
|
python ../../scanner.py ~/.tmp/dev $*
|
||||||
@@ -1,5 +1,25 @@
|
|||||||
{
|
{
|
||||||
"custom_nodes": [
|
"custom_nodes": [
|
||||||
|
{
|
||||||
|
"author": "synchronicity-labs",
|
||||||
|
"title": "ComfyUI Sync Lipsync Node",
|
||||||
|
"reference": "https://github.com/synchronicity-labs/sync-comfyui",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/synchronicity-labs/sync-comfyui"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "This custom node allows you to perform audio-video lip synchronization inside ComfyUI using a simple interface."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "joaomede",
|
||||||
|
"title": "ComfyUI-Unload-Model-Fork",
|
||||||
|
"reference": "https://github.com/joaomede/ComfyUI-Unload-Model-Fork",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/joaomede/ComfyUI-Unload-Model-Fork"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "For unloading a model or all models, using the memory management that is already present in ComfyUI. Copied from [a/https://github.com/willblaschko/ComfyUI-Unload-Models](https://github.com/willblaschko/ComfyUI-Unload-Models) but without the unnecessary extra stuff."
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"author": "SanDiegoDude",
|
"author": "SanDiegoDude",
|
||||||
"title": "ComfyUI-HiDream-Sampler [WIP]",
|
"title": "ComfyUI-HiDream-Sampler [WIP]",
|
||||||
@@ -149,6 +169,16 @@
|
|||||||
],
|
],
|
||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "A fork of KJNodes for ComfyUI.\nVarious quality of life -nodes for ComfyUI, mostly just visual stuff to improve usability"
|
"description": "A fork of KJNodes for ComfyUI.\nVarious quality of life -nodes for ComfyUI, mostly just visual stuff to improve usability"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "huixingyun",
|
||||||
|
"title": "ComfyUI-SoundFlow",
|
||||||
|
"reference": "https://github.com/huixingyun/ComfyUI-SoundFlow",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/huixingyun/ComfyUI-SoundFlow"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "forked from https://github.com/fredconex/ComfyUI-SoundFlow (removed)"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,219 @@
|
|||||||
{
|
{
|
||||||
"models": [
|
"models": [
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 i2v high noise 14B (fp16)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for i2v high noise 14B (fp16)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_i2v_high_noise_14B_fp16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_high_noise_14B_fp16.safetensors",
|
||||||
|
"size": "28.6GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 i2v high noise 14B (fp8_scaled)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for i2v high noise 14B (fp8_scaled)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_i2v_high_noise_14B_fp8_scaled.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_high_noise_14B_fp8_scaled.safetensors",
|
||||||
|
"size": "14.3GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 i2v low noise 14B (fp16)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for i2v low noise 14B (fp16)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_i2v_low_noise_14B_fp16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_low_noise_14B_fp16.safetensors",
|
||||||
|
"size": "28.6GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 i2v low noise 14B (fp8_scaled)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for i2v low noise 14B (fp8_scaled)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_i2v_low_noise_14B_fp8_scaled.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_low_noise_14B_fp8_scaled.safetensors",
|
||||||
|
"size": "14.3GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 t2v high noise 14B (fp16)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for t2v high noise 14B (fp16)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_t2v_high_noise_14B_fp16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_high_noise_14B_fp16.safetensors",
|
||||||
|
"size": "28.6GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 t2v high noise 14B (fp8_scaled)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for t2v high noise 14B (fp8_scaled)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_t2v_high_noise_14B_fp8_scaled.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_high_noise_14B_fp8_scaled.safetensors",
|
||||||
|
"size": "14.3GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 t2v low noise 14B (fp16)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for t2v low noise 14B (fp16)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_t2v_low_noise_14B_fp16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_low_noise_14B_fp16.safetensors",
|
||||||
|
"size": "28.6GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 t2v low noise 14B (fp8_scaled)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for t2v low noise 14B (fp8_scaled)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_t2v_low_noise_14B_fp8_scaled.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_low_noise_14B_fp8_scaled.safetensors",
|
||||||
|
"size": "14.3GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 ti2v 5B (fp16)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for ti2v 5B (fp16)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_ti2v_5B_fp16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_ti2v_5B_fp16.safetensors",
|
||||||
|
"size": "10.0GB"
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "sam2.1_hiera_tiny.pt",
|
||||||
|
"type": "sam2.1",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2.1 hiera model (tiny)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2.1_hiera_tiny.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_tiny.pt",
|
||||||
|
"size": "149.0MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sam2.1_hiera_small.pt",
|
||||||
|
"type": "sam2.1",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2.1 hiera model (small)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2.1_hiera_small.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_small.pt",
|
||||||
|
"size": "176.0MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sam2.1_hiera_base_plus.pt",
|
||||||
|
"type": "sam2.1",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2.1 hiera model (base+)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2.1_hiera_base_plus.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_base_plus.pt",
|
||||||
|
"size": "309.0MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sam2.1_hiera_large.pt",
|
||||||
|
"type": "sam2.1",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2.1 hiera model (large)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2.1_hiera_large.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_large.pt",
|
||||||
|
"size": "857.0MB"
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "sam2_hiera_tiny.pt",
|
||||||
|
"type": "sam2",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2 hiera model (tiny)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2_hiera_tiny.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/072824/sam2_hiera_tiny.pt",
|
||||||
|
"size": "149.0MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sam2_hiera_small.pt",
|
||||||
|
"type": "sam2",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2 hiera model (small)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2_hiera_small.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/072824/sam2_hiera_small.pt",
|
||||||
|
"size": "176.0MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sam2_hiera_base_plus.pt",
|
||||||
|
"type": "sam2",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2 hiera model (base+)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2_hiera_base_plus.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/072824/sam2_hiera_base_plus.pt",
|
||||||
|
"size": "309.0MB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sam2_hiera_large.pt",
|
||||||
|
"type": "sam2",
|
||||||
|
"base": "SAM",
|
||||||
|
"save_path": "sams",
|
||||||
|
"description": "Segmenty Anything SAM 2 hiera model (large)",
|
||||||
|
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||||
|
"filename": "sam2_hiera_large.pt",
|
||||||
|
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/072824/sam2_hiera_large.pt",
|
||||||
|
"size": "857.0MB"
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/omnigen2_fp16.safetensors",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "OmniGen2",
|
||||||
|
"save_path": "default",
|
||||||
|
"description": "OmniGen2 diffusion model. This is required for using OmniGen2.",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged",
|
||||||
|
"filename": "omnigen2_fp16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged/resolve/main/split_files/diffusion_models/omnigen2_fp16.safetensors",
|
||||||
|
"size": "7.93GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/qwen_2.5_vl_fp16.safetensors",
|
||||||
|
"type": "clip",
|
||||||
|
"base": "qwen-2.5",
|
||||||
|
"save_path": "default",
|
||||||
|
"description": "text encoder for OmniGen2",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged",
|
||||||
|
"filename": "qwen_2.5_vl_fp16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged/resolve/main/split_files/text_encoders/qwen_2.5_vl_fp16.safetensors",
|
||||||
|
"size": "7.51GB"
|
||||||
|
},
|
||||||
|
|
||||||
{
|
{
|
||||||
"name": "Latent Bridge Matching for Image Relighting",
|
"name": "Latent Bridge Matching for Image Relighting",
|
||||||
"type": "diffusion_model",
|
"type": "diffusion_model",
|
||||||
@@ -473,224 +687,6 @@
|
|||||||
"filename": "llava_llama3_fp16.safetensors",
|
"filename": "llava_llama3_fp16.safetensors",
|
||||||
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/text_encoders/llava_llama3_fp16.safetensors",
|
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/text_encoders/llava_llama3_fp16.safetensors",
|
||||||
"size": "16.1GB"
|
"size": "16.1GB"
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"name": "PixArt-Sigma-XL-2-512-MS.safetensors (diffusion)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "pixart-sigma",
|
|
||||||
"save_path": "diffusion_models/PixArt-Sigma",
|
|
||||||
"description": "PixArt-Sigma Diffusion model",
|
|
||||||
"reference": "https://huggingface.co/PixArt-alpha/PixArt-Sigma-XL-2-512-MS",
|
|
||||||
"filename": "PixArt-Sigma-XL-2-512-MS.safetensors",
|
|
||||||
"url": "https://huggingface.co/PixArt-alpha/PixArt-Sigma-XL-2-512-MS/resolve/main/transformer/diffusion_pytorch_model.safetensors",
|
|
||||||
"size": "2.44GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "PixArt-Sigma-XL-2-1024-MS.safetensors (diffusion)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "pixart-sigma",
|
|
||||||
"save_path": "diffusion_models/PixArt-Sigma",
|
|
||||||
"description": "PixArt-Sigma Diffusion model",
|
|
||||||
"reference": "https://huggingface.co/PixArt-alpha/PixArt-Sigma-XL-2-1024-MS",
|
|
||||||
"filename": "PixArt-Sigma-XL-2-1024-MS.safetensors",
|
|
||||||
"url": "https://huggingface.co/PixArt-alpha/PixArt-Sigma-XL-2-1024-MS/resolve/main/transformer/diffusion_pytorch_model.safetensors",
|
|
||||||
"size": "2.44GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "PixArt-XL-2-1024-MS.safetensors (diffusion)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "pixart-alpha",
|
|
||||||
"save_path": "diffusion_models/PixArt-Alpha",
|
|
||||||
"description": "PixArt-Alpha Diffusion model",
|
|
||||||
"reference": "https://huggingface.co/PixArt-alpha/PixArt-XL-2-1024-MS",
|
|
||||||
"filename": "PixArt-XL-2-1024-MS.safetensors",
|
|
||||||
"url": "https://huggingface.co/PixArt-alpha/PixArt-XL-2-1024-MS/resolve/main/transformer/diffusion_pytorch_model.safetensors",
|
|
||||||
"size": "2.45GB"
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"name": "Comfy-Org/hunyuan_video_t2v_720p_bf16.safetensors",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Hunyuan Video",
|
|
||||||
"save_path": "diffusion_models/hunyuan_video",
|
|
||||||
"description": "Huyuan Video diffusion model. repackaged version.",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged",
|
|
||||||
"filename": "hunyuan_video_t2v_720p_bf16.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/diffusion_models/hunyuan_video_t2v_720p_bf16.safetensors",
|
|
||||||
"size": "25.6GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Comfy-Org/hunyuan_video_vae_bf16.safetensors",
|
|
||||||
"type": "VAE",
|
|
||||||
"base": "Hunyuan Video",
|
|
||||||
"save_path": "VAE",
|
|
||||||
"description": "Huyuan Video VAE model. repackaged version.",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged",
|
|
||||||
"filename": "hunyuan_video_vae_bf16.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/vae/hunyuan_video_vae_bf16.safetensors",
|
|
||||||
"size": "493MB"
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"name": "LTX-Video 2B v0.9.1 Checkpoint",
|
|
||||||
"type": "checkpoint",
|
|
||||||
"base": "LTX-Video",
|
|
||||||
"save_path": "checkpoints/LTXV",
|
|
||||||
"description": "LTX-Video is the first DiT-based video generation model capable of generating high-quality videos in real-time. It produces 24 FPS videos at a 768x512 resolution faster than they can be watched. Trained on a large-scale dataset of diverse videos, the model generates high-resolution videos with realistic and varied content.",
|
|
||||||
"reference": "https://huggingface.co/Lightricks/LTX-Video",
|
|
||||||
"filename": "ltx-video-2b-v0.9.1.safetensors",
|
|
||||||
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltx-video-2b-v0.9.1.safetensors",
|
|
||||||
"size": "5.72GB"
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"name": "XLabs-AI/flux-canny-controlnet-v3.safetensors",
|
|
||||||
"type": "controlnet",
|
|
||||||
"base": "FLUX.1",
|
|
||||||
"save_path": "xlabs/controlnets",
|
|
||||||
"description": "ControlNet checkpoints for FLUX.1-dev model by Black Forest Labs.",
|
|
||||||
"reference": "https://huggingface.co/XLabs-AI/flux-controlnet-collections",
|
|
||||||
"filename": "flux-canny-controlnet-v3.safetensors",
|
|
||||||
"url": "https://huggingface.co/XLabs-AI/flux-controlnet-collections/resolve/main/flux-canny-controlnet-v3.safetensors",
|
|
||||||
"size": "1.49GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "XLabs-AI/flux-depth-controlnet-v3.safetensors",
|
|
||||||
"type": "controlnet",
|
|
||||||
"base": "FLUX.1",
|
|
||||||
"save_path": "xlabs/controlnets",
|
|
||||||
"description": "ControlNet checkpoints for FLUX.1-dev model by Black Forest Labs.",
|
|
||||||
"reference": "https://huggingface.co/XLabs-AI/flux-controlnet-collections",
|
|
||||||
"filename": "flux-depth-controlnet-v3.safetensors",
|
|
||||||
"url": "https://huggingface.co/XLabs-AI/flux-controlnet-collections/resolve/main/flux-depth-controlnet-v3.safetensors",
|
|
||||||
"size": "1.49GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "XLabs-AI/flux-hed-controlnet-v3.safetensors",
|
|
||||||
"type": "controlnet",
|
|
||||||
"base": "FLUX.1",
|
|
||||||
"save_path": "xlabs/controlnets",
|
|
||||||
"description": "ControlNet checkpoints for FLUX.1-dev model by Black Forest Labs.",
|
|
||||||
"reference": "https://huggingface.co/XLabs-AI/flux-controlnet-collections",
|
|
||||||
"filename": "flux-hed-controlnet-v3.safetensors",
|
|
||||||
"url": "https://huggingface.co/XLabs-AI/flux-controlnet-collections/resolve/main/flux-hed-controlnet-v3.safetensors",
|
|
||||||
"size": "1.49GB"
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"name": "XLabs-AI/realism_lora.safetensors",
|
|
||||||
"type": "lora",
|
|
||||||
"base": "FLUX.1",
|
|
||||||
"save_path": "xlabs/loras",
|
|
||||||
"description": "A checkpoint with trained LoRAs for FLUX.1-dev model by Black Forest Labs",
|
|
||||||
"reference": "https://huggingface.co/XLabs-AI/flux-lora-collection",
|
|
||||||
"filename": "realism_lora.safetensors",
|
|
||||||
"url": "https://huggingface.co/XLabs-AI/flux-lora-collection/resolve/main/realism_lora.safetensors",
|
|
||||||
"size": "44.8MB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "XLabs-AI/art_lora.safetensors",
|
|
||||||
"type": "lora",
|
|
||||||
"base": "FLUX.1",
|
|
||||||
"save_path": "xlabs/loras",
|
|
||||||
"description": "A checkpoint with trained LoRAs for FLUX.1-dev model by Black Forest Labs",
|
|
||||||
"reference": "https://huggingface.co/XLabs-AI/flux-lora-collection",
|
|
||||||
"filename": "art_lora.safetensors",
|
|
||||||
"url": "https://huggingface.co/XLabs-AI/flux-lora-collection/resolve/main/scenery_lora.safetensors",
|
|
||||||
"size": "44.8MB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "XLabs-AI/mjv6_lora.safetensors",
|
|
||||||
"type": "lora",
|
|
||||||
"base": "FLUX.1",
|
|
||||||
"save_path": "xlabs/loras",
|
|
||||||
"description": "A checkpoint with trained LoRAs for FLUX.1-dev model by Black Forest Labs",
|
|
||||||
"reference": "https://huggingface.co/XLabs-AI/flux-lora-collection",
|
|
||||||
"filename": "mjv6_lora.safetensors",
|
|
||||||
"url": "https://huggingface.co/XLabs-AI/flux-lora-collection/resolve/main/mjv6_lora.safetensors",
|
|
||||||
"size": "44.8MB"
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"name": "XLabs-AI/flux-ip-adapter",
|
|
||||||
"type": "lora",
|
|
||||||
"base": "FLUX.1",
|
|
||||||
"save_path": "xlabs/ipadapters",
|
|
||||||
"description": "A checkpoint with trained LoRAs for FLUX.1-dev model by Black Forest Labs",
|
|
||||||
"reference": "https://huggingface.co/XLabs-AI/flux-ip-adapter",
|
|
||||||
"filename": "ip_adapter.safetensors",
|
|
||||||
"url": "https://huggingface.co/XLabs-AI/flux-ip-adapter/resolve/main/ip_adapter.safetensors",
|
|
||||||
"size": "982MB"
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"name": "stabilityai/SD3.5-Large-Controlnet-Blur",
|
|
||||||
"type": "controlnet",
|
|
||||||
"base": "SD3.5",
|
|
||||||
"save_path": "controlnet/SD3.5",
|
|
||||||
"description": "Blur Controlnet model for SD3.5 Large",
|
|
||||||
"reference": "https://huggingface.co/stabilityai/stable-diffusion-3.5-controlnets",
|
|
||||||
"filename": "sd3.5_large_controlnet_blur.safetensors",
|
|
||||||
"url": "https://huggingface.co/stabilityai/stable-diffusion-3.5-controlnets/resolve/main/sd3.5_large_controlnet_blur.safetensors",
|
|
||||||
"size": "8.65GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "stabilityai/SD3.5-Large-Controlnet-Canny",
|
|
||||||
"type": "controlnet",
|
|
||||||
"base": "SD3.5",
|
|
||||||
"save_path": "controlnet/SD3.5",
|
|
||||||
"description": "Canny Controlnet model for SD3.5 Large",
|
|
||||||
"reference": "https://huggingface.co/stabilityai/stable-diffusion-3.5-controlnets",
|
|
||||||
"filename": "sd3.5_large_controlnet_canny.safetensors",
|
|
||||||
"url": "https://huggingface.co/stabilityai/stable-diffusion-3.5-controlnets/resolve/main/sd3.5_large_controlnet_canny.safetensors",
|
|
||||||
"size": "8.65GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "stabilityai/SD3.5-Large-Controlnet-Depth",
|
|
||||||
"type": "controlnet",
|
|
||||||
"base": "SD3.5",
|
|
||||||
"save_path": "controlnet/SD3.5",
|
|
||||||
"description": "Depth Controlnet model for SD3.5 Large",
|
|
||||||
"reference": "https://huggingface.co/stabilityai/stable-diffusion-3.5-controlnets",
|
|
||||||
"filename": "sd3.5_large_controlnet_depth.safetensors",
|
|
||||||
"url": "https://huggingface.co/stabilityai/stable-diffusion-3.5-controlnets/resolve/main/sd3.5_large_controlnet_depth.safetensors",
|
|
||||||
"size": "8.65GB"
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"name": "LTX-Video 2B v0.9 Checkpoint",
|
|
||||||
"type": "checkpoint",
|
|
||||||
"base": "LTX-Video",
|
|
||||||
"save_path": "checkpoints/LTXV",
|
|
||||||
"description": "LTX-Video is the first DiT-based video generation model capable of generating high-quality videos in real-time. It produces 24 FPS videos at a 768x512 resolution faster than they can be watched. Trained on a large-scale dataset of diverse videos, the model generates high-resolution videos with realistic and varied content.",
|
|
||||||
"reference": "https://huggingface.co/Lightricks/LTX-Video",
|
|
||||||
"filename": "ltx-video-2b-v0.9.safetensors",
|
|
||||||
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltx-video-2b-v0.9.safetensors",
|
|
||||||
"size": "9.37GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "InstantX/FLUX.1-dev-IP-Adapter",
|
|
||||||
"type": "IP-Adapter",
|
|
||||||
"base": "FLUX.1",
|
|
||||||
"save_path": "ipadapter-flux",
|
|
||||||
"description": "FLUX.1-dev-IP-Adapter",
|
|
||||||
"reference": "https://huggingface.co/InstantX/FLUX.1-dev-IP-Adapter",
|
|
||||||
"filename": "ip-adapter.bin",
|
|
||||||
"url": "https://huggingface.co/InstantX/FLUX.1-dev-IP-Adapter/resolve/main/ip-adapter.bin",
|
|
||||||
"size": "5.29GB"
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"name": "Comfy-Org/sigclip_vision_384 (patch14_384)",
|
|
||||||
"type": "clip_vision",
|
|
||||||
"base": "sigclip",
|
|
||||||
"save_path": "clip_vision",
|
|
||||||
"description": "This clip vision model is required for FLUX.1 Redux.",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/sigclip_vision_384/tree/main",
|
|
||||||
"filename": "sigclip_vision_patch14_384.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/sigclip_vision_384/resolve/main/sigclip_vision_patch14_384.safetensors",
|
|
||||||
"size": "857MB"
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,6 +10,16 @@
|
|||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "A minimal template for creating React/TypeScript frontend extensions for ComfyUI, with complete boilerplate setup including internationalization and unit testing."
|
"description": "A minimal template for creating React/TypeScript frontend extensions for ComfyUI, with complete boilerplate setup including internationalization and unit testing."
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"author": "comfyui-wiki",
|
||||||
|
"title": "ComfyUI-i18n-demo",
|
||||||
|
"reference": "https://github.com/comfyui-wiki/ComfyUI-i18n-demo",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/comfyui-wiki/ComfyUI-i18n-demo"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "ComfyUI custom node develop i18n support demo "
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"author": "Suzie1",
|
"author": "Suzie1",
|
||||||
"title": "Guide To Making Custom Nodes in ComfyUI",
|
"title": "Guide To Making Custom Nodes in ComfyUI",
|
||||||
@@ -331,6 +341,36 @@
|
|||||||
],
|
],
|
||||||
"description": "Dynamic Node examples for ComfyUI",
|
"description": "Dynamic Node examples for ComfyUI",
|
||||||
"install_type": "git-clone"
|
"install_type": "git-clone"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "Jonathon-Doran",
|
||||||
|
"title": "remote-combo-demo",
|
||||||
|
"reference": "https://github.com/Jonathon-Doran/remote-combo-demo",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/Jonathon-Doran/remote-combo-demo"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A minimal test suite demonstrating how remote COMBO inputs behave in ComfyUI, with and without force_input"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "J1mB091",
|
||||||
|
"title": "ComfyUI-J1mB091 Custom Nodes",
|
||||||
|
"reference": "https://github.com/J1mB091/ComfyUI-J1mB091",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/J1mB091/ComfyUI-J1mB091"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Vibe Coded ComfyUI Custom Nodes"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "aiforhumans",
|
||||||
|
"title": "XDev Nodes - Complete Toolkit",
|
||||||
|
"reference": "https://github.com/aiforhumans/comfyui-xdev-nodes",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/aiforhumans/comfyui-xdev-nodes"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Complete ComfyUI development toolkit with 8 professional nodes including VAE tools, universal type testing, and comprehensive debugging infrastructure."
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
373
notebooks/comfyui_colab_with_manager.ipynb
Normal file
373
notebooks/comfyui_colab_with_manager.ipynb
Normal file
@@ -0,0 +1,373 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {
|
||||||
|
"id": "aaaaaaaaaa"
|
||||||
|
},
|
||||||
|
"source": [
|
||||||
|
"Git clone the repo and install the requirements. (ignore the pip errors about protobuf)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {
|
||||||
|
"id": "bbbbbbbbbb"
|
||||||
|
},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"# #@title Environment Setup\n",
|
||||||
|
"\n",
|
||||||
|
"from pathlib import Path\n",
|
||||||
|
"\n",
|
||||||
|
"OPTIONS = {}\n",
|
||||||
|
"\n",
|
||||||
|
"USE_GOOGLE_DRIVE = True #@param {type:\"boolean\"}\n",
|
||||||
|
"UPDATE_COMFY_UI = True #@param {type:\"boolean\"}\n",
|
||||||
|
"USE_COMFYUI_MANAGER = True #@param {type:\"boolean\"}\n",
|
||||||
|
"INSTALL_CUSTOM_NODES_DEPENDENCIES = True #@param {type:\"boolean\"}\n",
|
||||||
|
"OPTIONS['USE_GOOGLE_DRIVE'] = USE_GOOGLE_DRIVE\n",
|
||||||
|
"OPTIONS['UPDATE_COMFY_UI'] = UPDATE_COMFY_UI\n",
|
||||||
|
"OPTIONS['USE_COMFYUI_MANAGER'] = USE_COMFYUI_MANAGER\n",
|
||||||
|
"OPTIONS['INSTALL_CUSTOM_NODES_DEPENDENCIES'] = INSTALL_CUSTOM_NODES_DEPENDENCIES\n",
|
||||||
|
"\n",
|
||||||
|
"current_dir = !pwd\n",
|
||||||
|
"WORKSPACE = f\"{current_dir[0]}/ComfyUI\"\n",
|
||||||
|
"\n",
|
||||||
|
"if OPTIONS['USE_GOOGLE_DRIVE']:\n",
|
||||||
|
" !echo \"Mounting Google Drive...\"\n",
|
||||||
|
" %cd /\n",
|
||||||
|
"\n",
|
||||||
|
" from google.colab import drive\n",
|
||||||
|
" drive.mount('/content/drive')\n",
|
||||||
|
"\n",
|
||||||
|
" WORKSPACE = \"/content/drive/MyDrive/ComfyUI\"\n",
|
||||||
|
" %cd /content/drive/MyDrive\n",
|
||||||
|
"\n",
|
||||||
|
"![ ! -d $WORKSPACE ] && echo -= Initial setup ComfyUI =- && git clone https://github.com/comfyanonymous/ComfyUI\n",
|
||||||
|
"%cd $WORKSPACE\n",
|
||||||
|
"\n",
|
||||||
|
"if OPTIONS['UPDATE_COMFY_UI']:\n",
|
||||||
|
" !echo -= Updating ComfyUI =-\n",
|
||||||
|
"\n",
|
||||||
|
" # Correction of the issue of permissions being deleted on Google Drive.\n",
|
||||||
|
" ![ -f \".ci/nightly/update_windows/update_comfyui_and_python_dependencies.bat\" ] && chmod 755 .ci/nightly/update_windows/update_comfyui_and_python_dependencies.bat\n",
|
||||||
|
" ![ -f \".ci/nightly/windows_base_files/run_nvidia_gpu.bat\" ] && chmod 755 .ci/nightly/windows_base_files/run_nvidia_gpu.bat\n",
|
||||||
|
" ![ -f \".ci/update_windows/update_comfyui_and_python_dependencies.bat\" ] && chmod 755 .ci/update_windows/update_comfyui_and_python_dependencies.bat\n",
|
||||||
|
" ![ -f \".ci/update_windows_cu118/update_comfyui_and_python_dependencies.bat\" ] && chmod 755 .ci/update_windows_cu118/update_comfyui_and_python_dependencies.bat\n",
|
||||||
|
" ![ -f \".ci/update_windows/update.py\" ] && chmod 755 .ci/update_windows/update.py\n",
|
||||||
|
" ![ -f \".ci/update_windows/update_comfyui.bat\" ] && chmod 755 .ci/update_windows/update_comfyui.bat\n",
|
||||||
|
" ![ -f \".ci/update_windows/README_VERY_IMPORTANT.txt\" ] && chmod 755 .ci/update_windows/README_VERY_IMPORTANT.txt\n",
|
||||||
|
" ![ -f \".ci/update_windows/run_cpu.bat\" ] && chmod 755 .ci/update_windows/run_cpu.bat\n",
|
||||||
|
" ![ -f \".ci/update_windows/run_nvidia_gpu.bat\" ] && chmod 755 .ci/update_windows/run_nvidia_gpu.bat\n",
|
||||||
|
"\n",
|
||||||
|
" !git pull\n",
|
||||||
|
"\n",
|
||||||
|
"!echo -= Install dependencies =-\n",
|
||||||
|
"!pip3 install accelerate\n",
|
||||||
|
"!pip3 install einops transformers>=4.28.1 safetensors>=0.4.2 aiohttp pyyaml Pillow scipy tqdm psutil tokenizers>=0.13.3\n",
|
||||||
|
"!pip3 install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu121\n",
|
||||||
|
"!pip3 install torchsde\n",
|
||||||
|
"!pip3 install kornia>=0.7.1 spandrel soundfile sentencepiece\n",
|
||||||
|
"\n",
|
||||||
|
"if OPTIONS['USE_COMFYUI_MANAGER']:\n",
|
||||||
|
" %cd custom_nodes\n",
|
||||||
|
"\n",
|
||||||
|
" # Correction of the issue of permissions being deleted on Google Drive.\n",
|
||||||
|
" ![ -f \"ComfyUI-Manager/check.sh\" ] && chmod 755 ComfyUI-Manager/check.sh\n",
|
||||||
|
" ![ -f \"ComfyUI-Manager/scan.sh\" ] && chmod 755 ComfyUI-Manager/scan.sh\n",
|
||||||
|
" ![ -f \"ComfyUI-Manager/node_db/dev/scan.sh\" ] && chmod 755 ComfyUI-Manager/node_db/dev/scan.sh\n",
|
||||||
|
" ![ -f \"ComfyUI-Manager/node_db/tutorial/scan.sh\" ] && chmod 755 ComfyUI-Manager/node_db/tutorial/scan.sh\n",
|
||||||
|
" ![ -f \"ComfyUI-Manager/scripts/install-comfyui-venv-linux.sh\" ] && chmod 755 ComfyUI-Manager/scripts/install-comfyui-venv-linux.sh\n",
|
||||||
|
" ![ -f \"ComfyUI-Manager/scripts/install-comfyui-venv-win.bat\" ] && chmod 755 ComfyUI-Manager/scripts/install-comfyui-venv-win.bat\n",
|
||||||
|
"\n",
|
||||||
|
" ![ ! -d ComfyUI-Manager ] && echo -= Initial setup ComfyUI-Manager =- && git clone https://github.com/ltdrdata/ComfyUI-Manager\n",
|
||||||
|
" %cd ComfyUI-Manager\n",
|
||||||
|
" !git pull\n",
|
||||||
|
"\n",
|
||||||
|
"%cd $WORKSPACE\n",
|
||||||
|
"\n",
|
||||||
|
"if OPTIONS['INSTALL_CUSTOM_NODES_DEPENDENCIES']:\n",
|
||||||
|
" !echo -= Install custom nodes dependencies =-\n",
|
||||||
|
" !pip install GitPython\n",
|
||||||
|
" !python custom_nodes/ComfyUI-Manager/cm-cli.py restore-dependencies\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {
|
||||||
|
"id": "cccccccccc"
|
||||||
|
},
|
||||||
|
"source": [
|
||||||
|
"Download some models/checkpoints/vae or custom comfyui nodes (uncomment the commands for the ones you want)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {
|
||||||
|
"id": "dddddddddd"
|
||||||
|
},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"# Checkpoints\n",
|
||||||
|
"\n",
|
||||||
|
"### SDXL\n",
|
||||||
|
"### I recommend these workflow examples: https://comfyanonymous.github.io/ComfyUI_examples/sdxl/\n",
|
||||||
|
"\n",
|
||||||
|
"#!wget -c https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors -P ./models/checkpoints/\n",
|
||||||
|
"#!wget -c https://huggingface.co/stabilityai/stable-diffusion-xl-refiner-1.0/resolve/main/sd_xl_refiner_1.0.safetensors -P ./models/checkpoints/\n",
|
||||||
|
"\n",
|
||||||
|
"# SDXL ReVision\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/clip_vision_g/resolve/main/clip_vision_g.safetensors -P ./models/clip_vision/\n",
|
||||||
|
"\n",
|
||||||
|
"# SD1.5\n",
|
||||||
|
"!wget -c https://huggingface.co/runwayml/stable-diffusion-v1-5/resolve/main/v1-5-pruned-emaonly.ckpt -P ./models/checkpoints/\n",
|
||||||
|
"\n",
|
||||||
|
"# SD2\n",
|
||||||
|
"#!wget -c https://huggingface.co/stabilityai/stable-diffusion-2-1-base/resolve/main/v2-1_512-ema-pruned.safetensors -P ./models/checkpoints/\n",
|
||||||
|
"#!wget -c https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/main/v2-1_768-ema-pruned.safetensors -P ./models/checkpoints/\n",
|
||||||
|
"\n",
|
||||||
|
"# Some SD1.5 anime style\n",
|
||||||
|
"#!wget -c https://huggingface.co/WarriorMama777/OrangeMixs/resolve/main/Models/AbyssOrangeMix2/AbyssOrangeMix2_hard.safetensors -P ./models/checkpoints/\n",
|
||||||
|
"#!wget -c https://huggingface.co/WarriorMama777/OrangeMixs/resolve/main/Models/AbyssOrangeMix3/AOM3A1_orangemixs.safetensors -P ./models/checkpoints/\n",
|
||||||
|
"#!wget -c https://huggingface.co/WarriorMama777/OrangeMixs/resolve/main/Models/AbyssOrangeMix3/AOM3A3_orangemixs.safetensors -P ./models/checkpoints/\n",
|
||||||
|
"#!wget -c https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/anything-v3-fp16-pruned.safetensors -P ./models/checkpoints/\n",
|
||||||
|
"\n",
|
||||||
|
"# Waifu Diffusion 1.5 (anime style SD2.x 768-v)\n",
|
||||||
|
"#!wget -c https://huggingface.co/waifu-diffusion/wd-1-5-beta3/resolve/main/wd-illusion-fp16.safetensors -P ./models/checkpoints/\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"# unCLIP models\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/illuminatiDiffusionV1_v11_unCLIP/resolve/main/illuminatiDiffusionV1_v11-unclip-h-fp16.safetensors -P ./models/checkpoints/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/wd-1.5-beta2_unCLIP/resolve/main/wd-1-5-beta2-aesthetic-unclip-h-fp16.safetensors -P ./models/checkpoints/\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"# VAE\n",
|
||||||
|
"!wget -c https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/vae-ft-mse-840000-ema-pruned.safetensors -P ./models/vae/\n",
|
||||||
|
"#!wget -c https://huggingface.co/WarriorMama777/OrangeMixs/resolve/main/VAEs/orangemix.vae.pt -P ./models/vae/\n",
|
||||||
|
"#!wget -c https://huggingface.co/hakurei/waifu-diffusion-v1-4/resolve/main/vae/kl-f8-anime2.ckpt -P ./models/vae/\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"# Loras\n",
|
||||||
|
"#!wget -c https://civitai.com/api/download/models/10350 -O ./models/loras/theovercomer8sContrastFix_sd21768.safetensors #theovercomer8sContrastFix SD2.x 768-v\n",
|
||||||
|
"#!wget -c https://civitai.com/api/download/models/10638 -O ./models/loras/theovercomer8sContrastFix_sd15.safetensors #theovercomer8sContrastFix SD1.x\n",
|
||||||
|
"#!wget -c https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_offset_example-lora_1.0.safetensors -P ./models/loras/ #SDXL offset noise lora\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"# T2I-Adapter\n",
|
||||||
|
"#!wget -c https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_depth_sd14v1.pth -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_seg_sd14v1.pth -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_sketch_sd14v1.pth -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_keypose_sd14v1.pth -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_openpose_sd14v1.pth -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_color_sd14v1.pth -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_canny_sd14v1.pth -P ./models/controlnet/\n",
|
||||||
|
"\n",
|
||||||
|
"# T2I Styles Model\n",
|
||||||
|
"#!wget -c https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_style_sd14v1.pth -P ./models/style_models/\n",
|
||||||
|
"\n",
|
||||||
|
"# CLIPVision model (needed for styles model)\n",
|
||||||
|
"#!wget -c https://huggingface.co/openai/clip-vit-large-patch14/resolve/main/pytorch_model.bin -O ./models/clip_vision/clip_vit14.bin\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"# ControlNet\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11e_sd15_ip2p_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11e_sd15_shuffle_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_canny_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11f1p_sd15_depth_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_inpaint_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_lineart_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_mlsd_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_normalbae_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_openpose_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_scribble_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_seg_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_softedge_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15s2_lineart_anime_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11u_sd15_tile_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"\n",
|
||||||
|
"# ControlNet SDXL\n",
|
||||||
|
"#!wget -c https://huggingface.co/stabilityai/control-lora/resolve/main/control-LoRAs-rank256/control-lora-canny-rank256.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/stabilityai/control-lora/resolve/main/control-LoRAs-rank256/control-lora-depth-rank256.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/stabilityai/control-lora/resolve/main/control-LoRAs-rank256/control-lora-recolor-rank256.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/stabilityai/control-lora/resolve/main/control-LoRAs-rank256/control-lora-sketch-rank256.safetensors -P ./models/controlnet/\n",
|
||||||
|
"\n",
|
||||||
|
"# Controlnet Preprocessor nodes by Fannovel16\n",
|
||||||
|
"#!cd custom_nodes && git clone https://github.com/Fannovel16/comfy_controlnet_preprocessors; cd comfy_controlnet_preprocessors && python install.py\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"# GLIGEN\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/GLIGEN_pruned_safetensors/resolve/main/gligen_sd14_textbox_pruned_fp16.safetensors -P ./models/gligen/\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"# ESRGAN upscale model\n",
|
||||||
|
"#!wget -c https://github.com/xinntao/Real-ESRGAN/releases/download/v0.1.0/RealESRGAN_x4plus.pth -P ./models/upscale_models/\n",
|
||||||
|
"#!wget -c https://huggingface.co/sberbank-ai/Real-ESRGAN/resolve/main/RealESRGAN_x2.pth -P ./models/upscale_models/\n",
|
||||||
|
"#!wget -c https://huggingface.co/sberbank-ai/Real-ESRGAN/resolve/main/RealESRGAN_x4.pth -P ./models/upscale_models/\n",
|
||||||
|
"\n",
|
||||||
|
"\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {
|
||||||
|
"id": "kkkkkkkkkkkkkkk"
|
||||||
|
},
|
||||||
|
"source": [
|
||||||
|
"### Run ComfyUI with cloudflared (Recommended Way)\n",
|
||||||
|
"\n",
|
||||||
|
"\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {
|
||||||
|
"id": "jjjjjjjjjjjjjj"
|
||||||
|
},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"!wget -P ~ https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64.deb\n",
|
||||||
|
"!dpkg -i ~/cloudflared-linux-amd64.deb\n",
|
||||||
|
"\n",
|
||||||
|
"import subprocess\n",
|
||||||
|
"import threading\n",
|
||||||
|
"import time\n",
|
||||||
|
"import socket\n",
|
||||||
|
"import urllib.request\n",
|
||||||
|
"\n",
|
||||||
|
"def iframe_thread(port):\n",
|
||||||
|
" while True:\n",
|
||||||
|
" time.sleep(0.5)\n",
|
||||||
|
" sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n",
|
||||||
|
" result = sock.connect_ex(('127.0.0.1', port))\n",
|
||||||
|
" if result == 0:\n",
|
||||||
|
" break\n",
|
||||||
|
" sock.close()\n",
|
||||||
|
" print(\"\\nComfyUI finished loading, trying to launch cloudflared (if it gets stuck here cloudflared is having issues)\\n\")\n",
|
||||||
|
"\n",
|
||||||
|
" p = subprocess.Popen([\"cloudflared\", \"tunnel\", \"--url\", \"http://127.0.0.1:{}\".format(port)], stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n",
|
||||||
|
" for line in p.stderr:\n",
|
||||||
|
" l = line.decode()\n",
|
||||||
|
" if \"trycloudflare.com \" in l:\n",
|
||||||
|
" print(\"This is the URL to access ComfyUI:\", l[l.find(\"http\"):], end='')\n",
|
||||||
|
" #print(l, end='')\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"threading.Thread(target=iframe_thread, daemon=True, args=(8188,)).start()\n",
|
||||||
|
"\n",
|
||||||
|
"!python main.py --dont-print-server"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {
|
||||||
|
"id": "kkkkkkkkkkkkkk"
|
||||||
|
},
|
||||||
|
"source": [
|
||||||
|
"### Run ComfyUI with localtunnel\n",
|
||||||
|
"\n",
|
||||||
|
"\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {
|
||||||
|
"id": "jjjjjjjjjjjjj"
|
||||||
|
},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"!npm install -g localtunnel\n",
|
||||||
|
"\n",
|
||||||
|
"import subprocess\n",
|
||||||
|
"import threading\n",
|
||||||
|
"import time\n",
|
||||||
|
"import socket\n",
|
||||||
|
"import urllib.request\n",
|
||||||
|
"\n",
|
||||||
|
"def iframe_thread(port):\n",
|
||||||
|
" while True:\n",
|
||||||
|
" time.sleep(0.5)\n",
|
||||||
|
" sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n",
|
||||||
|
" result = sock.connect_ex(('127.0.0.1', port))\n",
|
||||||
|
" if result == 0:\n",
|
||||||
|
" break\n",
|
||||||
|
" sock.close()\n",
|
||||||
|
" print(\"\\nComfyUI finished loading, trying to launch localtunnel (if it gets stuck here localtunnel is having issues)\\n\")\n",
|
||||||
|
"\n",
|
||||||
|
" print(\"The password/enpoint ip for localtunnel is:\", urllib.request.urlopen('https://ipv4.icanhazip.com').read().decode('utf8').strip(\"\\n\"))\n",
|
||||||
|
" p = subprocess.Popen([\"lt\", \"--port\", \"{}\".format(port)], stdout=subprocess.PIPE)\n",
|
||||||
|
" for line in p.stdout:\n",
|
||||||
|
" print(line.decode(), end='')\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"threading.Thread(target=iframe_thread, daemon=True, args=(8188,)).start()\n",
|
||||||
|
"\n",
|
||||||
|
"!python main.py --dont-print-server"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {
|
||||||
|
"id": "gggggggggg"
|
||||||
|
},
|
||||||
|
"source": [
|
||||||
|
"### Run ComfyUI with colab iframe (use only in case the previous way with localtunnel doesn't work)\n",
|
||||||
|
"\n",
|
||||||
|
"You should see the ui appear in an iframe. If you get a 403 error, it's your firefox settings or an extension that's messing things up.\n",
|
||||||
|
"\n",
|
||||||
|
"If you want to open it in another window use the link.\n",
|
||||||
|
"\n",
|
||||||
|
"Note that some UI features like live image previews won't work because the colab iframe blocks websockets."
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {
|
||||||
|
"id": "hhhhhhhhhh"
|
||||||
|
},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import threading\n",
|
||||||
|
"import time\n",
|
||||||
|
"import socket\n",
|
||||||
|
"def iframe_thread(port):\n",
|
||||||
|
" while True:\n",
|
||||||
|
" time.sleep(0.5)\n",
|
||||||
|
" sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n",
|
||||||
|
" result = sock.connect_ex(('127.0.0.1', port))\n",
|
||||||
|
" if result == 0:\n",
|
||||||
|
" break\n",
|
||||||
|
" sock.close()\n",
|
||||||
|
" from google.colab import output\n",
|
||||||
|
" output.serve_kernel_port_as_iframe(port, height=1024)\n",
|
||||||
|
" print(\"to open it in a window you can open this link here:\")\n",
|
||||||
|
" output.serve_kernel_port_as_window(port)\n",
|
||||||
|
"\n",
|
||||||
|
"threading.Thread(target=iframe_thread, daemon=True, args=(8188,)).start()\n",
|
||||||
|
"\n",
|
||||||
|
"!python main.py --dont-print-server"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"accelerator": "GPU",
|
||||||
|
"colab": {
|
||||||
|
"provenance": []
|
||||||
|
},
|
||||||
|
"gpuClass": "standard",
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "Python 3",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"name": "python"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 0
|
||||||
|
}
|
||||||
1492
openapi.yaml
1492
openapi.yaml
File diff suppressed because it is too large
Load Diff
@@ -12,10 +12,13 @@ import ast
|
|||||||
import logging
|
import logging
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from .common import security_check
|
glob_path = os.path.join(os.path.dirname(__file__), "glob")
|
||||||
from .common import manager_util
|
sys.path.append(glob_path)
|
||||||
from .common import cm_global
|
|
||||||
from .common import manager_downloader
|
import security_check
|
||||||
|
import manager_util
|
||||||
|
import cm_global
|
||||||
|
import manager_downloader
|
||||||
import folder_paths
|
import folder_paths
|
||||||
|
|
||||||
manager_util.add_python_path_to_env()
|
manager_util.add_python_path_to_env()
|
||||||
@@ -35,7 +38,6 @@ else:
|
|||||||
def current_timestamp():
|
def current_timestamp():
|
||||||
return str(time.time()).split('.')[0]
|
return str(time.time()).split('.')[0]
|
||||||
|
|
||||||
security_check.security_check()
|
|
||||||
|
|
||||||
cm_global.pip_blacklist = {'torch', 'torchaudio', 'torchsde', 'torchvision'}
|
cm_global.pip_blacklist = {'torch', 'torchaudio', 'torchsde', 'torchvision'}
|
||||||
cm_global.pip_downgrade_blacklist = ['torch', 'torchaudio', 'torchsde', 'torchvision', 'transformers', 'safetensors', 'kornia']
|
cm_global.pip_downgrade_blacklist = ['torch', 'torchaudio', 'torchsde', 'torchvision', 'transformers', 'safetensors', 'kornia']
|
||||||
@@ -64,14 +66,16 @@ def is_import_failed_extension(name):
|
|||||||
comfy_path = os.environ.get('COMFYUI_PATH')
|
comfy_path = os.environ.get('COMFYUI_PATH')
|
||||||
comfy_base_path = os.environ.get('COMFYUI_FOLDERS_BASE_PATH')
|
comfy_base_path = os.environ.get('COMFYUI_FOLDERS_BASE_PATH')
|
||||||
|
|
||||||
|
if comfy_path is None:
|
||||||
|
# legacy env var
|
||||||
|
comfy_path = os.environ.get('COMFYUI_PATH')
|
||||||
|
|
||||||
if comfy_path is None:
|
if comfy_path is None:
|
||||||
comfy_path = os.path.abspath(os.path.dirname(sys.modules['__main__'].__file__))
|
comfy_path = os.path.abspath(os.path.dirname(sys.modules['__main__'].__file__))
|
||||||
os.environ['COMFYUI_PATH'] = comfy_path
|
|
||||||
|
|
||||||
if comfy_base_path is None:
|
if comfy_base_path is None:
|
||||||
comfy_base_path = comfy_path
|
comfy_base_path = comfy_path
|
||||||
|
|
||||||
|
|
||||||
sys.__comfyui_manager_register_message_collapse = register_message_collapse
|
sys.__comfyui_manager_register_message_collapse = register_message_collapse
|
||||||
sys.__comfyui_manager_is_import_failed_extension = is_import_failed_extension
|
sys.__comfyui_manager_is_import_failed_extension = is_import_failed_extension
|
||||||
cm_global.register_api('cm.register_message_collapse', register_message_collapse)
|
cm_global.register_api('cm.register_message_collapse', register_message_collapse)
|
||||||
@@ -81,12 +85,23 @@ cm_global.register_api('cm.is_import_failed_extension', is_import_failed_extensi
|
|||||||
comfyui_manager_path = os.path.abspath(os.path.dirname(__file__))
|
comfyui_manager_path = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
|
||||||
custom_nodes_base_path = folder_paths.get_folder_paths('custom_nodes')[0]
|
custom_nodes_base_path = folder_paths.get_folder_paths('custom_nodes')[0]
|
||||||
manager_files_path = os.path.abspath(os.path.join(folder_paths.get_user_directory(), 'default', 'ComfyUI-Manager'))
|
|
||||||
|
# Check for System User API availability (PR #10966)
|
||||||
|
_has_system_user_api = hasattr(folder_paths, 'get_system_user_directory')
|
||||||
|
|
||||||
|
if _has_system_user_api:
|
||||||
|
manager_files_path = os.path.abspath(os.path.join(folder_paths.get_user_directory(), '__manager'))
|
||||||
|
else:
|
||||||
|
manager_files_path = os.path.abspath(os.path.join(folder_paths.get_user_directory(), 'default', 'ComfyUI-Manager'))
|
||||||
|
|
||||||
manager_pip_overrides_path = os.path.join(manager_files_path, "pip_overrides.json")
|
manager_pip_overrides_path = os.path.join(manager_files_path, "pip_overrides.json")
|
||||||
manager_pip_blacklist_path = os.path.join(manager_files_path, "pip_blacklist.list")
|
manager_pip_blacklist_path = os.path.join(manager_files_path, "pip_blacklist.list")
|
||||||
restore_snapshot_path = os.path.join(manager_files_path, "startup-scripts", "restore-snapshot.json")
|
restore_snapshot_path = os.path.join(manager_files_path, "startup-scripts", "restore-snapshot.json")
|
||||||
manager_config_path = os.path.join(manager_files_path, 'config.ini')
|
manager_config_path = os.path.join(manager_files_path, 'config.ini')
|
||||||
|
|
||||||
|
cm_cli_path = os.path.join(comfyui_manager_path, "cm-cli.py")
|
||||||
|
|
||||||
|
|
||||||
default_conf = {}
|
default_conf = {}
|
||||||
|
|
||||||
def read_config():
|
def read_config():
|
||||||
@@ -111,20 +126,15 @@ def check_file_logging():
|
|||||||
|
|
||||||
read_config()
|
read_config()
|
||||||
read_uv_mode()
|
read_uv_mode()
|
||||||
|
security_check.security_check()
|
||||||
check_file_logging()
|
check_file_logging()
|
||||||
|
|
||||||
if sys.version_info < (3, 13):
|
cm_global.pip_overrides = {}
|
||||||
cm_global.pip_overrides = {'numpy': 'numpy<2'}
|
|
||||||
else:
|
|
||||||
cm_global.pip_overrides = {}
|
|
||||||
|
|
||||||
if os.path.exists(manager_pip_overrides_path):
|
if os.path.exists(manager_pip_overrides_path):
|
||||||
with open(manager_pip_overrides_path, 'r', encoding="UTF-8", errors="ignore") as json_file:
|
with open(manager_pip_overrides_path, 'r', encoding="UTF-8", errors="ignore") as json_file:
|
||||||
cm_global.pip_overrides = json.load(json_file)
|
cm_global.pip_overrides = json.load(json_file)
|
||||||
|
|
||||||
if sys.version_info < (3, 13):
|
|
||||||
cm_global.pip_overrides['numpy'] = 'numpy<2'
|
|
||||||
|
|
||||||
|
|
||||||
if os.path.exists(manager_pip_blacklist_path):
|
if os.path.exists(manager_pip_blacklist_path):
|
||||||
with open(manager_pip_blacklist_path, 'r', encoding="UTF-8", errors="ignore") as f:
|
with open(manager_pip_blacklist_path, 'r', encoding="UTF-8", errors="ignore") as f:
|
||||||
@@ -398,11 +408,7 @@ try:
|
|||||||
def emit(self, record):
|
def emit(self, record):
|
||||||
global is_start_mode
|
global is_start_mode
|
||||||
|
|
||||||
try:
|
message = record.getMessage()
|
||||||
message = record.getMessage()
|
|
||||||
except Exception as e:
|
|
||||||
message = f"<<logging error>>: {record} - {e}"
|
|
||||||
original_stderr.write(message)
|
|
||||||
|
|
||||||
if is_start_mode:
|
if is_start_mode:
|
||||||
match = re.search(pat_import_fail, message)
|
match = re.search(pat_import_fail, message)
|
||||||
@@ -445,6 +451,35 @@ except Exception as e:
|
|||||||
print(f"[ComfyUI-Manager] Logging failed: {e}")
|
print(f"[ComfyUI-Manager] Logging failed: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_dependencies():
|
||||||
|
try:
|
||||||
|
import git # noqa: F401
|
||||||
|
import toml # noqa: F401
|
||||||
|
import rich # noqa: F401
|
||||||
|
import chardet # noqa: F401
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
my_path = os.path.dirname(__file__)
|
||||||
|
requirements_path = os.path.join(my_path, "requirements.txt")
|
||||||
|
|
||||||
|
print("## ComfyUI-Manager: installing dependencies. (GitPython)")
|
||||||
|
try:
|
||||||
|
subprocess.check_output(manager_util.make_pip_cmd(['install', '-r', requirements_path]))
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
print("## [ERROR] ComfyUI-Manager: Attempting to reinstall dependencies using an alternative method.")
|
||||||
|
try:
|
||||||
|
subprocess.check_output(manager_util.make_pip_cmd(['install', '--user', '-r', requirements_path]))
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
print("## [ERROR] ComfyUI-Manager: Failed to install the GitPython package in the correct Python environment. Please install it manually in the appropriate environment. (You can seek help at https://app.element.io/#/room/%23comfyui_space%3Amatrix.org)")
|
||||||
|
|
||||||
|
try:
|
||||||
|
print("## ComfyUI-Manager: installing dependencies done.")
|
||||||
|
except:
|
||||||
|
# maybe we should sys.exit() here? there is at least two screens worth of error messages still being pumped after our error messages
|
||||||
|
print("## [ERROR] ComfyUI-Manager: GitPython package seems to be installed, but failed to load somehow. Make sure you have a working git client installed")
|
||||||
|
|
||||||
|
ensure_dependencies()
|
||||||
|
|
||||||
|
|
||||||
print("** ComfyUI startup time:", current_timestamp())
|
print("** ComfyUI startup time:", current_timestamp())
|
||||||
print("** Platform:", platform.system())
|
print("** Platform:", platform.system())
|
||||||
print("** Python version:", sys.version)
|
print("** Python version:", sys.version)
|
||||||
@@ -468,7 +503,7 @@ def read_downgrade_blacklist():
|
|||||||
items = [x.strip() for x in items if x != '']
|
items = [x.strip() for x in items if x != '']
|
||||||
cm_global.pip_downgrade_blacklist += items
|
cm_global.pip_downgrade_blacklist += items
|
||||||
cm_global.pip_downgrade_blacklist = list(set(cm_global.pip_downgrade_blacklist))
|
cm_global.pip_downgrade_blacklist = list(set(cm_global.pip_downgrade_blacklist))
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@@ -489,7 +524,8 @@ check_bypass_ssl()
|
|||||||
|
|
||||||
# Perform install
|
# Perform install
|
||||||
processed_install = set()
|
processed_install = set()
|
||||||
script_list_path = os.path.join(folder_paths.user_directory, "default", "ComfyUI-Manager", "startup-scripts", "install-scripts.txt")
|
# Use manager_files_path for consistency (fixes path inconsistency bug)
|
||||||
|
script_list_path = os.path.join(manager_files_path, "startup-scripts", "install-scripts.txt")
|
||||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, manager_files_path)
|
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, manager_files_path)
|
||||||
|
|
||||||
|
|
||||||
@@ -574,10 +610,7 @@ if os.path.exists(restore_snapshot_path):
|
|||||||
if 'COMFYUI_FOLDERS_BASE_PATH' not in new_env:
|
if 'COMFYUI_FOLDERS_BASE_PATH' not in new_env:
|
||||||
new_env["COMFYUI_FOLDERS_BASE_PATH"] = comfy_path
|
new_env["COMFYUI_FOLDERS_BASE_PATH"] = comfy_path
|
||||||
|
|
||||||
if 'COMFYUI_PATH' not in new_env:
|
cmd_str = [sys.executable, cm_cli_path, 'restore-snapshot', restore_snapshot_path]
|
||||||
new_env['COMFYUI_PATH'] = os.path.dirname(folder_paths.__file__)
|
|
||||||
|
|
||||||
cmd_str = [sys.executable, '-m', 'comfyui_manager.cm_cli', 'restore-snapshot', restore_snapshot_path]
|
|
||||||
exit_code = process_wrap(cmd_str, custom_nodes_base_path, handler=msg_capture, env=new_env)
|
exit_code = process_wrap(cmd_str, custom_nodes_base_path, handler=msg_capture, env=new_env)
|
||||||
|
|
||||||
if exit_code != 0:
|
if exit_code != 0:
|
||||||
@@ -769,7 +802,11 @@ def execute_startup_script():
|
|||||||
|
|
||||||
|
|
||||||
# Check if script_list_path exists
|
# Check if script_list_path exists
|
||||||
if os.path.exists(script_list_path):
|
# Block startup-scripts on old ComfyUI (security measure)
|
||||||
|
if not _has_system_user_api:
|
||||||
|
if os.path.exists(script_list_path):
|
||||||
|
print("[ComfyUI-Manager] Startup scripts blocked on old ComfyUI version.")
|
||||||
|
elif os.path.exists(script_list_path):
|
||||||
execute_startup_script()
|
execute_startup_script()
|
||||||
|
|
||||||
|
|
||||||
@@ -1,65 +1,15 @@
|
|||||||
[build-system]
|
|
||||||
requires = ["setuptools >= 61.0"]
|
|
||||||
build-backend = "setuptools.build_meta"
|
|
||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "comfyui-manager"
|
name = "comfyui-manager"
|
||||||
license = { text = "GPL-3.0-only" }
|
|
||||||
version = "4.0.0-beta.4"
|
|
||||||
requires-python = ">= 3.9"
|
|
||||||
description = "ComfyUI-Manager provides features to install and manage custom nodes for ComfyUI, as well as various functionalities to assist with ComfyUI."
|
description = "ComfyUI-Manager provides features to install and manage custom nodes for ComfyUI, as well as various functionalities to assist with ComfyUI."
|
||||||
readme = "README.md"
|
version = "3.38"
|
||||||
keywords = ["comfyui", "comfyui-manager"]
|
license = { file = "LICENSE.txt" }
|
||||||
|
dependencies = ["GitPython", "PyGithub", "matrix-nio", "transformers", "huggingface-hub>0.20", "typer", "rich", "typing-extensions", "toml", "uv", "chardet"]
|
||||||
maintainers = [
|
|
||||||
{ name = "Dr.Lt.Data", email = "dr.lt.data@gmail.com" },
|
|
||||||
{ name = "Yoland Yan", email = "yoland@comfy.org" },
|
|
||||||
{ name = "James Kwon", email = "hongilkwon316@gmail.com" },
|
|
||||||
{ name = "Robin Huang", email = "robin@comfy.org" },
|
|
||||||
]
|
|
||||||
|
|
||||||
classifiers = [
|
|
||||||
"Development Status :: 4 - Beta",
|
|
||||||
"Intended Audience :: Developers",
|
|
||||||
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
|
|
||||||
]
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
"GitPython",
|
|
||||||
"PyGithub",
|
|
||||||
"matrix-client==0.4.0",
|
|
||||||
"transformers",
|
|
||||||
"huggingface-hub>0.20",
|
|
||||||
"typer",
|
|
||||||
"rich",
|
|
||||||
"typing-extensions",
|
|
||||||
"toml",
|
|
||||||
"uv",
|
|
||||||
"chardet"
|
|
||||||
]
|
|
||||||
|
|
||||||
[project.optional-dependencies]
|
|
||||||
dev = ["pre-commit", "pytest", "ruff", "pytest-cov"]
|
|
||||||
|
|
||||||
[project.urls]
|
[project.urls]
|
||||||
Repository = "https://github.com/ltdrdata/ComfyUI-Manager"
|
Repository = "https://github.com/ltdrdata/ComfyUI-Manager"
|
||||||
|
# Used by Comfy Registry https://comfyregistry.org
|
||||||
|
|
||||||
[tool.setuptools.packages.find]
|
[tool.comfy]
|
||||||
where = ["."]
|
PublisherId = "drltdata"
|
||||||
include = ["comfyui_manager*"]
|
DisplayName = "ComfyUI-Manager"
|
||||||
|
Icon = ""
|
||||||
[project.scripts]
|
|
||||||
cm-cli = "comfyui_manager.cm_cli.__main__:main"
|
|
||||||
|
|
||||||
[tool.ruff]
|
|
||||||
line-length = 120
|
|
||||||
target-version = "py39"
|
|
||||||
|
|
||||||
[tool.ruff.lint]
|
|
||||||
select = [
|
|
||||||
"E4", # default
|
|
||||||
"E7", # default
|
|
||||||
"E9", # default
|
|
||||||
"F", # default
|
|
||||||
"I", # isort-like behavior (import statement sorting)
|
|
||||||
]
|
|
||||||
|
|||||||
13
pytest.ini
13
pytest.ini
@@ -1,13 +0,0 @@
|
|||||||
[tool:pytest]
|
|
||||||
testpaths = tests
|
|
||||||
python_files = test_*.py
|
|
||||||
python_classes = Test*
|
|
||||||
python_functions = test_*
|
|
||||||
addopts =
|
|
||||||
-v
|
|
||||||
--tb=short
|
|
||||||
--strict-markers
|
|
||||||
--disable-warnings
|
|
||||||
markers =
|
|
||||||
slow: marks tests as slow (deselect with '-m "not slow"')
|
|
||||||
integration: marks tests as integration tests
|
|
||||||
@@ -1,8 +1,8 @@
|
|||||||
GitPython
|
GitPython
|
||||||
PyGithub
|
PyGithub
|
||||||
matrix-client==0.4.0
|
matrix-nio
|
||||||
transformers
|
transformers
|
||||||
huggingface-hub>0.20
|
huggingface-hub
|
||||||
typer
|
typer
|
||||||
rich
|
rich
|
||||||
typing-extensions
|
typing-extensions
|
||||||
|
|||||||
42
run_tests.py
42
run_tests.py
@@ -1,42 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
Simple test runner for ComfyUI-Manager tests.
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
python run_tests.py # Run all tests
|
|
||||||
python run_tests.py -k test_task_queue # Run specific tests
|
|
||||||
python run_tests.py --cov # Run with coverage
|
|
||||||
"""
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import subprocess
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
def main():
|
|
||||||
"""Run pytest with appropriate arguments"""
|
|
||||||
# Ensure we're in the project directory
|
|
||||||
project_root = Path(__file__).parent
|
|
||||||
|
|
||||||
# Base pytest command
|
|
||||||
cmd = [sys.executable, "-m", "pytest"]
|
|
||||||
|
|
||||||
# Add any command line arguments passed to this script
|
|
||||||
cmd.extend(sys.argv[1:])
|
|
||||||
|
|
||||||
# Add default arguments if none provided
|
|
||||||
if len(sys.argv) == 1:
|
|
||||||
cmd.extend([
|
|
||||||
"tests/",
|
|
||||||
"-v",
|
|
||||||
"--tb=short"
|
|
||||||
])
|
|
||||||
|
|
||||||
print(f"Running: {' '.join(cmd)}")
|
|
||||||
print(f"Working directory: {project_root}")
|
|
||||||
|
|
||||||
# Run pytest
|
|
||||||
result = subprocess.run(cmd, cwd=project_root)
|
|
||||||
sys.exit(result.returncode)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
425
scanner.py
425
scanner.py
@@ -7,13 +7,15 @@ import concurrent
|
|||||||
import datetime
|
import datetime
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
import requests
|
import requests
|
||||||
|
import warnings
|
||||||
|
import argparse
|
||||||
|
|
||||||
builtin_nodes = set()
|
builtin_nodes = set()
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
from github import Github
|
from github import Github, Auth
|
||||||
|
|
||||||
|
|
||||||
def download_url(url, dest_folder, filename=None):
|
def download_url(url, dest_folder, filename=None):
|
||||||
@@ -39,26 +41,51 @@ def download_url(url, dest_folder, filename=None):
|
|||||||
raise Exception(f"Failed to download file from {url}")
|
raise Exception(f"Failed to download file from {url}")
|
||||||
|
|
||||||
|
|
||||||
# prepare temp dir
|
def parse_arguments():
|
||||||
if len(sys.argv) > 1:
|
"""Parse command-line arguments"""
|
||||||
temp_dir = sys.argv[1]
|
parser = argparse.ArgumentParser(
|
||||||
else:
|
description='ComfyUI Manager Node Scanner',
|
||||||
temp_dir = os.path.join(os.getcwd(), ".tmp")
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||||
|
epilog='''
|
||||||
|
Examples:
|
||||||
|
# Standard mode
|
||||||
|
python3 scanner.py
|
||||||
|
python3 scanner.py --skip-update
|
||||||
|
|
||||||
if not os.path.exists(temp_dir):
|
# Scan-only mode
|
||||||
os.makedirs(temp_dir)
|
python3 scanner.py --scan-only temp-urls-clean.list
|
||||||
|
python3 scanner.py --scan-only urls.list --temp-dir /custom/temp
|
||||||
|
python3 scanner.py --scan-only urls.list --skip-update
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument('--scan-only', type=str, metavar='URL_LIST_FILE',
|
||||||
|
help='Scan-only mode: provide URL list file (one URL per line)')
|
||||||
|
parser.add_argument('--temp-dir', type=str, metavar='DIR',
|
||||||
|
help='Temporary directory for cloned repositories')
|
||||||
|
parser.add_argument('--skip-update', action='store_true',
|
||||||
|
help='Skip git clone/pull operations')
|
||||||
|
parser.add_argument('--skip-stat-update', action='store_true',
|
||||||
|
help='Skip GitHub stats collection')
|
||||||
|
parser.add_argument('--skip-all', action='store_true',
|
||||||
|
help='Skip all update operations')
|
||||||
|
|
||||||
|
# Backward compatibility: positional argument for temp_dir
|
||||||
|
parser.add_argument('temp_dir_positional', nargs='?', metavar='TEMP_DIR',
|
||||||
|
help='(Legacy) Temporary directory path')
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
return args
|
||||||
|
|
||||||
|
|
||||||
skip_update = '--skip-update' in sys.argv or '--skip-all' in sys.argv
|
# Module-level variables (will be set in main if running as script)
|
||||||
skip_stat_update = '--skip-stat-update' in sys.argv or '--skip-all' in sys.argv
|
args = None
|
||||||
|
scan_only_mode = False
|
||||||
if not skip_stat_update:
|
url_list_file = None
|
||||||
g = Github(os.environ.get('GITHUB_TOKEN'))
|
temp_dir = None
|
||||||
else:
|
skip_update = False
|
||||||
g = None
|
skip_stat_update = True
|
||||||
|
g = None
|
||||||
|
|
||||||
print(f"TEMP DIR: {temp_dir}")
|
|
||||||
|
|
||||||
|
|
||||||
parse_cnt = 0
|
parse_cnt = 0
|
||||||
@@ -73,12 +100,22 @@ def extract_nodes(code_text):
|
|||||||
parse_cnt += 1
|
parse_cnt += 1
|
||||||
|
|
||||||
code_text = re.sub(r'\\[^"\']', '', code_text)
|
code_text = re.sub(r'\\[^"\']', '', code_text)
|
||||||
parsed_code = ast.parse(code_text)
|
with warnings.catch_warnings():
|
||||||
|
warnings.filterwarnings('ignore', category=SyntaxWarning)
|
||||||
|
warnings.filterwarnings('ignore', category=DeprecationWarning)
|
||||||
|
parsed_code = ast.parse(code_text)
|
||||||
|
|
||||||
assignments = (node for node in parsed_code.body if isinstance(node, ast.Assign))
|
# Support both ast.Assign and ast.AnnAssign (for type-annotated assignments)
|
||||||
|
assignments = (node for node in parsed_code.body if isinstance(node, (ast.Assign, ast.AnnAssign)))
|
||||||
|
|
||||||
for assignment in assignments:
|
for assignment in assignments:
|
||||||
if isinstance(assignment.targets[0], ast.Name) and assignment.targets[0].id in ['NODE_CONFIG', 'NODE_CLASS_MAPPINGS']:
|
# Handle ast.AnnAssign (e.g., NODE_CLASS_MAPPINGS: Type = {...})
|
||||||
|
if isinstance(assignment, ast.AnnAssign):
|
||||||
|
if isinstance(assignment.target, ast.Name) and assignment.target.id in ['NODE_CONFIG', 'NODE_CLASS_MAPPINGS']:
|
||||||
|
node_class_mappings = assignment.value
|
||||||
|
break
|
||||||
|
# Handle ast.Assign (e.g., NODE_CLASS_MAPPINGS = {...})
|
||||||
|
elif isinstance(assignment.targets[0], ast.Name) and assignment.targets[0].id in ['NODE_CONFIG', 'NODE_CLASS_MAPPINGS']:
|
||||||
node_class_mappings = assignment.value
|
node_class_mappings = assignment.value
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
@@ -94,10 +131,103 @@ def extract_nodes(code_text):
|
|||||||
return s
|
return s
|
||||||
else:
|
else:
|
||||||
return set()
|
return set()
|
||||||
except Exception:
|
except:
|
||||||
return set()
|
return set()
|
||||||
|
|
||||||
|
|
||||||
|
def has_comfy_node_base(class_node):
|
||||||
|
"""Check if class inherits from io.ComfyNode or ComfyNode"""
|
||||||
|
for base in class_node.bases:
|
||||||
|
# Case 1: ComfyNode
|
||||||
|
if isinstance(base, ast.Name) and base.id == 'ComfyNode':
|
||||||
|
return True
|
||||||
|
# Case 2: io.ComfyNode
|
||||||
|
elif isinstance(base, ast.Attribute):
|
||||||
|
if base.attr == 'ComfyNode':
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def extract_keyword_value(call_node, keyword):
|
||||||
|
"""
|
||||||
|
Extract string value of keyword argument
|
||||||
|
Schema(node_id="MyNode") -> "MyNode"
|
||||||
|
"""
|
||||||
|
for kw in call_node.keywords:
|
||||||
|
if kw.arg == keyword:
|
||||||
|
# ast.Constant (Python 3.8+)
|
||||||
|
if isinstance(kw.value, ast.Constant):
|
||||||
|
if isinstance(kw.value.value, str):
|
||||||
|
return kw.value.value
|
||||||
|
# ast.Str (Python 3.7-) - suppress deprecation warning
|
||||||
|
else:
|
||||||
|
with warnings.catch_warnings():
|
||||||
|
warnings.filterwarnings('ignore', category=DeprecationWarning)
|
||||||
|
if hasattr(ast, 'Str') and isinstance(kw.value, ast.Str):
|
||||||
|
return kw.value.s
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def is_schema_call(call_node):
|
||||||
|
"""Check if ast.Call is io.Schema() or Schema()"""
|
||||||
|
func = call_node.func
|
||||||
|
if isinstance(func, ast.Name) and func.id == 'Schema':
|
||||||
|
return True
|
||||||
|
elif isinstance(func, ast.Attribute) and func.attr == 'Schema':
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def extract_node_id_from_schema(class_node):
|
||||||
|
"""
|
||||||
|
Extract node_id from define_schema() method
|
||||||
|
"""
|
||||||
|
for item in class_node.body:
|
||||||
|
if isinstance(item, ast.FunctionDef) and item.name == 'define_schema':
|
||||||
|
# Walk through function body
|
||||||
|
for stmt in ast.walk(item):
|
||||||
|
if isinstance(stmt, ast.Call):
|
||||||
|
# Check if it's Schema() call
|
||||||
|
if is_schema_call(stmt):
|
||||||
|
node_id = extract_keyword_value(stmt, 'node_id')
|
||||||
|
if node_id:
|
||||||
|
return node_id
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def extract_v3_nodes(code_text):
|
||||||
|
"""
|
||||||
|
Extract V3 node IDs using AST parsing
|
||||||
|
Returns: set of node_id strings
|
||||||
|
"""
|
||||||
|
global parse_cnt
|
||||||
|
|
||||||
|
try:
|
||||||
|
if parse_cnt % 100 == 0:
|
||||||
|
print(".", end="", flush=True)
|
||||||
|
parse_cnt += 1
|
||||||
|
|
||||||
|
with warnings.catch_warnings():
|
||||||
|
warnings.filterwarnings('ignore', category=SyntaxWarning)
|
||||||
|
warnings.filterwarnings('ignore', category=DeprecationWarning)
|
||||||
|
tree = ast.parse(code_text)
|
||||||
|
except (SyntaxError, UnicodeDecodeError):
|
||||||
|
return set()
|
||||||
|
|
||||||
|
nodes = set()
|
||||||
|
|
||||||
|
# Find io.ComfyNode subclasses
|
||||||
|
for node in ast.walk(tree):
|
||||||
|
if isinstance(node, ast.ClassDef):
|
||||||
|
# Check if inherits from ComfyNode
|
||||||
|
if has_comfy_node_base(node):
|
||||||
|
node_id = extract_node_id_from_schema(node)
|
||||||
|
if node_id:
|
||||||
|
nodes.add(node_id)
|
||||||
|
|
||||||
|
return nodes
|
||||||
|
|
||||||
|
|
||||||
# scan
|
# scan
|
||||||
def scan_in_file(filename, is_builtin=False):
|
def scan_in_file(filename, is_builtin=False):
|
||||||
global builtin_nodes
|
global builtin_nodes
|
||||||
@@ -105,13 +235,18 @@ def scan_in_file(filename, is_builtin=False):
|
|||||||
with open(filename, encoding='utf-8', errors='ignore') as file:
|
with open(filename, encoding='utf-8', errors='ignore') as file:
|
||||||
code = file.read()
|
code = file.read()
|
||||||
|
|
||||||
pattern = r"_CLASS_MAPPINGS\s*=\s*{([^}]*)}"
|
# Support type annotations (e.g., NODE_CLASS_MAPPINGS: Type = {...}) and line continuations (\)
|
||||||
|
pattern = r"_CLASS_MAPPINGS\s*(?::\s*\w+\s*)?=\s*(?:\\\s*)?{([^}]*)}"
|
||||||
regex = re.compile(pattern, re.MULTILINE | re.DOTALL)
|
regex = re.compile(pattern, re.MULTILINE | re.DOTALL)
|
||||||
|
|
||||||
nodes = set()
|
nodes = set()
|
||||||
class_dict = {}
|
class_dict = {}
|
||||||
|
|
||||||
|
# V1 nodes detection
|
||||||
nodes |= extract_nodes(code)
|
nodes |= extract_nodes(code)
|
||||||
|
|
||||||
|
# V3 nodes detection
|
||||||
|
nodes |= extract_v3_nodes(code)
|
||||||
code = re.sub(r'^#.*?$', '', code, flags=re.MULTILINE)
|
code = re.sub(r'^#.*?$', '', code, flags=re.MULTILINE)
|
||||||
|
|
||||||
def extract_keys(pattern, code):
|
def extract_keys(pattern, code):
|
||||||
@@ -208,6 +343,53 @@ def get_nodes(target_dir):
|
|||||||
return py_files, directories
|
return py_files, directories
|
||||||
|
|
||||||
|
|
||||||
|
def get_urls_from_list_file(list_file):
|
||||||
|
"""
|
||||||
|
Read URLs from list file for scan-only mode
|
||||||
|
|
||||||
|
Args:
|
||||||
|
list_file (str): Path to URL list file (one URL per line)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list of tuples: [(url, "", None, None), ...]
|
||||||
|
Format: (url, title, preemptions, nodename_pattern)
|
||||||
|
- title: Empty string
|
||||||
|
- preemptions: None
|
||||||
|
- nodename_pattern: None
|
||||||
|
|
||||||
|
File format:
|
||||||
|
https://github.com/owner/repo1
|
||||||
|
https://github.com/owner/repo2
|
||||||
|
# Comments starting with # are ignored
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
FileNotFoundError: If list_file does not exist
|
||||||
|
"""
|
||||||
|
if not os.path.exists(list_file):
|
||||||
|
raise FileNotFoundError(f"URL list file not found: {list_file}")
|
||||||
|
|
||||||
|
urls = []
|
||||||
|
with open(list_file, 'r', encoding='utf-8') as f:
|
||||||
|
for line_num, line in enumerate(f, 1):
|
||||||
|
line = line.strip()
|
||||||
|
|
||||||
|
# Skip empty lines and comments
|
||||||
|
if not line or line.startswith('#'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Validate URL format (basic check)
|
||||||
|
if not (line.startswith('http://') or line.startswith('https://')):
|
||||||
|
print(f"WARNING: Line {line_num} is not a valid URL: {line}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Add URL with empty metadata
|
||||||
|
# (url, title, preemptions, nodename_pattern)
|
||||||
|
urls.append((line, "", None, None))
|
||||||
|
|
||||||
|
print(f"Loaded {len(urls)} URLs from {list_file}")
|
||||||
|
return urls
|
||||||
|
|
||||||
|
|
||||||
def get_git_urls_from_json(json_file):
|
def get_git_urls_from_json(json_file):
|
||||||
with open(json_file, encoding='utf-8') as file:
|
with open(json_file, encoding='utf-8') as file:
|
||||||
data = json.load(file)
|
data = json.load(file)
|
||||||
@@ -255,22 +437,52 @@ def clone_or_pull_git_repository(git_url):
|
|||||||
repo.git.submodule('update', '--init', '--recursive')
|
repo.git.submodule('update', '--init', '--recursive')
|
||||||
print(f"Pulling {repo_name}...")
|
print(f"Pulling {repo_name}...")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Pulling {repo_name} failed: {e}")
|
print(f"Failed to pull '{repo_name}': {e}")
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
Repo.clone_from(git_url, repo_dir, recursive=True)
|
Repo.clone_from(git_url, repo_dir, recursive=True)
|
||||||
print(f"Cloning {repo_name}...")
|
print(f"Cloning {repo_name}...")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Cloning {repo_name} failed: {e}")
|
print(f"Failed to clone '{repo_name}': {e}")
|
||||||
|
|
||||||
|
|
||||||
def update_custom_nodes():
|
def update_custom_nodes(scan_only_mode=False, url_list_file=None):
|
||||||
|
"""
|
||||||
|
Update custom nodes by cloning/pulling repositories
|
||||||
|
|
||||||
|
Args:
|
||||||
|
scan_only_mode (bool): If True, use URL list file instead of custom-node-list.json
|
||||||
|
url_list_file (str): Path to URL list file (required if scan_only_mode=True)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: node_info mapping {repo_name: (url, title, preemptions, node_pattern)}
|
||||||
|
"""
|
||||||
if not os.path.exists(temp_dir):
|
if not os.path.exists(temp_dir):
|
||||||
os.makedirs(temp_dir)
|
os.makedirs(temp_dir)
|
||||||
|
|
||||||
node_info = {}
|
node_info = {}
|
||||||
|
|
||||||
git_url_titles_preemptions = get_git_urls_from_json('custom-node-list.json')
|
# Select URL source based on mode
|
||||||
|
if scan_only_mode:
|
||||||
|
if not url_list_file:
|
||||||
|
raise ValueError("url_list_file is required in scan-only mode")
|
||||||
|
|
||||||
|
git_url_titles_preemptions = get_urls_from_list_file(url_list_file)
|
||||||
|
print("\n[Scan-Only Mode]")
|
||||||
|
print(f" - URL source: {url_list_file}")
|
||||||
|
print(" - GitHub stats: DISABLED")
|
||||||
|
print(f" - Git clone/pull: {'ENABLED' if not skip_update else 'DISABLED'}")
|
||||||
|
print(" - Metadata: EMPTY")
|
||||||
|
else:
|
||||||
|
if not os.path.exists('custom-node-list.json'):
|
||||||
|
raise FileNotFoundError("custom-node-list.json not found")
|
||||||
|
|
||||||
|
git_url_titles_preemptions = get_git_urls_from_json('custom-node-list.json')
|
||||||
|
print("\n[Standard Mode]")
|
||||||
|
print(" - URL source: custom-node-list.json")
|
||||||
|
print(f" - GitHub stats: {'ENABLED' if not skip_stat_update else 'DISABLED'}")
|
||||||
|
print(f" - Git clone/pull: {'ENABLED' if not skip_update else 'DISABLED'}")
|
||||||
|
print(" - Metadata: FULL")
|
||||||
|
|
||||||
def process_git_url_title(url, title, preemptions, node_pattern):
|
def process_git_url_title(url, title, preemptions, node_pattern):
|
||||||
name = os.path.basename(url)
|
name = os.path.basename(url)
|
||||||
@@ -382,36 +594,48 @@ def update_custom_nodes():
|
|||||||
if not skip_stat_update:
|
if not skip_stat_update:
|
||||||
process_git_stats(git_url_titles_preemptions)
|
process_git_stats(git_url_titles_preemptions)
|
||||||
|
|
||||||
|
# Git clone/pull for all repositories
|
||||||
with concurrent.futures.ThreadPoolExecutor(11) as executor:
|
with concurrent.futures.ThreadPoolExecutor(11) as executor:
|
||||||
for url, title, preemptions, node_pattern in git_url_titles_preemptions:
|
for url, title, preemptions, node_pattern in git_url_titles_preemptions:
|
||||||
executor.submit(process_git_url_title, url, title, preemptions, node_pattern)
|
executor.submit(process_git_url_title, url, title, preemptions, node_pattern)
|
||||||
|
|
||||||
py_url_titles_and_pattern = get_py_urls_from_json('custom-node-list.json')
|
# .py file download (skip in scan-only mode - only process git repos)
|
||||||
|
if not scan_only_mode:
|
||||||
|
py_url_titles_and_pattern = get_py_urls_from_json('custom-node-list.json')
|
||||||
|
|
||||||
def download_and_store_info(url_title_preemptions_and_pattern):
|
def download_and_store_info(url_title_preemptions_and_pattern):
|
||||||
url, title, preemptions, node_pattern = url_title_preemptions_and_pattern
|
url, title, preemptions, node_pattern = url_title_preemptions_and_pattern
|
||||||
name = os.path.basename(url)
|
name = os.path.basename(url)
|
||||||
if name.endswith(".py"):
|
if name.endswith(".py"):
|
||||||
node_info[name] = (url, title, preemptions, node_pattern)
|
node_info[name] = (url, title, preemptions, node_pattern)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
download_url(url, temp_dir)
|
download_url(url, temp_dir)
|
||||||
except Exception:
|
except:
|
||||||
print(f"[ERROR] Cannot download '{url}'")
|
print(f"[ERROR] Cannot download '{url}'")
|
||||||
|
|
||||||
with concurrent.futures.ThreadPoolExecutor(10) as executor:
|
with concurrent.futures.ThreadPoolExecutor(10) as executor:
|
||||||
executor.map(download_and_store_info, py_url_titles_and_pattern)
|
executor.map(download_and_store_info, py_url_titles_and_pattern)
|
||||||
|
|
||||||
return node_info
|
return node_info
|
||||||
|
|
||||||
|
|
||||||
def gen_json(node_info):
|
def gen_json(node_info, scan_only_mode=False):
|
||||||
|
"""
|
||||||
|
Generate extension-node-map.json from scanned node information
|
||||||
|
|
||||||
|
Args:
|
||||||
|
node_info (dict): Repository metadata mapping
|
||||||
|
scan_only_mode (bool): If True, exclude metadata from output
|
||||||
|
"""
|
||||||
# scan from .py file
|
# scan from .py file
|
||||||
node_files, node_dirs = get_nodes(temp_dir)
|
node_files, node_dirs = get_nodes(temp_dir)
|
||||||
|
|
||||||
comfyui_path = os.path.abspath(os.path.join(temp_dir, "ComfyUI"))
|
comfyui_path = os.path.abspath(os.path.join(temp_dir, "ComfyUI"))
|
||||||
node_dirs.remove(comfyui_path)
|
# Only reorder if ComfyUI exists in the list
|
||||||
node_dirs = [comfyui_path] + node_dirs
|
if comfyui_path in node_dirs:
|
||||||
|
node_dirs.remove(comfyui_path)
|
||||||
|
node_dirs = [comfyui_path] + node_dirs
|
||||||
|
|
||||||
data = {}
|
data = {}
|
||||||
for dirname in node_dirs:
|
for dirname in node_dirs:
|
||||||
@@ -422,6 +646,7 @@ def gen_json(node_info):
|
|||||||
for py in py_files:
|
for py in py_files:
|
||||||
nodes_in_file, metadata_in_file = scan_in_file(py, dirname == "ComfyUI")
|
nodes_in_file, metadata_in_file = scan_in_file(py, dirname == "ComfyUI")
|
||||||
nodes.update(nodes_in_file)
|
nodes.update(nodes_in_file)
|
||||||
|
# Include metadata from .py files in both modes
|
||||||
metadata.update(metadata_in_file)
|
metadata.update(metadata_in_file)
|
||||||
|
|
||||||
dirname = os.path.basename(dirname)
|
dirname = os.path.basename(dirname)
|
||||||
@@ -436,17 +661,28 @@ def gen_json(node_info):
|
|||||||
if dirname in node_info:
|
if dirname in node_info:
|
||||||
git_url, title, preemptions, node_pattern = node_info[dirname]
|
git_url, title, preemptions, node_pattern = node_info[dirname]
|
||||||
|
|
||||||
metadata['title_aux'] = title
|
# Conditionally add metadata based on mode
|
||||||
|
if not scan_only_mode:
|
||||||
|
# Standard mode: include all metadata
|
||||||
|
metadata['title_aux'] = title
|
||||||
|
|
||||||
if preemptions is not None:
|
if preemptions is not None:
|
||||||
metadata['preemptions'] = preemptions
|
metadata['preemptions'] = preemptions
|
||||||
|
|
||||||
if node_pattern is not None:
|
if node_pattern is not None:
|
||||||
metadata['nodename_pattern'] = node_pattern
|
metadata['nodename_pattern'] = node_pattern
|
||||||
|
# Scan-only mode: metadata remains empty
|
||||||
|
|
||||||
data[git_url] = (nodes, metadata)
|
data[git_url] = (nodes, metadata)
|
||||||
else:
|
else:
|
||||||
print(f"WARN: {dirname} is removed from custom-node-list.json")
|
# Scan-only mode: Repository not in node_info (expected behavior)
|
||||||
|
# Construct URL from dirname (author_repo format)
|
||||||
|
if '_' in dirname:
|
||||||
|
parts = dirname.split('_', 1)
|
||||||
|
git_url = f"https://github.com/{parts[0]}/{parts[1]}"
|
||||||
|
data[git_url] = (nodes, metadata)
|
||||||
|
else:
|
||||||
|
print(f"WARN: {dirname} is removed from custom-node-list.json")
|
||||||
|
|
||||||
for file in node_files:
|
for file in node_files:
|
||||||
nodes, metadata = scan_in_file(file)
|
nodes, metadata = scan_in_file(file)
|
||||||
@@ -459,13 +695,16 @@ def gen_json(node_info):
|
|||||||
|
|
||||||
if file in node_info:
|
if file in node_info:
|
||||||
url, title, preemptions, node_pattern = node_info[file]
|
url, title, preemptions, node_pattern = node_info[file]
|
||||||
metadata['title_aux'] = title
|
|
||||||
|
|
||||||
if preemptions is not None:
|
# Conditionally add metadata based on mode
|
||||||
metadata['preemptions'] = preemptions
|
if not scan_only_mode:
|
||||||
|
metadata['title_aux'] = title
|
||||||
|
|
||||||
if node_pattern is not None:
|
if preemptions is not None:
|
||||||
metadata['nodename_pattern'] = node_pattern
|
metadata['preemptions'] = preemptions
|
||||||
|
|
||||||
|
if node_pattern is not None:
|
||||||
|
metadata['nodename_pattern'] = node_pattern
|
||||||
|
|
||||||
data[url] = (nodes, metadata)
|
data[url] = (nodes, metadata)
|
||||||
else:
|
else:
|
||||||
@@ -477,6 +716,10 @@ def gen_json(node_info):
|
|||||||
for extension in extensions:
|
for extension in extensions:
|
||||||
node_list_json_path = os.path.join(temp_dir, extension, 'node_list.json')
|
node_list_json_path = os.path.join(temp_dir, extension, 'node_list.json')
|
||||||
if os.path.exists(node_list_json_path):
|
if os.path.exists(node_list_json_path):
|
||||||
|
# Skip if extension not in node_info (scan-only mode with limited URLs)
|
||||||
|
if extension not in node_info:
|
||||||
|
continue
|
||||||
|
|
||||||
git_url, title, preemptions, node_pattern = node_info[extension]
|
git_url, title, preemptions, node_pattern = node_info[extension]
|
||||||
|
|
||||||
with open(node_list_json_path, 'r', encoding='utf-8') as f:
|
with open(node_list_json_path, 'r', encoding='utf-8') as f:
|
||||||
@@ -496,16 +739,25 @@ def gen_json(node_info):
|
|||||||
nodes_in_url, metadata_in_url = data[git_url]
|
nodes_in_url, metadata_in_url = data[git_url]
|
||||||
nodes = set(nodes_in_url)
|
nodes = set(nodes_in_url)
|
||||||
|
|
||||||
for x, desc in node_list_json.items():
|
try:
|
||||||
nodes.add(x.strip())
|
for x, desc in node_list_json.items():
|
||||||
|
nodes.add(x.strip())
|
||||||
|
except Exception as e:
|
||||||
|
print(f"\nERROR: Invalid json format '{node_list_json_path}'")
|
||||||
|
print("------------------------------------------------------")
|
||||||
|
print(e)
|
||||||
|
print("------------------------------------------------------")
|
||||||
|
node_list_json = {}
|
||||||
|
|
||||||
metadata_in_url['title_aux'] = title
|
# Conditionally add metadata based on mode
|
||||||
|
if not scan_only_mode:
|
||||||
|
metadata_in_url['title_aux'] = title
|
||||||
|
|
||||||
if preemptions is not None:
|
if preemptions is not None:
|
||||||
metadata['preemptions'] = preemptions
|
metadata_in_url['preemptions'] = preemptions
|
||||||
|
|
||||||
if node_pattern is not None:
|
if node_pattern is not None:
|
||||||
metadata_in_url['nodename_pattern'] = node_pattern
|
metadata_in_url['nodename_pattern'] = node_pattern
|
||||||
|
|
||||||
nodes = list(nodes)
|
nodes = list(nodes)
|
||||||
nodes.sort()
|
nodes.sort()
|
||||||
@@ -516,12 +768,53 @@ def gen_json(node_info):
|
|||||||
json.dump(data, file, indent=4, sort_keys=True)
|
json.dump(data, file, indent=4, sort_keys=True)
|
||||||
|
|
||||||
|
|
||||||
print("### ComfyUI Manager Node Scanner ###")
|
if __name__ == "__main__":
|
||||||
|
# Parse arguments
|
||||||
|
args = parse_arguments()
|
||||||
|
|
||||||
print("\n# Updating extensions\n")
|
# Determine mode
|
||||||
updated_node_info = update_custom_nodes()
|
scan_only_mode = args.scan_only is not None
|
||||||
|
url_list_file = args.scan_only if scan_only_mode else None
|
||||||
|
|
||||||
print("\n# 'extension-node-map.json' file is generated.\n")
|
# Determine temp_dir
|
||||||
gen_json(updated_node_info)
|
if args.temp_dir:
|
||||||
|
temp_dir = args.temp_dir
|
||||||
|
elif args.temp_dir_positional:
|
||||||
|
temp_dir = args.temp_dir_positional
|
||||||
|
else:
|
||||||
|
temp_dir = os.path.join(os.getcwd(), ".tmp")
|
||||||
|
|
||||||
print("\nDONE.\n")
|
if not os.path.exists(temp_dir):
|
||||||
|
os.makedirs(temp_dir)
|
||||||
|
|
||||||
|
# Determine skip flags
|
||||||
|
skip_update = args.skip_update or args.skip_all
|
||||||
|
skip_stat_update = args.skip_stat_update or args.skip_all or scan_only_mode
|
||||||
|
|
||||||
|
if not skip_stat_update:
|
||||||
|
auth = Auth.Token(os.environ.get('GITHUB_TOKEN'))
|
||||||
|
g = Github(auth=auth)
|
||||||
|
else:
|
||||||
|
g = None
|
||||||
|
|
||||||
|
print("### ComfyUI Manager Node Scanner ###")
|
||||||
|
|
||||||
|
if scan_only_mode:
|
||||||
|
print(f"\n# [Scan-Only Mode] Processing URL list: {url_list_file}\n")
|
||||||
|
else:
|
||||||
|
print("\n# [Standard Mode] Updating extensions\n")
|
||||||
|
|
||||||
|
# Update/clone repositories and collect node info
|
||||||
|
updated_node_info = update_custom_nodes(scan_only_mode, url_list_file)
|
||||||
|
|
||||||
|
print("\n# Generating 'extension-node-map.json'...\n")
|
||||||
|
|
||||||
|
# Generate extension-node-map.json
|
||||||
|
gen_json(updated_node_info, scan_only_mode)
|
||||||
|
|
||||||
|
print("\n✅ DONE.\n")
|
||||||
|
|
||||||
|
if scan_only_mode:
|
||||||
|
print("Output: extension-node-map.json (node mappings only)")
|
||||||
|
else:
|
||||||
|
print("Output: extension-node-map.json (full metadata)")
|
||||||
39
scripts/colab-dependencies.py
Normal file
39
scripts/colab-dependencies.py
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
|
def get_enabled_subdirectories_with_files(base_directory):
|
||||||
|
subdirs_with_files = []
|
||||||
|
for subdir in os.listdir(base_directory):
|
||||||
|
try:
|
||||||
|
full_path = os.path.join(base_directory, subdir)
|
||||||
|
if os.path.isdir(full_path) and not subdir.endswith(".disabled") and not subdir.startswith('.') and subdir != '__pycache__':
|
||||||
|
print(f"## Install dependencies for '{subdir}'")
|
||||||
|
requirements_file = os.path.join(full_path, "requirements.txt")
|
||||||
|
install_script = os.path.join(full_path, "install.py")
|
||||||
|
|
||||||
|
if os.path.exists(requirements_file) or os.path.exists(install_script):
|
||||||
|
subdirs_with_files.append((full_path, requirements_file, install_script))
|
||||||
|
except Exception as e:
|
||||||
|
print(f"EXCEPTION During Dependencies INSTALL on '{subdir}':\n{e}")
|
||||||
|
|
||||||
|
return subdirs_with_files
|
||||||
|
|
||||||
|
|
||||||
|
def install_requirements(requirements_file_path):
|
||||||
|
if os.path.exists(requirements_file_path):
|
||||||
|
subprocess.run(["pip", "install", "-r", requirements_file_path])
|
||||||
|
|
||||||
|
|
||||||
|
def run_install_script(install_script_path):
|
||||||
|
if os.path.exists(install_script_path):
|
||||||
|
subprocess.run(["python", install_script_path])
|
||||||
|
|
||||||
|
|
||||||
|
custom_nodes_directory = "custom_nodes"
|
||||||
|
subdirs_with_files = get_enabled_subdirectories_with_files(custom_nodes_directory)
|
||||||
|
|
||||||
|
|
||||||
|
for subdir, requirements_file, install_script in subdirs_with_files:
|
||||||
|
install_requirements(requirements_file)
|
||||||
|
run_install_script(install_script)
|
||||||
21
scripts/install-comfyui-venv-linux.sh
Executable file
21
scripts/install-comfyui-venv-linux.sh
Executable file
@@ -0,0 +1,21 @@
|
|||||||
|
git clone https://github.com/comfyanonymous/ComfyUI
|
||||||
|
cd ComfyUI/custom_nodes
|
||||||
|
git clone https://github.com/ltdrdata/ComfyUI-Manager comfyui-manager
|
||||||
|
cd ..
|
||||||
|
python -m venv venv
|
||||||
|
source venv/bin/activate
|
||||||
|
python -m pip install torch torchvision torchaudio --extra-index-url https://download.pytorch.org/whl/cu121
|
||||||
|
python -m pip install -r requirements.txt
|
||||||
|
python -m pip install -r custom_nodes/comfyui-manager/requirements.txt
|
||||||
|
cd ..
|
||||||
|
echo "#!/bin/bash" > run_gpu.sh
|
||||||
|
echo "cd ComfyUI" >> run_gpu.sh
|
||||||
|
echo "source venv/bin/activate" >> run_gpu.sh
|
||||||
|
echo "python main.py --preview-method auto" >> run_gpu.sh
|
||||||
|
chmod +x run_gpu.sh
|
||||||
|
|
||||||
|
echo "#!/bin/bash" > run_cpu.sh
|
||||||
|
echo "cd ComfyUI" >> run_cpu.sh
|
||||||
|
echo "source venv/bin/activate" >> run_cpu.sh
|
||||||
|
echo "python main.py --preview-method auto --cpu" >> run_cpu.sh
|
||||||
|
chmod +x run_cpu.sh
|
||||||
17
scripts/install-comfyui-venv-win.bat
Executable file
17
scripts/install-comfyui-venv-win.bat
Executable file
@@ -0,0 +1,17 @@
|
|||||||
|
git clone https://github.com/comfyanonymous/ComfyUI
|
||||||
|
cd ComfyUI/custom_nodes
|
||||||
|
git clone https://github.com/ltdrdata/ComfyUI-Manager comfyui-manager
|
||||||
|
cd ..
|
||||||
|
python -m venv venv
|
||||||
|
call venv/Scripts/activate
|
||||||
|
python -m pip install torch torchvision torchaudio --extra-index-url https://download.pytorch.org/whl/cu121
|
||||||
|
python -m pip install -r requirements.txt
|
||||||
|
python -m pip install -r custom_nodes/comfyui-manager/requirements.txt
|
||||||
|
cd ..
|
||||||
|
echo "cd ComfyUI" >> run_gpu.bat
|
||||||
|
echo "call venv/Scripts/activate" >> run_gpu.bat
|
||||||
|
echo "python main.py" >> run_gpu.bat
|
||||||
|
|
||||||
|
echo "cd ComfyUI" >> run_cpu.bat
|
||||||
|
echo "call venv/Scripts/activate" >> run_cpu.bat
|
||||||
|
echo "python main.py --cpu" >> run_cpu.bat
|
||||||
3
scripts/install-manager-for-portable-version.bat
Normal file
3
scripts/install-manager-for-portable-version.bat
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
.\python_embeded\python.exe -s -m pip install gitpython
|
||||||
|
.\python_embeded\python.exe -c "import git; git.Repo.clone_from('https://github.com/ltdrdata/ComfyUI-Manager', './ComfyUI/custom_nodes/comfyui-manager')"
|
||||||
|
.\python_embeded\python.exe -m pip install -r ./ComfyUI/custom_nodes/comfyui-manager/requirements.txt
|
||||||
@@ -1,89 +0,0 @@
|
|||||||
# ComfyUI-Manager Tests
|
|
||||||
|
|
||||||
This directory contains unit tests for ComfyUI-Manager components.
|
|
||||||
|
|
||||||
## Running Tests
|
|
||||||
|
|
||||||
### Using the Virtual Environment
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# From the project root
|
|
||||||
/path/to/comfyui/.venv/bin/python -m pytest tests/ -v
|
|
||||||
```
|
|
||||||
|
|
||||||
### Using the Test Runner
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Run all tests
|
|
||||||
python run_tests.py
|
|
||||||
|
|
||||||
# Run specific tests
|
|
||||||
python run_tests.py -k test_task_queue
|
|
||||||
|
|
||||||
# Run with coverage
|
|
||||||
python run_tests.py --cov
|
|
||||||
```
|
|
||||||
|
|
||||||
## Test Structure
|
|
||||||
|
|
||||||
### test_task_queue.py
|
|
||||||
|
|
||||||
Comprehensive tests for the TaskQueue functionality including:
|
|
||||||
|
|
||||||
- **Basic Operations**: Initialization, adding/removing tasks, state management
|
|
||||||
- **Batch Tracking**: Automatic batch creation, history saving, finalization
|
|
||||||
- **Thread Safety**: Concurrent access, worker lifecycle management
|
|
||||||
- **Integration Testing**: Full task processing workflow
|
|
||||||
- **Edge Cases**: Empty queues, invalid data, exception handling
|
|
||||||
|
|
||||||
**Key Features Tested:**
|
|
||||||
- ✅ Task queueing with Pydantic model validation
|
|
||||||
- ✅ Batch history tracking and persistence
|
|
||||||
- ✅ Thread-safe concurrent operations
|
|
||||||
- ✅ Worker thread lifecycle management
|
|
||||||
- ✅ WebSocket message tracking
|
|
||||||
- ✅ State snapshots and transitions
|
|
||||||
|
|
||||||
### MockTaskQueue
|
|
||||||
|
|
||||||
The tests use a `MockTaskQueue` class that:
|
|
||||||
- Isolates testing from global state and external dependencies
|
|
||||||
- Provides dependency injection for mocking external services
|
|
||||||
- Maintains the same API as the real TaskQueue
|
|
||||||
- Supports both synchronous and asynchronous testing patterns
|
|
||||||
|
|
||||||
## Test Categories
|
|
||||||
|
|
||||||
- **Unit Tests**: Individual method testing with mocked dependencies
|
|
||||||
- **Integration Tests**: Full workflow testing with real threading
|
|
||||||
- **Concurrency Tests**: Multi-threaded access verification
|
|
||||||
- **Edge Case Tests**: Error conditions and boundary cases
|
|
||||||
|
|
||||||
## Dependencies
|
|
||||||
|
|
||||||
Tests require:
|
|
||||||
- `pytest` - Test framework
|
|
||||||
- `pytest-asyncio` - Async test support
|
|
||||||
- `pydantic` - Data model validation
|
|
||||||
|
|
||||||
Install with: `pip install -e ".[dev]"`
|
|
||||||
|
|
||||||
## Design Notes
|
|
||||||
|
|
||||||
### Handling Singleton Pattern
|
|
||||||
|
|
||||||
The real TaskQueue uses a singleton pattern which makes testing challenging. The MockTaskQueue avoids this by:
|
|
||||||
- Not setting global instance variables
|
|
||||||
- Creating fresh instances per test
|
|
||||||
- Providing controlled dependency injection
|
|
||||||
|
|
||||||
### Thread Management
|
|
||||||
|
|
||||||
Tests handle threading complexities by:
|
|
||||||
- Using controlled mock workers for predictable behavior
|
|
||||||
- Providing synchronization primitives for timing-sensitive tests
|
|
||||||
- Testing both successful workflows and exception scenarios
|
|
||||||
|
|
||||||
### Heapq Compatibility
|
|
||||||
|
|
||||||
The original TaskQueue uses `heapq` with Pydantic models, which don't support comparison by default. Tests solve this by wrapping items in comparable tuples with priority values, maintaining FIFO order while enabling heap operations.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
"""Test suite for ComfyUI-Manager"""
|
|
||||||
@@ -1,510 +0,0 @@
|
|||||||
"""
|
|
||||||
Tests for TaskQueue functionality.
|
|
||||||
|
|
||||||
This module tests the core TaskQueue operations including:
|
|
||||||
- Task queueing and processing
|
|
||||||
- Batch tracking
|
|
||||||
- Thread lifecycle management
|
|
||||||
- State management
|
|
||||||
- WebSocket message delivery
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import threading
|
|
||||||
import time
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
from pathlib import Path
|
|
||||||
from unittest.mock import AsyncMock, MagicMock, Mock, patch
|
|
||||||
from typing import Any, Dict, Optional
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from comfyui_manager.data_models import (
|
|
||||||
QueueTaskItem,
|
|
||||||
TaskExecutionStatus,
|
|
||||||
TaskStateMessage,
|
|
||||||
InstallPackParams,
|
|
||||||
ManagerDatabaseSource,
|
|
||||||
ManagerChannel,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class MockTaskQueue:
|
|
||||||
"""
|
|
||||||
A testable version of TaskQueue that allows for dependency injection
|
|
||||||
and isolated testing without global state.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, history_dir: Optional[Path] = None):
|
|
||||||
# Don't set the global instance for testing
|
|
||||||
self.mutex = threading.RLock()
|
|
||||||
self.not_empty = threading.Condition(self.mutex)
|
|
||||||
self.current_index = 0
|
|
||||||
self.pending_tasks = []
|
|
||||||
self.running_tasks = {}
|
|
||||||
self.history_tasks = {}
|
|
||||||
self.task_counter = 0
|
|
||||||
self.batch_id = None
|
|
||||||
self.batch_start_time = None
|
|
||||||
self.batch_state_before = None
|
|
||||||
self._worker_task = None
|
|
||||||
self._history_dir = history_dir
|
|
||||||
|
|
||||||
# Mock external dependencies
|
|
||||||
self.mock_core = MagicMock()
|
|
||||||
self.mock_prompt_server = MagicMock()
|
|
||||||
|
|
||||||
def is_processing(self) -> bool:
|
|
||||||
"""Check if the queue is currently processing tasks"""
|
|
||||||
return (
|
|
||||||
self._worker_task is not None
|
|
||||||
and self._worker_task.is_alive()
|
|
||||||
)
|
|
||||||
|
|
||||||
def start_worker(self, mock_task_worker=None) -> bool:
|
|
||||||
"""Start the task worker. Can inject a mock worker for testing."""
|
|
||||||
if self._worker_task is not None and self._worker_task.is_alive():
|
|
||||||
return False # Already running
|
|
||||||
|
|
||||||
if mock_task_worker:
|
|
||||||
self._worker_task = threading.Thread(target=mock_task_worker)
|
|
||||||
else:
|
|
||||||
# Use a simple test worker that processes one task then stops
|
|
||||||
self._worker_task = threading.Thread(target=self._test_worker)
|
|
||||||
self._worker_task.start()
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _test_worker(self):
|
|
||||||
"""Simple test worker that processes tasks without external dependencies"""
|
|
||||||
while True:
|
|
||||||
task = self.get(timeout=1.0) # Short timeout for tests
|
|
||||||
if task is None:
|
|
||||||
if self.total_count() == 0:
|
|
||||||
break
|
|
||||||
continue
|
|
||||||
|
|
||||||
item, task_index = task
|
|
||||||
|
|
||||||
# Simulate task processing
|
|
||||||
self.running_tasks[task_index] = item
|
|
||||||
|
|
||||||
# Simulate work
|
|
||||||
time.sleep(0.1)
|
|
||||||
|
|
||||||
# Mark as completed
|
|
||||||
status = TaskExecutionStatus(
|
|
||||||
status_str="success",
|
|
||||||
completed=True,
|
|
||||||
messages=["Test task completed"]
|
|
||||||
)
|
|
||||||
|
|
||||||
self.mark_done(task_index, item, status, "Test result")
|
|
||||||
|
|
||||||
# Clean up
|
|
||||||
if task_index in self.running_tasks:
|
|
||||||
del self.running_tasks[task_index]
|
|
||||||
|
|
||||||
def get_current_state(self) -> TaskStateMessage:
|
|
||||||
"""Get current queue state with mocked dependencies"""
|
|
||||||
return TaskStateMessage(
|
|
||||||
history=self.get_history(),
|
|
||||||
running_queue=self.get_current_queue()[0],
|
|
||||||
pending_queue=self.get_current_queue()[1],
|
|
||||||
installed_packs={} # Mocked empty
|
|
||||||
)
|
|
||||||
|
|
||||||
def send_queue_state_update(self, msg: str, update, client_id: Optional[str] = None):
|
|
||||||
"""Mock implementation that tracks calls instead of sending WebSocket messages"""
|
|
||||||
if not hasattr(self, '_sent_updates'):
|
|
||||||
self._sent_updates = []
|
|
||||||
self._sent_updates.append({
|
|
||||||
'msg': msg,
|
|
||||||
'update': update,
|
|
||||||
'client_id': client_id
|
|
||||||
})
|
|
||||||
|
|
||||||
# Copy the essential methods from the real TaskQueue
|
|
||||||
def put(self, item) -> None:
|
|
||||||
"""Add a task to the queue. Item can be a dict or QueueTaskItem model."""
|
|
||||||
with self.mutex:
|
|
||||||
# Start a new batch if this is the first task after queue was empty
|
|
||||||
if (
|
|
||||||
self.batch_id is None
|
|
||||||
and len(self.pending_tasks) == 0
|
|
||||||
and len(self.running_tasks) == 0
|
|
||||||
):
|
|
||||||
self._start_new_batch()
|
|
||||||
|
|
||||||
# Convert to Pydantic model if it's a dict
|
|
||||||
if isinstance(item, dict):
|
|
||||||
item = QueueTaskItem(**item)
|
|
||||||
|
|
||||||
import heapq
|
|
||||||
# Wrap in tuple with priority to make it comparable
|
|
||||||
# Use task_counter as priority to maintain FIFO order
|
|
||||||
priority_item = (self.task_counter, item)
|
|
||||||
heapq.heappush(self.pending_tasks, priority_item)
|
|
||||||
self.task_counter += 1
|
|
||||||
self.not_empty.notify()
|
|
||||||
|
|
||||||
def _start_new_batch(self) -> None:
|
|
||||||
"""Start a new batch session for tracking operations."""
|
|
||||||
self.batch_id = (
|
|
||||||
f"test_batch_{datetime.now().strftime('%Y%m%d_%H%M%S')}_{uuid.uuid4().hex[:8]}"
|
|
||||||
)
|
|
||||||
self.batch_start_time = datetime.now().isoformat()
|
|
||||||
self.batch_state_before = {"test": "state"} # Simplified for testing
|
|
||||||
|
|
||||||
def get(self, timeout: Optional[float] = None):
|
|
||||||
"""Get next task from queue"""
|
|
||||||
with self.not_empty:
|
|
||||||
while len(self.pending_tasks) == 0:
|
|
||||||
self.not_empty.wait(timeout=timeout)
|
|
||||||
if timeout is not None and len(self.pending_tasks) == 0:
|
|
||||||
return None
|
|
||||||
import heapq
|
|
||||||
priority_item = heapq.heappop(self.pending_tasks)
|
|
||||||
task_index, item = priority_item # Unwrap the tuple
|
|
||||||
return item, task_index
|
|
||||||
|
|
||||||
def total_count(self) -> int:
|
|
||||||
"""Get total number of tasks (pending + running)"""
|
|
||||||
return len(self.pending_tasks) + len(self.running_tasks)
|
|
||||||
|
|
||||||
def done_count(self) -> int:
|
|
||||||
"""Get number of completed tasks"""
|
|
||||||
return len(self.history_tasks)
|
|
||||||
|
|
||||||
def get_current_queue(self):
|
|
||||||
"""Get current running and pending queues"""
|
|
||||||
running = list(self.running_tasks.values())
|
|
||||||
# Extract items from the priority tuples
|
|
||||||
pending = [item for priority, item in self.pending_tasks]
|
|
||||||
return running, pending
|
|
||||||
|
|
||||||
def get_history(self):
|
|
||||||
"""Get task history"""
|
|
||||||
return self.history_tasks
|
|
||||||
|
|
||||||
def mark_done(self, task_index: int, item: QueueTaskItem, status: TaskExecutionStatus, result: str):
|
|
||||||
"""Mark a task as completed"""
|
|
||||||
from comfyui_manager.data_models import TaskHistoryItem
|
|
||||||
|
|
||||||
history_item = TaskHistoryItem(
|
|
||||||
ui_id=item.ui_id,
|
|
||||||
client_id=item.client_id,
|
|
||||||
kind=item.kind.value if hasattr(item.kind, 'value') else str(item.kind),
|
|
||||||
timestamp=datetime.now().isoformat(),
|
|
||||||
result=result,
|
|
||||||
status=status
|
|
||||||
)
|
|
||||||
|
|
||||||
self.history_tasks[item.ui_id] = history_item
|
|
||||||
|
|
||||||
def finalize(self):
|
|
||||||
"""Finalize batch (simplified for testing)"""
|
|
||||||
if self._history_dir and self.batch_id:
|
|
||||||
batch_file = self._history_dir / f"{self.batch_id}.json"
|
|
||||||
batch_record = {
|
|
||||||
"batch_id": self.batch_id,
|
|
||||||
"start_time": self.batch_start_time,
|
|
||||||
"state_before": self.batch_state_before,
|
|
||||||
"operations": [] # Simplified
|
|
||||||
}
|
|
||||||
with open(batch_file, 'w') as f:
|
|
||||||
json.dump(batch_record, f, indent=2)
|
|
||||||
|
|
||||||
|
|
||||||
class TestTaskQueue:
|
|
||||||
"""Test suite for TaskQueue functionality"""
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def task_queue(self, tmp_path):
|
|
||||||
"""Create a clean TaskQueue instance for each test"""
|
|
||||||
return MockTaskQueue(history_dir=tmp_path)
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def sample_task(self):
|
|
||||||
"""Create a sample task for testing"""
|
|
||||||
return QueueTaskItem(
|
|
||||||
ui_id=str(uuid.uuid4()),
|
|
||||||
client_id="test_client",
|
|
||||||
kind="install",
|
|
||||||
params=InstallPackParams(
|
|
||||||
id="test-node",
|
|
||||||
version="1.0.0",
|
|
||||||
selected_version="1.0.0",
|
|
||||||
mode=ManagerDatabaseSource.cache,
|
|
||||||
channel=ManagerChannel.dev
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_task_queue_initialization(self, task_queue):
|
|
||||||
"""Test TaskQueue initializes with correct default state"""
|
|
||||||
assert task_queue.total_count() == 0
|
|
||||||
assert task_queue.done_count() == 0
|
|
||||||
assert not task_queue.is_processing()
|
|
||||||
assert task_queue.batch_id is None
|
|
||||||
assert len(task_queue.pending_tasks) == 0
|
|
||||||
assert len(task_queue.running_tasks) == 0
|
|
||||||
assert len(task_queue.history_tasks) == 0
|
|
||||||
|
|
||||||
def test_put_task_starts_batch(self, task_queue, sample_task):
|
|
||||||
"""Test that adding first task starts a new batch"""
|
|
||||||
assert task_queue.batch_id is None
|
|
||||||
|
|
||||||
task_queue.put(sample_task)
|
|
||||||
|
|
||||||
assert task_queue.batch_id is not None
|
|
||||||
assert task_queue.batch_id.startswith("test_batch_")
|
|
||||||
assert task_queue.batch_start_time is not None
|
|
||||||
assert task_queue.total_count() == 1
|
|
||||||
|
|
||||||
def test_put_multiple_tasks(self, task_queue, sample_task):
|
|
||||||
"""Test adding multiple tasks to queue"""
|
|
||||||
task_queue.put(sample_task)
|
|
||||||
|
|
||||||
# Create second task
|
|
||||||
task2 = QueueTaskItem(
|
|
||||||
ui_id=str(uuid.uuid4()),
|
|
||||||
client_id="test_client_2",
|
|
||||||
kind="install",
|
|
||||||
params=sample_task.params
|
|
||||||
)
|
|
||||||
task_queue.put(task2)
|
|
||||||
|
|
||||||
assert task_queue.total_count() == 2
|
|
||||||
assert len(task_queue.pending_tasks) == 2
|
|
||||||
|
|
||||||
def test_put_task_with_dict(self, task_queue):
|
|
||||||
"""Test adding task as dictionary gets converted to QueueTaskItem"""
|
|
||||||
task_dict = {
|
|
||||||
"ui_id": str(uuid.uuid4()),
|
|
||||||
"client_id": "test_client",
|
|
||||||
"kind": "install",
|
|
||||||
"params": {
|
|
||||||
"id": "test-node",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"selected_version": "1.0.0",
|
|
||||||
"mode": "cache",
|
|
||||||
"channel": "dev"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
task_queue.put(task_dict)
|
|
||||||
|
|
||||||
assert task_queue.total_count() == 1
|
|
||||||
# Verify it was converted to QueueTaskItem
|
|
||||||
item, _ = task_queue.get(timeout=0.1)
|
|
||||||
assert isinstance(item, QueueTaskItem)
|
|
||||||
assert item.ui_id == task_dict["ui_id"]
|
|
||||||
|
|
||||||
def test_get_task_from_queue(self, task_queue, sample_task):
|
|
||||||
"""Test retrieving task from queue"""
|
|
||||||
task_queue.put(sample_task)
|
|
||||||
|
|
||||||
item, task_index = task_queue.get(timeout=0.1)
|
|
||||||
|
|
||||||
assert item == sample_task
|
|
||||||
assert isinstance(task_index, int)
|
|
||||||
assert task_queue.total_count() == 0 # Should be removed from pending
|
|
||||||
|
|
||||||
def test_get_task_timeout(self, task_queue):
|
|
||||||
"""Test get with timeout on empty queue returns None"""
|
|
||||||
result = task_queue.get(timeout=0.1)
|
|
||||||
assert result is None
|
|
||||||
|
|
||||||
def test_start_stop_worker(self, task_queue):
|
|
||||||
"""Test worker thread lifecycle"""
|
|
||||||
assert not task_queue.is_processing()
|
|
||||||
|
|
||||||
# Mock worker that stops immediately
|
|
||||||
stop_event = threading.Event()
|
|
||||||
def mock_worker():
|
|
||||||
stop_event.wait(0.1) # Brief delay then stop
|
|
||||||
|
|
||||||
started = task_queue.start_worker(mock_worker)
|
|
||||||
assert started is True
|
|
||||||
assert task_queue.is_processing()
|
|
||||||
|
|
||||||
# Try to start again - should return False
|
|
||||||
started_again = task_queue.start_worker(mock_worker)
|
|
||||||
assert started_again is False
|
|
||||||
|
|
||||||
# Wait for worker to finish
|
|
||||||
stop_event.set()
|
|
||||||
task_queue._worker_task.join(timeout=1.0)
|
|
||||||
assert not task_queue.is_processing()
|
|
||||||
|
|
||||||
def test_task_processing_integration(self, task_queue, sample_task):
|
|
||||||
"""Test full task processing workflow"""
|
|
||||||
# Add task to queue
|
|
||||||
task_queue.put(sample_task)
|
|
||||||
assert task_queue.total_count() == 1
|
|
||||||
|
|
||||||
# Start worker
|
|
||||||
started = task_queue.start_worker()
|
|
||||||
assert started is True
|
|
||||||
|
|
||||||
# Wait for processing to complete
|
|
||||||
for _ in range(50): # Max 5 seconds
|
|
||||||
if task_queue.done_count() > 0:
|
|
||||||
break
|
|
||||||
time.sleep(0.1)
|
|
||||||
|
|
||||||
# Verify task was processed
|
|
||||||
assert task_queue.done_count() == 1
|
|
||||||
assert task_queue.total_count() == 0
|
|
||||||
assert sample_task.ui_id in task_queue.history_tasks
|
|
||||||
|
|
||||||
# Stop worker
|
|
||||||
task_queue._worker_task.join(timeout=1.0)
|
|
||||||
|
|
||||||
def test_get_current_state(self, task_queue, sample_task):
|
|
||||||
"""Test getting current queue state"""
|
|
||||||
task_queue.put(sample_task)
|
|
||||||
|
|
||||||
state = task_queue.get_current_state()
|
|
||||||
|
|
||||||
assert isinstance(state, TaskStateMessage)
|
|
||||||
assert len(state.pending_queue) == 1
|
|
||||||
assert len(state.running_queue) == 0
|
|
||||||
assert state.pending_queue[0] == sample_task
|
|
||||||
|
|
||||||
def test_batch_finalization(self, task_queue, tmp_path):
|
|
||||||
"""Test batch history is saved correctly"""
|
|
||||||
task_queue.put(QueueTaskItem(
|
|
||||||
ui_id=str(uuid.uuid4()),
|
|
||||||
client_id="test_client",
|
|
||||||
kind="install",
|
|
||||||
params=InstallPackParams(
|
|
||||||
id="test-node",
|
|
||||||
version="1.0.0",
|
|
||||||
selected_version="1.0.0",
|
|
||||||
mode=ManagerDatabaseSource.cache,
|
|
||||||
channel=ManagerChannel.dev
|
|
||||||
)
|
|
||||||
))
|
|
||||||
|
|
||||||
batch_id = task_queue.batch_id
|
|
||||||
task_queue.finalize()
|
|
||||||
|
|
||||||
# Check batch file was created
|
|
||||||
batch_file = tmp_path / f"{batch_id}.json"
|
|
||||||
assert batch_file.exists()
|
|
||||||
|
|
||||||
# Verify content
|
|
||||||
with open(batch_file) as f:
|
|
||||||
batch_data = json.load(f)
|
|
||||||
|
|
||||||
assert batch_data["batch_id"] == batch_id
|
|
||||||
assert "start_time" in batch_data
|
|
||||||
assert "state_before" in batch_data
|
|
||||||
|
|
||||||
def test_concurrent_access(self, task_queue):
|
|
||||||
"""Test thread-safe concurrent access to queue"""
|
|
||||||
num_tasks = 10
|
|
||||||
added_tasks = []
|
|
||||||
|
|
||||||
def add_tasks():
|
|
||||||
for i in range(num_tasks):
|
|
||||||
task = QueueTaskItem(
|
|
||||||
ui_id=f"task_{i}",
|
|
||||||
client_id=f"client_{i}",
|
|
||||||
kind="install",
|
|
||||||
params=InstallPackParams(
|
|
||||||
id=f"node_{i}",
|
|
||||||
version="1.0.0",
|
|
||||||
selected_version="1.0.0",
|
|
||||||
mode=ManagerDatabaseSource.cache,
|
|
||||||
channel=ManagerChannel.dev
|
|
||||||
)
|
|
||||||
)
|
|
||||||
task_queue.put(task)
|
|
||||||
added_tasks.append(task)
|
|
||||||
|
|
||||||
# Start multiple threads adding tasks
|
|
||||||
threads = []
|
|
||||||
for _ in range(3):
|
|
||||||
thread = threading.Thread(target=add_tasks)
|
|
||||||
threads.append(thread)
|
|
||||||
thread.start()
|
|
||||||
|
|
||||||
# Wait for all threads to complete
|
|
||||||
for thread in threads:
|
|
||||||
thread.join()
|
|
||||||
|
|
||||||
# Verify all tasks were added
|
|
||||||
assert task_queue.total_count() == num_tasks * 3
|
|
||||||
assert len(added_tasks) == num_tasks * 3
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_queue_state_updates_tracking(self, task_queue, sample_task):
|
|
||||||
"""Test that queue state updates are tracked properly"""
|
|
||||||
# Mock the update tracking
|
|
||||||
task_queue.send_queue_state_update("test-message", {"test": "data"}, "client1")
|
|
||||||
|
|
||||||
# Verify update was tracked
|
|
||||||
assert hasattr(task_queue, '_sent_updates')
|
|
||||||
assert len(task_queue._sent_updates) == 1
|
|
||||||
|
|
||||||
update = task_queue._sent_updates[0]
|
|
||||||
assert update['msg'] == "test-message"
|
|
||||||
assert update['update'] == {"test": "data"}
|
|
||||||
assert update['client_id'] == "client1"
|
|
||||||
|
|
||||||
|
|
||||||
class TestTaskQueueEdgeCases:
|
|
||||||
"""Test edge cases and error conditions"""
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def task_queue(self):
|
|
||||||
return MockTaskQueue()
|
|
||||||
|
|
||||||
def test_empty_queue_operations(self, task_queue):
|
|
||||||
"""Test operations on empty queue"""
|
|
||||||
assert task_queue.total_count() == 0
|
|
||||||
assert task_queue.done_count() == 0
|
|
||||||
|
|
||||||
# Getting from empty queue should timeout
|
|
||||||
result = task_queue.get(timeout=0.1)
|
|
||||||
assert result is None
|
|
||||||
|
|
||||||
# State should be empty
|
|
||||||
state = task_queue.get_current_state()
|
|
||||||
assert len(state.pending_queue) == 0
|
|
||||||
assert len(state.running_queue) == 0
|
|
||||||
|
|
||||||
def test_invalid_task_data(self, task_queue):
|
|
||||||
"""Test handling of invalid task data"""
|
|
||||||
# This should raise ValidationError due to missing required fields
|
|
||||||
with pytest.raises(Exception): # ValidationError from Pydantic
|
|
||||||
task_queue.put({
|
|
||||||
"ui_id": "test",
|
|
||||||
# Missing required fields
|
|
||||||
})
|
|
||||||
|
|
||||||
def test_worker_cleanup_on_exception(self, task_queue):
|
|
||||||
"""Test worker cleanup when worker function raises exception"""
|
|
||||||
exception_raised = threading.Event()
|
|
||||||
|
|
||||||
def failing_worker():
|
|
||||||
exception_raised.set()
|
|
||||||
raise RuntimeError("Test exception")
|
|
||||||
|
|
||||||
started = task_queue.start_worker(failing_worker)
|
|
||||||
assert started is True
|
|
||||||
|
|
||||||
# Wait for exception to be raised
|
|
||||||
exception_raised.wait(timeout=1.0)
|
|
||||||
|
|
||||||
# Worker should eventually stop
|
|
||||||
task_queue._worker_task.join(timeout=1.0)
|
|
||||||
assert not task_queue.is_processing()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
# Allow running tests directly
|
|
||||||
pytest.main([__file__])
|
|
||||||
Reference in New Issue
Block a user