Compare commits
465 Commits
feat/add-t
...
feat/manag
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2866193baf | ||
|
|
1ab2b1aeb3 | ||
|
|
ffaeb6d3ff | ||
|
|
6cc1ad4cc0 | ||
|
|
27fc787294 | ||
|
|
d23286d390 | ||
|
|
7c3ccc76c3 | ||
|
|
892dc5d4f3 | ||
|
|
e278692749 | ||
|
|
8d77dd2246 | ||
|
|
14ede2a585 | ||
|
|
5b525622f1 | ||
|
|
a24b11905c | ||
|
|
5d70858341 | ||
|
|
3daa006741 | ||
|
|
0bcc0c2101 | ||
|
|
b8850c808c | ||
|
|
f4f2c01ac1 | ||
|
|
7072e82dff | ||
|
|
53dc36c4cf | ||
|
|
5aadc3af00 | ||
|
|
8c28a698ed | ||
|
|
5ed6d8b202 | ||
|
|
b73dc7bf5e | ||
|
|
d7799964de | ||
|
|
71d0f4ab63 | ||
|
|
d479dcde81 | ||
|
|
ae536017d5 | ||
|
|
67ddfce279 | ||
|
|
b1f39b34d7 | ||
|
|
6cf958ccce | ||
|
|
5378f0a8e9 | ||
|
|
e13bf68775 | ||
|
|
eaed3677d3 | ||
|
|
b9c88da54d | ||
|
|
104ae77f7a | ||
|
|
bfcb2ce61b | ||
|
|
d970fe68ea | ||
|
|
63ba5fed09 | ||
|
|
98a8464933 | ||
|
|
7e3e6726e0 | ||
|
|
09567b2bb2 | ||
|
|
f3bd116184 | ||
|
|
7509737563 | ||
|
|
cfb815d879 | ||
|
|
44241fb967 | ||
|
|
c4b45129bd | ||
|
|
70741008ca | ||
|
|
6c2d2cae2a | ||
|
|
28f13d3311 | ||
|
|
4e31aaa8fb | ||
|
|
ba99f0c2cc | ||
|
|
e0a96b4937 | ||
|
|
82c055f527 | ||
|
|
f94008192c | ||
|
|
3895d5279e | ||
|
|
41be94690f | ||
|
|
3d85ecc525 | ||
|
|
7da00796e5 | ||
|
|
6086419cb6 | ||
|
|
5bc1f2f2c0 | ||
|
|
32a83b211e | ||
|
|
bead7b3a7f | ||
|
|
815d6d6572 | ||
|
|
fbecbee4c3 | ||
|
|
b9a7d2a78c | ||
|
|
95ce812992 | ||
|
|
9a36f4748c | ||
|
|
50b7849a35 | ||
|
|
6f1245b27c | ||
|
|
cc87ed3899 | ||
|
|
1d9037fefe | ||
|
|
03016e2d16 | ||
|
|
bdfb70a58a | ||
|
|
3d41617f4e | ||
|
|
35151ffdd1 | ||
|
|
4527d41a7a | ||
|
|
553cba12f3 | ||
|
|
00fb9c88e1 | ||
|
|
116e068ac3 | ||
|
|
1010dd2d28 | ||
|
|
68bc8302fd | ||
|
|
596dad5cda | ||
|
|
a924c280fb | ||
|
|
7354242906 | ||
|
|
3d0bcf5979 | ||
|
|
e7d0b158e9 | ||
|
|
10ff90787c | ||
|
|
330c4657b1 | ||
|
|
72a109f109 | ||
|
|
cf45c51dfb | ||
|
|
0b013adb34 | ||
|
|
7457d91f64 | ||
|
|
7fe1159426 | ||
|
|
c2665e3677 | ||
|
|
d63de803a4 | ||
|
|
11aca3513c | ||
|
|
561c9f40e5 | ||
|
|
54ed13aadf | ||
|
|
109cc21337 | ||
|
|
7e46b30fa5 | ||
|
|
0ba112c2c7 | ||
|
|
fc15d94170 | ||
|
|
dcb37d9c55 | ||
|
|
755b9d6342 | ||
|
|
3d6151c94f | ||
|
|
590bd8c4b9 | ||
|
|
e99aafd876 | ||
|
|
1f0adf8bcf | ||
|
|
dbd5d5fb43 | ||
|
|
a8b0e3641b | ||
|
|
9efb350be9 | ||
|
|
8d9820b3fb | ||
|
|
103f89551a | ||
|
|
6030d961ad | ||
|
|
ee08c9e17f | ||
|
|
48dd9a3240 | ||
|
|
e122e206a6 | ||
|
|
398b905758 | ||
|
|
dc2ec08fe3 | ||
|
|
3bf5edf5c9 | ||
|
|
134bca526c | ||
|
|
3393e58b06 | ||
|
|
648d7e73c6 | ||
|
|
eab6cdeee4 | ||
|
|
e8ec1ce8e3 | ||
|
|
b3581564ed | ||
|
|
29e1bd95fd | ||
|
|
8bff401c14 | ||
|
|
41798e9255 | ||
|
|
9e4f0228d1 | ||
|
|
76ee93c98c | ||
|
|
fb1a89efb7 | ||
|
|
aface43554 | ||
|
|
a35f0157b2 | ||
|
|
9b32162906 | ||
|
|
21bba62572 | ||
|
|
302327d6b3 | ||
|
|
5667e8bcbb | ||
|
|
ae66bd0e31 | ||
|
|
48dfadc02d | ||
|
|
3df6272bb6 | ||
|
|
e7f9bcda01 | ||
|
|
205044ca66 | ||
|
|
d497eb1f00 | ||
|
|
4e6f970ee9 | ||
|
|
0b6cdda6f5 | ||
|
|
a896ded763 | ||
|
|
fb5dd9ebc2 | ||
|
|
c8b7db6c38 | ||
|
|
44a3191be3 | ||
|
|
b4f7cdc9e7 | ||
|
|
8da07018d5 | ||
|
|
0c19a27065 | ||
|
|
3296b0ecdf | ||
|
|
0a07261124 | ||
|
|
33106d0ecf | ||
|
|
5bb887206a | ||
|
|
b30b0e27cb | ||
|
|
363736489c | ||
|
|
8dbf5e87a0 | ||
|
|
0b30f2cb50 | ||
|
|
ba5265dac4 | ||
|
|
ecb9c65917 | ||
|
|
8a98474600 | ||
|
|
b072216e67 | ||
|
|
cfb3181716 | ||
|
|
ab684cdc99 | ||
|
|
facadc3a44 | ||
|
|
f599bc22d7 | ||
|
|
281319d2da | ||
|
|
5cb203685c | ||
|
|
300c6e7406 | ||
|
|
9c4d6a0773 | ||
|
|
01fa37900b | ||
|
|
edbe744e17 | ||
|
|
2a32a1a4a8 | ||
|
|
404bdb21e6 | ||
|
|
b260c9a512 | ||
|
|
4b941adb6a | ||
|
|
bd752550a8 | ||
|
|
b8b71bb961 | ||
|
|
5aaf7a4092 | ||
|
|
030e02ffb8 | ||
|
|
60746c6253 | ||
|
|
d962aa03f4 | ||
|
|
121a5a1888 | ||
|
|
9e4a2aae43 | ||
|
|
ee6eb685e7 | ||
|
|
09a38a32ce | ||
|
|
d13b19d43d | ||
|
|
5316ec1b4d | ||
|
|
e730dca1ad | ||
|
|
8da30640bb | ||
|
|
6f4eb88e07 | ||
|
|
d9592b9dab | ||
|
|
b87ada72aa | ||
|
|
83363ba1f0 | ||
|
|
a2a7349ce4 | ||
|
|
23ebe7f718 | ||
|
|
e04264cfa3 | ||
|
|
8d29e5037f | ||
|
|
6926ed45b0 | ||
|
|
736b85b8bb | ||
|
|
9e3361bc31 | ||
|
|
6e10381020 | ||
|
|
a1d37d379c | ||
|
|
07d87db7a2 | ||
|
|
4e556673d2 | ||
|
|
f421304fc1 | ||
|
|
6867616973 | ||
|
|
c9271b1686 | ||
|
|
12eb6863da | ||
|
|
4834874091 | ||
|
|
8759ebf200 | ||
|
|
d4715aebef | ||
|
|
0fe2ade7bb | ||
|
|
0c71565535 | ||
|
|
cf8029ecd4 | ||
|
|
6a637091a2 | ||
|
|
31eba60012 | ||
|
|
51e58e9078 | ||
|
|
4a1e76730a | ||
|
|
5599bb028b | ||
|
|
552c6da0cc | ||
|
|
cc6817a891 | ||
|
|
fb48d1b485 | ||
|
|
1c336dad6b | ||
|
|
a4940d46cd | ||
|
|
499b2f44c1 | ||
|
|
2b200c9281 | ||
|
|
36a900c98f | ||
|
|
5236b03f66 | ||
|
|
8be35e3621 | ||
|
|
509f00fe89 | ||
|
|
a98b87f148 | ||
|
|
ae9b2b3b72 | ||
|
|
02e1ec0ae3 | ||
|
|
daefb0f120 | ||
|
|
ff0604e3b6 | ||
|
|
20e41e22fa | ||
|
|
59264c1fd9 | ||
|
|
a0e3bdd594 | ||
|
|
6580aaf3ad | ||
|
|
0b46701b60 | ||
|
|
0bb4effede | ||
|
|
b07082a52d | ||
|
|
04f267f5a7 | ||
|
|
03ccce2804 | ||
|
|
e894bd9f24 | ||
|
|
10e6988273 | ||
|
|
905b61e5d8 | ||
|
|
ee69d393ae | ||
|
|
cab39973ae | ||
|
|
d93f5d07bb | ||
|
|
ba00ffe1ae | ||
|
|
6afaf5eaf5 | ||
|
|
d30459cc34 | ||
|
|
e92fbb7b1b | ||
|
|
42d464b532 | ||
|
|
c2e9e5c63a | ||
|
|
bc36726925 | ||
|
|
22725b0188 | ||
|
|
7abbff8c31 | ||
|
|
6236f4bcf4 | ||
|
|
3c3e80f77f | ||
|
|
4aae2fb289 | ||
|
|
66ff07752f | ||
|
|
5cf92f2742 | ||
|
|
6d3fddc474 | ||
|
|
66d4ad6174 | ||
|
|
2a366a1607 | ||
|
|
d87a0995b4 | ||
|
|
9a73a41e04 | ||
|
|
ba041b36bc | ||
|
|
f5f9de69b4 | ||
|
|
71e56c62e8 | ||
|
|
a0b0c2b963 | ||
|
|
0f496619fd | ||
|
|
5fdd6a441a | ||
|
|
00f287bb63 | ||
|
|
785268efa6 | ||
|
|
2c976d9394 | ||
|
|
1e32582642 | ||
|
|
6f8f6d07f5 | ||
|
|
3958111e76 | ||
|
|
86fcc4af74 | ||
|
|
2fd26756df | ||
|
|
478f4b74d8 | ||
|
|
73d0d2a1bb | ||
|
|
546db08ec4 | ||
|
|
0dd41a8670 | ||
|
|
82c0c89f46 | ||
|
|
f4ce0fd5f1 | ||
|
|
c3798bf4c2 | ||
|
|
ff80b6ccb0 | ||
|
|
e729217116 | ||
|
|
94c695daca | ||
|
|
9f189f0420 | ||
|
|
ad09e53f60 | ||
|
|
092a7a5f3f | ||
|
|
f45649bd25 | ||
|
|
2595cc5ed7 | ||
|
|
2f62190c6f | ||
|
|
577314984c | ||
|
|
f0346b955b | ||
|
|
70139ded4a | ||
|
|
bf379900e1 | ||
|
|
9bafc90f5e | ||
|
|
fce0d9e88e | ||
|
|
2b3b154989 | ||
|
|
948d2440a1 | ||
|
|
5adbe1ce7a | ||
|
|
8157d34ffa | ||
|
|
3ec8cb2204 | ||
|
|
0daa826543 | ||
|
|
a66028da58 | ||
|
|
807c9e6872 | ||
|
|
e71f3774ba | ||
|
|
dd7314bf10 | ||
|
|
f33bc127dc | ||
|
|
db92b87782 | ||
|
|
eba41c8693 | ||
|
|
c855308162 | ||
|
|
73d971bed8 | ||
|
|
bcfe0c2874 | ||
|
|
931ff666ae | ||
|
|
18b6d86cc4 | ||
|
|
086040f858 | ||
|
|
adbeb527d6 | ||
|
|
043176168d | ||
|
|
3c5efa0662 | ||
|
|
9b739bcbbf | ||
|
|
db89076e48 | ||
|
|
19b341ef18 | ||
|
|
be3713b1a3 | ||
|
|
99c4415cfb | ||
|
|
7b311f2ccf | ||
|
|
4aeabfe0a7 | ||
|
|
431ed02194 | ||
|
|
07f587ed83 | ||
|
|
0408341d82 | ||
|
|
5b3c9432f3 | ||
|
|
4a197e63f9 | ||
|
|
ad79a2ef45 | ||
|
|
0876a12fe9 | ||
|
|
c43c7ecc03 | ||
|
|
4a6dee3044 | ||
|
|
019acdd840 | ||
|
|
1c98512720 | ||
|
|
43041cebed | ||
|
|
23a09ad546 | ||
|
|
0836e8fe7c | ||
|
|
90196af8f8 | ||
|
|
002e549a86 | ||
|
|
1de6f859bf | ||
|
|
566fe05772 | ||
|
|
18772c6292 | ||
|
|
6278bddc9b | ||
|
|
f74bf71735 | ||
|
|
efe9ed68b2 | ||
|
|
7c1e75865d | ||
|
|
89530fc4e7 | ||
|
|
a0aee41f1a | ||
|
|
2049dd75f4 | ||
|
|
0864c35ba9 | ||
|
|
92c9f66671 | ||
|
|
223d6dad51 | ||
|
|
815784e809 | ||
|
|
2795d00d1e | ||
|
|
86dd0b4963 | ||
|
|
77a4f4819f | ||
|
|
b63d603482 | ||
|
|
e569b4e613 | ||
|
|
8a70997546 | ||
|
|
80d0a0f882 | ||
|
|
70b3997874 | ||
|
|
e8e4311068 | ||
|
|
cb0fa5829d | ||
|
|
a66f86d4af | ||
|
|
35d98dcea8 | ||
|
|
38fefde06d | ||
|
|
75ecb31f8c | ||
|
|
77133375ad | ||
|
|
c58b93ff51 | ||
|
|
7d8ebfe91b | ||
|
|
810381eab2 | ||
|
|
61dc6cf2de | ||
|
|
0205ebad2a | ||
|
|
09a94133ac | ||
|
|
1eb3c3b219 | ||
|
|
457845bb51 | ||
|
|
0c11b46585 | ||
|
|
c35100d9e9 | ||
|
|
847031cb04 | ||
|
|
d1ca6288a3 | ||
|
|
624ad4cfe6 | ||
|
|
f8d87bb452 | ||
|
|
f60b3505e0 | ||
|
|
addefbc511 | ||
|
|
c4314b25a3 | ||
|
|
921bb86127 | ||
|
|
d912fb0f8b | ||
|
|
e8fc053a32 | ||
|
|
ce3b2bab39 | ||
|
|
15e3699535 | ||
|
|
a4bf6bddbf | ||
|
|
f1b3c6b735 | ||
|
|
e923434d08 | ||
|
|
ddc9cd0fd5 | ||
|
|
d081db0c30 | ||
|
|
14298b0859 | ||
|
|
03ecda3cfe | ||
|
|
350cb767c3 | ||
|
|
f450dcbb57 | ||
|
|
32e003965a | ||
|
|
65f0764338 | ||
|
|
1bdb026079 | ||
|
|
b3a7fb9c3e | ||
|
|
c143c81a7e | ||
|
|
dd389ba0f8 | ||
|
|
46b1649ab8 | ||
|
|
89710412e4 | ||
|
|
931973b632 | ||
|
|
60aaa838e3 | ||
|
|
7e51286313 | ||
|
|
1246538bbb | ||
|
|
80518abf9d | ||
|
|
fc1ae2a18e | ||
|
|
3fd8d2049c | ||
|
|
35a6bcf20c | ||
|
|
0d75fc331e | ||
|
|
0a23e793e3 | ||
|
|
2c1c03e063 | ||
|
|
64059d2949 | ||
|
|
648aa7c4d3 | ||
|
|
274bb81a08 | ||
|
|
e2c90b4681 | ||
|
|
fa0a98ac6e | ||
|
|
e6e7b42415 | ||
|
|
0b7ef2e1d4 | ||
|
|
2fac67a9f9 | ||
|
|
8b9892de2e | ||
|
|
b3290dc909 | ||
|
|
3e3176eddb | ||
|
|
b1ef84894a | ||
|
|
c6cffc92c4 | ||
|
|
efb9fd2712 | ||
|
|
94b294ff93 | ||
|
|
99a9e33648 | ||
|
|
055d94a919 | ||
|
|
0978005240 | ||
|
|
1f796581ec | ||
|
|
f3a1716dad | ||
|
|
a1c3a0db1f | ||
|
|
9f80cc8a6b | ||
|
|
133786846e | ||
|
|
bdf297a5c6 | ||
|
|
6767254eb0 | ||
|
|
691cebd479 | ||
|
|
f3932cbf29 | ||
|
|
3f73a97037 | ||
|
|
226f1f5be4 | ||
|
|
7e45c07660 | ||
|
|
0c815036b9 |
32
.github/workflows/publish-to-pypi.yml
vendored
32
.github/workflows/publish-to-pypi.yml
vendored
@@ -4,7 +4,7 @@ on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- manager-v4
|
||||
paths:
|
||||
- "pyproject.toml"
|
||||
|
||||
@@ -21,7 +21,7 @@ jobs:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.9'
|
||||
python-version: '3.x'
|
||||
|
||||
- name: Install build dependencies
|
||||
run: |
|
||||
@@ -31,28 +31,28 @@ jobs:
|
||||
- name: Get current version
|
||||
id: current_version
|
||||
run: |
|
||||
CURRENT_VERSION=$(grep -oP 'version = "\K[^"]+' pyproject.toml)
|
||||
CURRENT_VERSION=$(grep -oP '^version = "\K[^"]+' pyproject.toml)
|
||||
echo "version=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
echo "Current version: $CURRENT_VERSION"
|
||||
|
||||
- name: Build package
|
||||
run: python -m build
|
||||
|
||||
- name: Create GitHub Release
|
||||
id: create_release
|
||||
uses: softprops/action-gh-release@v2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
files: dist/*
|
||||
tag_name: v${{ steps.current_version.outputs.version }}
|
||||
draft: false
|
||||
prerelease: false
|
||||
generate_release_notes: true
|
||||
# - name: Create GitHub Release
|
||||
# id: create_release
|
||||
# uses: softprops/action-gh-release@v2
|
||||
# env:
|
||||
# GITHUB_TOKEN: ${{ github.token }}
|
||||
# with:
|
||||
# files: dist/*
|
||||
# tag_name: v${{ steps.current_version.outputs.version }}
|
||||
# draft: false
|
||||
# prerelease: false
|
||||
# generate_release_notes: true
|
||||
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc
|
||||
with:
|
||||
password: ${{ secrets.PYPI_TOKEN }}
|
||||
skip-existing: true
|
||||
verbose: true
|
||||
verbose: true
|
||||
|
||||
25
.github/workflows/publish.yml
vendored
25
.github/workflows/publish.yml
vendored
@@ -1,25 +0,0 @@
|
||||
name: Publish to Comfy registry
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- main-blocked
|
||||
paths:
|
||||
- "pyproject.toml"
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
publish-node:
|
||||
name: Publish Custom Node to registry
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository_owner == 'ltdrdata' || github.repository_owner == 'Comfy-Org' }}
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v4
|
||||
- name: Publish Custom Node
|
||||
uses: Comfy-Org/publish-node-action@v1
|
||||
with:
|
||||
## Add your own personal access token to your Github Repository secrets and reference it here.
|
||||
personal_access_token: ${{ secrets.REGISTRY_ACCESS_TOKEN }}
|
||||
@@ -11,4 +11,5 @@ include extras.json
|
||||
include github-stats.json
|
||||
include model-list.json
|
||||
include alter-list.json
|
||||
include comfyui_manager/channels.list.template
|
||||
include comfyui_manager/channels.list.template
|
||||
include comfyui_manager/pip-policy.json
|
||||
55
README.md
55
README.md
@@ -215,13 +215,14 @@ The following settings are applied based on the section marked as `is_default`.
|
||||
downgrade_blacklist = <Set a list of packages to prevent downgrades. List them separated by commas.>
|
||||
security_level = <Set the security level => strong|normal|normal-|weak>
|
||||
always_lazy_install = <Whether to perform dependency installation on restart even in environments other than Windows.>
|
||||
network_mode = <Set the network mode => public|private|offline>
|
||||
network_mode = <Set the network mode => public|private|offline|personal_cloud>
|
||||
```
|
||||
|
||||
* network_mode:
|
||||
- public: An environment that uses a typical public network.
|
||||
- private: An environment that uses a closed network, where a private node DB is configured via `channel_url`. (Uses cache if available)
|
||||
- offline: An environment that does not use any external connections when using an offline network. (Uses cache if available)
|
||||
- personal_cloud: Applies relaxed security features in cloud environments such as Google Colab or Runpod, where strong security is not required.
|
||||
|
||||
|
||||
## Additional Feature
|
||||
@@ -312,31 +313,33 @@ When you run the `scan.sh` script:
|
||||
|
||||
|
||||
## Security policy
|
||||
* Edit `config.ini` file: add `security_level = <LEVEL>`
|
||||
* `strong`
|
||||
* doesn't allow `high` and `middle` level risky feature
|
||||
* `normal`
|
||||
* doesn't allow `high` level risky feature
|
||||
* `middle` level risky feature is available
|
||||
* `normal-`
|
||||
* doesn't allow `high` level risky feature if `--listen` is specified and not starts with `127.`
|
||||
* `middle` level risky feature is available
|
||||
* `weak`
|
||||
* all feature is available
|
||||
|
||||
* `high` level risky features
|
||||
* `Install via git url`, `pip install`
|
||||
* Installation of custom nodes registered not in the `default channel`.
|
||||
* Fix custom nodes
|
||||
|
||||
* `middle` level risky features
|
||||
* Uninstall/Update
|
||||
* Installation of custom nodes registered in the `default channel`.
|
||||
* Restore/Remove Snapshot
|
||||
* Restart
|
||||
|
||||
* `low` level risky features
|
||||
* Update ComfyUI
|
||||
|
||||
The security settings are applied based on whether the ComfyUI server's listener is non-local and whether the network mode is set to `personal_cloud`.
|
||||
|
||||
* **non-local**: When the server is launched with `--listen` and is bound to a network range other than the local `127.` range, allowing remote IP access.
|
||||
* **personal\_cloud**: When the `network_mode` is set to `personal_cloud`.
|
||||
|
||||
|
||||
### Risky Level Table
|
||||
|
||||
| Risky Level | features |
|
||||
|-------------|---------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| high+ | * `Install via git url`, `pip install`<BR>* Installation of nodepack registered not in the `default channel`. |
|
||||
| high | * Fix nodepack |
|
||||
| middle+ | * Uninstall/Update<BR>* Installation of nodepack registered in the `default channel`.<BR>* Restore/Remove Snapshot<BR>* Install model |
|
||||
| middle | * Restart |
|
||||
| low | * Update ComfyUI |
|
||||
|
||||
|
||||
### Security Level Table
|
||||
|
||||
| Security Level | local | non-local (personal_cloud) | non-local (not personal_cloud) |
|
||||
|----------------|--------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------|--------------------------------|
|
||||
| strong | * Only `weak` level risky features are allowed | * Only `weak` level risky features are allowed | * Only `weak` level risky features are allowed |
|
||||
| normal | * `high+` and `high` level risky features are not allowed<BR>* `middle+` and `middle` level risky features are available | * `high+` and `high` level risky features are not allowed<BR>* `middle+` and `middle` level risky features are available | * `high+`, `high` and `middle+` level risky features are not allowed<BR>* `middle` level risky features are available
|
||||
| normal- | * All features are available | * `high+` and `high` level risky features are not allowed<BR>* `middle+` and `middle` level risky features are available | * `high+`, `high` and `middle+` level risky features are not allowed<BR>* `middle` level risky features are available
|
||||
| weak | * All features are available | * All features are available | * `high+` and `middle+` level risky features are not allowed<BR>* `high`, `middle` and `low` level risky features are available
|
||||
|
||||
|
||||
|
||||
# Disclaimer
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
import os
|
||||
import logging
|
||||
from aiohttp import web
|
||||
from .common.manager_security import HANDLER_POLICY
|
||||
from .common import manager_security
|
||||
from comfy.cli_args import args
|
||||
|
||||
|
||||
def prestartup():
|
||||
from . import prestartup_script # noqa: F401
|
||||
@@ -7,25 +12,29 @@ def prestartup():
|
||||
|
||||
|
||||
def start():
|
||||
from comfy.cli_args import args
|
||||
|
||||
logging.info('[START] ComfyUI-Manager')
|
||||
from .common import cm_global # noqa: F401
|
||||
|
||||
if not args.disable_manager:
|
||||
if args.enable_manager:
|
||||
if args.enable_manager_legacy_ui:
|
||||
try:
|
||||
from .legacy import manager_server # noqa: F401
|
||||
from .legacy import share_3rdparty # noqa: F401
|
||||
from .legacy import manager_core as core
|
||||
import nodes
|
||||
|
||||
logging.info("[ComfyUI-Manager] Legacy UI is enabled.")
|
||||
nodes.EXTENSION_WEB_DIRS['comfyui-manager-legacy'] = os.path.join(os.path.dirname(__file__), 'js')
|
||||
except Exception as e:
|
||||
print("Error enabling legacy ComfyUI Manager frontend:", e)
|
||||
core = None
|
||||
else:
|
||||
from .glob import manager_server # noqa: F401
|
||||
from .glob import share_3rdparty # noqa: F401
|
||||
from .glob import manager_core as core
|
||||
|
||||
if core is not None:
|
||||
manager_security.is_personal_cloud_mode = core.get_config()['network_mode'].lower() == 'personal_cloud'
|
||||
|
||||
|
||||
def should_be_disabled(fullpath:str) -> bool:
|
||||
@@ -33,9 +42,7 @@ def should_be_disabled(fullpath:str) -> bool:
|
||||
1. Disables the legacy ComfyUI-Manager.
|
||||
2. The blocklist can be expanded later based on policies.
|
||||
"""
|
||||
from comfy.cli_args import args
|
||||
|
||||
if not args.disable_manager:
|
||||
if args.enable_manager:
|
||||
# In cases where installation is done via a zip archive, the directory name may not be comfyui-manager, and it may not contain a git repository.
|
||||
# It is assumed that any installed legacy ComfyUI-Manager will have at least 'comfyui-manager' in its directory name.
|
||||
dir_name = os.path.basename(fullpath).lower()
|
||||
@@ -43,3 +50,55 @@ def should_be_disabled(fullpath:str) -> bool:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def get_client_ip(request):
|
||||
peername = request.transport.get_extra_info("peername")
|
||||
if peername is not None:
|
||||
host, port = peername
|
||||
return host
|
||||
|
||||
return "unknown"
|
||||
|
||||
|
||||
def create_middleware():
|
||||
connected_clients = set()
|
||||
is_local_mode = manager_security.is_loopback(args.listen)
|
||||
|
||||
@web.middleware
|
||||
async def manager_middleware(request: web.Request, handler):
|
||||
nonlocal connected_clients
|
||||
|
||||
# security policy for remote environments
|
||||
prev_client_count = len(connected_clients)
|
||||
client_ip = get_client_ip(request)
|
||||
connected_clients.add(client_ip)
|
||||
next_client_count = len(connected_clients)
|
||||
|
||||
if prev_client_count == 1 and next_client_count > 1:
|
||||
manager_security.multiple_remote_alert()
|
||||
|
||||
policy = manager_security.get_handler_policy(handler)
|
||||
is_banned = False
|
||||
|
||||
# policy check
|
||||
if len(connected_clients) > 1:
|
||||
if is_local_mode:
|
||||
if HANDLER_POLICY.MULTIPLE_REMOTE_BAN_NON_LOCAL in policy:
|
||||
is_banned = True
|
||||
if HANDLER_POLICY.MULTIPLE_REMOTE_BAN_NOT_PERSONAL_CLOUD in policy:
|
||||
is_banned = not manager_security.is_personal_cloud_mode
|
||||
|
||||
if HANDLER_POLICY.BANNED in policy:
|
||||
is_banned = True
|
||||
|
||||
if is_banned:
|
||||
logging.warning(f"[Manager] Banning request from {client_ip}: {request.path}")
|
||||
response = web.Response(text="[Manager] This request is banned.", status=403)
|
||||
else:
|
||||
response: web.Response = await handler(request)
|
||||
|
||||
return response
|
||||
|
||||
return manager_middleware
|
||||
|
||||
@@ -46,10 +46,7 @@ comfyui_manager_path = os.path.abspath(os.path.dirname(__file__))
|
||||
cm_global.pip_blacklist = {'torch', 'torchaudio', 'torchsde', 'torchvision'}
|
||||
cm_global.pip_downgrade_blacklist = ['torch', 'torchaudio', 'torchsde', 'torchvision', 'transformers', 'safetensors', 'kornia']
|
||||
|
||||
if sys.version_info < (3, 13):
|
||||
cm_global.pip_overrides = {'numpy': 'numpy<2'}
|
||||
else:
|
||||
cm_global.pip_overrides = {}
|
||||
cm_global.pip_overrides = {}
|
||||
|
||||
if os.path.exists(os.path.join(manager_util.comfyui_manager_path, "pip_overrides.json")):
|
||||
with open(os.path.join(manager_util.comfyui_manager_path, "pip_overrides.json"), 'r', encoding="UTF-8", errors="ignore") as json_file:
|
||||
@@ -152,9 +149,6 @@ class Ctx:
|
||||
with open(context.manager_pip_overrides_path, 'r', encoding="UTF-8", errors="ignore") as json_file:
|
||||
cm_global.pip_overrides = json.load(json_file)
|
||||
|
||||
if sys.version_info < (3, 13):
|
||||
cm_global.pip_overrides = {'numpy': 'numpy<2'}
|
||||
|
||||
if os.path.exists(context.manager_pip_blacklist_path):
|
||||
with open(context.manager_pip_blacklist_path, 'r', encoding="UTF-8", errors="ignore") as f:
|
||||
for x in f.readlines():
|
||||
|
||||
@@ -180,7 +180,7 @@ def install_node(node_id, version=None):
|
||||
else:
|
||||
url = f"{base_url}/nodes/{node_id}/install?version={version}"
|
||||
|
||||
response = requests.get(url)
|
||||
response = requests.get(url, verify=not manager_util.bypass_ssl)
|
||||
if response.status_code == 200:
|
||||
# Convert the API response to a NodeVersion object
|
||||
return map_node_version(response.json())
|
||||
@@ -191,7 +191,7 @@ def install_node(node_id, version=None):
|
||||
def all_versions_of_node(node_id):
|
||||
url = f"{base_url}/nodes/{node_id}/versions?statuses=NodeVersionStatusActive&statuses=NodeVersionStatusPending"
|
||||
|
||||
response = requests.get(url)
|
||||
response = requests.get(url, verify=not manager_util.bypass_ssl)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
else:
|
||||
@@ -211,6 +211,7 @@ def read_cnr_info(fullpath):
|
||||
|
||||
project = data.get('project', {})
|
||||
name = project.get('name').strip().lower()
|
||||
original_name = project.get('name')
|
||||
|
||||
# normalize version
|
||||
# for example: 2.5 -> 2.5.0
|
||||
@@ -222,6 +223,7 @@ def read_cnr_info(fullpath):
|
||||
if name and version: # repository is optional
|
||||
return {
|
||||
"id": name,
|
||||
"original_name": original_name,
|
||||
"version": version,
|
||||
"url": repository
|
||||
}
|
||||
|
||||
@@ -106,4 +106,3 @@ def get_comfyui_tag():
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ class NetworkMode(enum.Enum):
|
||||
PUBLIC = "public"
|
||||
PRIVATE = "private"
|
||||
OFFLINE = "offline"
|
||||
PERSONAL_CLOUD = "personal_cloud"
|
||||
|
||||
class SecurityLevel(enum.Enum):
|
||||
STRONG = "strong"
|
||||
|
||||
@@ -55,7 +55,11 @@ def download_url(model_url: str, model_dir: str, filename: str):
|
||||
return aria2_download_url(model_url, model_dir, filename)
|
||||
else:
|
||||
from torchvision.datasets.utils import download_url as torchvision_download_url
|
||||
return torchvision_download_url(model_url, model_dir, filename)
|
||||
try:
|
||||
return torchvision_download_url(model_url, model_dir, filename)
|
||||
except Exception as e:
|
||||
logging.error(f"[ComfyUI-Manager] Failed to download: {model_url} / {repr(e)}")
|
||||
raise
|
||||
|
||||
|
||||
def aria2_find_task(dir: str, filename: str):
|
||||
|
||||
36
comfyui_manager/common/manager_security.py
Normal file
36
comfyui_manager/common/manager_security.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from enum import Enum
|
||||
|
||||
is_personal_cloud_mode = False
|
||||
handler_policy = {}
|
||||
|
||||
class HANDLER_POLICY(Enum):
|
||||
MULTIPLE_REMOTE_BAN_NON_LOCAL = 1
|
||||
MULTIPLE_REMOTE_BAN_NOT_PERSONAL_CLOUD = 2
|
||||
BANNED = 3
|
||||
|
||||
|
||||
def is_loopback(address):
|
||||
import ipaddress
|
||||
try:
|
||||
return ipaddress.ip_address(address).is_loopback
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
||||
def do_nothing():
|
||||
pass
|
||||
|
||||
|
||||
def get_handler_policy(x):
|
||||
return handler_policy.get(x) or set()
|
||||
|
||||
def add_handler_policy(x, policy):
|
||||
s = handler_policy.get(x)
|
||||
if s is None:
|
||||
s = set()
|
||||
handler_policy[x] = s
|
||||
|
||||
s.add(policy)
|
||||
|
||||
|
||||
multiple_remote_alert = do_nothing
|
||||
@@ -15,7 +15,6 @@ import re
|
||||
import logging
|
||||
import platform
|
||||
import shlex
|
||||
from . import cm_global
|
||||
|
||||
|
||||
cache_lock = threading.Lock()
|
||||
@@ -25,6 +24,7 @@ comfyui_manager_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '
|
||||
cache_dir = os.path.join(comfyui_manager_path, '.cache') # This path is also updated together in **manager_core.update_user_directory**.
|
||||
|
||||
use_uv = False
|
||||
bypass_ssl = False
|
||||
|
||||
def is_manager_pip_package():
|
||||
return not os.path.exists(os.path.join(comfyui_manager_path, '..', 'custom_nodes'))
|
||||
@@ -140,7 +140,7 @@ async def get_data(uri, silent=False):
|
||||
print(f"FETCH DATA from: {uri}", end="")
|
||||
|
||||
if uri.startswith("http"):
|
||||
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
|
||||
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=not bypass_ssl)) as session:
|
||||
headers = {
|
||||
'Cache-Control': 'no-cache',
|
||||
'Pragma': 'no-cache',
|
||||
@@ -330,6 +330,32 @@ torch_torchvision_torchaudio_version_map = {
|
||||
}
|
||||
|
||||
|
||||
def torch_rollback(prev):
|
||||
spec = prev.split('+')
|
||||
if len(spec) > 1:
|
||||
platform = spec[1]
|
||||
else:
|
||||
cmd = make_pip_cmd(['install', '--force', 'torch', 'torchvision', 'torchaudio'])
|
||||
subprocess.check_output(cmd, universal_newlines=True)
|
||||
logging.error(cmd)
|
||||
return
|
||||
|
||||
torch_ver = StrictVersion(spec[0])
|
||||
torch_ver = f"{torch_ver.major}.{torch_ver.minor}.{torch_ver.patch}"
|
||||
torch_torchvision_torchaudio_ver = torch_torchvision_torchaudio_version_map.get(torch_ver)
|
||||
|
||||
if torch_torchvision_torchaudio_ver is None:
|
||||
cmd = make_pip_cmd(['install', '--pre', 'torch', 'torchvision', 'torchaudio',
|
||||
'--index-url', f"https://download.pytorch.org/whl/nightly/{platform}"])
|
||||
logging.info("[ComfyUI-Manager] restore PyTorch to nightly version")
|
||||
else:
|
||||
torchvision_ver, torchaudio_ver = torch_torchvision_torchaudio_ver
|
||||
cmd = make_pip_cmd(['install', f'torch=={torch_ver}', f'torchvision=={torchvision_ver}', f"torchaudio=={torchaudio_ver}",
|
||||
'--index-url', f"https://download.pytorch.org/whl/{platform}"])
|
||||
logging.info(f"[ComfyUI-Manager] restore PyTorch to {torch_ver}+{platform}")
|
||||
|
||||
subprocess.check_output(cmd, universal_newlines=True)
|
||||
|
||||
|
||||
class PIPFixer:
|
||||
def __init__(self, prev_pip_versions, comfyui_path, manager_files_path):
|
||||
@@ -337,32 +363,6 @@ class PIPFixer:
|
||||
self.comfyui_path = comfyui_path
|
||||
self.manager_files_path = manager_files_path
|
||||
|
||||
def torch_rollback(self):
|
||||
spec = self.prev_pip_versions['torch'].split('+')
|
||||
if len(spec) > 0:
|
||||
platform = spec[1]
|
||||
else:
|
||||
cmd = make_pip_cmd(['install', '--force', 'torch', 'torchvision', 'torchaudio'])
|
||||
subprocess.check_output(cmd, universal_newlines=True)
|
||||
logging.error(cmd)
|
||||
return
|
||||
|
||||
torch_ver = StrictVersion(spec[0])
|
||||
torch_ver = f"{torch_ver.major}.{torch_ver.minor}.{torch_ver.patch}"
|
||||
torch_torchvision_torchaudio_ver = torch_torchvision_torchaudio_version_map.get(torch_ver)
|
||||
|
||||
if torch_torchvision_torchaudio_ver is None:
|
||||
cmd = make_pip_cmd(['install', '--pre', 'torch', 'torchvision', 'torchaudio',
|
||||
'--index-url', f"https://download.pytorch.org/whl/nightly/{platform}"])
|
||||
logging.info("[ComfyUI-Manager] restore PyTorch to nightly version")
|
||||
else:
|
||||
torchvision_ver, torchaudio_ver = torch_torchvision_torchaudio_ver
|
||||
cmd = make_pip_cmd(['install', f'torch=={torch_ver}', f'torchvision=={torchvision_ver}', f"torchaudio=={torchaudio_ver}",
|
||||
'--index-url', f"https://download.pytorch.org/whl/{platform}"])
|
||||
logging.info(f"[ComfyUI-Manager] restore PyTorch to {torch_ver}+{platform}")
|
||||
|
||||
subprocess.check_output(cmd, universal_newlines=True)
|
||||
|
||||
def fix_broken(self):
|
||||
new_pip_versions = get_installed_packages(True)
|
||||
|
||||
@@ -384,7 +384,7 @@ class PIPFixer:
|
||||
elif self.prev_pip_versions['torch'] != new_pip_versions['torch'] \
|
||||
or self.prev_pip_versions['torchvision'] != new_pip_versions['torchvision'] \
|
||||
or self.prev_pip_versions['torchaudio'] != new_pip_versions['torchaudio']:
|
||||
self.torch_rollback()
|
||||
torch_rollback(self.prev_pip_versions['torch'])
|
||||
except Exception as e:
|
||||
logging.error("[ComfyUI-Manager] Failed to restore PyTorch")
|
||||
logging.error(e)
|
||||
@@ -415,32 +415,14 @@ class PIPFixer:
|
||||
|
||||
if len(targets) > 0:
|
||||
for x in targets:
|
||||
if sys.version_info < (3, 13):
|
||||
cmd = make_pip_cmd(['install', f"{x}=={versions[0].version_string}", "numpy<2"])
|
||||
subprocess.check_output(cmd, universal_newlines=True)
|
||||
cmd = make_pip_cmd(['install', f"{x}=={versions[0].version_string}"])
|
||||
subprocess.check_output(cmd, universal_newlines=True)
|
||||
|
||||
logging.info(f"[ComfyUI-Manager] 'opencv' dependencies were fixed: {targets}")
|
||||
except Exception as e:
|
||||
logging.error("[ComfyUI-Manager] Failed to restore opencv")
|
||||
logging.error(e)
|
||||
|
||||
# fix numpy
|
||||
if sys.version_info >= (3, 13):
|
||||
logging.info("[ComfyUI-Manager] In Python 3.13 and above, PIP Fixer does not downgrade `numpy` below version 2.0. If you need to force a downgrade of `numpy`, please use `pip_auto_fix.list`.")
|
||||
else:
|
||||
try:
|
||||
np = new_pip_versions.get('numpy')
|
||||
if cm_global.pip_overrides.get('numpy') == 'numpy<2':
|
||||
if np is not None:
|
||||
if StrictVersion(np) >= StrictVersion('2'):
|
||||
cmd = make_pip_cmd(['install', "numpy<2"])
|
||||
subprocess.check_output(cmd , universal_newlines=True)
|
||||
|
||||
logging.info("[ComfyUI-Manager] 'numpy' dependency were fixed")
|
||||
except Exception as e:
|
||||
logging.error("[ComfyUI-Manager] Failed to restore numpy")
|
||||
logging.error(e)
|
||||
|
||||
# fix missing frontend
|
||||
try:
|
||||
# NOTE: package name in requirements is 'comfyui-frontend-package'
|
||||
@@ -540,3 +522,69 @@ def robust_readlines(fullpath):
|
||||
|
||||
print(f"[ComfyUI-Manager] Failed to recognize encoding for: {fullpath}")
|
||||
return []
|
||||
|
||||
|
||||
def restore_pip_snapshot(pips, options):
|
||||
non_url = []
|
||||
local_url = []
|
||||
non_local_url = []
|
||||
|
||||
for k, v in pips.items():
|
||||
# NOTE: skip torch related packages
|
||||
if k.startswith("torch==") or k.startswith("torchvision==") or k.startswith("torchaudio==") or k.startswith("nvidia-"):
|
||||
continue
|
||||
|
||||
if v == "":
|
||||
non_url.append(k)
|
||||
else:
|
||||
if v.startswith('file:'):
|
||||
local_url.append(v)
|
||||
else:
|
||||
non_local_url.append(v)
|
||||
|
||||
|
||||
# restore other pips
|
||||
failed = []
|
||||
if '--pip-non-url' in options:
|
||||
# try all at once
|
||||
res = 1
|
||||
try:
|
||||
res = subprocess.check_output(make_pip_cmd(['install'] + non_url))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# fallback
|
||||
if res != 0:
|
||||
for x in non_url:
|
||||
res = 1
|
||||
try:
|
||||
res = subprocess.check_output(make_pip_cmd(['install', '--no-deps', x]))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if res != 0:
|
||||
failed.append(x)
|
||||
|
||||
if '--pip-non-local-url' in options:
|
||||
for x in non_local_url:
|
||||
res = 1
|
||||
try:
|
||||
res = subprocess.check_output(make_pip_cmd(['install', '--no-deps', x]))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if res != 0:
|
||||
failed.append(x)
|
||||
|
||||
if '--pip-local-url' in options:
|
||||
for x in local_url:
|
||||
res = 1
|
||||
try:
|
||||
res = subprocess.check_output(make_pip_cmd(['install', '--no-deps', x]))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if res != 0:
|
||||
failed.append(x)
|
||||
|
||||
print(f"Installation failed for pip packages: {failed}")
|
||||
713
comfyui_manager/common/pip_util.design.en.md
Normal file
713
comfyui_manager/common/pip_util.design.en.md
Normal file
@@ -0,0 +1,713 @@
|
||||
# Design Document for pip_util.py Implementation
|
||||
|
||||
This is designed to minimize breaking existing installed dependencies.
|
||||
|
||||
## List of Functions to Implement
|
||||
|
||||
## Global Policy Management
|
||||
|
||||
### Global Variables
|
||||
```python
|
||||
_pip_policy_cache = None # Policy cache (program-wide, loaded once)
|
||||
```
|
||||
|
||||
### Global Functions
|
||||
|
||||
* get_pip_policy(): Returns policy for resolving pip dependency conflicts (lazy loading)
|
||||
- **Call timing**: Called whenever needed (automatically loads only once on first call)
|
||||
- **Purpose**: Returns policy cache, automatically loads if cache is empty
|
||||
- **Execution flow**:
|
||||
1. Declare global _pip_policy_cache
|
||||
2. If _pip_policy_cache is already loaded, return immediately (prevent duplicate loading)
|
||||
3. Read base policy file:
|
||||
- Path: {manager_util.comfyui_manager_path}/pip-policy.json
|
||||
- Use empty dictionary if file doesn't exist
|
||||
- Log error and use empty dictionary if JSON parsing fails
|
||||
4. Read user policy file:
|
||||
- Path: {context.manager_files_path}/pip-policy.user.json
|
||||
- Create empty JSON file if doesn't exist ({"_comment": "User-specific pip policy overrides"})
|
||||
- Log warning and use empty dictionary if JSON parsing fails
|
||||
5. Apply merge rules (merge by package name):
|
||||
- Start with base policy as base
|
||||
- For each package in user policy:
|
||||
* Package only in user policy: add to base
|
||||
* Package only in base policy: keep in base
|
||||
* Package in both: completely replace with user policy (entire package replacement, not section-level)
|
||||
6. Store merged policy in _pip_policy_cache
|
||||
7. Log policy load success (include number of loaded package policies)
|
||||
8. Return _pip_policy_cache
|
||||
- **Return value**: Dict (merged policy dictionary)
|
||||
- **Exception handling**:
|
||||
- File read failure: Log warning and treat file as empty dictionary
|
||||
- JSON parsing failure: Log error and treat file as empty dictionary
|
||||
- **Notes**:
|
||||
- Lazy loading pattern automatically loads on first call
|
||||
- Not thread-safe, caution needed in multi-threaded environments
|
||||
|
||||
- Policy file structure should support the following scenarios:
|
||||
- Dictionary structure of {dependency name -> policy object}
|
||||
- Policy object has four policy sections:
|
||||
- **uninstall**: Package removal policy (pre-processing, condition optional)
|
||||
- **apply_first_match**: Evaluate top-to-bottom and execute only the first policy that satisfies condition (exclusive)
|
||||
- **apply_all_matches**: Execute all policies that satisfy conditions (cumulative)
|
||||
- **restore**: Package restoration policy (post-processing, condition optional)
|
||||
|
||||
- Condition types:
|
||||
- installed: Check version condition of already installed dependencies
|
||||
- spec is optional
|
||||
- package field: Specify package to check (optional, defaults to self)
|
||||
- Explicit: Reference another package (e.g., numba checks numpy version)
|
||||
- Omitted: Check own version (e.g., critical-package checks its own version)
|
||||
- platform: Platform conditions (os, has_gpu, comfyui_version, etc.)
|
||||
- If condition is absent, always considered satisfied
|
||||
|
||||
- uninstall policy (pre-removal policy):
|
||||
- Removal policy list (condition is optional, evaluate top-to-bottom and execute only first match)
|
||||
- When condition satisfied (or always if no condition): remove target package and abort installation
|
||||
- If this policy is applied, all subsequent steps are ignored
|
||||
- target field specifies package to remove
|
||||
- Example: Unconditionally remove if specific package is installed
|
||||
|
||||
- Actions available in apply_first_match (determine installation method, exclusive):
|
||||
- skip: Block installation of specific dependency
|
||||
- force_version: Force change to specific version during installation
|
||||
- extra_index_url field can specify custom package repository (optional)
|
||||
- replace: Replace with different dependency
|
||||
- extra_index_url field can specify custom package repository (optional)
|
||||
|
||||
- Actions available in apply_all_matches (installation options, cumulative):
|
||||
- pin_dependencies: Pin currently installed versions of other dependencies
|
||||
- pinned_packages field specifies package list
|
||||
- Example: `pip install requests urllib3==1.26.15 certifi==2023.7.22 charset-normalizer==3.2.0`
|
||||
- Real use case: Prevent urllib3 from upgrading to 2.x when installing requests
|
||||
- on_failure: "fail" or "retry_without_pin"
|
||||
- install_with: Specify additional dependencies to install together
|
||||
- warn: Record warning message in log
|
||||
|
||||
- restore policy (post-restoration policy):
|
||||
- Restoration policy list (condition is optional, evaluate top-to-bottom and execute only first match)
|
||||
- Executed after package installation completes (post-processing)
|
||||
- When condition satisfied (or always if no condition): force install target package to specific version
|
||||
- target field specifies package to restore (can be different package)
|
||||
- version field specifies version to install
|
||||
- extra_index_url field can specify custom package repository (optional)
|
||||
- Example: Reinstall/change version if specific package is deleted or wrong version
|
||||
|
||||
- Execution order:
|
||||
1. uninstall evaluation: If condition satisfied, remove package and **terminate** (ignore subsequent steps)
|
||||
2. apply_first_match evaluation:
|
||||
- Execute first policy that satisfies condition among skip/force_version/replace
|
||||
- If no matching policy, proceed with default installation of originally requested package
|
||||
3. apply_all_matches evaluation: Apply all pin_dependencies, install_with, warn that satisfy conditions
|
||||
4. Execute actual package installation (pip install or uv pip install)
|
||||
5. restore evaluation: If condition satisfied, restore target package (post-processing)
|
||||
|
||||
## Batch Unit Class (PipBatch)
|
||||
|
||||
### Class Structure
|
||||
```python
|
||||
class PipBatch:
|
||||
"""
|
||||
pip package installation batch unit manager
|
||||
Maintains pip freeze cache during batch operations for performance optimization
|
||||
|
||||
Usage pattern:
|
||||
# Batch operations (policy auto-loaded)
|
||||
with PipBatch() as batch:
|
||||
batch.ensure_not_installed()
|
||||
batch.install("numpy>=1.20")
|
||||
batch.install("pandas>=2.0")
|
||||
batch.install("scipy>=1.7")
|
||||
batch.ensure_installed()
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._installed_cache = None # Installed packages cache (batch-level)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self._installed_cache = None
|
||||
```
|
||||
|
||||
### Private Methods
|
||||
|
||||
* PipBatch._refresh_installed_cache():
|
||||
- **Purpose**: Read currently installed package information and refresh cache
|
||||
- **Execution flow**:
|
||||
1. Generate command using manager_util.make_pip_cmd(["freeze"])
|
||||
2. Execute pip freeze via subprocess
|
||||
3. Parse output:
|
||||
- Each line is in "package_name==version" format
|
||||
- Parse "package_name==version" to create dictionary
|
||||
- Ignore editable packages (starting with -e)
|
||||
- Ignore comments (starting with #)
|
||||
4. Store parsed dictionary in self._installed_cache
|
||||
- **Return value**: None
|
||||
- **Exception handling**:
|
||||
- pip freeze failure: Set cache to empty dictionary and log warning
|
||||
- Parse failure: Ignore line and continue
|
||||
|
||||
* PipBatch._get_installed_packages():
|
||||
- **Purpose**: Return cached installed package information (refresh if cache is None)
|
||||
- **Execution flow**:
|
||||
1. If self._installed_cache is None, call _refresh_installed_cache()
|
||||
2. Return self._installed_cache
|
||||
- **Return value**: {package_name: version} dictionary
|
||||
|
||||
* PipBatch._invalidate_cache():
|
||||
- **Purpose**: Invalidate cache after package install/uninstall
|
||||
- **Execution flow**:
|
||||
1. Set self._installed_cache = None
|
||||
- **Return value**: None
|
||||
- **Call timing**: After install(), ensure_not_installed(), ensure_installed()
|
||||
|
||||
* PipBatch._parse_package_spec(package_info):
|
||||
- **Purpose**: Split package spec string into package name and version spec
|
||||
- **Parameters**:
|
||||
- package_info: "numpy", "numpy==1.26.0", "numpy>=1.20.0", "numpy~=1.20", etc.
|
||||
- **Execution flow**:
|
||||
1. Use regex to split package name and version spec
|
||||
2. Pattern: `^([a-zA-Z0-9_-]+)([><=!~]+.*)?$`
|
||||
- **Return value**: (package_name, version_spec) tuple
|
||||
- Examples: ("numpy", "==1.26.0"), ("pandas", ">=2.0.0"), ("scipy", None)
|
||||
- **Exception handling**:
|
||||
- Parse failure: Raise ValueError
|
||||
|
||||
* PipBatch._evaluate_condition(condition, package_name, installed_packages):
|
||||
- **Purpose**: Evaluate policy condition and return whether satisfied
|
||||
- **Parameters**:
|
||||
- condition: Policy condition object (dictionary)
|
||||
- package_name: Name of package currently being processed
|
||||
- installed_packages: {package_name: version} dictionary
|
||||
- **Execution flow**:
|
||||
1. If condition is None, return True (always satisfied)
|
||||
2. Branch based on condition["type"]:
|
||||
a. "installed" type:
|
||||
- target_package = condition.get("package", package_name)
|
||||
- Check current version with installed_packages.get(target_package)
|
||||
- If not installed (None), return False
|
||||
- If spec exists, compare version using packaging.specifiers.SpecifierSet
|
||||
- If no spec, only check installation status (True)
|
||||
b. "platform" type:
|
||||
- If condition["os"] exists, compare with platform.system()
|
||||
- If condition["has_gpu"] exists, check GPU presence (torch.cuda.is_available(), etc.)
|
||||
- If condition["comfyui_version"] exists, compare ComfyUI version
|
||||
- Return True if all conditions satisfied
|
||||
3. Return True if all conditions satisfied, False if any unsatisfied
|
||||
- **Return value**: bool
|
||||
- **Exception handling**:
|
||||
- Version comparison failure: Log warning and return False
|
||||
- Unknown condition type: Log warning and return False
|
||||
|
||||
|
||||
### Public Methods
|
||||
|
||||
* PipBatch.install(package_info, extra_index_url=None, override_policy=False):
|
||||
- **Purpose**: Perform policy-based pip package installation (individual package basis)
|
||||
- **Parameters**:
|
||||
- package_info: Package name and version spec (e.g., "numpy", "numpy==1.26.0", "numpy>=1.20.0")
|
||||
- extra_index_url: Additional package repository URL (optional)
|
||||
- override_policy: If True, skip policy application and install directly (default: False)
|
||||
- **Execution flow**:
|
||||
1. Call get_pip_policy() to get policy (lazy loading)
|
||||
2. Use self._parse_package_spec() to split package_info into package name and version spec
|
||||
3. Call self._get_installed_packages() to get cached installed package information
|
||||
4. If override_policy=True → Jump directly to step 10 (skip policy)
|
||||
5. Get policy for package name from policy dictionary
|
||||
6. If no policy → Jump to step 10 (default installation)
|
||||
7. **apply_first_match policy evaluation** (exclusive - only first match):
|
||||
- Iterate through policy list top-to-bottom
|
||||
- Evaluate each policy's condition with self._evaluate_condition()
|
||||
- When first condition-satisfying policy found:
|
||||
* type="skip": Log reason and return False (don't install)
|
||||
* type="force_version": Change package_info version to policy's version
|
||||
* type="replace": Completely replace package_info with policy's replacement package
|
||||
- If no matching policy, keep original package_info
|
||||
8. **apply_all_matches policy evaluation** (cumulative - all matches):
|
||||
- Iterate through policy list top-to-bottom
|
||||
- Evaluate each policy's condition with self._evaluate_condition()
|
||||
- For all condition-satisfying policies:
|
||||
* type="pin_dependencies":
|
||||
- For each package in pinned_packages, query current version with self._installed_cache.get(pkg)
|
||||
- Pin to installed version in "package==version" format
|
||||
- Add to installation package list
|
||||
* type="install_with":
|
||||
- Add additional_packages to installation package list
|
||||
* type="warn":
|
||||
- Output message as warning log
|
||||
- If allow_continue=false, wait for user confirmation (optional)
|
||||
9. Compose final installation package list:
|
||||
- Main package (modified/replaced package_info)
|
||||
- Packages pinned by pin_dependencies
|
||||
- Packages added by install_with
|
||||
10. Handle extra_index_url:
|
||||
- Parameter-passed extra_index_url takes priority
|
||||
- Otherwise use extra_index_url defined in policy
|
||||
11. Generate pip/uv command using manager_util.make_pip_cmd():
|
||||
- Basic format: ["pip", "install"] + package list
|
||||
- If extra_index_url exists: add ["--extra-index-url", url]
|
||||
12. Execute command via subprocess
|
||||
13. Handle installation failure:
|
||||
- If pin_dependencies's on_failure="retry_without_pin":
|
||||
* Retry with only main package excluding pinned packages
|
||||
- If on_failure="fail":
|
||||
* Raise exception and abort installation
|
||||
- Otherwise: Log warning and continue
|
||||
14. On successful installation:
|
||||
- Call self._invalidate_cache() (invalidate cache)
|
||||
- Log info if reason exists
|
||||
- Return True
|
||||
- **Return value**: Installation success status (bool)
|
||||
- **Exception handling**:
|
||||
- Policy parsing failure: Log warning and proceed with default installation
|
||||
- Installation failure: Log error and raise exception (depends on on_failure setting)
|
||||
- **Notes**:
|
||||
- restore policy not handled in this method (batch-processed in ensure_installed())
|
||||
- uninstall policy not handled in this method (batch-processed in ensure_not_installed())
|
||||
|
||||
* PipBatch.ensure_not_installed():
|
||||
- **Purpose**: Iterate through all policies and remove all packages satisfying uninstall conditions (batch processing)
|
||||
- **Parameters**: None
|
||||
- **Execution flow**:
|
||||
1. Call get_pip_policy() to get policy (lazy loading)
|
||||
2. Call self._get_installed_packages() to get cached installed package information
|
||||
3. Iterate through all package policies in policy dictionary:
|
||||
a. Check if each package has uninstall policy
|
||||
b. If uninstall policy exists:
|
||||
- Iterate through uninstall policy list top-to-bottom
|
||||
- Evaluate each policy's condition with self._evaluate_condition()
|
||||
- When first condition-satisfying policy found:
|
||||
* Check if target package exists in self._installed_cache
|
||||
* If installed:
|
||||
- Generate command with manager_util.make_pip_cmd(["uninstall", "-y", target])
|
||||
- Execute pip uninstall via subprocess
|
||||
- Log reason in info log
|
||||
- Add to removed package list
|
||||
- Remove package from self._installed_cache
|
||||
* Move to next package (only first match per package)
|
||||
4. Complete iteration through all package policies
|
||||
- **Return value**: List of removed package names (list of str)
|
||||
- **Exception handling**:
|
||||
- Individual package removal failure: Log warning only and continue to next package
|
||||
- **Call timing**:
|
||||
- Called at batch operation start to pre-remove conflicting packages
|
||||
- Called before multiple package installations to clean installation environment
|
||||
|
||||
* PipBatch.ensure_installed():
|
||||
- **Purpose**: Iterate through all policies and restore all packages satisfying restore conditions (batch processing)
|
||||
- **Parameters**: None
|
||||
- **Execution flow**:
|
||||
1. Call get_pip_policy() to get policy (lazy loading)
|
||||
2. Call self._get_installed_packages() to get cached installed package information
|
||||
3. Iterate through all package policies in policy dictionary:
|
||||
a. Check if each package has restore policy
|
||||
b. If restore policy exists:
|
||||
- Iterate through restore policy list top-to-bottom
|
||||
- Evaluate each policy's condition with self._evaluate_condition()
|
||||
- When first condition-satisfying policy found:
|
||||
* Get target package name (policy's "target" field)
|
||||
* Get version specified in version field
|
||||
* Check current version with self._installed_cache.get(target)
|
||||
* If current version is None or different from specified version:
|
||||
- Compose as package_spec = f"{target}=={version}" format
|
||||
- Generate command with manager_util.make_pip_cmd(["install", package_spec])
|
||||
- If extra_index_url exists, add ["--extra-index-url", url]
|
||||
- Execute pip install via subprocess
|
||||
- Log reason in info log
|
||||
- Add to restored package list
|
||||
- Update cache: self._installed_cache[target] = version
|
||||
* Move to next package (only first match per package)
|
||||
4. Complete iteration through all package policies
|
||||
- **Return value**: List of restored package names (list of str)
|
||||
- **Exception handling**:
|
||||
- Individual package installation failure: Log warning only and continue to next package
|
||||
- **Call timing**:
|
||||
- Called at batch operation end to restore essential package versions
|
||||
- Called for environment verification after multiple package installations
|
||||
|
||||
|
||||
## pip-policy.json Examples
|
||||
|
||||
### Base Policy File ({manager_util.comfyui_manager_path}/pip-policy.json)
|
||||
```json
|
||||
{
|
||||
"torch": {
|
||||
"apply_first_match": [
|
||||
{
|
||||
"type": "skip",
|
||||
"reason": "PyTorch installation should be managed manually due to CUDA compatibility"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
"opencv-python": {
|
||||
"apply_first_match": [
|
||||
{
|
||||
"type": "replace",
|
||||
"replacement": "opencv-contrib-python",
|
||||
"version": ">=4.8.0",
|
||||
"reason": "opencv-contrib-python includes all opencv-python features plus extras"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
"PIL": {
|
||||
"apply_first_match": [
|
||||
{
|
||||
"type": "replace",
|
||||
"replacement": "Pillow",
|
||||
"reason": "PIL is deprecated, use Pillow instead"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
"click": {
|
||||
"apply_first_match": [
|
||||
{
|
||||
"condition": {
|
||||
"type": "installed",
|
||||
"package": "colorama",
|
||||
"spec": "<0.5.0"
|
||||
},
|
||||
"type": "force_version",
|
||||
"version": "8.1.3",
|
||||
"reason": "click 8.1.3 compatible with colorama <0.5"
|
||||
}
|
||||
],
|
||||
"apply_all_matches": [
|
||||
{
|
||||
"type": "pin_dependencies",
|
||||
"pinned_packages": ["colorama"],
|
||||
"reason": "Prevent colorama upgrade that may break compatibility"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
"requests": {
|
||||
"apply_all_matches": [
|
||||
{
|
||||
"type": "pin_dependencies",
|
||||
"pinned_packages": ["urllib3", "certifi", "charset-normalizer"],
|
||||
"on_failure": "retry_without_pin",
|
||||
"reason": "Prevent urllib3 from upgrading to 2.x which has breaking changes"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
"six": {
|
||||
"restore": [
|
||||
{
|
||||
"target": "six",
|
||||
"version": "1.16.0",
|
||||
"reason": "six must be maintained at 1.16.0 for compatibility"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
"urllib3": {
|
||||
"restore": [
|
||||
{
|
||||
"condition": {
|
||||
"type": "installed",
|
||||
"spec": "!=1.26.15"
|
||||
},
|
||||
"target": "urllib3",
|
||||
"version": "1.26.15",
|
||||
"reason": "urllib3 must be 1.26.15 for compatibility with legacy code"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
"onnxruntime": {
|
||||
"apply_first_match": [
|
||||
{
|
||||
"condition": {
|
||||
"type": "platform",
|
||||
"os": "linux",
|
||||
"has_gpu": true
|
||||
},
|
||||
"type": "replace",
|
||||
"replacement": "onnxruntime-gpu",
|
||||
"reason": "Use GPU version on Linux with CUDA"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
"legacy-custom-node-package": {
|
||||
"apply_first_match": [
|
||||
{
|
||||
"condition": {
|
||||
"type": "platform",
|
||||
"comfyui_version": "<1.0.0"
|
||||
},
|
||||
"type": "force_version",
|
||||
"version": "0.9.0",
|
||||
"reason": "legacy-custom-node-package 0.9.0 is compatible with ComfyUI <1.0.0"
|
||||
},
|
||||
{
|
||||
"condition": {
|
||||
"type": "platform",
|
||||
"comfyui_version": ">=1.0.0"
|
||||
},
|
||||
"type": "force_version",
|
||||
"version": "1.5.0",
|
||||
"reason": "legacy-custom-node-package 1.5.0 is required for ComfyUI >=1.0.0"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
"tensorflow": {
|
||||
"apply_all_matches": [
|
||||
{
|
||||
"condition": {
|
||||
"type": "installed",
|
||||
"package": "torch"
|
||||
},
|
||||
"type": "warn",
|
||||
"message": "Installing TensorFlow alongside PyTorch may cause CUDA conflicts",
|
||||
"allow_continue": true
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
"some-package": {
|
||||
"uninstall": [
|
||||
{
|
||||
"condition": {
|
||||
"type": "installed",
|
||||
"package": "conflicting-package",
|
||||
"spec": ">=2.0.0"
|
||||
},
|
||||
"target": "conflicting-package",
|
||||
"reason": "conflicting-package >=2.0.0 conflicts with some-package"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
"banned-malicious-package": {
|
||||
"uninstall": [
|
||||
{
|
||||
"target": "banned-malicious-package",
|
||||
"reason": "Security vulnerability CVE-2024-XXXXX, always remove if attempting to install"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
"critical-package": {
|
||||
"restore": [
|
||||
{
|
||||
"condition": {
|
||||
"type": "installed",
|
||||
"package": "critical-package",
|
||||
"spec": "!=1.2.3"
|
||||
},
|
||||
"target": "critical-package",
|
||||
"version": "1.2.3",
|
||||
"extra_index_url": "https://custom-repo.example.com/simple",
|
||||
"reason": "critical-package must be version 1.2.3, restore if different or missing"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
"stable-package": {
|
||||
"apply_first_match": [
|
||||
{
|
||||
"condition": {
|
||||
"type": "installed",
|
||||
"package": "critical-dependency",
|
||||
"spec": ">=2.0.0"
|
||||
},
|
||||
"type": "force_version",
|
||||
"version": "1.5.0",
|
||||
"extra_index_url": "https://custom-repo.example.com/simple",
|
||||
"reason": "stable-package 1.5.0 is required when critical-dependency >=2.0.0 is installed"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
"new-experimental-package": {
|
||||
"apply_all_matches": [
|
||||
{
|
||||
"type": "pin_dependencies",
|
||||
"pinned_packages": ["numpy", "pandas", "scipy"],
|
||||
"on_failure": "retry_without_pin",
|
||||
"reason": "new-experimental-package may upgrade numpy/pandas/scipy, pin them to prevent breakage"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
"pytorch-addon": {
|
||||
"apply_all_matches": [
|
||||
{
|
||||
"condition": {
|
||||
"type": "installed",
|
||||
"package": "torch",
|
||||
"spec": ">=2.0.0"
|
||||
},
|
||||
"type": "pin_dependencies",
|
||||
"pinned_packages": ["torch", "torchvision", "torchaudio"],
|
||||
"on_failure": "fail",
|
||||
"reason": "pytorch-addon must not change PyTorch ecosystem versions"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Policy Structure Schema
|
||||
```json
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^.*$": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"uninstall": {
|
||||
"type": "array",
|
||||
"description": "When condition satisfied (or always if no condition), remove package and terminate",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": ["target"],
|
||||
"properties": {
|
||||
"condition": {
|
||||
"type": "object",
|
||||
"description": "Optional: always remove if absent",
|
||||
"required": ["type"],
|
||||
"properties": {
|
||||
"type": {"enum": ["installed", "platform"]},
|
||||
"package": {"type": "string", "description": "Optional: defaults to self"},
|
||||
"spec": {"type": "string", "description": "Optional: version condition"},
|
||||
"os": {"type": "string"},
|
||||
"has_gpu": {"type": "boolean"},
|
||||
"comfyui_version": {"type": "string"}
|
||||
}
|
||||
},
|
||||
"target": {
|
||||
"type": "string",
|
||||
"description": "Package name to remove"
|
||||
},
|
||||
"reason": {"type": "string"}
|
||||
}
|
||||
}
|
||||
},
|
||||
"restore": {
|
||||
"type": "array",
|
||||
"description": "When condition satisfied (or always if no condition), restore package and terminate",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": ["target", "version"],
|
||||
"properties": {
|
||||
"condition": {
|
||||
"type": "object",
|
||||
"description": "Optional: always restore if absent",
|
||||
"required": ["type"],
|
||||
"properties": {
|
||||
"type": {"enum": ["installed", "platform"]},
|
||||
"package": {"type": "string", "description": "Optional: defaults to self"},
|
||||
"spec": {"type": "string", "description": "Optional: version condition"},
|
||||
"os": {"type": "string"},
|
||||
"has_gpu": {"type": "boolean"},
|
||||
"comfyui_version": {"type": "string"}
|
||||
}
|
||||
},
|
||||
"target": {
|
||||
"type": "string",
|
||||
"description": "Package name to restore"
|
||||
},
|
||||
"version": {
|
||||
"type": "string",
|
||||
"description": "Version to restore"
|
||||
},
|
||||
"extra_index_url": {"type": "string"},
|
||||
"reason": {"type": "string"}
|
||||
}
|
||||
}
|
||||
},
|
||||
"apply_first_match": {
|
||||
"type": "array",
|
||||
"description": "Execute only first condition-satisfying policy (exclusive)",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": ["type"],
|
||||
"properties": {
|
||||
"condition": {
|
||||
"type": "object",
|
||||
"description": "Optional: always apply if absent",
|
||||
"required": ["type"],
|
||||
"properties": {
|
||||
"type": {"enum": ["installed", "platform"]},
|
||||
"package": {"type": "string", "description": "Optional: defaults to self"},
|
||||
"spec": {"type": "string", "description": "Optional: version condition"},
|
||||
"os": {"type": "string"},
|
||||
"has_gpu": {"type": "boolean"},
|
||||
"comfyui_version": {"type": "string"}
|
||||
}
|
||||
},
|
||||
"type": {
|
||||
"enum": ["skip", "force_version", "replace"],
|
||||
"description": "Exclusive action: determines installation method"
|
||||
},
|
||||
"version": {"type": "string"},
|
||||
"replacement": {"type": "string"},
|
||||
"extra_index_url": {"type": "string"},
|
||||
"reason": {"type": "string"}
|
||||
}
|
||||
}
|
||||
},
|
||||
"apply_all_matches": {
|
||||
"type": "array",
|
||||
"description": "Execute all condition-satisfying policies (cumulative)",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": ["type"],
|
||||
"properties": {
|
||||
"condition": {
|
||||
"type": "object",
|
||||
"description": "Optional: always apply if absent",
|
||||
"required": ["type"],
|
||||
"properties": {
|
||||
"type": {"enum": ["installed", "platform"]},
|
||||
"package": {"type": "string", "description": "Optional: defaults to self"},
|
||||
"spec": {"type": "string", "description": "Optional: version condition"},
|
||||
"os": {"type": "string"},
|
||||
"has_gpu": {"type": "boolean"},
|
||||
"comfyui_version": {"type": "string"}
|
||||
}
|
||||
},
|
||||
"type": {
|
||||
"enum": ["pin_dependencies", "install_with", "warn"],
|
||||
"description": "Cumulative action: adds installation options"
|
||||
},
|
||||
"pinned_packages": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"}
|
||||
},
|
||||
"on_failure": {"enum": ["fail", "retry_without_pin"]},
|
||||
"additional_packages": {"type": "array"},
|
||||
"message": {"type": "string"},
|
||||
"allow_continue": {"type": "boolean"},
|
||||
"reason": {"type": "string"}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Error Handling
|
||||
|
||||
* Default behavior when errors occur during policy execution:
|
||||
- Log error and continue
|
||||
- Only treat as installation failure when pin_dependencies's on_failure="fail"
|
||||
- For other cases, leave warning and attempt originally requested installation
|
||||
|
||||
|
||||
* pip_install: Performs pip package installation
|
||||
- Use manager_util.make_pip_cmd to generate commands for selective application of uv and pip
|
||||
- Provide functionality to skip policy application through override_policy flag
|
||||
614
comfyui_manager/common/pip_util.implementation-plan.en.md
Normal file
614
comfyui_manager/common/pip_util.implementation-plan.en.md
Normal file
@@ -0,0 +1,614 @@
|
||||
# pip_util.py Implementation Plan Document
|
||||
|
||||
## 1. Project Overview
|
||||
|
||||
### Purpose
|
||||
Implement a policy-based pip package management system that minimizes breaking existing installed dependencies
|
||||
|
||||
### Core Features
|
||||
- JSON-based policy file loading and merging (lazy loading)
|
||||
- Per-package installation policy evaluation and application
|
||||
- Performance optimization through batch-level pip freeze caching
|
||||
- Automated conditional package removal/restoration
|
||||
|
||||
### Technology Stack
|
||||
- Python 3.x
|
||||
- packaging library (version comparison)
|
||||
- subprocess (pip command execution)
|
||||
- json (policy file parsing)
|
||||
|
||||
---
|
||||
|
||||
## 2. Architecture Design
|
||||
|
||||
### 2.1 Global Policy Management (Lazy Loading Pattern)
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────┐
|
||||
│ get_pip_policy() │
|
||||
│ - Auto-loads policy files on │
|
||||
│ first call via lazy loading │
|
||||
│ - Returns cache on subsequent calls│
|
||||
└─────────────────────────────────────┘
|
||||
│
|
||||
▼
|
||||
┌─────────────────────────────────────┐
|
||||
│ _pip_policy_cache (global) │
|
||||
│ - Merged policy dictionary │
|
||||
│ - {package_name: policy_object} │
|
||||
└─────────────────────────────────────┘
|
||||
```
|
||||
|
||||
### 2.2 Batch Operation Class (PipBatch)
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────┐
|
||||
│ PipBatch (Context Manager) │
|
||||
│ ┌───────────────────────────────┐ │
|
||||
│ │ _installed_cache │ │
|
||||
│ │ - Caches pip freeze results │ │
|
||||
│ │ - {package: version} │ │
|
||||
│ └───────────────────────────────┘ │
|
||||
│ │
|
||||
│ Public Methods: │
|
||||
│ ├─ install() │
|
||||
│ ├─ ensure_not_installed() │
|
||||
│ └─ ensure_installed() │
|
||||
│ │
|
||||
│ Private Methods: │
|
||||
│ ├─ _get_installed_packages() │
|
||||
│ ├─ _refresh_installed_cache() │
|
||||
│ ├─ _invalidate_cache() │
|
||||
│ ├─ _parse_package_spec() │
|
||||
│ └─ _evaluate_condition() │
|
||||
└─────────────────────────────────────┘
|
||||
```
|
||||
|
||||
### 2.3 Policy Evaluation Flow
|
||||
|
||||
```
|
||||
install("numpy>=1.20") called
|
||||
│
|
||||
▼
|
||||
get_pip_policy() → Load policy (lazy)
|
||||
│
|
||||
▼
|
||||
Parse package name: "numpy"
|
||||
│
|
||||
▼
|
||||
Look up "numpy" policy in policy dictionary
|
||||
│
|
||||
├─ Evaluate apply_first_match (exclusive)
|
||||
│ ├─ skip → Return False (don't install)
|
||||
│ ├─ force_version → Change version
|
||||
│ └─ replace → Replace package
|
||||
│
|
||||
├─ Evaluate apply_all_matches (cumulative)
|
||||
│ ├─ pin_dependencies → Pin dependencies
|
||||
│ ├─ install_with → Additional packages
|
||||
│ └─ warn → Warning log
|
||||
│
|
||||
▼
|
||||
Execute pip install
|
||||
│
|
||||
▼
|
||||
Invalidate cache (_invalidate_cache)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 3. Phase-by-Phase Implementation Plan
|
||||
|
||||
### Phase 1: Core Infrastructure Setup (2-3 hours)
|
||||
|
||||
#### Task 1.1: Project Structure and Dependency Setup (30 min)
|
||||
**Implementation**:
|
||||
- Create `pip_util.py` file
|
||||
- Add necessary import statements
|
||||
```python
|
||||
import json
|
||||
import logging
|
||||
import platform
|
||||
import re
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
from packaging.specifiers import SpecifierSet
|
||||
from packaging.version import Version
|
||||
|
||||
from . import manager_util, context
|
||||
```
|
||||
- Set up logging
|
||||
```python
|
||||
logger = logging.getLogger(__name__)
|
||||
```
|
||||
|
||||
**Validation**:
|
||||
- Module loads without import errors
|
||||
- Logger works correctly
|
||||
|
||||
#### Task 1.2: Global Variable and get_pip_policy() Implementation (1 hour)
|
||||
**Implementation**:
|
||||
- Declare global variable
|
||||
```python
|
||||
_pip_policy_cache: Optional[Dict] = None
|
||||
```
|
||||
- Implement `get_pip_policy()` function
|
||||
- Check cache and early return
|
||||
- Read base policy file (`{manager_util.comfyui_manager_path}/pip-policy.json`)
|
||||
- Read user policy file (`{context.manager_files_path}/pip-policy.user.json`)
|
||||
- Create file if doesn't exist (for user policy)
|
||||
- Merge policies (complete package-level replacement)
|
||||
- Save to cache and return
|
||||
|
||||
**Exception Handling**:
|
||||
- `FileNotFoundError`: File not found → Use empty dictionary
|
||||
- `json.JSONDecodeError`: JSON parse failure → Warning log + empty dictionary
|
||||
- General exception: Warning log + empty dictionary
|
||||
|
||||
**Validation**:
|
||||
- Returns empty dictionary when policy files don't exist
|
||||
- Returns correct merged result when policy files exist
|
||||
- Confirms cache usage on second call (load log appears only once)
|
||||
|
||||
#### Task 1.3: PipBatch Class Basic Structure (30 min)
|
||||
**Implementation**:
|
||||
- Class definition and `__init__`
|
||||
```python
|
||||
class PipBatch:
|
||||
def __init__(self):
|
||||
self._installed_cache: Optional[Dict[str, str]] = None
|
||||
```
|
||||
- Context manager methods (`__enter__`, `__exit__`)
|
||||
```python
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self._installed_cache = None
|
||||
return False
|
||||
```
|
||||
|
||||
**Validation**:
|
||||
- `with PipBatch() as batch:` syntax works correctly
|
||||
- Cache cleared on `__exit__` call
|
||||
|
||||
---
|
||||
|
||||
### Phase 2: Caching and Utility Methods (2-3 hours)
|
||||
|
||||
#### Task 2.1: pip freeze Caching Methods (1 hour)
|
||||
**Implementation**:
|
||||
- Implement `_refresh_installed_cache()`
|
||||
- Call `manager_util.make_pip_cmd(["freeze"])`
|
||||
- Execute command via subprocess
|
||||
- Parse output (package==version format)
|
||||
- Exclude editable packages (-e) and comments (#)
|
||||
- Convert to dictionary and store in `self._installed_cache`
|
||||
|
||||
- Implement `_get_installed_packages()`
|
||||
- Call `_refresh_installed_cache()` if cache is None
|
||||
- Return cache
|
||||
|
||||
- Implement `_invalidate_cache()`
|
||||
- Set `self._installed_cache = None`
|
||||
|
||||
**Exception Handling**:
|
||||
- `subprocess.CalledProcessError`: pip freeze failure → Empty dictionary
|
||||
- Parse error: Ignore line + warning log
|
||||
|
||||
**Validation**:
|
||||
- pip freeze results correctly parsed into dictionary
|
||||
- New load occurs after cache invalidation and re-query
|
||||
|
||||
#### Task 2.2: Package Spec Parsing (30 min)
|
||||
**Implementation**:
|
||||
- Implement `_parse_package_spec(package_info)`
|
||||
- Regex pattern: `^([a-zA-Z0-9_-]+)([><=!~]+.*)?$`
|
||||
- Split package name and version spec
|
||||
- Return tuple: `(package_name, version_spec)`
|
||||
|
||||
**Exception Handling**:
|
||||
- Parse failure: Raise `ValueError`
|
||||
|
||||
**Validation**:
|
||||
- "numpy" → ("numpy", None)
|
||||
- "numpy==1.26.0" → ("numpy", "==1.26.0")
|
||||
- "pandas>=2.0.0" → ("pandas", ">=2.0.0")
|
||||
- Invalid format → ValueError
|
||||
|
||||
#### Task 2.3: Condition Evaluation Method (1.5 hours)
|
||||
**Implementation**:
|
||||
- Implement `_evaluate_condition(condition, package_name, installed_packages)`
|
||||
|
||||
**Handling by Condition Type**:
|
||||
1. **condition is None**: Always return True
|
||||
2. **"installed" type**:
|
||||
- `target_package = condition.get("package", package_name)`
|
||||
- Check version with `installed_packages.get(target_package)`
|
||||
- If spec exists, compare using `packaging.specifiers.SpecifierSet`
|
||||
- If no spec, only check installation status
|
||||
3. **"platform" type**:
|
||||
- `os` condition: Compare with `platform.system()`
|
||||
- `has_gpu` condition: Check `torch.cuda.is_available()` (False if torch unavailable)
|
||||
- `comfyui_version` condition: TODO (currently warning)
|
||||
|
||||
**Exception Handling**:
|
||||
- Version comparison failure: Warning log + return False
|
||||
- Unknown condition type: Warning log + return False
|
||||
|
||||
**Validation**:
|
||||
- Write test cases for each condition type
|
||||
- Verify edge case handling (torch not installed, invalid version format, etc.)
|
||||
|
||||
---
|
||||
|
||||
### Phase 3: Core Installation Logic Implementation (4-5 hours)
|
||||
|
||||
#### Task 3.1: install() Method - Basic Flow (2 hours)
|
||||
**Implementation**:
|
||||
1. Parse package spec (`_parse_package_spec`)
|
||||
2. Query installed package cache (`_get_installed_packages`)
|
||||
3. If `override_policy=True`, install directly and return
|
||||
4. Call `get_pip_policy()` to load policy
|
||||
5. Default installation if no policy exists
|
||||
|
||||
**Validation**:
|
||||
- Verify policy ignored when override_policy=True
|
||||
- Verify default installation for packages without policy
|
||||
|
||||
#### Task 3.2: install() Method - apply_first_match Policy (1 hour)
|
||||
**Implementation**:
|
||||
- Iterate through policy list top-to-bottom
|
||||
- Evaluate each policy's condition (`_evaluate_condition`)
|
||||
- When condition satisfied:
|
||||
- **skip**: Log reason and return False
|
||||
- **force_version**: Force version change
|
||||
- **replace**: Replace package
|
||||
- Apply only first match (break)
|
||||
|
||||
**Validation**:
|
||||
- Verify installation blocked by skip policy
|
||||
- Verify version changed by force_version
|
||||
- Verify package replaced by replace
|
||||
|
||||
#### Task 3.3: install() Method - apply_all_matches Policy (1 hour)
|
||||
**Implementation**:
|
||||
- Iterate through policy list top-to-bottom
|
||||
- Evaluate each policy's condition
|
||||
- Apply all condition-satisfying policies:
|
||||
- **pin_dependencies**: Pin to installed version
|
||||
- **install_with**: Add to additional package list
|
||||
- **warn**: Output warning log
|
||||
|
||||
**Validation**:
|
||||
- Verify multiple policies applied simultaneously
|
||||
- Verify version pinning by pin_dependencies
|
||||
- Verify additional package installation by install_with
|
||||
|
||||
#### Task 3.4: install() Method - Installation Execution and Retry Logic (1 hour)
|
||||
**Implementation**:
|
||||
1. Compose final package list
|
||||
2. Generate command using `manager_util.make_pip_cmd()`
|
||||
3. Handle `extra_index_url`
|
||||
4. Execute installation via subprocess
|
||||
5. Handle failure based on on_failure setting:
|
||||
- `retry_without_pin`: Retry without pins
|
||||
- `fail`: Raise exception
|
||||
- Other: Warning log
|
||||
6. Invalidate cache on success
|
||||
|
||||
**Validation**:
|
||||
- Verify normal installation
|
||||
- Verify retry logic on pin failure
|
||||
- Verify error handling
|
||||
|
||||
---
|
||||
|
||||
### Phase 4: Batch Operation Methods Implementation (2-3 hours)
|
||||
|
||||
#### Task 4.1: ensure_not_installed() Implementation (1.5 hours)
|
||||
**Implementation**:
|
||||
1. Call `get_pip_policy()`
|
||||
2. Iterate through all package policies
|
||||
3. Check each package's uninstall policy
|
||||
4. When condition satisfied:
|
||||
- Check if target package is installed
|
||||
- If installed, execute `pip uninstall -y {target}`
|
||||
- Remove from cache
|
||||
- Add to removal list
|
||||
5. Execute only first match (per package)
|
||||
6. Return list of removed packages
|
||||
|
||||
**Exception Handling**:
|
||||
- Individual package removal failure: Warning log + continue
|
||||
|
||||
**Validation**:
|
||||
- Verify package removal by uninstall policy
|
||||
- Verify batch removal of multiple packages
|
||||
- Verify continued processing of other packages even on removal failure
|
||||
|
||||
#### Task 4.2: ensure_installed() Implementation (1.5 hours)
|
||||
**Implementation**:
|
||||
1. Call `get_pip_policy()`
|
||||
2. Iterate through all package policies
|
||||
3. Check each package's restore policy
|
||||
4. When condition satisfied:
|
||||
- Check target package's current version
|
||||
- If absent or different version:
|
||||
- Execute `pip install {target}=={version}`
|
||||
- Add extra_index_url if present
|
||||
- Update cache
|
||||
- Add to restoration list
|
||||
5. Execute only first match (per package)
|
||||
6. Return list of restored packages
|
||||
|
||||
**Exception Handling**:
|
||||
- Individual package installation failure: Warning log + continue
|
||||
|
||||
**Validation**:
|
||||
- Verify package restoration by restore policy
|
||||
- Verify reinstallation on version mismatch
|
||||
- Verify continued processing of other packages even on restoration failure
|
||||
|
||||
---
|
||||
|
||||
## 4. Testing Strategy
|
||||
|
||||
### 4.1 Unit Tests
|
||||
|
||||
#### Policy Loading Tests
|
||||
```python
|
||||
def test_get_pip_policy_empty():
|
||||
"""Returns empty dictionary when policy files don't exist"""
|
||||
|
||||
def test_get_pip_policy_merge():
|
||||
"""Correctly merges base and user policies"""
|
||||
|
||||
def test_get_pip_policy_cache():
|
||||
"""Uses cache on second call"""
|
||||
```
|
||||
|
||||
#### Package Parsing Tests
|
||||
```python
|
||||
def test_parse_package_spec_simple():
|
||||
"""'numpy' → ('numpy', None)"""
|
||||
|
||||
def test_parse_package_spec_version():
|
||||
"""'numpy==1.26.0' → ('numpy', '==1.26.0')"""
|
||||
|
||||
def test_parse_package_spec_range():
|
||||
"""'pandas>=2.0.0' → ('pandas', '>=2.0.0')"""
|
||||
|
||||
def test_parse_package_spec_invalid():
|
||||
"""Invalid format → ValueError"""
|
||||
```
|
||||
|
||||
#### Condition Evaluation Tests
|
||||
```python
|
||||
def test_evaluate_condition_none():
|
||||
"""None condition → True"""
|
||||
|
||||
def test_evaluate_condition_installed():
|
||||
"""Evaluates installed package condition"""
|
||||
|
||||
def test_evaluate_condition_platform():
|
||||
"""Evaluates platform condition"""
|
||||
```
|
||||
|
||||
### 4.2 Integration Tests
|
||||
|
||||
#### Installation Policy Tests
|
||||
```python
|
||||
def test_install_with_skip_policy():
|
||||
"""Blocks installation with skip policy"""
|
||||
|
||||
def test_install_with_force_version():
|
||||
"""Changes version with force_version policy"""
|
||||
|
||||
def test_install_with_replace():
|
||||
"""Replaces package with replace policy"""
|
||||
|
||||
def test_install_with_pin_dependencies():
|
||||
"""Pins versions with pin_dependencies"""
|
||||
```
|
||||
|
||||
#### Batch Operation Tests
|
||||
```python
|
||||
def test_ensure_not_installed():
|
||||
"""Removes packages with uninstall policy"""
|
||||
|
||||
def test_ensure_installed():
|
||||
"""Restores packages with restore policy"""
|
||||
|
||||
def test_batch_workflow():
|
||||
"""Tests complete batch workflow"""
|
||||
```
|
||||
|
||||
### 4.3 Edge Case Tests
|
||||
|
||||
```python
|
||||
def test_install_without_policy():
|
||||
"""Default installation for packages without policy"""
|
||||
|
||||
def test_install_override_policy():
|
||||
"""Ignores policy with override_policy=True"""
|
||||
|
||||
def test_pip_freeze_failure():
|
||||
"""Handles empty cache on pip freeze failure"""
|
||||
|
||||
def test_json_parse_error():
|
||||
"""Handles malformed JSON files"""
|
||||
|
||||
def test_subprocess_failure():
|
||||
"""Exception handling when pip command fails"""
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 5. Error Handling Strategy
|
||||
|
||||
### 5.1 Policy Loading Errors
|
||||
- **File not found**: Warning log + empty dictionary
|
||||
- **JSON parse failure**: Error log + empty dictionary
|
||||
- **No read permission**: Warning log + empty dictionary
|
||||
|
||||
### 5.2 Package Installation Errors
|
||||
- **pip command failure**: Depends on on_failure setting
|
||||
- `retry_without_pin`: Retry
|
||||
- `fail`: Raise exception
|
||||
- Other: Warning log
|
||||
- **Invalid package spec**: Raise ValueError
|
||||
|
||||
### 5.3 Batch Operation Errors
|
||||
- **Individual package failure**: Warning log + continue to next package
|
||||
- **pip freeze failure**: Empty dictionary + warning log
|
||||
|
||||
---
|
||||
|
||||
## 6. Performance Optimization
|
||||
|
||||
### 6.1 Caching Strategy
|
||||
- **Policy cache**: Reused program-wide via global variable
|
||||
- **pip freeze cache**: Reused per batch, invalidated after install/remove
|
||||
- **lazy loading**: Load only when needed
|
||||
|
||||
### 6.2 Parallel Processing Considerations
|
||||
- Current implementation is not thread-safe
|
||||
- Consider adding threading.Lock if needed
|
||||
- Batch operations execute sequentially only
|
||||
|
||||
---
|
||||
|
||||
## 7. Documentation Requirements
|
||||
|
||||
### 7.1 Code Documentation
|
||||
- Docstrings required for all public methods
|
||||
- Specify parameters, return values, and exceptions
|
||||
- Include usage examples
|
||||
|
||||
### 7.2 User Guide
|
||||
- Explain `pip-policy.json` structure
|
||||
- Policy writing examples
|
||||
- Usage pattern examples
|
||||
|
||||
### 7.3 Developer Guide
|
||||
- Architecture explanation
|
||||
- Extension methods
|
||||
- Test execution methods
|
||||
|
||||
---
|
||||
|
||||
## 8. Deployment Checklist
|
||||
|
||||
### 8.1 Code Quality
|
||||
- [ ] All unit tests pass
|
||||
- [ ] All integration tests pass
|
||||
- [ ] Code coverage ≥80%
|
||||
- [ ] No linting errors (flake8, pylint)
|
||||
- [ ] Type hints complete (mypy passes)
|
||||
|
||||
### 8.2 Documentation
|
||||
- [ ] README.md written
|
||||
- [ ] API documentation generated
|
||||
- [ ] Example policy files written
|
||||
- [ ] Usage guide written
|
||||
|
||||
### 8.3 Performance Verification
|
||||
- [ ] Policy loading performance measured (<100ms)
|
||||
- [ ] pip freeze caching effectiveness verified (≥50% speed improvement)
|
||||
- [ ] Memory usage confirmed (<10MB)
|
||||
|
||||
### 8.4 Security Verification
|
||||
- [ ] Input validation complete
|
||||
- [ ] Path traversal prevention
|
||||
- [ ] Command injection prevention
|
||||
- [ ] JSON parsing safety confirmed
|
||||
|
||||
---
|
||||
|
||||
## 9. Future Improvements
|
||||
|
||||
### 9.1 Short-term (1-2 weeks)
|
||||
- Implement ComfyUI version check
|
||||
- Implement user confirmation prompt (allow_continue=false)
|
||||
- Thread-safe improvements (add Lock)
|
||||
|
||||
### 9.2 Mid-term (1-2 months)
|
||||
- Add policy validation tools
|
||||
- Policy migration tools
|
||||
- More detailed logging and debugging options
|
||||
|
||||
### 9.3 Long-term (3-6 months)
|
||||
- Web UI for policy management
|
||||
- Provide policy templates
|
||||
- Community policy sharing system
|
||||
|
||||
---
|
||||
|
||||
## 10. Risks and Mitigation Strategies
|
||||
|
||||
### Risk 1: Policy Conflicts
|
||||
**Description**: Policies for different packages may conflict
|
||||
**Mitigation**: Develop policy validation tools, conflict detection algorithm
|
||||
|
||||
### Risk 2: pip Version Compatibility
|
||||
**Description**: Must work across various pip versions
|
||||
**Mitigation**: Test on multiple pip versions, version-specific branching
|
||||
|
||||
### Risk 3: Performance Degradation
|
||||
**Description**: Installation speed may decrease due to policy evaluation
|
||||
**Mitigation**: Optimize caching, minimize condition evaluation
|
||||
|
||||
### Risk 4: Policy Misconfiguration
|
||||
**Description**: Users may write incorrect policies
|
||||
**Mitigation**: JSON schema validation, provide examples and guides
|
||||
|
||||
---
|
||||
|
||||
## 11. Timeline
|
||||
|
||||
### Week 1
|
||||
- Phase 1: Core Infrastructure Setup (Day 1-2)
|
||||
- Phase 2: Caching and Utility Methods (Day 3-4)
|
||||
- Write unit tests (Day 5)
|
||||
|
||||
### Week 2
|
||||
- Phase 3: Core Installation Logic Implementation (Day 1-3)
|
||||
- Phase 4: Batch Operation Methods Implementation (Day 4-5)
|
||||
|
||||
### Week 3
|
||||
- Integration and edge case testing (Day 1-2)
|
||||
- Documentation (Day 3)
|
||||
- Code review and refactoring (Day 4-5)
|
||||
|
||||
### Week 4
|
||||
- Performance optimization (Day 1-2)
|
||||
- Security verification (Day 3)
|
||||
- Final testing and deployment preparation (Day 4-5)
|
||||
|
||||
---
|
||||
|
||||
## 12. Success Criteria
|
||||
|
||||
### Feature Completeness
|
||||
- ✅ All policy types (uninstall, apply_first_match, apply_all_matches, restore) work correctly
|
||||
- ✅ Policy merge logic works correctly
|
||||
- ✅ Batch operations perform normally
|
||||
|
||||
### Quality Metrics
|
||||
- ✅ Test coverage ≥80%
|
||||
- ✅ All tests pass
|
||||
- ✅ 0 linting errors
|
||||
- ✅ 100% type hint completion
|
||||
|
||||
### Performance Metrics
|
||||
- ✅ Policy loading <100ms
|
||||
- ✅ ≥50% performance improvement with pip freeze caching
|
||||
- ✅ Memory usage <10MB
|
||||
|
||||
### Usability
|
||||
- ✅ Clear error messages
|
||||
- ✅ Sufficient documentation
|
||||
- ✅ Verified in real-world use cases
|
||||
629
comfyui_manager/common/pip_util.py
Normal file
629
comfyui_manager/common/pip_util.py
Normal file
@@ -0,0 +1,629 @@
|
||||
"""
|
||||
pip_util - Policy-based pip package management system
|
||||
|
||||
This module provides a policy-based approach to pip package installation
|
||||
to minimize dependency conflicts and protect existing installed packages.
|
||||
|
||||
Usage:
|
||||
# Batch operations (policy auto-loaded)
|
||||
with PipBatch() as batch:
|
||||
batch.ensure_not_installed()
|
||||
batch.install("numpy>=1.20")
|
||||
batch.install("pandas>=2.0")
|
||||
batch.install("scipy>=1.7")
|
||||
batch.ensure_installed()
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import platform
|
||||
import re
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
from packaging.requirements import Requirement
|
||||
from packaging.specifiers import SpecifierSet
|
||||
from packaging.version import Version
|
||||
|
||||
from . import manager_util, context
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Global policy cache (lazy loaded on first access)
|
||||
_pip_policy_cache: Optional[Dict] = None
|
||||
|
||||
|
||||
def get_pip_policy() -> Dict:
|
||||
"""
|
||||
Get pip policy with lazy loading.
|
||||
|
||||
Returns the cached policy if available, otherwise loads it from files.
|
||||
This function automatically loads the policy on first access.
|
||||
|
||||
Thread safety: This function is NOT thread-safe.
|
||||
Ensure single-threaded access during initialization.
|
||||
|
||||
Returns:
|
||||
Dictionary of merged pip policies
|
||||
|
||||
Example:
|
||||
>>> policy = get_pip_policy()
|
||||
>>> numpy_policy = policy.get("numpy", {})
|
||||
"""
|
||||
global _pip_policy_cache
|
||||
|
||||
# Return cached policy if already loaded
|
||||
if _pip_policy_cache is not None:
|
||||
logger.debug("Returning cached pip policy")
|
||||
return _pip_policy_cache
|
||||
|
||||
logger.info("Loading pip policies...")
|
||||
|
||||
# Load base policy
|
||||
base_policy = {}
|
||||
base_policy_path = Path(manager_util.comfyui_manager_path) / "pip-policy.json"
|
||||
|
||||
try:
|
||||
if base_policy_path.exists():
|
||||
with open(base_policy_path, 'r', encoding='utf-8') as f:
|
||||
base_policy = json.load(f)
|
||||
logger.debug(f"Loaded base policy from {base_policy_path}")
|
||||
else:
|
||||
logger.warning(f"Base policy file not found: {base_policy_path}")
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"Failed to parse base policy JSON: {e}")
|
||||
base_policy = {}
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to read base policy file: {e}")
|
||||
base_policy = {}
|
||||
|
||||
# Load user policy
|
||||
user_policy = {}
|
||||
user_policy_path = Path(context.manager_files_path) / "pip-policy.user.json"
|
||||
|
||||
try:
|
||||
if user_policy_path.exists():
|
||||
with open(user_policy_path, 'r', encoding='utf-8') as f:
|
||||
user_policy = json.load(f)
|
||||
logger.debug(f"Loaded user policy from {user_policy_path}")
|
||||
else:
|
||||
# Create empty user policy file
|
||||
user_policy_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(user_policy_path, 'w', encoding='utf-8') as f:
|
||||
json.dump({"_comment": "User-specific pip policy overrides"}, f, indent=2)
|
||||
logger.info(f"Created empty user policy file: {user_policy_path}")
|
||||
except json.JSONDecodeError as e:
|
||||
logger.warning(f"Failed to parse user policy JSON: {e}")
|
||||
user_policy = {}
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to read user policy file: {e}")
|
||||
user_policy = {}
|
||||
|
||||
# Merge policies (package-level override: user completely replaces base per package)
|
||||
merged_policy = base_policy.copy()
|
||||
for package_name, package_policy in user_policy.items():
|
||||
if package_name.startswith("_"): # Skip metadata fields like _comment
|
||||
continue
|
||||
merged_policy[package_name] = package_policy # Complete package replacement
|
||||
|
||||
# Store in global cache
|
||||
_pip_policy_cache = merged_policy
|
||||
logger.info(f"Policy loaded successfully: {len(_pip_policy_cache)} package policies")
|
||||
|
||||
return _pip_policy_cache
|
||||
|
||||
|
||||
class PipBatch:
|
||||
"""
|
||||
Pip package installation batch manager.
|
||||
|
||||
Maintains pip freeze cache during a batch of operations for performance optimization.
|
||||
|
||||
Usage pattern:
|
||||
# Batch operations (policy auto-loaded)
|
||||
with PipBatch() as batch:
|
||||
batch.ensure_not_installed()
|
||||
batch.install("numpy>=1.20")
|
||||
batch.install("pandas>=2.0")
|
||||
batch.install("scipy>=1.7")
|
||||
batch.ensure_installed()
|
||||
|
||||
Attributes:
|
||||
_installed_cache: Cache of installed packages from pip freeze
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize PipBatch with empty cache."""
|
||||
self._installed_cache: Optional[Dict[str, str]] = None
|
||||
|
||||
def __enter__(self):
|
||||
"""Enter context manager."""
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Exit context manager and clear cache."""
|
||||
self._installed_cache = None
|
||||
return False
|
||||
|
||||
def _refresh_installed_cache(self) -> None:
|
||||
"""
|
||||
Refresh the installed packages cache by executing pip freeze.
|
||||
|
||||
Parses pip freeze output into a dictionary of {package_name: version}.
|
||||
Ignores editable packages and comments.
|
||||
|
||||
Raises:
|
||||
No exceptions raised - failures result in empty cache with warning log
|
||||
"""
|
||||
try:
|
||||
cmd = manager_util.make_pip_cmd(["freeze"])
|
||||
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
|
||||
|
||||
packages = {}
|
||||
for line in result.stdout.strip().split('\n'):
|
||||
line = line.strip()
|
||||
|
||||
# Skip empty lines
|
||||
if not line:
|
||||
continue
|
||||
|
||||
# Skip editable packages (-e /path/to/package or -e git+https://...)
|
||||
# Editable packages don't have version info and are typically development-only
|
||||
if line.startswith('-e '):
|
||||
continue
|
||||
|
||||
# Skip comments (defensive: pip freeze typically doesn't output comments,
|
||||
# but this handles manually edited requirements.txt or future pip changes)
|
||||
if line.startswith('#'):
|
||||
continue
|
||||
|
||||
# Parse package==version
|
||||
if '==' in line:
|
||||
try:
|
||||
package_name, version = line.split('==', 1)
|
||||
packages[package_name.strip()] = version.strip()
|
||||
except ValueError:
|
||||
logger.warning(f"Failed to parse pip freeze line: {line}")
|
||||
continue
|
||||
|
||||
self._installed_cache = packages
|
||||
logger.debug(f"Refreshed installed packages cache: {len(packages)} packages")
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
logger.warning(f"pip freeze failed: {e}")
|
||||
self._installed_cache = {}
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to refresh installed packages cache: {e}")
|
||||
self._installed_cache = {}
|
||||
|
||||
def _get_installed_packages(self) -> Dict[str, str]:
|
||||
"""
|
||||
Get cached installed packages, refresh if cache is None.
|
||||
|
||||
Returns:
|
||||
Dictionary of {package_name: version}
|
||||
"""
|
||||
if self._installed_cache is None:
|
||||
self._refresh_installed_cache()
|
||||
return self._installed_cache
|
||||
|
||||
def _invalidate_cache(self) -> None:
|
||||
"""
|
||||
Invalidate the installed packages cache.
|
||||
|
||||
Should be called after install/uninstall operations.
|
||||
"""
|
||||
self._installed_cache = None
|
||||
|
||||
def _parse_package_spec(self, package_info: str) -> Tuple[str, Optional[str]]:
|
||||
"""
|
||||
Parse package spec string into package name and version spec using PEP 508.
|
||||
|
||||
Uses the packaging library to properly parse package specifications according to
|
||||
PEP 508 standard, which handles complex cases like extras and multiple version
|
||||
constraints that simple regex cannot handle correctly.
|
||||
|
||||
Args:
|
||||
package_info: Package specification like "numpy", "numpy==1.26.0", "numpy>=1.20.0",
|
||||
or complex specs like "package[extra]>=1.0,<2.0"
|
||||
|
||||
Returns:
|
||||
Tuple of (package_name, version_spec)
|
||||
Examples: ("numpy", "==1.26.0"), ("pandas", ">=2.0.0"), ("scipy", None)
|
||||
Package names are normalized (e.g., "NumPy" -> "numpy")
|
||||
|
||||
Raises:
|
||||
ValueError: If package_info cannot be parsed according to PEP 508
|
||||
|
||||
Example:
|
||||
>>> batch._parse_package_spec("numpy>=1.20")
|
||||
("numpy", ">=1.20")
|
||||
>>> batch._parse_package_spec("requests[security]>=2.0,<3.0")
|
||||
("requests", ">=2.0,<3.0")
|
||||
"""
|
||||
try:
|
||||
req = Requirement(package_info)
|
||||
package_name = req.name # Normalized package name
|
||||
version_spec = str(req.specifier) if req.specifier else None
|
||||
return package_name, version_spec
|
||||
except Exception as e:
|
||||
raise ValueError(f"Invalid package spec: {package_info}") from e
|
||||
|
||||
def _evaluate_condition(self, condition: Optional[Dict], package_name: str,
|
||||
installed_packages: Dict[str, str]) -> bool:
|
||||
"""
|
||||
Evaluate policy condition and return whether it's satisfied.
|
||||
|
||||
Args:
|
||||
condition: Policy condition object (dict) or None
|
||||
package_name: Current package being processed
|
||||
installed_packages: Dictionary of {package_name: version}
|
||||
|
||||
Returns:
|
||||
True if condition is satisfied, False otherwise
|
||||
None condition always returns True
|
||||
|
||||
Example:
|
||||
>>> condition = {"type": "installed", "package": "numpy", "spec": ">=1.20"}
|
||||
>>> batch._evaluate_condition(condition, "numba", {"numpy": "1.26.0"})
|
||||
True
|
||||
"""
|
||||
# No condition means always satisfied
|
||||
if condition is None:
|
||||
return True
|
||||
|
||||
condition_type = condition.get("type")
|
||||
|
||||
if condition_type == "installed":
|
||||
# Check if a package is installed with optional version spec
|
||||
target_package = condition.get("package", package_name)
|
||||
installed_version = installed_packages.get(target_package)
|
||||
|
||||
# Package not installed
|
||||
if installed_version is None:
|
||||
return False
|
||||
|
||||
# Check version spec if provided
|
||||
spec = condition.get("spec")
|
||||
if spec:
|
||||
try:
|
||||
specifier = SpecifierSet(spec)
|
||||
return Version(installed_version) in specifier
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to compare version {installed_version} with spec {spec}: {e}")
|
||||
return False
|
||||
|
||||
# Package is installed (no spec check)
|
||||
return True
|
||||
|
||||
elif condition_type == "platform":
|
||||
# Check platform conditions (os, has_gpu, comfyui_version)
|
||||
conditions_met = True
|
||||
|
||||
# Check OS
|
||||
if "os" in condition:
|
||||
expected_os = condition["os"].lower()
|
||||
actual_os = platform.system().lower()
|
||||
if expected_os not in actual_os and actual_os not in expected_os:
|
||||
conditions_met = False
|
||||
|
||||
# Check GPU availability
|
||||
if "has_gpu" in condition:
|
||||
expected_gpu = condition["has_gpu"]
|
||||
try:
|
||||
import torch
|
||||
has_gpu = torch.cuda.is_available()
|
||||
except ImportError:
|
||||
has_gpu = False
|
||||
|
||||
if expected_gpu != has_gpu:
|
||||
conditions_met = False
|
||||
|
||||
# Check ComfyUI version
|
||||
if "comfyui_version" in condition:
|
||||
# TODO: Implement ComfyUI version check
|
||||
logger.warning("ComfyUI version condition not yet implemented")
|
||||
|
||||
return conditions_met
|
||||
|
||||
else:
|
||||
logger.warning(f"Unknown condition type: {condition_type}")
|
||||
return False
|
||||
|
||||
def install(self, package_info: str, extra_index_url: Optional[str] = None,
|
||||
override_policy: bool = False) -> bool:
|
||||
"""
|
||||
Install a pip package with policy-based modifications.
|
||||
|
||||
Args:
|
||||
package_info: Package specification (e.g., "numpy", "numpy==1.26.0", "numpy>=1.20.0")
|
||||
extra_index_url: Additional package repository URL (optional)
|
||||
override_policy: If True, skip policy application and install directly (default: False)
|
||||
|
||||
Returns:
|
||||
True if installation succeeded, False if skipped by policy
|
||||
|
||||
Raises:
|
||||
ValueError: If package_info cannot be parsed
|
||||
subprocess.CalledProcessError: If installation fails (depending on policy on_failure settings)
|
||||
|
||||
Example:
|
||||
>>> with PipBatch() as batch:
|
||||
... batch.install("numpy>=1.20")
|
||||
... batch.install("torch", override_policy=True)
|
||||
"""
|
||||
# Parse package spec
|
||||
try:
|
||||
package_name, version_spec = self._parse_package_spec(package_info)
|
||||
except ValueError as e:
|
||||
logger.error(f"Invalid package spec: {e}")
|
||||
raise
|
||||
|
||||
# Get installed packages cache
|
||||
installed_packages = self._get_installed_packages()
|
||||
|
||||
# Override policy - skip to direct installation
|
||||
if override_policy:
|
||||
logger.info(f"Installing {package_info} (policy override)")
|
||||
cmd = manager_util.make_pip_cmd(["install", package_info])
|
||||
if extra_index_url:
|
||||
cmd.extend(["--extra-index-url", extra_index_url])
|
||||
|
||||
try:
|
||||
subprocess.run(cmd, check=True)
|
||||
self._invalidate_cache()
|
||||
logger.info(f"Successfully installed {package_info}")
|
||||
return True
|
||||
except subprocess.CalledProcessError as e:
|
||||
logger.error(f"Failed to install {package_info}: {e}")
|
||||
raise
|
||||
|
||||
# Get policy (lazy loading)
|
||||
pip_policy = get_pip_policy()
|
||||
policy = pip_policy.get(package_name, {})
|
||||
|
||||
# If no policy, proceed with default installation
|
||||
if not policy:
|
||||
logger.debug(f"No policy found for {package_name}, proceeding with default installation")
|
||||
cmd = manager_util.make_pip_cmd(["install", package_info])
|
||||
if extra_index_url:
|
||||
cmd.extend(["--extra-index-url", extra_index_url])
|
||||
|
||||
try:
|
||||
subprocess.run(cmd, check=True)
|
||||
self._invalidate_cache()
|
||||
logger.info(f"Successfully installed {package_info}")
|
||||
return True
|
||||
except subprocess.CalledProcessError as e:
|
||||
logger.error(f"Failed to install {package_info}: {e}")
|
||||
raise
|
||||
|
||||
# Apply apply_first_match policies (exclusive - first match only)
|
||||
final_package_info = package_info
|
||||
final_extra_index_url = extra_index_url
|
||||
policy_reason = None
|
||||
|
||||
apply_first_match = policy.get("apply_first_match", [])
|
||||
for policy_item in apply_first_match:
|
||||
condition = policy_item.get("condition")
|
||||
if self._evaluate_condition(condition, package_name, installed_packages):
|
||||
policy_type = policy_item.get("type")
|
||||
|
||||
if policy_type == "skip":
|
||||
reason = policy_item.get("reason", "No reason provided")
|
||||
logger.info(f"Skipping installation of {package_name}: {reason}")
|
||||
return False
|
||||
|
||||
elif policy_type == "force_version":
|
||||
forced_version = policy_item.get("version")
|
||||
final_package_info = f"{package_name}=={forced_version}"
|
||||
policy_reason = policy_item.get("reason")
|
||||
if "extra_index_url" in policy_item:
|
||||
final_extra_index_url = policy_item["extra_index_url"]
|
||||
logger.info(f"Force version for {package_name}: {forced_version} ({policy_reason})")
|
||||
break # First match only
|
||||
|
||||
elif policy_type == "replace":
|
||||
replacement = policy_item.get("replacement")
|
||||
replacement_version = policy_item.get("version", "")
|
||||
if replacement_version:
|
||||
final_package_info = f"{replacement}{replacement_version}"
|
||||
else:
|
||||
final_package_info = replacement
|
||||
policy_reason = policy_item.get("reason")
|
||||
if "extra_index_url" in policy_item:
|
||||
final_extra_index_url = policy_item["extra_index_url"]
|
||||
logger.info(f"Replacing {package_name} with {final_package_info}: {policy_reason}")
|
||||
break # First match only
|
||||
|
||||
# Apply apply_all_matches policies (cumulative - all matches)
|
||||
additional_packages = []
|
||||
pinned_packages = []
|
||||
pin_on_failure = "fail"
|
||||
|
||||
apply_all_matches = policy.get("apply_all_matches", [])
|
||||
for policy_item in apply_all_matches:
|
||||
condition = policy_item.get("condition")
|
||||
if self._evaluate_condition(condition, package_name, installed_packages):
|
||||
policy_type = policy_item.get("type")
|
||||
|
||||
if policy_type == "pin_dependencies":
|
||||
pin_list = policy_item.get("pinned_packages", [])
|
||||
for pkg in pin_list:
|
||||
installed_version = installed_packages.get(pkg)
|
||||
if installed_version:
|
||||
pinned_packages.append(f"{pkg}=={installed_version}")
|
||||
else:
|
||||
logger.warning(f"Cannot pin {pkg}: not currently installed")
|
||||
pin_on_failure = policy_item.get("on_failure", "fail")
|
||||
reason = policy_item.get("reason", "")
|
||||
logger.info(f"Pinning dependencies: {pinned_packages} ({reason})")
|
||||
|
||||
elif policy_type == "install_with":
|
||||
additional = policy_item.get("additional_packages", [])
|
||||
additional_packages.extend(additional)
|
||||
reason = policy_item.get("reason", "")
|
||||
logger.info(f"Installing additional packages: {additional} ({reason})")
|
||||
|
||||
elif policy_type == "warn":
|
||||
message = policy_item.get("message", "")
|
||||
allow_continue = policy_item.get("allow_continue", True)
|
||||
logger.warning(f"Policy warning for {package_name}: {message}")
|
||||
if not allow_continue:
|
||||
# TODO: Implement user confirmation
|
||||
logger.info("User confirmation required (not implemented, continuing)")
|
||||
|
||||
# Build final package list
|
||||
packages_to_install = [final_package_info] + pinned_packages + additional_packages
|
||||
|
||||
# Execute installation
|
||||
cmd = manager_util.make_pip_cmd(["install"] + packages_to_install)
|
||||
if final_extra_index_url:
|
||||
cmd.extend(["--extra-index-url", final_extra_index_url])
|
||||
|
||||
try:
|
||||
subprocess.run(cmd, check=True)
|
||||
self._invalidate_cache()
|
||||
if policy_reason:
|
||||
logger.info(f"Successfully installed {final_package_info}: {policy_reason}")
|
||||
else:
|
||||
logger.info(f"Successfully installed {final_package_info}")
|
||||
return True
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
# Handle installation failure
|
||||
if pinned_packages and pin_on_failure == "retry_without_pin":
|
||||
logger.warning(f"Installation failed with pinned dependencies, retrying without pins")
|
||||
retry_cmd = manager_util.make_pip_cmd(["install", final_package_info])
|
||||
if final_extra_index_url:
|
||||
retry_cmd.extend(["--extra-index-url", final_extra_index_url])
|
||||
|
||||
try:
|
||||
subprocess.run(retry_cmd, check=True)
|
||||
self._invalidate_cache()
|
||||
logger.info(f"Successfully installed {final_package_info} (without pins)")
|
||||
return True
|
||||
except subprocess.CalledProcessError as retry_error:
|
||||
logger.error(f"Retry installation also failed: {retry_error}")
|
||||
raise
|
||||
|
||||
elif pin_on_failure == "fail":
|
||||
logger.error(f"Installation failed: {e}")
|
||||
raise
|
||||
|
||||
else:
|
||||
logger.warning(f"Installation failed, but continuing: {e}")
|
||||
return False
|
||||
|
||||
def ensure_not_installed(self) -> List[str]:
|
||||
"""
|
||||
Remove all packages matching uninstall policies (batch processing).
|
||||
|
||||
Iterates through all package policies and executes uninstall actions
|
||||
where conditions are satisfied.
|
||||
|
||||
Returns:
|
||||
List of removed package names
|
||||
|
||||
Example:
|
||||
>>> with PipBatch() as batch:
|
||||
... removed = batch.ensure_not_installed()
|
||||
... print(f"Removed: {removed}")
|
||||
"""
|
||||
# Get policy (lazy loading)
|
||||
pip_policy = get_pip_policy()
|
||||
|
||||
installed_packages = self._get_installed_packages()
|
||||
removed_packages = []
|
||||
|
||||
for package_name, policy in pip_policy.items():
|
||||
uninstall_policies = policy.get("uninstall", [])
|
||||
|
||||
for uninstall_policy in uninstall_policies:
|
||||
condition = uninstall_policy.get("condition")
|
||||
|
||||
if self._evaluate_condition(condition, package_name, installed_packages):
|
||||
target = uninstall_policy.get("target")
|
||||
reason = uninstall_policy.get("reason", "No reason provided")
|
||||
|
||||
# Check if target is installed
|
||||
if target in installed_packages:
|
||||
try:
|
||||
cmd = manager_util.make_pip_cmd(["uninstall", "-y", target])
|
||||
subprocess.run(cmd, check=True)
|
||||
|
||||
logger.info(f"Uninstalled {target}: {reason}")
|
||||
removed_packages.append(target)
|
||||
|
||||
# Remove from cache
|
||||
del installed_packages[target]
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
logger.warning(f"Failed to uninstall {target}: {e}")
|
||||
|
||||
# First match only per package
|
||||
break
|
||||
|
||||
return removed_packages
|
||||
|
||||
def ensure_installed(self) -> List[str]:
|
||||
"""
|
||||
Restore all packages matching restore policies (batch processing).
|
||||
|
||||
Iterates through all package policies and executes restore actions
|
||||
where conditions are satisfied.
|
||||
|
||||
Returns:
|
||||
List of restored package names
|
||||
|
||||
Example:
|
||||
>>> with PipBatch() as batch:
|
||||
... batch.install("numpy>=1.20")
|
||||
... restored = batch.ensure_installed()
|
||||
... print(f"Restored: {restored}")
|
||||
"""
|
||||
# Get policy (lazy loading)
|
||||
pip_policy = get_pip_policy()
|
||||
|
||||
installed_packages = self._get_installed_packages()
|
||||
restored_packages = []
|
||||
|
||||
for package_name, policy in pip_policy.items():
|
||||
restore_policies = policy.get("restore", [])
|
||||
|
||||
for restore_policy in restore_policies:
|
||||
condition = restore_policy.get("condition")
|
||||
|
||||
if self._evaluate_condition(condition, package_name, installed_packages):
|
||||
target = restore_policy.get("target")
|
||||
version = restore_policy.get("version")
|
||||
reason = restore_policy.get("reason", "No reason provided")
|
||||
extra_index_url = restore_policy.get("extra_index_url")
|
||||
|
||||
# Check if target needs restoration
|
||||
current_version = installed_packages.get(target)
|
||||
|
||||
if current_version is None or current_version != version:
|
||||
try:
|
||||
package_spec = f"{target}=={version}"
|
||||
cmd = manager_util.make_pip_cmd(["install", package_spec])
|
||||
|
||||
if extra_index_url:
|
||||
cmd.extend(["--extra-index-url", extra_index_url])
|
||||
|
||||
subprocess.run(cmd, check=True)
|
||||
|
||||
logger.info(f"Restored {package_spec}: {reason}")
|
||||
restored_packages.append(target)
|
||||
|
||||
# Update cache
|
||||
installed_packages[target] = version
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
logger.warning(f"Failed to restore {target}: {e}")
|
||||
|
||||
# First match only per package
|
||||
break
|
||||
|
||||
return restored_packages
|
||||
2916
comfyui_manager/common/pip_util.test-design.md
Normal file
2916
comfyui_manager/common/pip_util.test-design.md
Normal file
File diff suppressed because it is too large
Load Diff
@@ -2,6 +2,8 @@ import sys
|
||||
import subprocess
|
||||
import os
|
||||
|
||||
from . import manager_util
|
||||
|
||||
|
||||
def security_check():
|
||||
print("[START] Security scan")
|
||||
@@ -66,18 +68,23 @@ https://blog.comfy.org/comfyui-statement-on-the-ultralytics-crypto-miner-situati
|
||||
"lolMiner": [os.path.join(comfyui_path, 'lolMiner')]
|
||||
}
|
||||
|
||||
installed_pips = subprocess.check_output([sys.executable, '-m', "pip", "freeze"], text=True)
|
||||
installed_pips = subprocess.check_output(manager_util.make_pip_cmd(["freeze"]), text=True)
|
||||
|
||||
detected = set()
|
||||
try:
|
||||
anthropic_info = subprocess.check_output([sys.executable, '-m', "pip", "show", "anthropic"], text=True, stderr=subprocess.DEVNULL)
|
||||
anthropic_reqs = [x for x in anthropic_info.split('\n') if x.startswith("Requires")][0].split(': ')[1]
|
||||
if "pycrypto" in anthropic_reqs:
|
||||
location = [x for x in anthropic_info.split('\n') if x.startswith("Location")][0].split(': ')[1]
|
||||
for fi in os.listdir(location):
|
||||
if fi.startswith("anthropic"):
|
||||
guide["ComfyUI_LLMVISION"] = f"\n0.Remove {os.path.join(location, fi)}" + guide["ComfyUI_LLMVISION"]
|
||||
detected.add("ComfyUI_LLMVISION")
|
||||
anthropic_info = subprocess.check_output(manager_util.make_pip_cmd(["show", "anthropic"]), text=True, stderr=subprocess.DEVNULL)
|
||||
requires_lines = [x for x in anthropic_info.split('\n') if x.startswith("Requires")]
|
||||
if requires_lines:
|
||||
anthropic_reqs = requires_lines[0].split(": ", 1)[1]
|
||||
if "pycrypto" in anthropic_reqs:
|
||||
location_lines = [x for x in anthropic_info.split('\n') if x.startswith("Location")]
|
||||
if location_lines:
|
||||
location = location_lines[0].split(": ", 1)[1]
|
||||
for fi in os.listdir(location):
|
||||
if fi.startswith("anthropic"):
|
||||
guide["ComfyUI_LLMVISION"] = (f"\n0.Remove {os.path.join(location, fi)}" + guide["ComfyUI_LLMVISION"])
|
||||
detected.add("ComfyUI_LLMVISION")
|
||||
|
||||
except subprocess.CalledProcessError:
|
||||
pass
|
||||
|
||||
|
||||
6716
custom-node-list.json → comfyui_manager/custom-node-list.json
Executable file → Normal file
6716
custom-node-list.json → comfyui_manager/custom-node-list.json
Executable file → Normal file
File diff suppressed because it is too large
Load Diff
@@ -29,6 +29,7 @@ datamodel-codegen \
|
||||
--use-subclass-enum \
|
||||
--field-constraints \
|
||||
--strict-types bytes \
|
||||
--use-double-quotes \
|
||||
--input openapi.yaml \
|
||||
--output comfyui_manager/data_models/generated_models.py \
|
||||
--output-model-type pydantic_v2.BaseModel
|
||||
|
||||
@@ -30,9 +30,15 @@ from .generated_models import (
|
||||
InstalledModelInfo,
|
||||
ComfyUIVersionInfo,
|
||||
|
||||
# Import Fail Info Models
|
||||
ImportFailInfoBulkRequest,
|
||||
ImportFailInfoBulkResponse,
|
||||
ImportFailInfoItem,
|
||||
ImportFailInfoItem1,
|
||||
|
||||
# Other models
|
||||
Kind,
|
||||
StatusStr,
|
||||
OperationType,
|
||||
OperationResult,
|
||||
ManagerPackInfo,
|
||||
ManagerPackInstalled,
|
||||
SelectedVersion,
|
||||
@@ -49,6 +55,9 @@ from .generated_models import (
|
||||
UninstallPackParams,
|
||||
DisablePackParams,
|
||||
EnablePackParams,
|
||||
UpdateAllQueryParams,
|
||||
UpdateComfyUIQueryParams,
|
||||
ComfyUISwitchVersionQueryParams,
|
||||
QueueStatus,
|
||||
ManagerMappings,
|
||||
ModelMetadata,
|
||||
@@ -59,8 +68,8 @@ from .generated_models import (
|
||||
HistoryResponse,
|
||||
HistoryListResponse,
|
||||
InstallType,
|
||||
OperationType,
|
||||
Result,
|
||||
SecurityLevel,
|
||||
RiskLevel,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
@@ -85,9 +94,15 @@ __all__ = [
|
||||
"InstalledModelInfo",
|
||||
"ComfyUIVersionInfo",
|
||||
|
||||
# Import Fail Info Models
|
||||
"ImportFailInfoBulkRequest",
|
||||
"ImportFailInfoBulkResponse",
|
||||
"ImportFailInfoItem",
|
||||
"ImportFailInfoItem1",
|
||||
|
||||
# Other models
|
||||
"Kind",
|
||||
"StatusStr",
|
||||
"OperationType",
|
||||
"OperationResult",
|
||||
"ManagerPackInfo",
|
||||
"ManagerPackInstalled",
|
||||
"SelectedVersion",
|
||||
@@ -104,6 +119,9 @@ __all__ = [
|
||||
"UninstallPackParams",
|
||||
"DisablePackParams",
|
||||
"EnablePackParams",
|
||||
"UpdateAllQueryParams",
|
||||
"UpdateComfyUIQueryParams",
|
||||
"ComfyUISwitchVersionQueryParams",
|
||||
"QueueStatus",
|
||||
"ManagerMappings",
|
||||
"ModelMetadata",
|
||||
@@ -114,6 +132,6 @@ __all__ = [
|
||||
"HistoryResponse",
|
||||
"HistoryListResponse",
|
||||
"InstallType",
|
||||
"OperationType",
|
||||
"Result",
|
||||
"SecurityLevel",
|
||||
"RiskLevel",
|
||||
]
|
||||
@@ -1,6 +1,6 @@
|
||||
# generated by datamodel-codegen:
|
||||
# filename: openapi.yaml
|
||||
# timestamp: 2025-06-14T01:44:21+00:00
|
||||
# timestamp: 2025-07-31T04:52:26+00:00
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -11,252 +11,298 @@ from typing import Any, Dict, List, Optional, Union
|
||||
from pydantic import BaseModel, Field, RootModel
|
||||
|
||||
|
||||
class Kind(str, Enum):
|
||||
install = 'install'
|
||||
uninstall = 'uninstall'
|
||||
update = 'update'
|
||||
update_all = 'update-all'
|
||||
update_comfyui = 'update-comfyui'
|
||||
fix = 'fix'
|
||||
disable = 'disable'
|
||||
enable = 'enable'
|
||||
install_model = 'install-model'
|
||||
class OperationType(str, Enum):
|
||||
install = "install"
|
||||
uninstall = "uninstall"
|
||||
update = "update"
|
||||
update_comfyui = "update-comfyui"
|
||||
fix = "fix"
|
||||
disable = "disable"
|
||||
enable = "enable"
|
||||
install_model = "install-model"
|
||||
|
||||
|
||||
class StatusStr(str, Enum):
|
||||
success = 'success'
|
||||
error = 'error'
|
||||
skip = 'skip'
|
||||
class OperationResult(str, Enum):
|
||||
success = "success"
|
||||
failed = "failed"
|
||||
skipped = "skipped"
|
||||
error = "error"
|
||||
skip = "skip"
|
||||
|
||||
|
||||
class TaskExecutionStatus(BaseModel):
|
||||
status_str: StatusStr = Field(..., description='Overall task execution status')
|
||||
completed: bool = Field(..., description='Whether the task completed')
|
||||
messages: List[str] = Field(..., description='Additional status messages')
|
||||
status_str: OperationResult
|
||||
completed: bool = Field(..., description="Whether the task completed")
|
||||
messages: List[str] = Field(..., description="Additional status messages")
|
||||
|
||||
|
||||
class ManagerMessageName(str, Enum):
|
||||
cm_task_completed = 'cm-task-completed'
|
||||
cm_task_started = 'cm-task-started'
|
||||
cm_queue_status = 'cm-queue-status'
|
||||
cm_task_completed = "cm-task-completed"
|
||||
cm_task_started = "cm-task-started"
|
||||
cm_queue_status = "cm-queue-status"
|
||||
|
||||
|
||||
class ManagerPackInfo(BaseModel):
|
||||
id: str = Field(
|
||||
...,
|
||||
description='Either github-author/github-repo or name of pack from the registry',
|
||||
description="Either github-author/github-repo or name of pack from the registry",
|
||||
)
|
||||
version: str = Field(..., description='Semantic version or Git commit hash')
|
||||
ui_id: Optional[str] = Field(None, description='Task ID - generated internally')
|
||||
version: str = Field(..., description="Semantic version or Git commit hash")
|
||||
ui_id: Optional[str] = Field(None, description="Task ID - generated internally")
|
||||
|
||||
|
||||
class ManagerPackInstalled(BaseModel):
|
||||
ver: str = Field(
|
||||
...,
|
||||
description='The version of the pack that is installed (Git commit hash or semantic version)',
|
||||
description="The version of the pack that is installed (Git commit hash or semantic version)",
|
||||
)
|
||||
cnr_id: Optional[str] = Field(
|
||||
None, description='The name of the pack if installed from the registry'
|
||||
None, description="The name of the pack if installed from the registry"
|
||||
)
|
||||
aux_id: Optional[str] = Field(
|
||||
None,
|
||||
description='The name of the pack if installed from github (author/repo-name format)',
|
||||
description="The name of the pack if installed from github (author/repo-name format)",
|
||||
)
|
||||
enabled: bool = Field(..., description='Whether the pack is enabled')
|
||||
enabled: bool = Field(..., description="Whether the pack is enabled")
|
||||
|
||||
|
||||
class SelectedVersion(str, Enum):
|
||||
latest = 'latest'
|
||||
nightly = 'nightly'
|
||||
latest = "latest"
|
||||
nightly = "nightly"
|
||||
|
||||
|
||||
class ManagerChannel(str, Enum):
|
||||
default = 'default'
|
||||
recent = 'recent'
|
||||
legacy = 'legacy'
|
||||
forked = 'forked'
|
||||
dev = 'dev'
|
||||
tutorial = 'tutorial'
|
||||
default = "default"
|
||||
recent = "recent"
|
||||
legacy = "legacy"
|
||||
forked = "forked"
|
||||
dev = "dev"
|
||||
tutorial = "tutorial"
|
||||
|
||||
|
||||
class ManagerDatabaseSource(str, Enum):
|
||||
remote = 'remote'
|
||||
local = 'local'
|
||||
cache = 'cache'
|
||||
remote = "remote"
|
||||
local = "local"
|
||||
cache = "cache"
|
||||
|
||||
|
||||
class ManagerPackState(str, Enum):
|
||||
installed = 'installed'
|
||||
disabled = 'disabled'
|
||||
not_installed = 'not_installed'
|
||||
import_failed = 'import_failed'
|
||||
needs_update = 'needs_update'
|
||||
installed = "installed"
|
||||
disabled = "disabled"
|
||||
not_installed = "not_installed"
|
||||
import_failed = "import_failed"
|
||||
needs_update = "needs_update"
|
||||
|
||||
|
||||
class ManagerPackInstallType(str, Enum):
|
||||
git_clone = 'git-clone'
|
||||
copy = 'copy'
|
||||
cnr = 'cnr'
|
||||
git_clone = "git-clone"
|
||||
copy = "copy"
|
||||
cnr = "cnr"
|
||||
|
||||
|
||||
class SecurityLevel(str, Enum):
|
||||
strong = "strong"
|
||||
normal = "normal"
|
||||
normal_ = "normal-"
|
||||
weak = "weak"
|
||||
|
||||
|
||||
class RiskLevel(str, Enum):
|
||||
block = "block"
|
||||
high_ = "high+"
|
||||
high = "high"
|
||||
middle_ = "middle+"
|
||||
middle = "middle"
|
||||
|
||||
|
||||
class UpdateState(Enum):
|
||||
false = 'false'
|
||||
true = 'true'
|
||||
false = "false"
|
||||
true = "true"
|
||||
|
||||
|
||||
class ManagerPack(ManagerPackInfo):
|
||||
author: Optional[str] = Field(
|
||||
None, description="Pack author name or 'Unclaimed' if added via GitHub crawl"
|
||||
)
|
||||
files: Optional[List[str]] = Field(None, description='Files included in the pack')
|
||||
reference: Optional[str] = Field(
|
||||
None, description='The type of installation reference'
|
||||
files: Optional[List[str]] = Field(
|
||||
None,
|
||||
description="Repository URLs for installation (typically contains one GitHub URL)",
|
||||
)
|
||||
title: Optional[str] = Field(None, description='The display name of the pack')
|
||||
reference: Optional[str] = Field(
|
||||
None, description="The type of installation reference"
|
||||
)
|
||||
title: Optional[str] = Field(None, description="The display name of the pack")
|
||||
cnr_latest: Optional[SelectedVersion] = None
|
||||
repository: Optional[str] = Field(None, description='GitHub repository URL')
|
||||
repository: Optional[str] = Field(None, description="GitHub repository URL")
|
||||
state: Optional[ManagerPackState] = None
|
||||
update_state: Optional[UpdateState] = Field(
|
||||
None, alias='update-state', description='Update availability status'
|
||||
None, alias="update-state", description="Update availability status"
|
||||
)
|
||||
stars: Optional[int] = Field(None, description='GitHub stars count')
|
||||
last_update: Optional[datetime] = Field(None, description='Last update timestamp')
|
||||
health: Optional[str] = Field(None, description='Health status of the pack')
|
||||
description: Optional[str] = Field(None, description='Pack description')
|
||||
trust: Optional[bool] = Field(None, description='Whether the pack is trusted')
|
||||
stars: Optional[int] = Field(None, description="GitHub stars count")
|
||||
last_update: Optional[datetime] = Field(None, description="Last update timestamp")
|
||||
health: Optional[str] = Field(None, description="Health status of the pack")
|
||||
description: Optional[str] = Field(None, description="Pack description")
|
||||
trust: Optional[bool] = Field(None, description="Whether the pack is trusted")
|
||||
install_type: Optional[ManagerPackInstallType] = None
|
||||
|
||||
|
||||
class InstallPackParams(ManagerPackInfo):
|
||||
selected_version: Union[str, SelectedVersion] = Field(
|
||||
..., description='Semantic version, Git commit hash, latest, or nightly'
|
||||
..., description="Semantic version, Git commit hash, latest, or nightly"
|
||||
)
|
||||
repository: Optional[str] = Field(
|
||||
None,
|
||||
description='GitHub repository URL (required if selected_version is nightly)',
|
||||
description="GitHub repository URL (required if selected_version is nightly)",
|
||||
)
|
||||
pip: Optional[List[str]] = Field(None, description='PyPi dependency names')
|
||||
pip: Optional[List[str]] = Field(None, description="PyPi dependency names")
|
||||
mode: ManagerDatabaseSource
|
||||
channel: ManagerChannel
|
||||
skip_post_install: Optional[bool] = Field(
|
||||
None, description='Whether to skip post-installation steps'
|
||||
None, description="Whether to skip post-installation steps"
|
||||
)
|
||||
|
||||
|
||||
class UpdateAllPacksParams(BaseModel):
|
||||
mode: Optional[ManagerDatabaseSource] = None
|
||||
ui_id: Optional[str] = Field(None, description='Task ID - generated internally')
|
||||
ui_id: Optional[str] = Field(None, description="Task ID - generated internally")
|
||||
|
||||
|
||||
class UpdatePackParams(BaseModel):
|
||||
node_name: str = Field(..., description='Name of the node package to update')
|
||||
node_name: str = Field(..., description="Name of the node package to update")
|
||||
node_ver: Optional[str] = Field(
|
||||
None, description='Current version of the node package'
|
||||
None, description="Current version of the node package"
|
||||
)
|
||||
|
||||
|
||||
class UpdateComfyUIParams(BaseModel):
|
||||
is_stable: Optional[bool] = Field(
|
||||
True,
|
||||
description='Whether to update to stable version (true) or nightly (false)',
|
||||
description="Whether to update to stable version (true) or nightly (false)",
|
||||
)
|
||||
target_version: Optional[str] = Field(
|
||||
None,
|
||||
description='Specific version to switch to (for version switching operations)',
|
||||
description="Specific version to switch to (for version switching operations)",
|
||||
)
|
||||
|
||||
|
||||
class FixPackParams(BaseModel):
|
||||
node_name: str = Field(..., description='Name of the node package to fix')
|
||||
node_ver: str = Field(..., description='Version of the node package')
|
||||
node_name: str = Field(..., description="Name of the node package to fix")
|
||||
node_ver: str = Field(..., description="Version of the node package")
|
||||
|
||||
|
||||
class UninstallPackParams(BaseModel):
|
||||
node_name: str = Field(..., description='Name of the node package to uninstall')
|
||||
node_name: str = Field(..., description="Name of the node package to uninstall")
|
||||
is_unknown: Optional[bool] = Field(
|
||||
False, description='Whether this is an unknown/unregistered package'
|
||||
False, description="Whether this is an unknown/unregistered package"
|
||||
)
|
||||
|
||||
|
||||
class DisablePackParams(BaseModel):
|
||||
node_name: str = Field(..., description='Name of the node package to disable')
|
||||
node_name: str = Field(..., description="Name of the node package to disable")
|
||||
is_unknown: Optional[bool] = Field(
|
||||
False, description='Whether this is an unknown/unregistered package'
|
||||
False, description="Whether this is an unknown/unregistered package"
|
||||
)
|
||||
|
||||
|
||||
class EnablePackParams(BaseModel):
|
||||
cnr_id: str = Field(
|
||||
..., description='ComfyUI Node Registry ID of the package to enable'
|
||||
..., description="ComfyUI Node Registry ID of the package to enable"
|
||||
)
|
||||
|
||||
|
||||
class UpdateAllQueryParams(BaseModel):
|
||||
client_id: str = Field(
|
||||
..., description="Client identifier that initiated the request"
|
||||
)
|
||||
ui_id: str = Field(..., description="Base UI identifier for task tracking")
|
||||
mode: Optional[ManagerDatabaseSource] = None
|
||||
|
||||
|
||||
class UpdateComfyUIQueryParams(BaseModel):
|
||||
client_id: str = Field(
|
||||
..., description="Client identifier that initiated the request"
|
||||
)
|
||||
ui_id: str = Field(..., description="UI identifier for task tracking")
|
||||
stable: Optional[bool] = Field(
|
||||
True,
|
||||
description="Whether to update to stable version (true) or nightly (false)",
|
||||
)
|
||||
|
||||
|
||||
class ComfyUISwitchVersionQueryParams(BaseModel):
|
||||
ver: str = Field(..., description="Version to switch to")
|
||||
client_id: str = Field(
|
||||
..., description="Client identifier that initiated the request"
|
||||
)
|
||||
ui_id: str = Field(..., description="UI identifier for task tracking")
|
||||
|
||||
|
||||
class QueueStatus(BaseModel):
|
||||
total_count: int = Field(
|
||||
..., description='Total number of tasks (pending + running)'
|
||||
..., description="Total number of tasks (pending + running)"
|
||||
)
|
||||
done_count: int = Field(..., description='Number of completed tasks')
|
||||
in_progress_count: int = Field(..., description='Number of tasks currently running')
|
||||
done_count: int = Field(..., description="Number of completed tasks")
|
||||
in_progress_count: int = Field(..., description="Number of tasks currently running")
|
||||
pending_count: Optional[int] = Field(
|
||||
None, description='Number of tasks waiting to be executed'
|
||||
None, description="Number of tasks waiting to be executed"
|
||||
)
|
||||
is_processing: bool = Field(..., description='Whether the task worker is active')
|
||||
is_processing: bool = Field(..., description="Whether the task worker is active")
|
||||
client_id: Optional[str] = Field(
|
||||
None, description='Client ID (when filtered by client)'
|
||||
None, description="Client ID (when filtered by client)"
|
||||
)
|
||||
|
||||
|
||||
class ManagerMappings1(BaseModel):
|
||||
title_aux: Optional[str] = Field(None, description='The display name of the pack')
|
||||
title_aux: Optional[str] = Field(None, description="The display name of the pack")
|
||||
|
||||
|
||||
class ManagerMappings(
|
||||
RootModel[Optional[Dict[str, List[Union[List[str], ManagerMappings1]]]]]
|
||||
):
|
||||
root: Optional[Dict[str, List[Union[List[str], ManagerMappings1]]]] = Field(
|
||||
None, description='Tuple of [node_names, metadata]'
|
||||
None, description="Tuple of [node_names, metadata]"
|
||||
)
|
||||
|
||||
|
||||
class ModelMetadata(BaseModel):
|
||||
name: str = Field(..., description='Name of the model')
|
||||
type: str = Field(..., description='Type of model')
|
||||
base: Optional[str] = Field(None, description='Base model type')
|
||||
save_path: Optional[str] = Field(None, description='Path for saving the model')
|
||||
url: str = Field(..., description='Download URL')
|
||||
filename: str = Field(..., description='Target filename')
|
||||
ui_id: Optional[str] = Field(None, description='ID for UI reference')
|
||||
name: str = Field(..., description="Name of the model")
|
||||
type: str = Field(..., description="Type of model")
|
||||
base: Optional[str] = Field(None, description="Base model type")
|
||||
save_path: Optional[str] = Field(None, description="Path for saving the model")
|
||||
url: str = Field(..., description="Download URL")
|
||||
filename: str = Field(..., description="Target filename")
|
||||
ui_id: Optional[str] = Field(None, description="ID for UI reference")
|
||||
|
||||
|
||||
class InstallType(str, Enum):
|
||||
git = 'git'
|
||||
copy = 'copy'
|
||||
pip = 'pip'
|
||||
git = "git"
|
||||
copy = "copy"
|
||||
pip = "pip"
|
||||
|
||||
|
||||
class NodePackageMetadata(BaseModel):
|
||||
title: Optional[str] = Field(None, description='Display name of the node package')
|
||||
name: Optional[str] = Field(None, description='Repository/package name')
|
||||
files: Optional[List[str]] = Field(None, description='Source URLs for the package')
|
||||
title: Optional[str] = Field(None, description="Display name of the node package")
|
||||
name: Optional[str] = Field(None, description="Repository/package name")
|
||||
files: Optional[List[str]] = Field(None, description="Source URLs for the package")
|
||||
description: Optional[str] = Field(
|
||||
None, description='Description of the node package functionality'
|
||||
None, description="Description of the node package functionality"
|
||||
)
|
||||
install_type: Optional[InstallType] = Field(None, description='Installation method')
|
||||
version: Optional[str] = Field(None, description='Version identifier')
|
||||
install_type: Optional[InstallType] = Field(None, description="Installation method")
|
||||
version: Optional[str] = Field(None, description="Version identifier")
|
||||
id: Optional[str] = Field(
|
||||
None, description='Unique identifier for the node package'
|
||||
None, description="Unique identifier for the node package"
|
||||
)
|
||||
ui_id: Optional[str] = Field(None, description='ID for UI reference')
|
||||
channel: Optional[str] = Field(None, description='Source channel')
|
||||
mode: Optional[str] = Field(None, description='Source mode')
|
||||
ui_id: Optional[str] = Field(None, description="ID for UI reference")
|
||||
channel: Optional[str] = Field(None, description="Source channel")
|
||||
mode: Optional[str] = Field(None, description="Source mode")
|
||||
|
||||
|
||||
class SnapshotItem(RootModel[str]):
|
||||
root: str = Field(..., description='Name of the snapshot')
|
||||
root: str = Field(..., description="Name of the snapshot")
|
||||
|
||||
|
||||
class Error(BaseModel):
|
||||
error: str = Field(..., description='Error message')
|
||||
error: str = Field(..., description="Error message")
|
||||
|
||||
|
||||
class InstalledPacksResponse(RootModel[Optional[Dict[str, ManagerPackInstalled]]]):
|
||||
@@ -265,142 +311,171 @@ class InstalledPacksResponse(RootModel[Optional[Dict[str, ManagerPackInstalled]]
|
||||
|
||||
class HistoryListResponse(BaseModel):
|
||||
ids: Optional[List[str]] = Field(
|
||||
None, description='List of available batch history IDs'
|
||||
None, description="List of available batch history IDs"
|
||||
)
|
||||
|
||||
|
||||
class InstalledNodeInfo(BaseModel):
|
||||
name: str = Field(..., description='Node package name')
|
||||
version: str = Field(..., description='Installed version')
|
||||
repository_url: Optional[str] = Field(None, description='Git repository URL')
|
||||
name: str = Field(..., description="Node package name")
|
||||
version: str = Field(..., description="Installed version")
|
||||
repository_url: Optional[str] = Field(None, description="Git repository URL")
|
||||
install_method: str = Field(
|
||||
..., description='Installation method (cnr, git, pip, etc.)'
|
||||
..., description="Installation method (cnr, git, pip, etc.)"
|
||||
)
|
||||
enabled: Optional[bool] = Field(
|
||||
True, description='Whether the node is currently enabled'
|
||||
True, description="Whether the node is currently enabled"
|
||||
)
|
||||
install_date: Optional[datetime] = Field(
|
||||
None, description='ISO timestamp of installation'
|
||||
None, description="ISO timestamp of installation"
|
||||
)
|
||||
|
||||
|
||||
class InstalledModelInfo(BaseModel):
|
||||
name: str = Field(..., description='Model filename')
|
||||
path: str = Field(..., description='Full path to model file')
|
||||
type: str = Field(..., description='Model type (checkpoint, lora, vae, etc.)')
|
||||
size_bytes: Optional[int] = Field(None, description='File size in bytes', ge=0)
|
||||
hash: Optional[str] = Field(None, description='Model file hash for verification')
|
||||
name: str = Field(..., description="Model filename")
|
||||
path: str = Field(..., description="Full path to model file")
|
||||
type: str = Field(..., description="Model type (checkpoint, lora, vae, etc.)")
|
||||
size_bytes: Optional[int] = Field(None, description="File size in bytes", ge=0)
|
||||
hash: Optional[str] = Field(None, description="Model file hash for verification")
|
||||
install_date: Optional[datetime] = Field(
|
||||
None, description='ISO timestamp when added'
|
||||
None, description="ISO timestamp when added"
|
||||
)
|
||||
|
||||
|
||||
class ComfyUIVersionInfo(BaseModel):
|
||||
version: str = Field(..., description='ComfyUI version string')
|
||||
commit_hash: Optional[str] = Field(None, description='Git commit hash')
|
||||
branch: Optional[str] = Field(None, description='Git branch name')
|
||||
version: str = Field(..., description="ComfyUI version string")
|
||||
commit_hash: Optional[str] = Field(None, description="Git commit hash")
|
||||
branch: Optional[str] = Field(None, description="Git branch name")
|
||||
is_stable: Optional[bool] = Field(
|
||||
False, description='Whether this is a stable release'
|
||||
False, description="Whether this is a stable release"
|
||||
)
|
||||
last_updated: Optional[datetime] = Field(
|
||||
None, description='ISO timestamp of last update'
|
||||
None, description="ISO timestamp of last update"
|
||||
)
|
||||
|
||||
|
||||
class OperationType(str, Enum):
|
||||
install = 'install'
|
||||
update = 'update'
|
||||
uninstall = 'uninstall'
|
||||
fix = 'fix'
|
||||
disable = 'disable'
|
||||
enable = 'enable'
|
||||
install_model = 'install-model'
|
||||
|
||||
|
||||
class Result(str, Enum):
|
||||
success = 'success'
|
||||
failed = 'failed'
|
||||
skipped = 'skipped'
|
||||
|
||||
|
||||
class BatchOperation(BaseModel):
|
||||
operation_id: str = Field(..., description='Unique operation identifier')
|
||||
operation_type: OperationType = Field(..., description='Type of operation')
|
||||
operation_id: str = Field(..., description="Unique operation identifier")
|
||||
operation_type: OperationType
|
||||
target: str = Field(
|
||||
..., description='Target of the operation (node name, model name, etc.)'
|
||||
..., description="Target of the operation (node name, model name, etc.)"
|
||||
)
|
||||
target_version: Optional[str] = Field(
|
||||
None, description='Target version for the operation'
|
||||
None, description="Target version for the operation"
|
||||
)
|
||||
result: Result = Field(..., description='Operation result')
|
||||
result: OperationResult
|
||||
error_message: Optional[str] = Field(
|
||||
None, description='Error message if operation failed'
|
||||
None, description="Error message if operation failed"
|
||||
)
|
||||
start_time: datetime = Field(
|
||||
..., description='ISO timestamp when operation started'
|
||||
..., description="ISO timestamp when operation started"
|
||||
)
|
||||
end_time: Optional[datetime] = Field(
|
||||
None, description='ISO timestamp when operation completed'
|
||||
None, description="ISO timestamp when operation completed"
|
||||
)
|
||||
client_id: Optional[str] = Field(
|
||||
None, description='Client that initiated the operation'
|
||||
None, description="Client that initiated the operation"
|
||||
)
|
||||
|
||||
|
||||
class ComfyUISystemState(BaseModel):
|
||||
snapshot_time: datetime = Field(
|
||||
..., description='ISO timestamp when snapshot was taken'
|
||||
..., description="ISO timestamp when snapshot was taken"
|
||||
)
|
||||
comfyui_version: ComfyUIVersionInfo
|
||||
frontend_version: Optional[str] = Field(
|
||||
None, description='ComfyUI frontend version if available'
|
||||
None, description="ComfyUI frontend version if available"
|
||||
)
|
||||
python_version: str = Field(..., description='Python interpreter version')
|
||||
python_version: str = Field(..., description="Python interpreter version")
|
||||
platform_info: str = Field(
|
||||
..., description='Operating system and platform information'
|
||||
..., description="Operating system and platform information"
|
||||
)
|
||||
installed_nodes: Optional[Dict[str, InstalledNodeInfo]] = Field(
|
||||
None, description='Map of installed node packages by name'
|
||||
None, description="Map of installed node packages by name"
|
||||
)
|
||||
installed_models: Optional[Dict[str, InstalledModelInfo]] = Field(
|
||||
None, description='Map of installed models by name'
|
||||
None, description="Map of installed models by name"
|
||||
)
|
||||
manager_config: Optional[Dict[str, Any]] = Field(
|
||||
None, description='ComfyUI Manager configuration settings'
|
||||
None, description="ComfyUI Manager configuration settings"
|
||||
)
|
||||
comfyui_root_path: Optional[str] = Field(
|
||||
None, description="ComfyUI root installation directory"
|
||||
)
|
||||
model_paths: Optional[Dict[str, List[str]]] = Field(
|
||||
None, description="Map of model types to their configured paths"
|
||||
)
|
||||
manager_version: Optional[str] = Field(None, description="ComfyUI Manager version")
|
||||
security_level: Optional[SecurityLevel] = None
|
||||
network_mode: Optional[str] = Field(
|
||||
None, description="Network mode (online, offline, private)"
|
||||
)
|
||||
cli_args: Optional[Dict[str, Any]] = Field(
|
||||
None, description="Selected ComfyUI CLI arguments"
|
||||
)
|
||||
custom_nodes_count: Optional[int] = Field(
|
||||
None, description="Total number of custom node packages", ge=0
|
||||
)
|
||||
failed_imports: Optional[List[str]] = Field(
|
||||
None, description="List of custom nodes that failed to import"
|
||||
)
|
||||
pip_packages: Optional[Dict[str, str]] = Field(
|
||||
None, description="Map of installed pip packages to their versions"
|
||||
)
|
||||
embedded_python: Optional[bool] = Field(
|
||||
None,
|
||||
description="Whether ComfyUI is running from an embedded Python distribution",
|
||||
)
|
||||
|
||||
|
||||
class BatchExecutionRecord(BaseModel):
|
||||
batch_id: str = Field(..., description='Unique batch identifier')
|
||||
start_time: datetime = Field(..., description='ISO timestamp when batch started')
|
||||
batch_id: str = Field(..., description="Unique batch identifier")
|
||||
start_time: datetime = Field(..., description="ISO timestamp when batch started")
|
||||
end_time: Optional[datetime] = Field(
|
||||
None, description='ISO timestamp when batch completed'
|
||||
None, description="ISO timestamp when batch completed"
|
||||
)
|
||||
state_before: ComfyUISystemState
|
||||
state_after: Optional[ComfyUISystemState] = Field(
|
||||
None, description='System state after batch execution'
|
||||
None, description="System state after batch execution"
|
||||
)
|
||||
operations: Optional[List[BatchOperation]] = Field(
|
||||
None, description='List of operations performed in this batch'
|
||||
None, description="List of operations performed in this batch"
|
||||
)
|
||||
total_operations: Optional[int] = Field(
|
||||
0, description='Total number of operations in batch', ge=0
|
||||
0, description="Total number of operations in batch", ge=0
|
||||
)
|
||||
successful_operations: Optional[int] = Field(
|
||||
0, description='Number of successful operations', ge=0
|
||||
0, description="Number of successful operations", ge=0
|
||||
)
|
||||
failed_operations: Optional[int] = Field(
|
||||
0, description='Number of failed operations', ge=0
|
||||
0, description="Number of failed operations", ge=0
|
||||
)
|
||||
skipped_operations: Optional[int] = Field(
|
||||
0, description='Number of skipped operations', ge=0
|
||||
0, description="Number of skipped operations", ge=0
|
||||
)
|
||||
|
||||
|
||||
class ImportFailInfoBulkRequest(BaseModel):
|
||||
cnr_ids: Optional[List[str]] = Field(
|
||||
None, description="A list of CNR IDs to check."
|
||||
)
|
||||
urls: Optional[List[str]] = Field(
|
||||
None, description="A list of repository URLs to check."
|
||||
)
|
||||
|
||||
|
||||
class ImportFailInfoItem1(BaseModel):
|
||||
error: Optional[str] = None
|
||||
traceback: Optional[str] = None
|
||||
|
||||
|
||||
class ImportFailInfoItem(RootModel[Optional[ImportFailInfoItem1]]):
|
||||
root: Optional[ImportFailInfoItem1]
|
||||
|
||||
|
||||
class QueueTaskItem(BaseModel):
|
||||
ui_id: str = Field(..., description='Unique identifier for the task')
|
||||
client_id: str = Field(..., description='Client identifier that initiated the task')
|
||||
kind: Kind = Field(..., description='Type of task being performed')
|
||||
ui_id: str = Field(..., description="Unique identifier for the task")
|
||||
client_id: str = Field(..., description="Client identifier that initiated the task")
|
||||
kind: OperationType
|
||||
params: Union[
|
||||
InstallPackParams,
|
||||
UpdatePackParams,
|
||||
@@ -415,50 +490,56 @@ class QueueTaskItem(BaseModel):
|
||||
|
||||
|
||||
class TaskHistoryItem(BaseModel):
|
||||
ui_id: str = Field(..., description='Unique identifier for the task')
|
||||
client_id: str = Field(..., description='Client identifier that initiated the task')
|
||||
kind: str = Field(..., description='Type of task that was performed')
|
||||
timestamp: datetime = Field(..., description='ISO timestamp when task completed')
|
||||
result: str = Field(..., description='Task result message or details')
|
||||
ui_id: str = Field(..., description="Unique identifier for the task")
|
||||
client_id: str = Field(..., description="Client identifier that initiated the task")
|
||||
kind: str = Field(..., description="Type of task that was performed")
|
||||
timestamp: datetime = Field(..., description="ISO timestamp when task completed")
|
||||
result: str = Field(..., description="Task result message or details")
|
||||
status: Optional[TaskExecutionStatus] = None
|
||||
batch_id: Optional[str] = Field(
|
||||
None, description="ID of the batch this task belongs to"
|
||||
)
|
||||
end_time: Optional[datetime] = Field(
|
||||
None, description="ISO timestamp when task execution ended"
|
||||
)
|
||||
|
||||
|
||||
class TaskStateMessage(BaseModel):
|
||||
history: Dict[str, TaskHistoryItem] = Field(
|
||||
..., description='Map of task IDs to their history items'
|
||||
..., description="Map of task IDs to their history items"
|
||||
)
|
||||
running_queue: List[QueueTaskItem] = Field(
|
||||
..., description='Currently executing tasks'
|
||||
..., description="Currently executing tasks"
|
||||
)
|
||||
pending_queue: List[QueueTaskItem] = Field(
|
||||
..., description='Tasks waiting to be executed'
|
||||
..., description="Tasks waiting to be executed"
|
||||
)
|
||||
installed_packs: Dict[str, ManagerPackInstalled] = Field(
|
||||
..., description='Map of currently installed node packages by name'
|
||||
..., description="Map of currently installed node packages by name"
|
||||
)
|
||||
|
||||
|
||||
class MessageTaskDone(BaseModel):
|
||||
ui_id: str = Field(..., description='Task identifier')
|
||||
result: str = Field(..., description='Task result message')
|
||||
kind: str = Field(..., description='Type of task')
|
||||
ui_id: str = Field(..., description="Task identifier")
|
||||
result: str = Field(..., description="Task result message")
|
||||
kind: str = Field(..., description="Type of task")
|
||||
status: Optional[TaskExecutionStatus] = None
|
||||
timestamp: datetime = Field(..., description='ISO timestamp when task completed')
|
||||
timestamp: datetime = Field(..., description="ISO timestamp when task completed")
|
||||
state: TaskStateMessage
|
||||
|
||||
|
||||
class MessageTaskStarted(BaseModel):
|
||||
ui_id: str = Field(..., description='Task identifier')
|
||||
kind: str = Field(..., description='Type of task')
|
||||
timestamp: datetime = Field(..., description='ISO timestamp when task started')
|
||||
ui_id: str = Field(..., description="Task identifier")
|
||||
kind: str = Field(..., description="Type of task")
|
||||
timestamp: datetime = Field(..., description="ISO timestamp when task started")
|
||||
state: TaskStateMessage
|
||||
|
||||
|
||||
class MessageTaskFailed(BaseModel):
|
||||
ui_id: str = Field(..., description='Task identifier')
|
||||
error: str = Field(..., description='Error message')
|
||||
kind: str = Field(..., description='Type of task')
|
||||
timestamp: datetime = Field(..., description='ISO timestamp when task failed')
|
||||
ui_id: str = Field(..., description="Task identifier")
|
||||
error: str = Field(..., description="Error message")
|
||||
kind: str = Field(..., description="Type of task")
|
||||
timestamp: datetime = Field(..., description="ISO timestamp when task failed")
|
||||
state: TaskStateMessage
|
||||
|
||||
|
||||
@@ -466,11 +547,15 @@ class MessageUpdate(
|
||||
RootModel[Union[MessageTaskDone, MessageTaskStarted, MessageTaskFailed]]
|
||||
):
|
||||
root: Union[MessageTaskDone, MessageTaskStarted, MessageTaskFailed] = Field(
|
||||
..., description='Union type for all possible WebSocket message updates'
|
||||
..., description="Union type for all possible WebSocket message updates"
|
||||
)
|
||||
|
||||
|
||||
class HistoryResponse(BaseModel):
|
||||
history: Optional[Dict[str, TaskHistoryItem]] = Field(
|
||||
None, description='Map of task IDs to their history items'
|
||||
None, description="Map of task IDs to their history items"
|
||||
)
|
||||
|
||||
|
||||
class ImportFailInfoBulkResponse(RootModel[Optional[Dict[str, ImportFailInfoItem]]]):
|
||||
root: Optional[Dict[str, ImportFailInfoItem]] = None
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -8,3 +8,4 @@
|
||||
7. Adjust the `__init__.py` files in the `data_models` directory to match/export the new data model
|
||||
8. Only then, make the changes to the rest of the codebase
|
||||
9. Run the CI tests to verify that the changes are working
|
||||
- The comfyui_manager is a python package that is used to manage the comfyui server. There are two sub-packages `glob` and `legacy`. These represent the current version (`glob`) and the previous version (`legacy`), not including common utilities and data models. When developing, we work in the `glob` package. You can ignore the `legacy` package entirely, unless you have a very good reason to research how things were done in the legacy or prior major versions of the package. But in those cases, you should just look for the sake of knowledge or reflection, not for changing code (unless explicitly asked to do so).
|
||||
@@ -1,6 +1,6 @@
|
||||
from comfy.cli_args import args
|
||||
|
||||
SECURITY_MESSAGE_MIDDLE_OR_BELOW = "ERROR: To use this action, a security_level of `middle or below` is required. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
|
||||
SECURITY_MESSAGE_MIDDLE = "ERROR: To use this action, a security_level of `normal or below` is required. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
|
||||
SECURITY_MESSAGE_MIDDLE_P = "ERROR: To use this action, security_level must be `normal or below`, and network_mode must be set to `personal_cloud`. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
|
||||
SECURITY_MESSAGE_NORMAL_MINUS = "ERROR: To use this feature, you must either set '--listen' to a local IP and set the security level to 'normal-' or lower, or set the security level to 'middle' or 'weak'. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
|
||||
SECURITY_MESSAGE_GENERAL = "ERROR: This installation is not allowed in this security_level. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
|
||||
SECURITY_MESSAGE_NORMAL_MINUS_MODEL = "ERROR: Downloading models that are not in '.safetensors' format is only allowed for models registered in the 'default' channel at this security level. If you want to download this model, set the security level to 'normal-' or lower."
|
||||
@@ -15,9 +15,6 @@ def is_loopback(address):
|
||||
return False
|
||||
|
||||
|
||||
is_local_mode = is_loopback(args.listen)
|
||||
|
||||
|
||||
model_dir_name_map = {
|
||||
"checkpoints": "checkpoints",
|
||||
"checkpoint": "checkpoints",
|
||||
@@ -37,3 +34,22 @@ model_dir_name_map = {
|
||||
"unet": "diffusion_models",
|
||||
"diffusion_model": "diffusion_models",
|
||||
}
|
||||
|
||||
# List of all model directory names used for checking installed models
|
||||
MODEL_DIR_NAMES = [
|
||||
"checkpoints",
|
||||
"loras",
|
||||
"vae",
|
||||
"text_encoders",
|
||||
"diffusion_models",
|
||||
"clip_vision",
|
||||
"embeddings",
|
||||
"diffusers",
|
||||
"vae_approx",
|
||||
"controlnet",
|
||||
"gligen",
|
||||
"upscale_models",
|
||||
"hypernetworks",
|
||||
"photomaker",
|
||||
"classifiers",
|
||||
]
|
||||
|
||||
@@ -41,11 +41,12 @@ from ..common.enums import NetworkMode, SecurityLevel, DBMode
|
||||
from ..common import context
|
||||
|
||||
|
||||
version_code = [4, 0]
|
||||
version_code = [4, 0, 2]
|
||||
version_str = f"V{version_code[0]}.{version_code[1]}" + (f'.{version_code[2]}' if len(version_code) > 2 else '')
|
||||
|
||||
|
||||
DEFAULT_CHANNEL = "https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main"
|
||||
DEFAULT_CHANNEL_LEGACY = "https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main"
|
||||
|
||||
|
||||
default_custom_nodes_path = None
|
||||
@@ -153,14 +154,8 @@ def check_invalid_nodes():
|
||||
cached_config = None
|
||||
js_path = None
|
||||
|
||||
comfy_ui_required_revision = 1930
|
||||
comfy_ui_required_commit_datetime = datetime(2024, 1, 24, 0, 0, 0)
|
||||
|
||||
comfy_ui_revision = "Unknown"
|
||||
comfy_ui_commit_datetime = datetime(1900, 1, 1, 0, 0, 0)
|
||||
|
||||
channel_dict = None
|
||||
valid_channels = {'default', 'local'}
|
||||
valid_channels = {'default', 'local', DEFAULT_CHANNEL, DEFAULT_CHANNEL_LEGACY}
|
||||
channel_list = None
|
||||
|
||||
|
||||
@@ -304,18 +299,86 @@ class ManagedResult:
|
||||
return self
|
||||
|
||||
|
||||
class NormalizedKeyDict:
|
||||
def __init__(self):
|
||||
self._store = {}
|
||||
self._key_map = {}
|
||||
|
||||
def _normalize_key(self, key):
|
||||
if isinstance(key, str):
|
||||
return key.strip().lower()
|
||||
return key
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
norm_key = self._normalize_key(key)
|
||||
self._key_map[norm_key] = key
|
||||
self._store[key] = value
|
||||
|
||||
def __getitem__(self, key):
|
||||
norm_key = self._normalize_key(key)
|
||||
original_key = self._key_map[norm_key]
|
||||
return self._store[original_key]
|
||||
|
||||
def __delitem__(self, key):
|
||||
norm_key = self._normalize_key(key)
|
||||
original_key = self._key_map.pop(norm_key)
|
||||
del self._store[original_key]
|
||||
|
||||
def __contains__(self, key):
|
||||
return self._normalize_key(key) in self._key_map
|
||||
|
||||
def get(self, key, default=None):
|
||||
return self[key] if key in self else default
|
||||
|
||||
def setdefault(self, key, default=None):
|
||||
if key in self:
|
||||
return self[key]
|
||||
self[key] = default
|
||||
return default
|
||||
|
||||
def pop(self, key, default=None):
|
||||
if key in self:
|
||||
val = self[key]
|
||||
del self[key]
|
||||
return val
|
||||
if default is not None:
|
||||
return default
|
||||
raise KeyError(key)
|
||||
|
||||
def keys(self):
|
||||
return self._store.keys()
|
||||
|
||||
def values(self):
|
||||
return self._store.values()
|
||||
|
||||
def items(self):
|
||||
return self._store.items()
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._store)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._store)
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self._store)
|
||||
|
||||
def to_dict(self):
|
||||
return dict(self._store)
|
||||
|
||||
|
||||
class UnifiedManager:
|
||||
def __init__(self):
|
||||
self.installed_node_packages: dict[str, InstalledNodePackage] = {}
|
||||
|
||||
self.cnr_inactive_nodes = {} # node_id -> node_version -> fullpath
|
||||
self.nightly_inactive_nodes = {} # node_id -> fullpath
|
||||
self.unknown_inactive_nodes = {} # node_id -> repo url * fullpath
|
||||
self.active_nodes = {} # node_id -> node_version * fullpath
|
||||
self.unknown_active_nodes = {} # node_id -> repo url * fullpath
|
||||
self.cnr_map = {} # node_id -> cnr info
|
||||
self.repo_cnr_map = {} # repo_url -> cnr info
|
||||
self.custom_node_map_cache = {} # (channel, mode) -> augmented custom node list json
|
||||
self.cnr_inactive_nodes = NormalizedKeyDict() # node_id -> node_version -> fullpath
|
||||
self.nightly_inactive_nodes = NormalizedKeyDict() # node_id -> fullpath
|
||||
self.unknown_inactive_nodes = {} # node_id -> repo url * fullpath
|
||||
self.active_nodes = NormalizedKeyDict() # node_id -> node_version * fullpath
|
||||
self.unknown_active_nodes = {} # node_id -> repo url * fullpath
|
||||
self.cnr_map = NormalizedKeyDict() # node_id -> cnr info
|
||||
self.repo_cnr_map = {} # repo_url -> cnr info
|
||||
self.custom_node_map_cache = {} # (channel, mode) -> augmented custom node list json
|
||||
self.processed_install = set()
|
||||
|
||||
def get_module_name(self, x):
|
||||
@@ -721,7 +784,7 @@ class UnifiedManager:
|
||||
channel = normalize_channel(channel)
|
||||
nodes = await self.load_nightly(channel, mode)
|
||||
|
||||
res = {}
|
||||
res = NormalizedKeyDict()
|
||||
added_cnr = set()
|
||||
for v in nodes.values():
|
||||
v = v[0]
|
||||
@@ -1411,7 +1474,7 @@ def identify_node_pack_from_path(fullpath):
|
||||
# cnr
|
||||
cnr = cnr_utils.read_cnr_info(fullpath)
|
||||
if cnr is not None:
|
||||
return module_name, cnr['version'], cnr['id'], None
|
||||
return module_name, cnr['version'], cnr['original_name'], None
|
||||
|
||||
return None
|
||||
else:
|
||||
@@ -1461,7 +1524,10 @@ def get_installed_node_packs():
|
||||
if info is None:
|
||||
continue
|
||||
|
||||
res[info[0]] = { 'ver': info[1], 'cnr_id': info[2], 'aux_id': info[3], 'enabled': False }
|
||||
# NOTE: don't add disabled nodepack if there is enabled nodepack
|
||||
original_name = info[0].split('@')[0]
|
||||
if original_name not in res:
|
||||
res[info[0]] = { 'ver': info[1], 'cnr_id': info[2], 'aux_id': info[3], 'enabled': False }
|
||||
|
||||
return res
|
||||
|
||||
@@ -1558,16 +1624,18 @@ def read_config():
|
||||
config = configparser.ConfigParser(strict=False)
|
||||
config.read(context.manager_config_path)
|
||||
default_conf = config['default']
|
||||
manager_util.use_uv = default_conf['use_uv'].lower() == 'true' if 'use_uv' in default_conf else False
|
||||
|
||||
def get_bool(key, default_value):
|
||||
return default_conf[key].lower() == 'true' if key in default_conf else False
|
||||
|
||||
manager_util.use_uv = default_conf['use_uv'].lower() == 'true' if 'use_uv' in default_conf else False
|
||||
manager_util.bypass_ssl = get_bool('bypass_ssl', False)
|
||||
|
||||
return {
|
||||
'http_channel_enabled': get_bool('http_channel_enabled', False),
|
||||
'preview_method': default_conf.get('preview_method', manager_funcs.get_current_preview_method()).lower(),
|
||||
'git_exe': default_conf.get('git_exe', ''),
|
||||
'use_uv': get_bool('use_uv', False),
|
||||
'use_uv': get_bool('use_uv', True),
|
||||
'channel_url': default_conf.get('channel_url', DEFAULT_CHANNEL),
|
||||
'default_cache_as_channel_url': get_bool('default_cache_as_channel_url', False),
|
||||
'share_option': default_conf.get('share_option', 'all').lower(),
|
||||
@@ -1585,16 +1653,20 @@ def read_config():
|
||||
}
|
||||
|
||||
except Exception:
|
||||
manager_util.use_uv = False
|
||||
import importlib.util
|
||||
# temporary disable `uv` on Windows by default (https://github.com/Comfy-Org/ComfyUI-Manager/issues/1969)
|
||||
manager_util.use_uv = importlib.util.find_spec("uv") is not None and platform.system() != "Windows"
|
||||
manager_util.bypass_ssl = False
|
||||
|
||||
return {
|
||||
'http_channel_enabled': False,
|
||||
'preview_method': manager_funcs.get_current_preview_method(),
|
||||
'git_exe': '',
|
||||
'use_uv': False,
|
||||
'use_uv': manager_util.use_uv,
|
||||
'channel_url': DEFAULT_CHANNEL,
|
||||
'default_cache_as_channel_url': False,
|
||||
'share_option': 'all',
|
||||
'bypass_ssl': False,
|
||||
'bypass_ssl': manager_util.bypass_ssl,
|
||||
'file_logging': True,
|
||||
'component_policy': 'workflow',
|
||||
'update_policy': 'stable-comfyui',
|
||||
@@ -1712,16 +1784,6 @@ def try_install_script(url, repo_path, install_cmd, instant_execution=False):
|
||||
print(f"\n## ComfyUI-Manager: EXECUTE => {install_cmd}")
|
||||
code = manager_funcs.run_script(install_cmd, cwd=repo_path)
|
||||
|
||||
if platform.system() != "Windows":
|
||||
try:
|
||||
if not os.environ.get('__COMFYUI_DESKTOP_VERSION__') and comfy_ui_commit_datetime.date() < comfy_ui_required_commit_datetime.date():
|
||||
print("\n\n###################################################################")
|
||||
print(f"[WARN] ComfyUI-Manager: Your ComfyUI version ({comfy_ui_revision})[{comfy_ui_commit_datetime.date()}] is too old. Please update to the latest version.")
|
||||
print("[WARN] The extension installation feature may not work properly in the current installed ComfyUI version on Windows environment.")
|
||||
print("###################################################################\n\n")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if code != 0:
|
||||
if url is None:
|
||||
url = os.path.dirname(repo_path)
|
||||
@@ -2791,7 +2853,7 @@ async def get_unified_total_nodes(channel, mode, regsitry_cache_mode='cache'):
|
||||
|
||||
if cnr_id is not None:
|
||||
# cnr or nightly version
|
||||
cnr_ids.remove(cnr_id)
|
||||
cnr_ids.discard(cnr_id)
|
||||
updatable = False
|
||||
cnr = unified_manager.cnr_map[cnr_id]
|
||||
|
||||
@@ -2955,6 +3017,11 @@ async def restore_snapshot(snapshot_path, git_helper_extras=None):
|
||||
info = yaml.load(snapshot_file, Loader=yaml.SafeLoader)
|
||||
info = info['custom_nodes']
|
||||
|
||||
if 'pips' in info and info['pips']:
|
||||
pips = info['pips']
|
||||
else:
|
||||
pips = {}
|
||||
|
||||
# for cnr restore
|
||||
cnr_info = info.get('cnr_custom_nodes')
|
||||
if cnr_info is not None:
|
||||
@@ -3161,6 +3228,8 @@ async def restore_snapshot(snapshot_path, git_helper_extras=None):
|
||||
unified_manager.repo_install(repo_url, to_path, instant_execution=True, no_deps=False, return_postinstall=False)
|
||||
cloned_repos.append(repo_name)
|
||||
|
||||
manager_util.restore_pip_snapshot(pips, git_helper_extras)
|
||||
|
||||
# print summary
|
||||
for x in cloned_repos:
|
||||
print(f"[ INSTALLED ] {x}")
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -11,6 +11,15 @@ import hashlib
|
||||
import folder_paths
|
||||
from server import PromptServer
|
||||
import logging
|
||||
import sys
|
||||
|
||||
|
||||
try:
|
||||
from nio import AsyncClient, LoginResponse, UploadResponse
|
||||
matrix_nio_is_available = True
|
||||
except Exception:
|
||||
logging.warning(f"[ComfyUI-Manager] The matrix sharing feature has been disabled because the `matrix-nio` dependency is not installed.\n\tTo use this feature, please run the following command:\n\t{sys.executable} -m pip install matrix-nio\n")
|
||||
matrix_nio_is_available = False
|
||||
|
||||
|
||||
def extract_model_file_names(json_data):
|
||||
@@ -193,6 +202,14 @@ async def get_esheep_workflow_and_images(request):
|
||||
return web.Response(status=200, text=json.dumps(data))
|
||||
|
||||
|
||||
@PromptServer.instance.routes.get("/v2/manager/get_matrix_dep_status")
|
||||
async def get_matrix_dep_status(request):
|
||||
if matrix_nio_is_available:
|
||||
return web.Response(status=200, text='available')
|
||||
else:
|
||||
return web.Response(status=200, text='unavailable')
|
||||
|
||||
|
||||
def set_matrix_auth(json_data):
|
||||
homeserver = json_data['homeserver']
|
||||
username = json_data['username']
|
||||
@@ -332,15 +349,12 @@ async def share_art(request):
|
||||
workflowId = upload_workflow_json["workflowId"]
|
||||
|
||||
# check if the user has provided Matrix credentials
|
||||
if "matrix" in share_destinations:
|
||||
if matrix_nio_is_available and "matrix" in share_destinations:
|
||||
comfyui_share_room_id = '!LGYSoacpJPhIfBqVfb:matrix.org'
|
||||
filename = os.path.basename(asset_filepath)
|
||||
content_type = assetFileType
|
||||
|
||||
try:
|
||||
from matrix_client.api import MatrixHttpApi
|
||||
from matrix_client.client import MatrixClient
|
||||
|
||||
homeserver = 'matrix.org'
|
||||
if matrix_auth:
|
||||
homeserver = matrix_auth.get('homeserver', 'matrix.org')
|
||||
@@ -348,20 +362,35 @@ async def share_art(request):
|
||||
if not homeserver.startswith("https://"):
|
||||
homeserver = "https://" + homeserver
|
||||
|
||||
client = MatrixClient(homeserver)
|
||||
try:
|
||||
token = client.login(username=matrix_auth['username'], password=matrix_auth['password'])
|
||||
if not token:
|
||||
return web.json_response({"error": "Invalid Matrix credentials."}, content_type='application/json', status=400)
|
||||
except Exception:
|
||||
client = AsyncClient(homeserver, matrix_auth['username'])
|
||||
|
||||
# Login
|
||||
login_resp = await client.login(matrix_auth['password'])
|
||||
if not isinstance(login_resp, LoginResponse) or not login_resp.access_token:
|
||||
await client.close()
|
||||
return web.json_response({"error": "Invalid Matrix credentials."}, content_type='application/json', status=400)
|
||||
|
||||
matrix = MatrixHttpApi(homeserver, token=token)
|
||||
# Upload asset
|
||||
with open(asset_filepath, 'rb') as f:
|
||||
mxc_url = matrix.media_upload(f.read(), content_type, filename=filename)['content_uri']
|
||||
upload_resp, _maybe_keys = await client.upload(f, content_type=content_type, filename=filename)
|
||||
asset_data = f.seek(0) or f.read() # get size for info below
|
||||
if not isinstance(upload_resp, UploadResponse) or not upload_resp.content_uri:
|
||||
await client.close()
|
||||
return web.json_response({"error": "Failed to upload asset to Matrix."}, content_type='application/json', status=500)
|
||||
mxc_url = upload_resp.content_uri
|
||||
|
||||
workflow_json_mxc_url = matrix.media_upload(prompt['workflow'], 'application/json', filename='workflow.json')['content_uri']
|
||||
# Upload workflow JSON
|
||||
import io
|
||||
workflow_json_bytes = json.dumps(prompt['workflow']).encode('utf-8')
|
||||
workflow_io = io.BytesIO(workflow_json_bytes)
|
||||
upload_workflow_resp, _maybe_keys = await client.upload(workflow_io, content_type='application/json', filename='workflow.json')
|
||||
workflow_io.seek(0)
|
||||
if not isinstance(upload_workflow_resp, UploadResponse) or not upload_workflow_resp.content_uri:
|
||||
await client.close()
|
||||
return web.json_response({"error": "Failed to upload workflow to Matrix."}, content_type='application/json', status=500)
|
||||
workflow_json_mxc_url = upload_workflow_resp.content_uri
|
||||
|
||||
# Send text message
|
||||
text_content = ""
|
||||
if title:
|
||||
text_content += f"{title}\n"
|
||||
@@ -369,11 +398,47 @@ async def share_art(request):
|
||||
text_content += f"{description}\n"
|
||||
if credits:
|
||||
text_content += f"\ncredits: {credits}\n"
|
||||
matrix.send_message(comfyui_share_room_id, text_content)
|
||||
matrix.send_content(comfyui_share_room_id, mxc_url, filename, 'm.image')
|
||||
matrix.send_content(comfyui_share_room_id, workflow_json_mxc_url, 'workflow.json', 'm.file')
|
||||
except Exception:
|
||||
logging.exception("An error occurred")
|
||||
await client.room_send(
|
||||
room_id=comfyui_share_room_id,
|
||||
message_type="m.room.message",
|
||||
content={"msgtype": "m.text", "body": text_content}
|
||||
)
|
||||
|
||||
# Send image
|
||||
await client.room_send(
|
||||
room_id=comfyui_share_room_id,
|
||||
message_type="m.room.message",
|
||||
content={
|
||||
"msgtype": "m.image",
|
||||
"body": filename,
|
||||
"url": mxc_url,
|
||||
"info": {
|
||||
"mimetype": content_type,
|
||||
"size": len(asset_data)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
# Send workflow JSON file
|
||||
await client.room_send(
|
||||
room_id=comfyui_share_room_id,
|
||||
message_type="m.room.message",
|
||||
content={
|
||||
"msgtype": "m.file",
|
||||
"body": "workflow.json",
|
||||
"url": workflow_json_mxc_url,
|
||||
"info": {
|
||||
"mimetype": "application/json",
|
||||
"size": len(workflow_json_bytes)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
await client.close()
|
||||
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return web.json_response({"error": "An error occurred when sharing your art to Matrix."}, content_type='application/json', status=500)
|
||||
|
||||
return web.json_response({
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import os
|
||||
import logging
|
||||
import concurrent.futures
|
||||
import folder_paths
|
||||
|
||||
from comfyui_manager.glob import manager_core as core
|
||||
from comfyui_manager.glob.constants import model_dir_name_map
|
||||
from comfyui_manager.glob.constants import model_dir_name_map, MODEL_DIR_NAMES
|
||||
|
||||
|
||||
def get_model_dir(data, show_log=False):
|
||||
@@ -72,3 +73,89 @@ def get_model_path(data, show_log=False):
|
||||
return os.path.join(base_model, os.path.basename(data["url"]))
|
||||
else:
|
||||
return os.path.join(base_model, data["filename"])
|
||||
|
||||
|
||||
def check_model_installed(json_obj):
|
||||
def is_exists(model_dir_name, filename, url):
|
||||
if filename == "<huggingface>":
|
||||
filename = os.path.basename(url)
|
||||
|
||||
dirs = folder_paths.get_folder_paths(model_dir_name)
|
||||
|
||||
for x in dirs:
|
||||
if os.path.exists(os.path.join(x, filename)):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
total_models_files = set()
|
||||
for x in MODEL_DIR_NAMES:
|
||||
for y in folder_paths.get_filename_list(x):
|
||||
total_models_files.add(y)
|
||||
|
||||
def process_model_phase(item):
|
||||
if (
|
||||
"diffusion" not in item["filename"]
|
||||
and "pytorch" not in item["filename"]
|
||||
and "model" not in item["filename"]
|
||||
):
|
||||
# non-general name case
|
||||
if item["filename"] in total_models_files:
|
||||
item["installed"] = "True"
|
||||
return
|
||||
|
||||
if item["save_path"] == "default":
|
||||
model_dir_name = model_dir_name_map.get(item["type"].lower())
|
||||
if model_dir_name is not None:
|
||||
item["installed"] = str(
|
||||
is_exists(model_dir_name, item["filename"], item["url"])
|
||||
)
|
||||
else:
|
||||
item["installed"] = "False"
|
||||
else:
|
||||
model_dir_name = item["save_path"].split("/")[0]
|
||||
if model_dir_name in folder_paths.folder_names_and_paths:
|
||||
if is_exists(model_dir_name, item["filename"], item["url"]):
|
||||
item["installed"] = "True"
|
||||
|
||||
if "installed" not in item:
|
||||
if item["filename"] == "<huggingface>":
|
||||
filename = os.path.basename(item["url"])
|
||||
else:
|
||||
filename = item["filename"]
|
||||
|
||||
fullpath = os.path.join(
|
||||
folder_paths.models_dir, item["save_path"], filename
|
||||
)
|
||||
|
||||
item["installed"] = "True" if os.path.exists(fullpath) else "False"
|
||||
|
||||
with concurrent.futures.ThreadPoolExecutor(8) as executor:
|
||||
for item in json_obj["models"]:
|
||||
executor.submit(process_model_phase, item)
|
||||
|
||||
|
||||
async def check_whitelist_for_model(item):
|
||||
from comfyui_manager.data_models import ManagerDatabaseSource
|
||||
|
||||
json_obj = await core.get_data_by_mode(ManagerDatabaseSource.cache.value, "model-list.json")
|
||||
|
||||
for x in json_obj.get("models", []):
|
||||
if (
|
||||
x["save_path"] == item["save_path"]
|
||||
and x["base"] == item["base"]
|
||||
and x["filename"] == item["filename"]
|
||||
):
|
||||
return True
|
||||
|
||||
json_obj = await core.get_data_by_mode(ManagerDatabaseSource.local.value, "model-list.json")
|
||||
|
||||
for x in json_obj.get("models", []):
|
||||
if (
|
||||
x["save_path"] == item["save_path"]
|
||||
and x["base"] == item["base"]
|
||||
and x["filename"] == item["filename"]
|
||||
):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from comfyui_manager.glob import manager_core as core
|
||||
from comfy.cli_args import args
|
||||
from comfyui_manager.data_models import SecurityLevel, RiskLevel, ManagerDatabaseSource
|
||||
|
||||
|
||||
def is_loopback(address):
|
||||
@@ -12,24 +13,37 @@ def is_loopback(address):
|
||||
|
||||
def is_allowed_security_level(level):
|
||||
is_local_mode = is_loopback(args.listen)
|
||||
|
||||
if level == "block":
|
||||
is_personal_cloud = core.get_config()['network_mode'].lower() == 'personal_cloud'
|
||||
|
||||
if level == RiskLevel.block.value:
|
||||
return False
|
||||
elif level == "high":
|
||||
elif level == RiskLevel.high_.value:
|
||||
if is_local_mode:
|
||||
return core.get_config()["security_level"] in ["weak", "normal-"]
|
||||
return core.get_config()['security_level'] in [SecurityLevel.weak.value, SecurityLevel.normal_.value]
|
||||
elif is_personal_cloud:
|
||||
return core.get_config()['security_level'] == SecurityLevel.weak.value
|
||||
else:
|
||||
return core.get_config()["security_level"] == "weak"
|
||||
elif level == "middle":
|
||||
return core.get_config()["security_level"] in ["weak", "normal", "normal-"]
|
||||
return False
|
||||
elif level == RiskLevel.high.value:
|
||||
if is_local_mode:
|
||||
return core.get_config()['security_level'] in [SecurityLevel.weak.value, SecurityLevel.normal_.value]
|
||||
else:
|
||||
return core.get_config()['security_level'] == SecurityLevel.weak.value
|
||||
elif level == RiskLevel.middle_.value:
|
||||
if is_local_mode or is_personal_cloud:
|
||||
return core.get_config()['security_level'] in [SecurityLevel.weak.value, SecurityLevel.normal.value, SecurityLevel.normal_.value]
|
||||
else:
|
||||
return False
|
||||
elif level == RiskLevel.middle.value:
|
||||
return core.get_config()['security_level'] in [SecurityLevel.weak.value, SecurityLevel.normal.value, SecurityLevel.normal_.value]
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
async def get_risky_level(files, pip_packages):
|
||||
json_data1 = await core.get_data_by_mode("local", "custom-node-list.json")
|
||||
json_data1 = await core.get_data_by_mode(ManagerDatabaseSource.local.value, "custom-node-list.json")
|
||||
json_data2 = await core.get_data_by_mode(
|
||||
"cache",
|
||||
ManagerDatabaseSource.cache.value,
|
||||
"custom-node-list.json",
|
||||
channel_url="https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main",
|
||||
)
|
||||
@@ -40,7 +54,7 @@ async def get_risky_level(files, pip_packages):
|
||||
|
||||
for x in files:
|
||||
if x not in all_urls:
|
||||
return "high"
|
||||
return RiskLevel.high_.value
|
||||
|
||||
all_pip_packages = set()
|
||||
for x in json_data1["custom_nodes"] + json_data2["custom_nodes"]:
|
||||
@@ -48,6 +62,6 @@ async def get_risky_level(files, pip_packages):
|
||||
|
||||
for p in pip_packages:
|
||||
if p not in all_pip_packages:
|
||||
return "block"
|
||||
return RiskLevel.block.value
|
||||
|
||||
return "middle"
|
||||
return RiskLevel.middle_.value
|
||||
|
||||
@@ -222,9 +222,6 @@ function isBeforeFrontendVersion(compareVersion) {
|
||||
}
|
||||
}
|
||||
|
||||
const is_legacy_front = () => isBeforeFrontendVersion('1.2.49');
|
||||
const isNotNewManagerUI = () => isBeforeFrontendVersion('1.16.4');
|
||||
|
||||
document.head.appendChild(docStyle);
|
||||
|
||||
var update_comfyui_button = null;
|
||||
@@ -1517,11 +1514,6 @@ app.registerExtension({
|
||||
tooltip: "Share"
|
||||
}).element
|
||||
);
|
||||
|
||||
const shouldShowLegacyMenuItems = isNotNewManagerUI();
|
||||
if (shouldShowLegacyMenuItems) {
|
||||
app.menu?.settingsGroup.element.before(cmGroup.element);
|
||||
}
|
||||
}
|
||||
catch(exception) {
|
||||
console.log('ComfyUI is outdated. New style menu based features are disabled.');
|
||||
|
||||
@@ -552,6 +552,20 @@ export class ShareDialog extends ComfyDialog {
|
||||
this.matrix_destination_checkbox.style.color = "var(--fg-color)";
|
||||
this.matrix_destination_checkbox.checked = this.share_option === 'matrix'; //true;
|
||||
|
||||
try {
|
||||
api.fetchApi(`/v2/manager/get_matrix_dep_status`)
|
||||
.then(response => response.text())
|
||||
.then(data => {
|
||||
if(data == 'unavailable') {
|
||||
matrix_destination_checkbox_text.style.textDecoration = "line-through";
|
||||
this.matrix_destination_checkbox.disabled = true;
|
||||
this.matrix_destination_checkbox.title = "It has been disabled because the 'matrix-nio' dependency is not installed. Please install this dependency to use the matrix sharing feature.";
|
||||
matrix_destination_checkbox_text.title = "It has been disabled because the 'matrix-nio' dependency is not installed. Please install this dependency to use the matrix sharing feature.";
|
||||
}
|
||||
})
|
||||
.catch(error => {});
|
||||
} catch (error) {}
|
||||
|
||||
this.comfyworkflows_destination_checkbox = $el("input", { type: 'checkbox', id: "comfyworkflows_destination" }, [])
|
||||
const comfyworkflows_destination_checkbox_text = $el("label", {}, [" ComfyWorkflows.com"])
|
||||
this.comfyworkflows_destination_checkbox.style.color = "var(--fg-color)";
|
||||
|
||||
@@ -71,7 +71,7 @@ export class CopusShareDialog extends ComfyDialog {
|
||||
this.allFiles = [];
|
||||
this.titleNum = 0;
|
||||
}
|
||||
|
||||
|
||||
createButtons() {
|
||||
const inputStyle = {
|
||||
display: "block",
|
||||
@@ -201,13 +201,15 @@ export class CopusShareDialog extends ComfyDialog {
|
||||
});
|
||||
this.LockInput = $el("input", {
|
||||
type: "text",
|
||||
placeholder: "",
|
||||
style: {
|
||||
placeholder: "0",
|
||||
style: {
|
||||
width: "100px",
|
||||
padding: "7px",
|
||||
paddingLeft: "30px",
|
||||
borderRadius: "4px",
|
||||
border: "1px solid #ddd",
|
||||
boxSizing: "border-box",
|
||||
position: "relative",
|
||||
},
|
||||
oninput: (event) => {
|
||||
let input = event.target.value;
|
||||
@@ -301,7 +303,7 @@ export class CopusShareDialog extends ComfyDialog {
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
|
||||
const titleNumDom = $el(
|
||||
"label",
|
||||
{
|
||||
@@ -342,15 +344,11 @@ export class CopusShareDialog extends ComfyDialog {
|
||||
["0/70"]
|
||||
);
|
||||
// Additional Inputs Section
|
||||
const additionalInputsSection = $el(
|
||||
"div",
|
||||
{ style: { ...sectionStyle, } },
|
||||
[
|
||||
$el("label", { style: labelStyle }, ["3️⃣ Title "]),
|
||||
this.TitleInput,
|
||||
titleNumDom,
|
||||
]
|
||||
);
|
||||
const additionalInputsSection = $el("div", { style: { ...sectionStyle } }, [
|
||||
$el("label", { style: labelStyle }, ["3️⃣ Title "]),
|
||||
this.TitleInput,
|
||||
titleNumDom,
|
||||
]);
|
||||
const SubtitleSection = $el("div", { style: sectionStyle }, [
|
||||
$el("label", { style: labelStyle }, ["4️⃣ Subtitle "]),
|
||||
this.SubTitleInput,
|
||||
@@ -379,7 +377,7 @@ export class CopusShareDialog extends ComfyDialog {
|
||||
});
|
||||
|
||||
const blockChainSection_lock = $el("div", { style: sectionStyle }, [
|
||||
$el("label", { style: labelStyle }, ["6️⃣ Pay to download"]),
|
||||
$el("label", { style: labelStyle }, ["6️⃣ Download threshold"]),
|
||||
$el(
|
||||
"label",
|
||||
{
|
||||
@@ -392,11 +390,42 @@ export class CopusShareDialog extends ComfyDialog {
|
||||
},
|
||||
[
|
||||
this.radioButtonsCheck_lock,
|
||||
$el("div", { style: { marginLeft: "5px" ,display:'flex',alignItems:'center'} }, [
|
||||
$el("span", { style: { marginLeft: "5px" } }, ["ON"]),
|
||||
$el("span", { style: { marginLeft: "20px",marginRight:'10px' ,color:'#fff'} }, ["Price US$"]),
|
||||
this.LockInput
|
||||
]),
|
||||
$el(
|
||||
"div",
|
||||
{
|
||||
style: {
|
||||
marginLeft: "5px",
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
position: "relative",
|
||||
},
|
||||
},
|
||||
[
|
||||
$el("span", { style: { marginLeft: "5px" } }, ["ON"]),
|
||||
$el(
|
||||
"span",
|
||||
{
|
||||
style: {
|
||||
marginLeft: "20px",
|
||||
marginRight: "10px",
|
||||
color: "#fff",
|
||||
},
|
||||
},
|
||||
["Unlock with"]
|
||||
),
|
||||
$el("img", {
|
||||
style: {
|
||||
width: "16px",
|
||||
height: "16px",
|
||||
position: "absolute",
|
||||
right: "75px",
|
||||
zIndex: "100",
|
||||
},
|
||||
src: "https://static.copus.io/images/admin/202507/prod/e2919a1d8f3c2d99d3b8fe27ff94b841.png",
|
||||
}),
|
||||
this.LockInput,
|
||||
]
|
||||
),
|
||||
]
|
||||
),
|
||||
$el(
|
||||
@@ -404,14 +433,25 @@ export class CopusShareDialog extends ComfyDialog {
|
||||
{ style: { display: "flex", alignItems: "center", cursor: "pointer" } },
|
||||
[
|
||||
this.radioButtonsCheckOff_lock,
|
||||
$el("span", { style: { marginLeft: "5px" } }, ["OFF"]),
|
||||
$el(
|
||||
"div",
|
||||
{
|
||||
style: {
|
||||
marginLeft: "5px",
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
},
|
||||
},
|
||||
[$el("span", { style: { marginLeft: "5px" } }, ["OFF"])]
|
||||
),
|
||||
]
|
||||
),
|
||||
|
||||
|
||||
$el(
|
||||
"p",
|
||||
{ style: { fontSize: "16px", color: "#fff", margin: "10px 0 0 0" } },
|
||||
["Get paid from your workflow. You can change the price and withdraw your earnings on Copus."]
|
||||
[
|
||||
]
|
||||
),
|
||||
]);
|
||||
|
||||
@@ -432,7 +472,7 @@ export class CopusShareDialog extends ComfyDialog {
|
||||
});
|
||||
|
||||
const blockChainSection = $el("div", { style: sectionStyle }, [
|
||||
$el("label", { style: labelStyle }, ["7️⃣ Store on blockchain "]),
|
||||
$el("label", { style: labelStyle }, ["8️⃣ Store on blockchain "]),
|
||||
$el(
|
||||
"label",
|
||||
{
|
||||
@@ -463,6 +503,139 @@ export class CopusShareDialog extends ComfyDialog {
|
||||
),
|
||||
]);
|
||||
|
||||
this.ratingRadioButtonsCheck0 = $el("input", {
|
||||
type: "radio",
|
||||
name: "content_rating",
|
||||
value: "0",
|
||||
id: "content_rating0",
|
||||
});
|
||||
this.ratingRadioButtonsCheck1 = $el("input", {
|
||||
type: "radio",
|
||||
name: "content_rating",
|
||||
value: "1",
|
||||
id: "content_rating1",
|
||||
});
|
||||
this.ratingRadioButtonsCheck2 = $el("input", {
|
||||
type: "radio",
|
||||
name: "content_rating",
|
||||
value: "2",
|
||||
id: "content_rating2",
|
||||
});
|
||||
this.ratingRadioButtonsCheck_1 = $el("input", {
|
||||
type: "radio",
|
||||
name: "content_rating",
|
||||
value: "-1",
|
||||
id: "content_rating_1",
|
||||
checked: true,
|
||||
});
|
||||
|
||||
// content rating
|
||||
const contentRatingSection = $el("div", { style: sectionStyle }, [
|
||||
$el("label", { style: labelStyle }, ["7️⃣ Content rating "]),
|
||||
$el(
|
||||
"label",
|
||||
{
|
||||
style: {
|
||||
marginTop: "10px",
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
cursor: "pointer",
|
||||
},
|
||||
},
|
||||
[
|
||||
this.ratingRadioButtonsCheck0,
|
||||
$el("img", {
|
||||
style: {
|
||||
width: "12px",
|
||||
height: "12px",
|
||||
marginLeft: "5px",
|
||||
},
|
||||
src: "https://static.copus.io/images/client/202507/test/b9f17da83b054d53cd0cb4508c2c30dc.png",
|
||||
}),
|
||||
$el("span", { style: { marginLeft: "5px", color: "#fff" } }, [
|
||||
"All ages",
|
||||
]),
|
||||
]
|
||||
),
|
||||
$el(
|
||||
"p",
|
||||
{ style: { fontSize: "10px", color: "#fff", marginLeft: "20px" } },
|
||||
["Safe for all viewers; no profanity, violence, or mature themes."]
|
||||
),
|
||||
$el(
|
||||
"label",
|
||||
{ style: { display: "flex", alignItems: "center", cursor: "pointer" } },
|
||||
[
|
||||
this.ratingRadioButtonsCheck1,
|
||||
$el("img", {
|
||||
style: {
|
||||
width: "12px",
|
||||
height: "12px",
|
||||
marginLeft: "5px",
|
||||
},
|
||||
src: "https://static.copus.io/images/client/202507/test/7848bc0d3690671df21c7cf00c4cfc81.png",
|
||||
}),
|
||||
$el("span", { style: { marginLeft: "5px", color: "#fff" } }, [
|
||||
"13+ (Teen)",
|
||||
]),
|
||||
]
|
||||
),
|
||||
$el(
|
||||
"p",
|
||||
{ style: { fontSize: "10px", color: "#fff", marginLeft: "20px" } },
|
||||
[
|
||||
"Mild language, light themes, or cartoon violence; no explicit content. ",
|
||||
]
|
||||
),
|
||||
$el(
|
||||
"label",
|
||||
{ style: { display: "flex", alignItems: "center", cursor: "pointer" } },
|
||||
[
|
||||
this.ratingRadioButtonsCheck2,
|
||||
$el("img", {
|
||||
style: {
|
||||
width: "12px",
|
||||
height: "12px",
|
||||
marginLeft: "5px",
|
||||
},
|
||||
src: "https://static.copus.io/images/client/202507/test/bc51839c208d68d91173e43c23bff039.png",
|
||||
}),
|
||||
$el("span", { style: { marginLeft: "5px", color: "#fff" } }, [
|
||||
"18+ (Explicit)",
|
||||
]),
|
||||
]
|
||||
),
|
||||
$el(
|
||||
"p",
|
||||
{ style: { fontSize: "10px", color: "#fff", marginLeft: "20px" } },
|
||||
[
|
||||
"Explicit content, including sexual content, strong violence, or intense themes. ",
|
||||
]
|
||||
),
|
||||
$el(
|
||||
"label",
|
||||
{ style: { display: "flex", alignItems: "center", cursor: "pointer" } },
|
||||
[
|
||||
this.ratingRadioButtonsCheck_1,
|
||||
$el("img", {
|
||||
style: {
|
||||
width: "12px",
|
||||
height: "12px",
|
||||
marginLeft: "5px",
|
||||
},
|
||||
src: "https://static.copus.io/images/client/202507/test/5c802fdcaaea4e7bbed37393eec0d5ba.png",
|
||||
}),
|
||||
$el("span", { style: { marginLeft: "5px", color: "#fff" } }, [
|
||||
"Not Rated",
|
||||
]),
|
||||
]
|
||||
),
|
||||
$el(
|
||||
"p",
|
||||
{ style: { fontSize: "10px", color: "#fff", marginLeft: "20px" } },
|
||||
["No age rating provided."]
|
||||
),
|
||||
]);
|
||||
|
||||
// Message Section
|
||||
this.message = $el(
|
||||
@@ -526,6 +699,7 @@ export class CopusShareDialog extends ComfyDialog {
|
||||
DescriptionSection,
|
||||
// contestSection,
|
||||
blockChainSection_lock,
|
||||
contentRatingSection,
|
||||
blockChainSection,
|
||||
this.message,
|
||||
buttonsSection,
|
||||
@@ -534,7 +708,7 @@ export class CopusShareDialog extends ComfyDialog {
|
||||
return layout;
|
||||
}
|
||||
/**
|
||||
* api
|
||||
* api
|
||||
* @param {url} path
|
||||
* @param {params} options
|
||||
* @param {statusText} statusText
|
||||
@@ -587,7 +761,9 @@ export class CopusShareDialog extends ComfyDialog {
|
||||
url: data,
|
||||
});
|
||||
} else {
|
||||
throw new Error("make sure your API key is correct and try again later");
|
||||
throw new Error(
|
||||
"make sure your API key is correct and try again later"
|
||||
);
|
||||
}
|
||||
} catch (e) {
|
||||
if (e?.response?.status === 413) {
|
||||
@@ -628,8 +804,15 @@ export class CopusShareDialog extends ComfyDialog {
|
||||
subTitle: this.SubTitleInput.value,
|
||||
content: this.descriptionInput.value,
|
||||
storeOnChain: this.radioButtonsCheck.checked ? true : false,
|
||||
lockState:this.radioButtonsCheck_lock.checked ? 2 : 0,
|
||||
unlockPrice:this.LockInput.value,
|
||||
lockState: this.radioButtonsCheck_lock.checked ? 2 : 0,
|
||||
unlockPrice: this.LockInput.value,
|
||||
rating: this.ratingRadioButtonsCheck0.checked
|
||||
? 0
|
||||
: this.ratingRadioButtonsCheck1.checked
|
||||
? 1
|
||||
: this.ratingRadioButtonsCheck2.checked
|
||||
? 2
|
||||
: -1,
|
||||
};
|
||||
|
||||
if (!this.keyInput.value) {
|
||||
@@ -644,8 +827,8 @@ export class CopusShareDialog extends ComfyDialog {
|
||||
throw new Error("Title is required");
|
||||
}
|
||||
|
||||
if(this.radioButtonsCheck_lock.checked){
|
||||
if (!this.LockInput.value){
|
||||
if (this.radioButtonsCheck_lock.checked) {
|
||||
if (!this.LockInput.value) {
|
||||
throw new Error("Price is required");
|
||||
}
|
||||
}
|
||||
@@ -695,23 +878,23 @@ export class CopusShareDialog extends ComfyDialog {
|
||||
"Uploading workflow..."
|
||||
);
|
||||
|
||||
if (res.status && res.data.status && res.data) {
|
||||
localStorage.setItem("copus_token",this.keyInput.value);
|
||||
const { data } = res.data;
|
||||
if (data) {
|
||||
const url = `${DEFAULT_HOMEPAGE_URL}/work/${data}`;
|
||||
this.message.innerHTML = `Workflow has been shared successfully. <a href="${url}" target="_blank">Click here to view it.</a>`;
|
||||
this.previewImage.src = "";
|
||||
this.previewImage.style.display = "none";
|
||||
this.uploadedImages = [];
|
||||
this.allFilesImages = [];
|
||||
this.allFiles = [];
|
||||
this.TitleInput.value = "";
|
||||
this.SubTitleInput.value = "";
|
||||
this.descriptionInput.value = "";
|
||||
this.selectedFile = null;
|
||||
}
|
||||
}
|
||||
if (res.status && res.data.status && res.data) {
|
||||
localStorage.setItem("copus_token", this.keyInput.value);
|
||||
const { data } = res.data;
|
||||
if (data) {
|
||||
const url = `${DEFAULT_HOMEPAGE_URL}/work/${data}`;
|
||||
this.message.innerHTML = `Workflow has been shared successfully. <a href="${url}" target="_blank">Click here to view it.</a>`;
|
||||
this.previewImage.src = "";
|
||||
this.previewImage.style.display = "none";
|
||||
this.uploadedImages = [];
|
||||
this.allFilesImages = [];
|
||||
this.allFiles = [];
|
||||
this.TitleInput.value = "";
|
||||
this.SubTitleInput.value = "";
|
||||
this.descriptionInput.value = "";
|
||||
this.selectedFile = null;
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
throw new Error("Error sharing workflow: " + e.message);
|
||||
}
|
||||
@@ -757,7 +940,7 @@ export class CopusShareDialog extends ComfyDialog {
|
||||
this.element.style.display = "block";
|
||||
this.previewImage.src = "";
|
||||
this.previewImage.style.display = "none";
|
||||
this.keyInput.value = apiToken!=null?apiToken:"";
|
||||
this.keyInput.value = apiToken != null ? apiToken : "";
|
||||
this.uploadedImages = [];
|
||||
this.allFilesImages = [];
|
||||
this.allFiles = [];
|
||||
|
||||
@@ -714,6 +714,7 @@ export class CustomNodesManager {
|
||||
link.href = rowItem.reference;
|
||||
link.target = '_blank';
|
||||
link.innerHTML = `<b>${title}</b>`;
|
||||
link.title = rowItem.originalData.id;
|
||||
container.appendChild(link);
|
||||
|
||||
return container;
|
||||
@@ -1625,17 +1626,35 @@ export class CustomNodesManager {
|
||||
getNodesInWorkflow() {
|
||||
let usedGroupNodes = new Set();
|
||||
let allUsedNodes = {};
|
||||
const visitedGraphs = new Set();
|
||||
|
||||
for(let k in app.graph._nodes) {
|
||||
let node = app.graph._nodes[k];
|
||||
const visitGraph = (graph) => {
|
||||
if (!graph || visitedGraphs.has(graph)) return;
|
||||
visitedGraphs.add(graph);
|
||||
|
||||
if(node.type.startsWith('workflow>')) {
|
||||
usedGroupNodes.add(node.type.slice(9));
|
||||
continue;
|
||||
const nodes = graph._nodes || graph.nodes || [];
|
||||
for(let k in nodes) {
|
||||
let node = nodes[k];
|
||||
if (!node) continue;
|
||||
|
||||
// If it's a SubgraphNode, recurse into its graph and continue searching
|
||||
if (node.isSubgraphNode?.() && node.subgraph) {
|
||||
visitGraph(node.subgraph);
|
||||
}
|
||||
|
||||
if (!node.type) continue;
|
||||
|
||||
// Group nodes / components
|
||||
if(typeof node.type === 'string' && node.type.startsWith('workflow>')) {
|
||||
usedGroupNodes.add(node.type.slice(9));
|
||||
continue;
|
||||
}
|
||||
|
||||
allUsedNodes[node.type] = node;
|
||||
}
|
||||
};
|
||||
|
||||
allUsedNodes[node.type] = node;
|
||||
}
|
||||
visitGraph(app.graph);
|
||||
|
||||
for(let k of usedGroupNodes) {
|
||||
let subnodes = app.graph.extra.groupNodes[k]?.nodes;
|
||||
|
||||
@@ -41,11 +41,12 @@ from ..common.enums import NetworkMode, SecurityLevel, DBMode
|
||||
from ..common import context
|
||||
|
||||
|
||||
version_code = [4, 0]
|
||||
version_code = [4, 0, 2]
|
||||
version_str = f"V{version_code[0]}.{version_code[1]}" + (f'.{version_code[2]}' if len(version_code) > 2 else '')
|
||||
|
||||
|
||||
DEFAULT_CHANNEL = "https://raw.githubusercontent.com/Comfy-Org/ComfyUI-Manager/main"
|
||||
DEFAULT_CHANNEL_LEGACY = "https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main"
|
||||
|
||||
|
||||
default_custom_nodes_path = None
|
||||
@@ -160,7 +161,7 @@ comfy_ui_revision = "Unknown"
|
||||
comfy_ui_commit_datetime = datetime(1900, 1, 1, 0, 0, 0)
|
||||
|
||||
channel_dict = None
|
||||
valid_channels = {'default', 'local'}
|
||||
valid_channels = {'default', 'local', DEFAULT_CHANNEL, DEFAULT_CHANNEL_LEGACY}
|
||||
channel_list = None
|
||||
|
||||
|
||||
@@ -304,18 +305,86 @@ class ManagedResult:
|
||||
return self
|
||||
|
||||
|
||||
class NormalizedKeyDict:
|
||||
def __init__(self):
|
||||
self._store = {}
|
||||
self._key_map = {}
|
||||
|
||||
def _normalize_key(self, key):
|
||||
if isinstance(key, str):
|
||||
return key.strip().lower()
|
||||
return key
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
norm_key = self._normalize_key(key)
|
||||
self._key_map[norm_key] = key
|
||||
self._store[key] = value
|
||||
|
||||
def __getitem__(self, key):
|
||||
norm_key = self._normalize_key(key)
|
||||
original_key = self._key_map[norm_key]
|
||||
return self._store[original_key]
|
||||
|
||||
def __delitem__(self, key):
|
||||
norm_key = self._normalize_key(key)
|
||||
original_key = self._key_map.pop(norm_key)
|
||||
del self._store[original_key]
|
||||
|
||||
def __contains__(self, key):
|
||||
return self._normalize_key(key) in self._key_map
|
||||
|
||||
def get(self, key, default=None):
|
||||
return self[key] if key in self else default
|
||||
|
||||
def setdefault(self, key, default=None):
|
||||
if key in self:
|
||||
return self[key]
|
||||
self[key] = default
|
||||
return default
|
||||
|
||||
def pop(self, key, default=None):
|
||||
if key in self:
|
||||
val = self[key]
|
||||
del self[key]
|
||||
return val
|
||||
if default is not None:
|
||||
return default
|
||||
raise KeyError(key)
|
||||
|
||||
def keys(self):
|
||||
return self._store.keys()
|
||||
|
||||
def values(self):
|
||||
return self._store.values()
|
||||
|
||||
def items(self):
|
||||
return self._store.items()
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._store)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._store)
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self._store)
|
||||
|
||||
def to_dict(self):
|
||||
return dict(self._store)
|
||||
|
||||
|
||||
class UnifiedManager:
|
||||
def __init__(self):
|
||||
self.installed_node_packages: dict[str, InstalledNodePackage] = {}
|
||||
|
||||
self.cnr_inactive_nodes = {} # node_id -> node_version -> fullpath
|
||||
self.nightly_inactive_nodes = {} # node_id -> fullpath
|
||||
self.unknown_inactive_nodes = {} # node_id -> repo url * fullpath
|
||||
self.active_nodes = {} # node_id -> node_version * fullpath
|
||||
self.unknown_active_nodes = {} # node_id -> repo url * fullpath
|
||||
self.cnr_map = {} # node_id -> cnr info
|
||||
self.repo_cnr_map = {} # repo_url -> cnr info
|
||||
self.custom_node_map_cache = {} # (channel, mode) -> augmented custom node list json
|
||||
self.cnr_inactive_nodes = NormalizedKeyDict() # node_id -> node_version -> fullpath
|
||||
self.nightly_inactive_nodes = NormalizedKeyDict() # node_id -> fullpath
|
||||
self.unknown_inactive_nodes = {} # node_id -> repo url * fullpath
|
||||
self.active_nodes = NormalizedKeyDict() # node_id -> node_version * fullpath
|
||||
self.unknown_active_nodes = {} # node_id -> repo url * fullpath
|
||||
self.cnr_map = NormalizedKeyDict() # node_id -> cnr info
|
||||
self.repo_cnr_map = {} # repo_url -> cnr info
|
||||
self.custom_node_map_cache = {} # (channel, mode) -> augmented custom node list json
|
||||
self.processed_install = set()
|
||||
|
||||
def get_module_name(self, x):
|
||||
@@ -721,7 +790,7 @@ class UnifiedManager:
|
||||
channel = normalize_channel(channel)
|
||||
nodes = await self.load_nightly(channel, mode)
|
||||
|
||||
res = {}
|
||||
res = NormalizedKeyDict()
|
||||
added_cnr = set()
|
||||
for v in nodes.values():
|
||||
v = v[0]
|
||||
@@ -1322,6 +1391,7 @@ class UnifiedManager:
|
||||
return ManagedResult('skip')
|
||||
elif self.is_disabled(node_id):
|
||||
return self.unified_enable(node_id)
|
||||
|
||||
else:
|
||||
version_spec = self.resolve_unspecified_version(node_id)
|
||||
|
||||
@@ -1557,16 +1627,18 @@ def read_config():
|
||||
config = configparser.ConfigParser(strict=False)
|
||||
config.read(context.manager_config_path)
|
||||
default_conf = config['default']
|
||||
manager_util.use_uv = default_conf['use_uv'].lower() == 'true' if 'use_uv' in default_conf else False
|
||||
|
||||
def get_bool(key, default_value):
|
||||
return default_conf[key].lower() == 'true' if key in default_conf else False
|
||||
|
||||
manager_util.use_uv = default_conf['use_uv'].lower() == 'true' if 'use_uv' in default_conf else False
|
||||
manager_util.bypass_ssl = get_bool('bypass_ssl', False)
|
||||
|
||||
return {
|
||||
'http_channel_enabled': get_bool('http_channel_enabled', False),
|
||||
'preview_method': default_conf.get('preview_method', manager_funcs.get_current_preview_method()).lower(),
|
||||
'git_exe': default_conf.get('git_exe', ''),
|
||||
'use_uv': get_bool('use_uv', False),
|
||||
'use_uv': get_bool('use_uv', True),
|
||||
'channel_url': default_conf.get('channel_url', DEFAULT_CHANNEL),
|
||||
'default_cache_as_channel_url': get_bool('default_cache_as_channel_url', False),
|
||||
'share_option': default_conf.get('share_option', 'all').lower(),
|
||||
@@ -1585,15 +1657,17 @@ def read_config():
|
||||
|
||||
except Exception:
|
||||
manager_util.use_uv = False
|
||||
manager_util.bypass_ssl = False
|
||||
|
||||
return {
|
||||
'http_channel_enabled': False,
|
||||
'preview_method': manager_funcs.get_current_preview_method(),
|
||||
'git_exe': '',
|
||||
'use_uv': False,
|
||||
'use_uv': True,
|
||||
'channel_url': DEFAULT_CHANNEL,
|
||||
'default_cache_as_channel_url': False,
|
||||
'share_option': 'all',
|
||||
'bypass_ssl': False,
|
||||
'bypass_ssl': manager_util.bypass_ssl,
|
||||
'file_logging': True,
|
||||
'component_policy': 'workflow',
|
||||
'update_policy': 'stable-comfyui',
|
||||
@@ -2776,7 +2850,7 @@ async def get_unified_total_nodes(channel, mode, regsitry_cache_mode='cache'):
|
||||
|
||||
if cnr_id is not None:
|
||||
# cnr or nightly version
|
||||
cnr_ids.remove(cnr_id)
|
||||
cnr_ids.discard(cnr_id)
|
||||
updatable = False
|
||||
cnr = unified_manager.cnr_map[cnr_id]
|
||||
|
||||
@@ -2940,6 +3014,11 @@ async def restore_snapshot(snapshot_path, git_helper_extras=None):
|
||||
info = yaml.load(snapshot_file, Loader=yaml.SafeLoader)
|
||||
info = info['custom_nodes']
|
||||
|
||||
if 'pips' in info and info['pips']:
|
||||
pips = info['pips']
|
||||
else:
|
||||
pips = {}
|
||||
|
||||
# for cnr restore
|
||||
cnr_info = info.get('cnr_custom_nodes')
|
||||
if cnr_info is not None:
|
||||
@@ -3146,6 +3225,8 @@ async def restore_snapshot(snapshot_path, git_helper_extras=None):
|
||||
unified_manager.repo_install(repo_url, to_path, instant_execution=True, no_deps=False, return_postinstall=False)
|
||||
cloned_repos.append(repo_name)
|
||||
|
||||
manager_util.restore_pip_snapshot(pips, git_helper_extras)
|
||||
|
||||
# print summary
|
||||
for x in cloned_repos:
|
||||
print(f"[ INSTALLED ] {x}")
|
||||
|
||||
@@ -23,6 +23,7 @@ from ..common import manager_util
|
||||
from ..common import cm_global
|
||||
from ..common import manager_downloader
|
||||
from ..common import context
|
||||
from ..common import manager_security
|
||||
|
||||
|
||||
logging.info(f"### Loading: ComfyUI-Manager ({core.version_str})")
|
||||
@@ -36,7 +37,8 @@ logging.info("[ComfyUI-Manager] network_mode: " + network_mode_description)
|
||||
comfy_ui_hash = "-"
|
||||
comfyui_tag = None
|
||||
|
||||
SECURITY_MESSAGE_MIDDLE_OR_BELOW = "ERROR: To use this action, a security_level of `middle or below` is required. Please contact the administrator.\nReference: https://github.com/Comfy-Org/ComfyUI-Manager#security-policy"
|
||||
SECURITY_MESSAGE_MIDDLE = "ERROR: To use this action, a security_level of `normal or below` is required. Please contact the administrator.\nReference: https://github.com/Comfy-Org/ComfyUI-Manager#security-policy"
|
||||
SECURITY_MESSAGE_MIDDLE_P = "ERROR: To use this action, security_level must be `normal or below`, and network_mode must be set to `personal_cloud`. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
|
||||
SECURITY_MESSAGE_NORMAL_MINUS = "ERROR: To use this feature, you must either set '--listen' to a local IP and set the security level to 'normal-' or lower, or set the security level to 'middle' or 'weak'. Please contact the administrator.\nReference: https://github.com/Comfy-Org/ComfyUI-Manager#security-policy"
|
||||
SECURITY_MESSAGE_GENERAL = "ERROR: This installation is not allowed in this security_level. Please contact the administrator.\nReference: https://github.com/Comfy-Org/ComfyUI-Manager#security-policy"
|
||||
SECURITY_MESSAGE_NORMAL_MINUS_MODEL = "ERROR: Downloading models that are not in '.safetensors' format is only allowed for models registered in the 'default' channel at this security level. If you want to download this model, set the security level to 'normal-' or lower."
|
||||
@@ -93,13 +95,27 @@ model_dir_name_map = {
|
||||
|
||||
|
||||
def is_allowed_security_level(level):
|
||||
is_personal_cloud = core.get_config()['network_mode'].lower() == 'personal_cloud'
|
||||
|
||||
if level == 'block':
|
||||
return False
|
||||
elif level == 'high+':
|
||||
if is_local_mode:
|
||||
return core.get_config()['security_level'] in ['weak', 'normal-']
|
||||
elif is_personal_cloud:
|
||||
return core.get_config()['security_level'] == 'weak'
|
||||
else:
|
||||
return False
|
||||
elif level == 'high':
|
||||
if is_local_mode:
|
||||
return core.get_config()['security_level'] in ['weak', 'normal-']
|
||||
else:
|
||||
return core.get_config()['security_level'] == 'weak'
|
||||
elif level == 'middle+':
|
||||
if is_local_mode or is_personal_cloud:
|
||||
return core.get_config()['security_level'] in ['weak', 'normal', 'normal-']
|
||||
else:
|
||||
return False
|
||||
elif level == 'middle':
|
||||
return core.get_config()['security_level'] in ['weak', 'normal', 'normal-']
|
||||
else:
|
||||
@@ -116,7 +132,7 @@ async def get_risky_level(files, pip_packages):
|
||||
|
||||
for x in files:
|
||||
if x not in all_urls:
|
||||
return "high"
|
||||
return "high+"
|
||||
|
||||
all_pip_packages = set()
|
||||
for x in json_data1['custom_nodes'] + json_data2['custom_nodes']:
|
||||
@@ -126,7 +142,7 @@ async def get_risky_level(files, pip_packages):
|
||||
if p not in all_pip_packages:
|
||||
return "block"
|
||||
|
||||
return "middle"
|
||||
return "middle+"
|
||||
|
||||
|
||||
class ManagerFuncsInComfyUI(core.ManagerFuncs):
|
||||
@@ -650,7 +666,7 @@ async def task_worker():
|
||||
return 'success'
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"[ComfyUI-Manager] ERROR: {e}", file=sys.stderr)
|
||||
logging.error(f"[ComfyUI-Manager] ERROR: {e}")
|
||||
|
||||
return f"Model installation error: {model_url}"
|
||||
|
||||
@@ -758,29 +774,29 @@ async def queue_batch(request):
|
||||
for x in v:
|
||||
res = await _uninstall_custom_node(x)
|
||||
if res.status != 200:
|
||||
failed.add(x[0])
|
||||
failed.add(x['id'])
|
||||
else:
|
||||
res = await _install_custom_node(x)
|
||||
if res.status != 200:
|
||||
failed.add(x[0])
|
||||
failed.add(x['id'])
|
||||
|
||||
elif k == 'install':
|
||||
for x in v:
|
||||
res = await _install_custom_node(x)
|
||||
if res.status != 200:
|
||||
failed.add(x[0])
|
||||
failed.add(x['id'])
|
||||
|
||||
elif k == 'uninstall':
|
||||
for x in v:
|
||||
res = await _uninstall_custom_node(x)
|
||||
if res.status != 200:
|
||||
failed.add(x[0])
|
||||
failed.add(x['id'])
|
||||
|
||||
elif k == 'update':
|
||||
for x in v:
|
||||
res = await _update_custom_node(x)
|
||||
if res.status != 200:
|
||||
failed.add(x[0])
|
||||
failed.add(x['id'])
|
||||
|
||||
elif k == 'update_comfyui':
|
||||
await update_comfyui(None)
|
||||
@@ -793,13 +809,13 @@ async def queue_batch(request):
|
||||
for x in v:
|
||||
res = await _install_model(x)
|
||||
if res.status != 200:
|
||||
failed.add(x[0])
|
||||
failed.add(x['id'])
|
||||
|
||||
elif k == 'fix':
|
||||
for x in v:
|
||||
res = await _fix_custom_node(x)
|
||||
if res.status != 200:
|
||||
failed.add(x[0])
|
||||
failed.add(x['id'])
|
||||
|
||||
with task_worker_lock:
|
||||
finalize_temp_queue_batch(json_data, failed)
|
||||
@@ -910,8 +926,8 @@ async def update_all(request):
|
||||
|
||||
|
||||
async def _update_all(json_data):
|
||||
if not is_allowed_security_level('middle'):
|
||||
logging.error(SECURITY_MESSAGE_MIDDLE_OR_BELOW)
|
||||
if not is_allowed_security_level('middle+'):
|
||||
logging.error(SECURITY_MESSAGE_MIDDLE_P)
|
||||
return web.Response(status=403)
|
||||
|
||||
with task_worker_lock:
|
||||
@@ -1056,14 +1072,17 @@ async def fetch_customnode_list(request):
|
||||
if channel != 'local':
|
||||
found = 'custom'
|
||||
|
||||
for name, url in core.get_channel_dict().items():
|
||||
if url == channel:
|
||||
found = name
|
||||
break
|
||||
if channel == core.DEFAULT_CHANNEL or channel == core.DEFAULT_CHANNEL_LEGACY:
|
||||
channel = 'default'
|
||||
else:
|
||||
for name, url in core.get_channel_dict().items():
|
||||
if url == channel:
|
||||
found = name
|
||||
break
|
||||
|
||||
channel = found
|
||||
channel = found
|
||||
|
||||
result = dict(channel=channel, node_packs=node_packs)
|
||||
result = dict(channel=channel, node_packs=node_packs.to_dict())
|
||||
|
||||
return web.json_response(result, content_type='application/json')
|
||||
|
||||
@@ -1162,7 +1181,7 @@ async def get_snapshot_list(request):
|
||||
@routes.get("/v2/snapshot/remove")
|
||||
async def remove_snapshot(request):
|
||||
if not is_allowed_security_level('middle'):
|
||||
logging.error(SECURITY_MESSAGE_MIDDLE_OR_BELOW)
|
||||
logging.error(SECURITY_MESSAGE_MIDDLE)
|
||||
return web.Response(status=403)
|
||||
|
||||
try:
|
||||
@@ -1179,8 +1198,8 @@ async def remove_snapshot(request):
|
||||
|
||||
@routes.get("/v2/snapshot/restore")
|
||||
async def restore_snapshot(request):
|
||||
if not is_allowed_security_level('middle'):
|
||||
logging.error(SECURITY_MESSAGE_MIDDLE_OR_BELOW)
|
||||
if not is_allowed_security_level('middle+'):
|
||||
logging.error(SECURITY_MESSAGE_MIDDLE_P)
|
||||
return web.Response(status=403)
|
||||
|
||||
try:
|
||||
@@ -1292,6 +1311,65 @@ async def import_fail_info(request):
|
||||
return web.Response(status=400)
|
||||
|
||||
|
||||
@routes.post("/v2/customnode/import_fail_info_bulk")
|
||||
async def import_fail_info_bulk(request):
|
||||
try:
|
||||
json_data = await request.json()
|
||||
|
||||
# Basic validation - ensure we have either cnr_ids or urls
|
||||
if not isinstance(json_data, dict):
|
||||
return web.Response(status=400, text="Request body must be a JSON object")
|
||||
|
||||
if "cnr_ids" not in json_data and "urls" not in json_data:
|
||||
return web.Response(
|
||||
status=400, text="Either 'cnr_ids' or 'urls' field is required"
|
||||
)
|
||||
|
||||
await core.unified_manager.reload('cache')
|
||||
await core.unified_manager.get_custom_nodes('default', 'cache')
|
||||
|
||||
results = {}
|
||||
|
||||
if "cnr_ids" in json_data:
|
||||
if not isinstance(json_data["cnr_ids"], list):
|
||||
return web.Response(status=400, text="'cnr_ids' must be an array")
|
||||
for cnr_id in json_data["cnr_ids"]:
|
||||
if not isinstance(cnr_id, str):
|
||||
results[cnr_id] = {"error": "cnr_id must be a string"}
|
||||
continue
|
||||
module_name = core.unified_manager.get_module_name(cnr_id)
|
||||
if module_name is not None:
|
||||
info = cm_global.error_dict.get(module_name)
|
||||
if info is not None:
|
||||
results[cnr_id] = info
|
||||
else:
|
||||
results[cnr_id] = None
|
||||
else:
|
||||
results[cnr_id] = None
|
||||
|
||||
if "urls" in json_data:
|
||||
if not isinstance(json_data["urls"], list):
|
||||
return web.Response(status=400, text="'urls' must be an array")
|
||||
for url in json_data["urls"]:
|
||||
if not isinstance(url, str):
|
||||
results[url] = {"error": "url must be a string"}
|
||||
continue
|
||||
module_name = core.unified_manager.get_module_name(url)
|
||||
if module_name is not None:
|
||||
info = cm_global.error_dict.get(module_name)
|
||||
if info is not None:
|
||||
results[url] = info
|
||||
else:
|
||||
results[url] = None
|
||||
else:
|
||||
results[url] = None
|
||||
|
||||
return web.json_response(results)
|
||||
except Exception as e:
|
||||
logging.error(f"[ComfyUI-Manager] Error processing bulk import fail info: {e}")
|
||||
return web.Response(status=500, text="Internal server error")
|
||||
|
||||
|
||||
@routes.post("/v2/manager/queue/reinstall")
|
||||
async def reinstall_custom_node(request):
|
||||
await uninstall_custom_node(request)
|
||||
@@ -1356,8 +1434,8 @@ async def install_custom_node(request):
|
||||
|
||||
|
||||
async def _install_custom_node(json_data):
|
||||
if not is_allowed_security_level('middle'):
|
||||
logging.error(SECURITY_MESSAGE_MIDDLE_OR_BELOW)
|
||||
if not is_allowed_security_level('middle+'):
|
||||
logging.error(SECURITY_MESSAGE_MIDDLE_P)
|
||||
return web.Response(status=403, text="A security error has occurred. Please check the terminal logs")
|
||||
|
||||
# non-nightly cnr is safe
|
||||
@@ -1462,7 +1540,7 @@ async def _fix_custom_node(json_data):
|
||||
|
||||
@routes.post("/v2/customnode/install/git_url")
|
||||
async def install_custom_node_git_url(request):
|
||||
if not is_allowed_security_level('high'):
|
||||
if not is_allowed_security_level('high+'):
|
||||
logging.error(SECURITY_MESSAGE_NORMAL_MINUS)
|
||||
return web.Response(status=403)
|
||||
|
||||
@@ -1482,7 +1560,7 @@ async def install_custom_node_git_url(request):
|
||||
|
||||
@routes.post("/v2/customnode/install/pip")
|
||||
async def install_custom_node_pip(request):
|
||||
if not is_allowed_security_level('high'):
|
||||
if not is_allowed_security_level('high+'):
|
||||
logging.error(SECURITY_MESSAGE_NORMAL_MINUS)
|
||||
return web.Response(status=403)
|
||||
|
||||
@@ -1500,7 +1578,7 @@ async def uninstall_custom_node(request):
|
||||
|
||||
async def _uninstall_custom_node(json_data):
|
||||
if not is_allowed_security_level('middle'):
|
||||
logging.error(SECURITY_MESSAGE_MIDDLE_OR_BELOW)
|
||||
logging.error(SECURITY_MESSAGE_MIDDLE)
|
||||
return web.Response(status=403, text="A security error has occurred. Please check the terminal logs")
|
||||
|
||||
node_id = json_data.get('id')
|
||||
@@ -1526,7 +1604,7 @@ async def update_custom_node(request):
|
||||
|
||||
async def _update_custom_node(json_data):
|
||||
if not is_allowed_security_level('middle'):
|
||||
logging.error(SECURITY_MESSAGE_MIDDLE_OR_BELOW)
|
||||
logging.error(SECURITY_MESSAGE_MIDDLE)
|
||||
return web.Response(status=403, text="A security error has occurred. Please check the terminal logs")
|
||||
|
||||
node_id = json_data.get('id')
|
||||
@@ -1617,8 +1695,8 @@ async def install_model(request):
|
||||
|
||||
|
||||
async def _install_model(json_data):
|
||||
if not is_allowed_security_level('middle'):
|
||||
logging.error(SECURITY_MESSAGE_MIDDLE_OR_BELOW)
|
||||
if not is_allowed_security_level('middle+'):
|
||||
logging.error(SECURITY_MESSAGE_MIDDLE_P)
|
||||
return web.Response(status=403, text="A security error has occurred. Please check the terminal logs")
|
||||
|
||||
# validate request
|
||||
@@ -1626,7 +1704,7 @@ async def _install_model(json_data):
|
||||
logging.error(f"[ComfyUI-Manager] Invalid model install request is detected: {json_data}")
|
||||
return web.Response(status=400, text="Invalid model install request is detected")
|
||||
|
||||
if not json_data['filename'].endswith('.safetensors') and not is_allowed_security_level('high'):
|
||||
if not json_data['filename'].endswith('.safetensors') and not is_allowed_security_level('high+'):
|
||||
models_json = await core.get_data_by_mode('cache', 'model-list.json', 'default')
|
||||
|
||||
is_belongs_to_whitelist = False
|
||||
@@ -1783,7 +1861,7 @@ async def get_notice_legacy(request):
|
||||
@routes.get("/v2/manager/reboot")
|
||||
def restart(self):
|
||||
if not is_allowed_security_level('middle'):
|
||||
logging.error(SECURITY_MESSAGE_MIDDLE_OR_BELOW)
|
||||
logging.error(SECURITY_MESSAGE_MIDDLE)
|
||||
return web.Response(status=403)
|
||||
|
||||
try:
|
||||
@@ -1949,9 +2027,10 @@ if not os.path.exists(context.manager_config_path):
|
||||
core.write_config()
|
||||
|
||||
|
||||
cm_global.register_extension('ComfyUI-Manager',
|
||||
{'version': core.version,
|
||||
'name': 'ComfyUI Manager',
|
||||
'nodes': {},
|
||||
'description': 'This extension provides the ability to manage custom nodes in ComfyUI.', })
|
||||
|
||||
# policy setup
|
||||
manager_security.add_handler_policy(reinstall_custom_node, manager_security.HANDLER_POLICY.MULTIPLE_REMOTE_BAN_NOT_PERSONAL_CLOUD)
|
||||
manager_security.add_handler_policy(install_custom_node, manager_security.HANDLER_POLICY.MULTIPLE_REMOTE_BAN_NOT_PERSONAL_CLOUD)
|
||||
manager_security.add_handler_policy(fix_custom_node, manager_security.HANDLER_POLICY.MULTIPLE_REMOTE_BAN_NOT_PERSONAL_CLOUD)
|
||||
manager_security.add_handler_policy(install_custom_node_git_url, manager_security.HANDLER_POLICY.MULTIPLE_REMOTE_BAN_NOT_PERSONAL_CLOUD)
|
||||
manager_security.add_handler_policy(install_custom_node_pip, manager_security.HANDLER_POLICY.MULTIPLE_REMOTE_BAN_NOT_PERSONAL_CLOUD)
|
||||
manager_security.add_handler_policy(install_model, manager_security.HANDLER_POLICY.MULTIPLE_REMOTE_BAN_NOT_PERSONAL_CLOUD)
|
||||
|
||||
@@ -10,6 +10,16 @@ import hashlib
|
||||
|
||||
import folder_paths
|
||||
from server import PromptServer
|
||||
import logging
|
||||
import sys
|
||||
|
||||
|
||||
try:
|
||||
from nio import AsyncClient, LoginResponse, UploadResponse
|
||||
matrix_nio_is_available = True
|
||||
except Exception:
|
||||
logging.warning(f"[ComfyUI-Manager] The matrix sharing feature has been disabled because the `matrix-nio` dependency is not installed.\n\tTo use this feature, please run the following command:\n\t{sys.executable} -m pip install matrix-nio\n")
|
||||
matrix_nio_is_available = False
|
||||
|
||||
|
||||
def extract_model_file_names(json_data):
|
||||
@@ -192,6 +202,14 @@ async def get_esheep_workflow_and_images(request):
|
||||
return web.Response(status=200, text=json.dumps(data))
|
||||
|
||||
|
||||
@PromptServer.instance.routes.get("/v2/manager/get_matrix_dep_status")
|
||||
async def get_matrix_dep_status(request):
|
||||
if matrix_nio_is_available:
|
||||
return web.Response(status=200, text='available')
|
||||
else:
|
||||
return web.Response(status=200, text='unavailable')
|
||||
|
||||
|
||||
def set_matrix_auth(json_data):
|
||||
homeserver = json_data['homeserver']
|
||||
username = json_data['username']
|
||||
@@ -331,15 +349,12 @@ async def share_art(request):
|
||||
workflowId = upload_workflow_json["workflowId"]
|
||||
|
||||
# check if the user has provided Matrix credentials
|
||||
if "matrix" in share_destinations:
|
||||
if matrix_nio_is_available and "matrix" in share_destinations:
|
||||
comfyui_share_room_id = '!LGYSoacpJPhIfBqVfb:matrix.org'
|
||||
filename = os.path.basename(asset_filepath)
|
||||
content_type = assetFileType
|
||||
|
||||
try:
|
||||
from matrix_client.api import MatrixHttpApi
|
||||
from matrix_client.client import MatrixClient
|
||||
|
||||
homeserver = 'matrix.org'
|
||||
if matrix_auth:
|
||||
homeserver = matrix_auth.get('homeserver', 'matrix.org')
|
||||
@@ -347,20 +362,35 @@ async def share_art(request):
|
||||
if not homeserver.startswith("https://"):
|
||||
homeserver = "https://" + homeserver
|
||||
|
||||
client = MatrixClient(homeserver)
|
||||
try:
|
||||
token = client.login(username=matrix_auth['username'], password=matrix_auth['password'])
|
||||
if not token:
|
||||
return web.json_response({"error": "Invalid Matrix credentials."}, content_type='application/json', status=400)
|
||||
except Exception:
|
||||
client = AsyncClient(homeserver, matrix_auth['username'])
|
||||
|
||||
# Login
|
||||
login_resp = await client.login(matrix_auth['password'])
|
||||
if not isinstance(login_resp, LoginResponse) or not login_resp.access_token:
|
||||
await client.close()
|
||||
return web.json_response({"error": "Invalid Matrix credentials."}, content_type='application/json', status=400)
|
||||
|
||||
matrix = MatrixHttpApi(homeserver, token=token)
|
||||
# Upload asset
|
||||
with open(asset_filepath, 'rb') as f:
|
||||
mxc_url = matrix.media_upload(f.read(), content_type, filename=filename)['content_uri']
|
||||
upload_resp, _maybe_keys = await client.upload(f, content_type=content_type, filename=filename)
|
||||
asset_data = f.seek(0) or f.read() # get size for info below
|
||||
if not isinstance(upload_resp, UploadResponse) or not upload_resp.content_uri:
|
||||
await client.close()
|
||||
return web.json_response({"error": "Failed to upload asset to Matrix."}, content_type='application/json', status=500)
|
||||
mxc_url = upload_resp.content_uri
|
||||
|
||||
workflow_json_mxc_url = matrix.media_upload(prompt['workflow'], 'application/json', filename='workflow.json')['content_uri']
|
||||
# Upload workflow JSON
|
||||
import io
|
||||
workflow_json_bytes = json.dumps(prompt['workflow']).encode('utf-8')
|
||||
workflow_io = io.BytesIO(workflow_json_bytes)
|
||||
upload_workflow_resp, _maybe_keys = await client.upload(workflow_io, content_type='application/json', filename='workflow.json')
|
||||
workflow_io.seek(0)
|
||||
if not isinstance(upload_workflow_resp, UploadResponse) or not upload_workflow_resp.content_uri:
|
||||
await client.close()
|
||||
return web.json_response({"error": "Failed to upload workflow to Matrix."}, content_type='application/json', status=500)
|
||||
workflow_json_mxc_url = upload_workflow_resp.content_uri
|
||||
|
||||
# Send text message
|
||||
text_content = ""
|
||||
if title:
|
||||
text_content += f"{title}\n"
|
||||
@@ -368,10 +398,45 @@ async def share_art(request):
|
||||
text_content += f"{description}\n"
|
||||
if credits:
|
||||
text_content += f"\ncredits: {credits}\n"
|
||||
matrix.send_message(comfyui_share_room_id, text_content)
|
||||
matrix.send_content(comfyui_share_room_id, mxc_url, filename, 'm.image')
|
||||
matrix.send_content(comfyui_share_room_id, workflow_json_mxc_url, 'workflow.json', 'm.file')
|
||||
except Exception:
|
||||
await client.room_send(
|
||||
room_id=comfyui_share_room_id,
|
||||
message_type="m.room.message",
|
||||
content={"msgtype": "m.text", "body": text_content}
|
||||
)
|
||||
|
||||
# Send image
|
||||
await client.room_send(
|
||||
room_id=comfyui_share_room_id,
|
||||
message_type="m.room.message",
|
||||
content={
|
||||
"msgtype": "m.image",
|
||||
"body": filename,
|
||||
"url": mxc_url,
|
||||
"info": {
|
||||
"mimetype": content_type,
|
||||
"size": len(asset_data)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
# Send workflow JSON file
|
||||
await client.room_send(
|
||||
room_id=comfyui_share_room_id,
|
||||
message_type="m.room.message",
|
||||
content={
|
||||
"msgtype": "m.file",
|
||||
"body": "workflow.json",
|
||||
"url": workflow_json_mxc_url,
|
||||
"info": {
|
||||
"mimetype": "application/json",
|
||||
"size": len(workflow_json_bytes)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
await client.close()
|
||||
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return web.json_response({"error": "An error occurred when sharing your art to Matrix."}, content_type='application/json', status=500)
|
||||
|
||||
@@ -1973,6 +1973,97 @@
|
||||
"url": "https://dl.fbaipublicfiles.com/segment_anything/sam_vit_b_01ec64.pth",
|
||||
"size": "375.0MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "sam2.1_hiera_tiny.pt",
|
||||
"type": "sam2.1",
|
||||
"base": "SAM",
|
||||
"save_path": "sams",
|
||||
"description": "Segmenty Anything SAM 2.1 hiera model (tiny)",
|
||||
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||
"filename": "sam2.1_hiera_tiny.pt",
|
||||
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_tiny.pt",
|
||||
"size": "149.0MB"
|
||||
},
|
||||
{
|
||||
"name": "sam2.1_hiera_small.pt",
|
||||
"type": "sam2.1",
|
||||
"base": "SAM",
|
||||
"save_path": "sams",
|
||||
"description": "Segmenty Anything SAM 2.1 hiera model (small)",
|
||||
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||
"filename": "sam2.1_hiera_small.pt",
|
||||
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_small.pt",
|
||||
"size": "176.0MB"
|
||||
},
|
||||
{
|
||||
"name": "sam2.1_hiera_base_plus.pt",
|
||||
"type": "sam2.1",
|
||||
"base": "SAM",
|
||||
"save_path": "sams",
|
||||
"description": "Segmenty Anything SAM 2.1 hiera model (base+)",
|
||||
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||
"filename": "sam2.1_hiera_base_plus.pt",
|
||||
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_base_plus.pt",
|
||||
"size": "309.0MB"
|
||||
},
|
||||
{
|
||||
"name": "sam2.1_hiera_large.pt",
|
||||
"type": "sam2.1",
|
||||
"base": "SAM",
|
||||
"save_path": "sams",
|
||||
"description": "Segmenty Anything SAM 2.1 hiera model (large)",
|
||||
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||
"filename": "sam2.1_hiera_large.pt",
|
||||
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_large.pt",
|
||||
"size": "857.0MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "sam2_hiera_tiny.pt",
|
||||
"type": "sam2",
|
||||
"base": "SAM",
|
||||
"save_path": "sams",
|
||||
"description": "Segmenty Anything SAM 2 hiera model (tiny)",
|
||||
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||
"filename": "sam2_hiera_tiny.pt",
|
||||
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/072824/sam2_hiera_tiny.pt",
|
||||
"size": "149.0MB"
|
||||
},
|
||||
{
|
||||
"name": "sam2_hiera_small.pt",
|
||||
"type": "sam2",
|
||||
"base": "SAM",
|
||||
"save_path": "sams",
|
||||
"description": "Segmenty Anything SAM 2 hiera model (small)",
|
||||
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||
"filename": "sam2_hiera_small.pt",
|
||||
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/072824/sam2_hiera_small.pt",
|
||||
"size": "176.0MB"
|
||||
},
|
||||
{
|
||||
"name": "sam2_hiera_base_plus.pt",
|
||||
"type": "sam2",
|
||||
"base": "SAM",
|
||||
"save_path": "sams",
|
||||
"description": "Segmenty Anything SAM 2 hiera model (base+)",
|
||||
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||
"filename": "sam2_hiera_base_plus.pt",
|
||||
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/072824/sam2_hiera_base_plus.pt",
|
||||
"size": "309.0MB"
|
||||
},
|
||||
{
|
||||
"name": "sam2_hiera_large.pt",
|
||||
"type": "sam2",
|
||||
"base": "SAM",
|
||||
"save_path": "sams",
|
||||
"description": "Segmenty Anything SAM 2 hiera model (large)",
|
||||
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||
"filename": "sam2_hiera_large.pt",
|
||||
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/072824/sam2_hiera_large.pt",
|
||||
"size": "857.0MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "seecoder v1.0",
|
||||
"type": "seecoder",
|
||||
@@ -4006,6 +4097,29 @@
|
||||
"size": "649MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Comfy-Org/omnigen2_fp16.safetensors",
|
||||
"type": "diffusion_model",
|
||||
"base": "OmniGen2",
|
||||
"save_path": "default",
|
||||
"description": "OmniGen2 diffusion model. This is required for using OmniGen2.",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged",
|
||||
"filename": "omnigen2_fp16.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged/resolve/main/split_files/diffusion_models/omnigen2_fp16.safetensors",
|
||||
"size": "7.93GB"
|
||||
},
|
||||
{
|
||||
"name": "Comfy-Org/qwen_2.5_vl_fp16.safetensors",
|
||||
"type": "clip",
|
||||
"base": "qwen-2.5",
|
||||
"save_path": "default",
|
||||
"description": "text encoder for OmniGen2",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged",
|
||||
"filename": "qwen_2.5_vl_fp16.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged/resolve/main/split_files/text_encoders/qwen_2.5_vl_fp16.safetensors",
|
||||
"size": "7.51GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "FLUX.1 [Schnell] Diffusion model",
|
||||
"type": "diffusion_model",
|
||||
@@ -4023,7 +4137,7 @@
|
||||
"type": "VAE",
|
||||
"base": "FLUX.1",
|
||||
"save_path": "vae/FLUX1",
|
||||
"description": "FLUX.1 VAE model",
|
||||
"description": "FLUX.1 VAE model\nNOTE: This VAE model can also be used for image generation with OmniGen2.",
|
||||
"reference": "https://huggingface.co/black-forest-labs/FLUX.1-schnell",
|
||||
"filename": "ae.safetensors",
|
||||
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors",
|
||||
@@ -4931,6 +5045,105 @@
|
||||
"size": "1.26GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Comfy-Org/Wan2.2 i2v high noise 14B (fp16)",
|
||||
"type": "diffusion_model",
|
||||
"base": "Wan2.2",
|
||||
"save_path": "diffusion_models/Wan2.2",
|
||||
"description": "Wan2.2 diffusion model for i2v high noise 14B (fp16)",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||
"filename": "wan2.2_i2v_high_noise_14B_fp16.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_high_noise_14B_fp16.safetensors",
|
||||
"size": "28.6GB"
|
||||
},
|
||||
{
|
||||
"name": "Comfy-Org/Wan2.2 i2v high noise 14B (fp8_scaled)",
|
||||
"type": "diffusion_model",
|
||||
"base": "Wan2.2",
|
||||
"save_path": "diffusion_models/Wan2.2",
|
||||
"description": "Wan2.2 diffusion model for i2v high noise 14B (fp8_scaled)",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||
"filename": "wan2.2_i2v_high_noise_14B_fp8_scaled.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_high_noise_14B_fp8_scaled.safetensors",
|
||||
"size": "14.3GB"
|
||||
},
|
||||
{
|
||||
"name": "Comfy-Org/Wan2.2 i2v low noise 14B (fp16)",
|
||||
"type": "diffusion_model",
|
||||
"base": "Wan2.2",
|
||||
"save_path": "diffusion_models/Wan2.2",
|
||||
"description": "Wan2.2 diffusion model for i2v low noise 14B (fp16)",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||
"filename": "wan2.2_i2v_low_noise_14B_fp16.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_low_noise_14B_fp16.safetensors",
|
||||
"size": "28.6GB"
|
||||
},
|
||||
{
|
||||
"name": "Comfy-Org/Wan2.2 i2v low noise 14B (fp8_scaled)",
|
||||
"type": "diffusion_model",
|
||||
"base": "Wan2.2",
|
||||
"save_path": "diffusion_models/Wan2.2",
|
||||
"description": "Wan2.2 diffusion model for i2v low noise 14B (fp8_scaled)",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||
"filename": "wan2.2_i2v_low_noise_14B_fp8_scaled.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_low_noise_14B_fp8_scaled.safetensors",
|
||||
"size": "14.3GB"
|
||||
},
|
||||
{
|
||||
"name": "Comfy-Org/Wan2.2 t2v high noise 14B (fp16)",
|
||||
"type": "diffusion_model",
|
||||
"base": "Wan2.2",
|
||||
"save_path": "diffusion_models/Wan2.2",
|
||||
"description": "Wan2.2 diffusion model for t2v high noise 14B (fp16)",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||
"filename": "wan2.2_t2v_high_noise_14B_fp16.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_high_noise_14B_fp16.safetensors",
|
||||
"size": "28.6GB"
|
||||
},
|
||||
{
|
||||
"name": "Comfy-Org/Wan2.2 t2v high noise 14B (fp8_scaled)",
|
||||
"type": "diffusion_model",
|
||||
"base": "Wan2.2",
|
||||
"save_path": "diffusion_models/Wan2.2",
|
||||
"description": "Wan2.2 diffusion model for t2v high noise 14B (fp8_scaled)",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||
"filename": "wan2.2_t2v_high_noise_14B_fp8_scaled.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_high_noise_14B_fp8_scaled.safetensors",
|
||||
"size": "14.3GB"
|
||||
},
|
||||
{
|
||||
"name": "Comfy-Org/Wan2.2 t2v low noise 14B (fp16)",
|
||||
"type": "diffusion_model",
|
||||
"base": "Wan2.2",
|
||||
"save_path": "diffusion_models/Wan2.2",
|
||||
"description": "Wan2.2 diffusion model for t2v low noise 14B (fp16)",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||
"filename": "wan2.2_t2v_low_noise_14B_fp16.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_low_noise_14B_fp16.safetensors",
|
||||
"size": "28.6GB"
|
||||
},
|
||||
{
|
||||
"name": "Comfy-Org/Wan2.2 t2v low noise 14B (fp8_scaled)",
|
||||
"type": "diffusion_model",
|
||||
"base": "Wan2.2",
|
||||
"save_path": "diffusion_models/Wan2.2",
|
||||
"description": "Wan2.2 diffusion model for t2v low noise 14B (fp8_scaled)",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||
"filename": "wan2.2_t2v_low_noise_14B_fp8_scaled.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_low_noise_14B_fp8_scaled.safetensors",
|
||||
"size": "14.3GB"
|
||||
},
|
||||
{
|
||||
"name": "Comfy-Org/Wan2.2 ti2v 5B (fp16)",
|
||||
"type": "diffusion_model",
|
||||
"base": "Wan2.2",
|
||||
"save_path": "diffusion_models/Wan2.2",
|
||||
"description": "Wan2.2 diffusion model for ti2v 5B (fp16)",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||
"filename": "wan2.2_ti2v_5B_fp16.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_ti2v_5B_fp16.safetensors",
|
||||
"size": "10.0GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Comfy-Org/umt5_xxl_fp16.safetensors",
|
||||
@@ -5033,6 +5246,50 @@
|
||||
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltxv-13b-0.9.7-distilled-fp8.safetensors",
|
||||
"size": "15.7GB"
|
||||
},
|
||||
{
|
||||
"name": "LTX-Video 2B Distilled v0.9.8",
|
||||
"type": "checkpoint",
|
||||
"base": "LTX-Video",
|
||||
"save_path": "checkpoints/LTXV",
|
||||
"description": "LTX-Video 2B distilled model v0.9.8 with improved prompt understanding and detail generation.",
|
||||
"reference": "https://huggingface.co/Lightricks/LTX-Video",
|
||||
"filename": "ltxv-2b-0.9.8-distilled.safetensors",
|
||||
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltxv-2b-0.9.8-distilled.safetensors",
|
||||
"size": "6.34GB"
|
||||
},
|
||||
{
|
||||
"name": "LTX-Video 2B Distilled FP8 v0.9.8",
|
||||
"type": "checkpoint",
|
||||
"base": "LTX-Video",
|
||||
"save_path": "checkpoints/LTXV",
|
||||
"description": "Quantized LTX-Video 2B distilled model v0.9.8 with improved prompt understanding and detail generation, optimized for lower VRAM usage.",
|
||||
"reference": "https://huggingface.co/Lightricks/LTX-Video",
|
||||
"filename": "ltxv-2b-0.9.8-distilled-fp8.safetensors",
|
||||
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltxv-2b-0.9.8-distilled-fp8.safetensors",
|
||||
"size": "4.46GB"
|
||||
},
|
||||
{
|
||||
"name": "LTX-Video 13B Distilled v0.9.8",
|
||||
"type": "checkpoint",
|
||||
"base": "LTX-Video",
|
||||
"save_path": "checkpoints/LTXV",
|
||||
"description": "LTX-Video 13B distilled model v0.9.8 with improved prompt understanding and detail generation.",
|
||||
"reference": "https://huggingface.co/Lightricks/LTX-Video",
|
||||
"filename": "ltxv-13b-0.9.8-distilled.safetensors",
|
||||
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltxv-13b-0.9.8-distilled.safetensors",
|
||||
"size": "28.6GB"
|
||||
},
|
||||
{
|
||||
"name": "LTX-Video 13B Distilled FP8 v0.9.8",
|
||||
"type": "checkpoint",
|
||||
"base": "LTX-Video",
|
||||
"save_path": "checkpoints/LTXV",
|
||||
"description": "Quantized LTX-Video 13B distilled model v0.9.8 with improved prompt understanding and detail generation, optimized for lower VRAM usage.",
|
||||
"reference": "https://huggingface.co/Lightricks/LTX-Video",
|
||||
"filename": "ltxv-13b-0.9.8-distilled-fp8.safetensors",
|
||||
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltxv-13b-0.9.8-distilled-fp8.safetensors",
|
||||
"size": "15.7GB"
|
||||
},
|
||||
{
|
||||
"name": "LTX-Video 13B Distilled LoRA v0.9.7",
|
||||
"type": "lora",
|
||||
@@ -5044,6 +5301,50 @@
|
||||
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltxv-13b-0.9.7-distilled-lora128.safetensors",
|
||||
"size": "1.33GB"
|
||||
},
|
||||
{
|
||||
"name": "LTX-Video ICLoRA Depth 13B v0.9.7",
|
||||
"type": "lora",
|
||||
"base": "LTX-Video",
|
||||
"save_path": "loras",
|
||||
"description": "In-Context LoRA (IC LoRA) for depth-controlled video-to-video generation with precise depth conditioning.",
|
||||
"reference": "https://huggingface.co/Lightricks/LTX-Video-ICLoRA-depth-13b-0.9.7",
|
||||
"filename": "ltxv-097-ic-lora-depth-control-comfyui.safetensors",
|
||||
"url": "https://huggingface.co/Lightricks/LTX-Video-ICLoRA-depth-13b-0.9.7/resolve/main/ltxv-097-ic-lora-depth-control-comfyui.safetensors",
|
||||
"size": "81.9MB"
|
||||
},
|
||||
{
|
||||
"name": "LTX-Video ICLoRA Pose 13B v0.9.7",
|
||||
"type": "lora",
|
||||
"base": "LTX-Video",
|
||||
"save_path": "loras",
|
||||
"description": "In-Context LoRA (IC LoRA) for pose-controlled video-to-video generation with precise pose conditioning.",
|
||||
"reference": "https://huggingface.co/Lightricks/LTX-Video-ICLoRA-pose-13b-0.9.7",
|
||||
"filename": "ltxv-097-ic-lora-pose-control-comfyui.safetensors",
|
||||
"url": "https://huggingface.co/Lightricks/LTX-Video-ICLoRA-pose-13b-0.9.7/resolve/main/ltxv-097-ic-lora-pose-control-comfyui.safetensors",
|
||||
"size": "151MB"
|
||||
},
|
||||
{
|
||||
"name": "LTX-Video ICLoRA Canny 13B v0.9.7",
|
||||
"type": "lora",
|
||||
"base": "LTX-Video",
|
||||
"save_path": "loras",
|
||||
"description": "In-Context LoRA (IC LoRA) for canny edge-controlled video-to-video generation with precise edge conditioning.",
|
||||
"reference": "https://huggingface.co/Lightricks/LTX-Video-ICLoRA-canny-13b-0.9.7",
|
||||
"filename": "ltxv-097-ic-lora-canny-control-comfyui.safetensors",
|
||||
"url": "https://huggingface.co/Lightricks/LTX-Video-ICLoRA-canny-13b-0.9.7/resolve/main/ltxv-097-ic-lora-canny-control-comfyui.safetensors",
|
||||
"size": "81.9MB"
|
||||
},
|
||||
{
|
||||
"name": "LTX-Video ICLoRA Detailer 13B v0.9.8",
|
||||
"type": "lora",
|
||||
"base": "LTX-Video",
|
||||
"save_path": "loras",
|
||||
"description": "A video detailer model on top of LTXV_13B_098_DEV trained on custom data using In-Context LoRA (IC LoRA) method.",
|
||||
"reference": "https://huggingface.co/Lightricks/LTX-Video-ICLoRA-detailer-13b-0.9.8",
|
||||
"filename": "ltxv-098-ic-lora-detailer-comfyui.safetensors",
|
||||
"url": "https://huggingface.co/Lightricks/LTX-Video-ICLoRA-detailer-13b-0.9.8/resolve/main/ltxv-098-ic-lora-detailer-comfyui.safetensors",
|
||||
"size": "1.31GB"
|
||||
},
|
||||
{
|
||||
"name": "Latent Bridge Matching for Image Relighting",
|
||||
"type": "diffusion_model",
|
||||
@@ -35,7 +35,6 @@ else:
|
||||
def current_timestamp():
|
||||
return str(time.time()).split('.')[0]
|
||||
|
||||
security_check.security_check()
|
||||
|
||||
cm_global.pip_blacklist = {'torch', 'torchaudio', 'torchsde', 'torchvision'}
|
||||
cm_global.pip_downgrade_blacklist = ['torch', 'torchaudio', 'torchsde', 'torchvision', 'transformers', 'safetensors', 'kornia']
|
||||
@@ -111,19 +110,14 @@ def check_file_logging():
|
||||
|
||||
read_config()
|
||||
read_uv_mode()
|
||||
security_check.security_check()
|
||||
check_file_logging()
|
||||
|
||||
if sys.version_info < (3, 13):
|
||||
cm_global.pip_overrides = {'numpy': 'numpy<2'}
|
||||
else:
|
||||
cm_global.pip_overrides = {}
|
||||
cm_global.pip_overrides = {}
|
||||
|
||||
if os.path.exists(manager_pip_overrides_path):
|
||||
with open(manager_pip_overrides_path, 'r', encoding="UTF-8", errors="ignore") as json_file:
|
||||
cm_global.pip_overrides = json.load(json_file)
|
||||
|
||||
if sys.version_info < (3, 13):
|
||||
cm_global.pip_overrides['numpy'] = 'numpy<2'
|
||||
|
||||
|
||||
if os.path.exists(manager_pip_blacklist_path):
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,3 +1,3 @@
|
||||
#!/bin/bash
|
||||
rm ~/.tmp/dev/*.py > /dev/null 2>&1
|
||||
python ../../scanner.py ~/.tmp/dev
|
||||
python ../../scanner.py ~/.tmp/dev $*
|
||||
@@ -1,5 +1,25 @@
|
||||
{
|
||||
"custom_nodes": [
|
||||
{
|
||||
"author": "synchronicity-labs",
|
||||
"title": "ComfyUI Sync Lipsync Node",
|
||||
"reference": "https://github.com/synchronicity-labs/sync-comfyui",
|
||||
"files": [
|
||||
"https://github.com/synchronicity-labs/sync-comfyui"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "This custom node allows you to perform audio-video lip synchronization inside ComfyUI using a simple interface."
|
||||
},
|
||||
{
|
||||
"author": "joaomede",
|
||||
"title": "ComfyUI-Unload-Model-Fork",
|
||||
"reference": "https://github.com/joaomede/ComfyUI-Unload-Model-Fork",
|
||||
"files": [
|
||||
"https://github.com/joaomede/ComfyUI-Unload-Model-Fork"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "For unloading a model or all models, using the memory management that is already present in ComfyUI. Copied from [a/https://github.com/willblaschko/ComfyUI-Unload-Models](https://github.com/willblaschko/ComfyUI-Unload-Models) but without the unnecessary extra stuff."
|
||||
},
|
||||
{
|
||||
"author": "SanDiegoDude",
|
||||
"title": "ComfyUI-HiDream-Sampler [WIP]",
|
||||
|
||||
@@ -1,5 +1,778 @@
|
||||
{
|
||||
"custom_nodes": [
|
||||
{
|
||||
"author": "aistudynow",
|
||||
"title": "comfyui-HunyuanImage-2.1 [REMOVED]",
|
||||
"reference": "https://github.com/aistudynow/comfyui-HunyuanImage-2.1",
|
||||
"files": [
|
||||
"https://github.com/aistudynow/comfyui-HunyuanImage-2.1"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "NODES: Load HunyuanImage DiT, Load HunyuanImage VAE, Load HunyuanImage Dual Text Encoder, HunyuanImage Sampler, HunyuanImage VAE Decode, HunyuanImage CLIP Text Encode, Empty HunyuanImage Latent Image"
|
||||
},
|
||||
{
|
||||
"author": "SlackinJack",
|
||||
"title": "distrifuser_comfyui [DEPRECATED]",
|
||||
"reference": "https://github.com/SlackinJack/distrifuser_comfyui",
|
||||
"files": [
|
||||
"https://github.com/SlackinJack/distrifuser_comfyui"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "[a/Distrifuser](https://github.com/mit-han-lab/distrifuser) sampler node for ComfyUI\n"
|
||||
},
|
||||
{
|
||||
"author": "SlackinJack",
|
||||
"title": "asyncdiff_comfyui [DEPRECATED]",
|
||||
"reference": "https://github.com/SlackinJack/asyncdiff_comfyui",
|
||||
"files": [
|
||||
"https://github.com/SlackinJack/asyncdiff_comfyui"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "AsyncDiff node for ComfyUI"
|
||||
},
|
||||
{
|
||||
"author": "TheBill2001",
|
||||
"title": "Save Images with Captions [REMOVED]",
|
||||
"reference": "https://github.com/TheBill2001/ComfyUI-Save-Image-Caption",
|
||||
"files": [
|
||||
"https://github.com/TheBill2001/ComfyUI-Save-Image-Caption"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Provide two custom nodes to load and save images with captions as separate files."
|
||||
},
|
||||
{
|
||||
"author": "ShmuelRonen",
|
||||
"title": "ComfyUI Flux 1.1 Ultra & Raw Node [REMOVED]",
|
||||
"reference": "https://github.com/ShmuelRonen/ComfyUI_Flux_1.1_RAW_API",
|
||||
"files": [
|
||||
"https://github.com/ShmuelRonen/ComfyUI_Flux_1.1_RAW_API"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A ComfyUI custom node for Black Forest Labs' FLUX 1.1 [pro] API, supporting both regular and Ultra modes with optional Raw mode."
|
||||
},
|
||||
{
|
||||
"author": "mattwilliamson",
|
||||
"title": "ComfyUI AI GameDev Nodes [UNSAFE/REMOVED]",
|
||||
"reference": "https://github.com/mattwilliamson/comfyui-ai-gamedev",
|
||||
"files": [
|
||||
"https://github.com/mattwilliamson/comfyui-ai-gamedev"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Custom ComfyUI nodes for AI-powered game asset generation, providing a comprehensive toolkit for game developers to create 3D models, animations, and audio assets using state-of-the-art AI models.[w/This node pack has an implementation that dynamically generates scripts.]"
|
||||
},
|
||||
{
|
||||
"author": "manifestations",
|
||||
"title": "ComfyUI Outfit Nodes [DEPRECATED]",
|
||||
"reference": "https://github.com/manifestations/comfyui-outfit",
|
||||
"files": [
|
||||
"https://github.com/manifestations/comfyui-outfit"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Advanced, professional outfit and makeup generation nodes for ComfyUI, with dynamic UI and AI-powered prompt formatting."
|
||||
},
|
||||
{
|
||||
"author": "Poukpalaova",
|
||||
"title": "ComfyUI-FRED-Nodes [DEPRECATED]",
|
||||
"reference": "https://github.com/Poukpalaova/ComfyUI-FRED-Nodes",
|
||||
"files": [
|
||||
"https://github.com/Poukpalaova/ComfyUI-FRED-Nodes"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Multiple nodes that ease the process.\nNOTE: The files in the repo are not organized."
|
||||
},
|
||||
{
|
||||
"author": "cwebbi1",
|
||||
"title": "VoidCustomNodes [REMOVED]",
|
||||
"reference": "https://github.com/cwebbi1/VoidCustomNodes",
|
||||
"files": [
|
||||
"https://github.com/cwebbi1/VoidCustomNodes"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "NODES:Prompt Parser, String Combiner"
|
||||
},
|
||||
{
|
||||
"author": "Shellishack",
|
||||
"title": "ComfyUI Remote Media Loaders [REMOVED]",
|
||||
"reference": "https://github.com/Shellishack/comfyui-remote-media-loaders",
|
||||
"files": [
|
||||
"https://github.com/Shellishack/comfyui-remote-media-loaders"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Load media (image/video/audio) from remote URL"
|
||||
},
|
||||
{
|
||||
"author": "D3lUX3I",
|
||||
"title": "VideoPromptEnhancer [REMOVED]",
|
||||
"reference": "https://github.com/D3lUX3I/ComfyUI-VideoPromptEnhancer",
|
||||
"files": [
|
||||
"https://github.com/D3lUX3I/ComfyUI-VideoPromptEnhancer"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "This node generates a professional prompt from an input text for modern video AI models (e.g., Alibaba Wan 2.2) via the OpenRouter API."
|
||||
},
|
||||
{
|
||||
"author": "perilli",
|
||||
"title": "apw_nodes [REMOVED]",
|
||||
"reference": "https://github.com/alessandroperilli/APW_Nodes",
|
||||
"files": [
|
||||
"https://github.com/alessandroperilli/APW_Nodes"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A custom node suite to augment the capabilities of the [a/AP Workflows for ComfyUI](https://perilli.com/ai/comfyui/)\nNOTE: See [a/Open Creative Studio Nodes](https://github.com/alessandroperilli/OCS_Nodes)"
|
||||
},
|
||||
{
|
||||
"author": "greengerong",
|
||||
"title": "ComfyUI-Lumina-Video [REMOVED]",
|
||||
"reference": "https://github.com/greengerong/ComfyUI-Lumina-Video",
|
||||
"files": [
|
||||
"https://github.com/greengerong/ComfyUI-Lumina-Video"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "This is a video generation plugin implementation for ComfyUI based on the Lumina Video model."
|
||||
},
|
||||
{
|
||||
"author": "SatadalAI",
|
||||
"title": "Combined Upscale Node for ComfyUI [REMOVED]",
|
||||
"reference": "https://github.com/SatadalAI/SATA_UtilityNode",
|
||||
"files": [
|
||||
"https://github.com/SatadalAI/SATA_UtilityNode"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Combined_Upscale is a custom ComfyUI node designed for high-quality image enhancement workflows. It intelligently combines model-based upscaling with efficient CPU-based resizing, offering granular control over output dimensions and quality. Ideal for asset pipelines, UI prototyping, and generative workflows.\nNOTE: The files in the repo are not organized."
|
||||
},
|
||||
{
|
||||
"author": "netroxin",
|
||||
"title": "Netro_wildcards [REMOVED]",
|
||||
"reference": "https://github.com/netroxin/comfyui_netro_wildcards",
|
||||
"files": [
|
||||
"https://github.com/netroxin/comfyui_netro_wildcards"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Since I used 'simple wildcards' from Vanilla and it no longer works with the new Comfy UI version for me, I created an alternative. This CustomNode takes the entire contents of your wildcards-folder(comfyui wildcards) and creates a node for each one."
|
||||
},
|
||||
{
|
||||
"author": "takoyaki1118",
|
||||
"title": "ComfyUI-MangaTools [REMOVED]",
|
||||
"reference": "https://github.com/takoyaki1118/ComfyUI-MangaTools",
|
||||
"files": [
|
||||
"https://github.com/takoyaki1118/ComfyUI-MangaTools"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "NODES: Manga Panel Detector, Manga Panel Dispatcher, GateImage, MangaPageAssembler"
|
||||
},
|
||||
{
|
||||
"author": "lucasgattas",
|
||||
"title": "comfyui-egregora-regional [REMOVED]",
|
||||
"reference": "https://github.com/lucasgattas/comfyui-egregora-regional",
|
||||
"files": [
|
||||
"https://github.com/lucasgattas/comfyui-egregora-regional"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Image Tile Split with Region-Aware Prompting for ComfyUI"
|
||||
},
|
||||
{
|
||||
"author": "lucasgattas",
|
||||
"title": "comfyui-egregora-tiled [REMOVED]",
|
||||
"reference": "https://github.com/lucasgattas/comfyui-egregora-tiled",
|
||||
"files": [
|
||||
"https://github.com/lucasgattas/comfyui-egregora-tiled"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Tiled regional prompting + tiled VAE decode with seam-free blending for ComfyUI"
|
||||
},
|
||||
{
|
||||
"author": "Seedsa",
|
||||
"title": "ComfyUI Fooocus Nodes [REMOVED]",
|
||||
"id": "fooocus-nodes",
|
||||
"reference": "https://github.com/Seedsa/Fooocus_Nodes",
|
||||
"files": [
|
||||
"https://github.com/Seedsa/Fooocus_Nodes"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "This extension provides image generation features based on Fooocus."
|
||||
},
|
||||
{
|
||||
"author": "zhilemann",
|
||||
"title": "ComfyUI-moondream2 [REMOVED]",
|
||||
"reference": "https://github.com/zhilemann/ComfyUI-moondream2",
|
||||
"files": [
|
||||
"https://github.com/zhilemann/ComfyUI-moondream2"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "nodes for nightly moondream2 VLM inference\nsupports only captioning and visual queries at the moment"
|
||||
},
|
||||
{
|
||||
"author": "shinich39",
|
||||
"title": "comfyui-textarea-is-shit [REMOVED]",
|
||||
"reference": "https://github.com/shinich39/comfyui-textarea-is-shit",
|
||||
"files": [
|
||||
"https://github.com/shinich39/comfyui-textarea-is-shit"
|
||||
],
|
||||
"description": "HTML gives me a textarea like piece of shit.",
|
||||
"install_type": "git-clone"
|
||||
},
|
||||
{
|
||||
"author": "shinich39",
|
||||
"title": "comfyui-poor-textarea [REMOVED]",
|
||||
"reference": "https://github.com/shinich39/comfyui-poor-textarea",
|
||||
"files": [
|
||||
"https://github.com/shinich39/comfyui-poor-textarea"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Add commentify, indentation, auto-close brackets in textarea."
|
||||
},
|
||||
{
|
||||
"author": "InfiniNode",
|
||||
"title": "Comfyui-InfiniNode-Model-Suite [UNSAFE/REMOVED]",
|
||||
"reference": "https://github.com/InfiniNode/Comfyui-InfiniNode-Model-Suite",
|
||||
"files": [
|
||||
"https://github.com/InfiniNode/Comfyui-InfiniNode-Model-Suite"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Welcome to the InfiniNode Model Suite, a custom node pack for ComfyUI that transforms the process of manipulating generative AI models. Our suite is a direct implementation of the 'GUI-Based Key Converter Development Plan,' designed to remove technical barriers for advanced AI practitioners and integrate seamlessly with existing image generation pipelines.[w/This node pack contains a node that has a vulnerability allowing write to arbitrary file paths.]"
|
||||
},
|
||||
{
|
||||
"author": "Avalre",
|
||||
"title": "ComfyUI-avaNodes [REMOVED]",
|
||||
"reference": "https://github.com/Avalre/ComfyUI-avaNodes",
|
||||
"files": [
|
||||
"https://github.com/Avalre/ComfyUI-avaNodes"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "These nodes were created to personalize/optimize several ComfyUI nodes for my own use. You can replicate the functionality of most of my nodes by some combination of default ComfyUI nodes and custom nodes from other developers."
|
||||
},
|
||||
{
|
||||
"author": "Alectriciti",
|
||||
"title": "comfyui-creativeprompts [REMOVED]",
|
||||
"reference": "https://github.com/Alectriciti/comfyui-creativeprompts",
|
||||
"files": [
|
||||
"https://github.com/Alectriciti/comfyui-creativeprompts"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A creative alternative to dynamicprompts"
|
||||
},
|
||||
{
|
||||
"author": "flybirdxx",
|
||||
"title": "ComfyUI Sliding Window [REMOVED]",
|
||||
"reference": "https://github.com/PixWizardry/ComfyUI_Sliding_Window",
|
||||
"files": [
|
||||
"https://github.com/PixWizardry/ComfyUI_Sliding_Window"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "This set of nodes provides a powerful sliding window or 'tiling' technique for processing long videos and animations in ComfyUI. It allows you to work on animations that are longer than your VRAM would typically allow by breaking the job into smaller, overlapping chunks and seamlessly blending them back together."
|
||||
},
|
||||
{
|
||||
"author": "SykkoAtHome",
|
||||
"title": "Sykko Tools for ComfyUI [REMOVED]",
|
||||
"reference": "https://github.com/SykkoAtHome/ComfyUI_SykkoTools",
|
||||
"files": [
|
||||
"https://github.com/SykkoAtHome/ComfyUI_SykkoTools"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Utilities for working with camera animations inside ComfyUI. The repository currently provides a node for loading camera motion from ASCII FBX files and a corresponding command line helper for debugging."
|
||||
},
|
||||
{
|
||||
"author": "hananbeer",
|
||||
"title": "node_dev - ComfyUI Node Development Helper [REMOVED]",
|
||||
"reference": "https://github.com/hananbeer/node_dev",
|
||||
"files": [
|
||||
"https://github.com/hananbeer/node_dev"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Browse to this endpoint to reload custom nodes for more streamlined development:\nhttp://127.0.0.1:8188/node_dev/reload/<module_name>"
|
||||
},
|
||||
{
|
||||
"author": "Charonartist",
|
||||
"title": "Comfyui_gemini_tts_node [REMOVED]",
|
||||
"reference": "https://github.com/Charonartist/Comfyui_gemini_tts_node",
|
||||
"files": [
|
||||
"https://github.com/Charonartist/Comfyui_gemini_tts_node"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "This custom node is a ComfyUI node for generating speech from text using the Gemini 2.5 Flash Preview TTS API."
|
||||
},
|
||||
{
|
||||
"author": "squirrel765",
|
||||
"title": "lorasubdirectory [REMOVED]",
|
||||
"reference": "https://github.com/andrewsthomasj/lorasubdirectory",
|
||||
"files": [
|
||||
"https://github.com/andrewsthomasj/lorasubdirectory"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "only show dropdown of loras ina a given subdirectory"
|
||||
},
|
||||
{
|
||||
"author": "shingo1228",
|
||||
"title": "ComfyUI-send-Eagle(slim) [REVMOED]",
|
||||
"id": "send-eagle",
|
||||
"reference": "https://github.com/shingo1228/ComfyUI-send-eagle-slim",
|
||||
"files": [
|
||||
"https://github.com/shingo1228/ComfyUI-send-eagle-slim"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Nodes:Send Webp Image to Eagle. This is an extension node for ComfyUI that allows you to send generated images in webp format to Eagle. This extension node is a re-implementation of the Eagle linkage functions of the previous ComfyUI-send-Eagle node, focusing on the functions required for this node."
|
||||
},
|
||||
{
|
||||
"author": "shingo1228",
|
||||
"title": "ComfyUI-SDXL-EmptyLatentImage [REVMOED]",
|
||||
"id": "sdxl-emptylatent",
|
||||
"reference": "https://github.com/shingo1228/ComfyUI-SDXL-EmptyLatentImage",
|
||||
"files": [
|
||||
"https://github.com/shingo1228/ComfyUI-SDXL-EmptyLatentImage"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Nodes:SDXL Empty Latent Image. An extension node for ComfyUI that allows you to select a resolution from the pre-defined json files and output a Latent Image."
|
||||
},
|
||||
{
|
||||
"author": "chaunceyyann",
|
||||
"title": "ComfyUI Image Processing Nodes [REMOVED]",
|
||||
"reference": "https://github.com/chaunceyyann/comfyui-image-processing-nodes",
|
||||
"files": [
|
||||
"https://github.com/chaunceyyann/comfyui-image-processing-nodes"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A collection of custom nodes for ComfyUI focused on image processing operations."
|
||||
},
|
||||
{
|
||||
"author": "OgreLemonSoup",
|
||||
"title": "Gallery&Tabs [DEPRECATED]",
|
||||
"id": "LoadImageGallery",
|
||||
"reference": "https://github.com/OgreLemonSoup/ComfyUI-Load-Image-Gallery",
|
||||
"files": [
|
||||
"https://github.com/OgreLemonSoup/ComfyUI-Load-Image-Gallery"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Adds a gallery to the Load Image node and tabs for Load Checkpoint/Lora/etc nodes"
|
||||
},
|
||||
{
|
||||
"author": "11dogzi",
|
||||
"title": "Qwen-Image ComfyUI [REMOVED]",
|
||||
"reference": "https://github.com/11dogzi/Comfyui-Qwen-Image",
|
||||
"files": [
|
||||
"https://github.com/11dogzi/Comfyui-Qwen-Image"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "This is a custom node package that integrates the Qwen-Image model into ComfyUI."
|
||||
},
|
||||
{
|
||||
"author": "BAIS1C",
|
||||
"title": "ComfyUI-AudioDuration [REMOVED]",
|
||||
"reference": "https://github.com/BAIS1C/ComfyUI_BASICDancePoser",
|
||||
"files": [
|
||||
"https://github.com/BAIS1C/ComfyUI_BASICDancePoser"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Node to extract Dance poses from Music to control Video Generations.\nNOTE: The files in the repo are not organized."
|
||||
},
|
||||
{
|
||||
"author": "BAIS1C",
|
||||
"title": "ComfyUI_BASICSAdvancedDancePoser [REMOVED]",
|
||||
"reference": "https://github.com/BAIS1C/ComfyUI_BASICSAdvancedDancePoser",
|
||||
"files": [
|
||||
"https://github.com/BAIS1C/ComfyUI_BASICSAdvancedDancePoser"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Professional COCO-WholeBody 133-keypoint dance animation system for ComfyUI"
|
||||
},
|
||||
{
|
||||
"author": "fablestudio",
|
||||
"title": "ComfyUI-Showrunner-Utils [REMOVED]",
|
||||
"reference": "https://github.com/fablestudio/ComfyUI-Showrunner-Utils",
|
||||
"files": [
|
||||
"https://github.com/fablestudio/ComfyUI-Showrunner-Utils"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "NODES: Align Face, Generate Timestamp, GetMostCommonColors, Alpha Crop and Position Image, Shrink Image"
|
||||
},
|
||||
{
|
||||
"author": "skayka",
|
||||
"title": "ComfyUI-DreamFit [REMOVED]",
|
||||
"reference": "https://github.com/skayka/ComfyUI-DreamFit",
|
||||
"files": [
|
||||
"https://github.com/skayka/ComfyUI-DreamFit"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Garment-centric human generation nodes for ComfyUI using DreamFit with Flux.\nDreamFit is a powerful adapter system that enhances Flux models with garment-aware generation capabilities, enabling high-quality fashion and clothing generation."
|
||||
},
|
||||
{
|
||||
"author": "domenecmiralles",
|
||||
"title": "obobo_nodes [REMOVED]",
|
||||
"reference": "https://github.com/domenecmiralles/obobo_nodes",
|
||||
"files": [
|
||||
"https://github.com/domenecmiralles/obobo_nodes"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A collection of custom nodes for ComfyUI that provide various input and output capabilities."
|
||||
},
|
||||
{
|
||||
"author": "NicholasKao1029",
|
||||
"title": "comfyui-pixxio [REMOVED]",
|
||||
"reference": "https://github.com/NicholasKao1029/comfyui-pixxio",
|
||||
"files": [
|
||||
"https://github.com/NicholasKao1029/comfyui-pixxio"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "NODES: Auto-Upload Image to Pixxio Collection, Load Image from Pixx.io"
|
||||
},
|
||||
{
|
||||
"author": "ComfyUI-Workflow",
|
||||
"title": "ComfyUI OpenAI Nodes [REMOVED]",
|
||||
"reference": "https://github.com/ComfyUI-Workflow/ComfyUI-OpenAI",
|
||||
"files": [
|
||||
"https://github.com/ComfyUI-Workflow/ComfyUI-OpenAI"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "By utilizing OpenAI's powerful vision models, this node enables you to incorporate state-of-the-art image understanding into your ComfyUI projects with minimal setup."
|
||||
},
|
||||
{
|
||||
"author": "dionren",
|
||||
"title": "Export Workflow With Cyuai Api Available Nodes [REMOVED]",
|
||||
"id": "comfyUI-Pro-Export-Tool",
|
||||
"reference": "https://github.com/dionren/ComfyUI-Pro-Export-Tool",
|
||||
"files": [
|
||||
"https://github.com/dionren/ComfyUI-Pro-Export-Tool"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "This is a node to convert workflows to cyuai api available nodes."
|
||||
},
|
||||
{
|
||||
"author": "1H-hobit",
|
||||
"title": "ComfyUI_InternVL3 [REMOVED]",
|
||||
"reference": "https://github.com/1H-hobit/ComfyUI_InternVL3",
|
||||
"files": [
|
||||
"https://github.com/1H-hobit/ComfyUI_InternVL3"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "ComfyUI for [a/InternVL](https://github.com/OpenGVLab/InternVL)"
|
||||
},
|
||||
{
|
||||
"author": "spacepxl",
|
||||
"title": "ComfyUI-Florence-2 [DEPRECATED]",
|
||||
"id": "florence2-spacepxl",
|
||||
"reference": "https://github.com/spacepxl/ComfyUI-Florence-2",
|
||||
"files": [
|
||||
"https://github.com/spacepxl/ComfyUI-Florence-2"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "[a/https://huggingface.co/microsoft/Florence-2-large-ft](https://huggingface.co/microsoft/Florence-2-large-ft)\nLarge or base model, support for captioning and bbox task modes, more coming soon."
|
||||
},
|
||||
{
|
||||
"author": "xxxxxxxxxxxc",
|
||||
"title": "flux-kontext-diff-merge [REMOVED]",
|
||||
"reference": "https://github.com/xxxxxxxxxxxc/flux-kontext-diff-merge",
|
||||
"files": [
|
||||
"https://github.com/xxxxxxxxxxxc/flux-kontext-diff-merge"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Preserve image quality with flux-kontext-diff-merge. This ComfyUI node merges only changed areas from AI edits, ensuring clarity and detail."
|
||||
},
|
||||
{
|
||||
"author": "TechnoByteJS",
|
||||
"title": "TechNodes [REMOVED]",
|
||||
"id": "technodes",
|
||||
"reference": "https://github.com/TechnoByteJS/ComfyUI-TechNodes",
|
||||
"files": [
|
||||
"https://github.com/TechnoByteJS/ComfyUI-TechNodes"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "ComfyUI nodes for merging, testing and more.\nNOTE: SDNext Merge, VAE Merge, MBW Layers, Repeat VAE, Quantization."
|
||||
},
|
||||
{
|
||||
"author": "DDDDEEP",
|
||||
"title": "ComfyUI-DDDDEEP [REMOVED]",
|
||||
"reference": "https://github.com/DDDDEEP/ComfyUI-DDDDEEP",
|
||||
"files": [
|
||||
"https://github.com/DDDDEEP/ComfyUI-DDDDEEP"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "NODES: AutoWidthHeight, ReturnIntSeed, OppositeBool, PromptItemCollection"
|
||||
},
|
||||
{
|
||||
"author": "manifestations",
|
||||
"title": "ComfyUI Ethnic Outfits Custom Nodes [REMOVED]",
|
||||
"reference": "https://github.com/manifestations/comfyui-outfits",
|
||||
"files": [
|
||||
"https://github.com/manifestations/comfyui-outfits"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Custom ComfyUI nodes for generating outfit prompts representing diverse ethnicities, cultures, and regions. Uses extensible JSON data for clothing, accessories, and poses, with “random/disabled” dropdowns for flexibility. Advanced prompt engineering is supported via Ollama LLM integration. Easily add new regions, ethnicities, or cultures by updating data files and creating lightweight node wrappers. Designed for artists, researchers, and developers seeking culturally rich, customizable prompt generation in ComfyUI workflows."
|
||||
},
|
||||
{
|
||||
"author": "MitoshiroPJ",
|
||||
"title": "ComfyUI Slothful Attention [REMOVED]",
|
||||
"reference": "https://github.com/MitoshiroPJ/comfyui_slothful_attention",
|
||||
"files": [
|
||||
"https://github.com/MitoshiroPJ/comfyui_slothful_attention"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "This custom node allow controlling output without training. The reducing method is similar to [a/Spatial-Reduction Attention](https://paperswithcode.com/method/spatial-reduction-attention)."
|
||||
},
|
||||
{
|
||||
"author": "MitoshiroPJ",
|
||||
"title": "comfyui_focal_sampler [REMOVED]",
|
||||
"reference": "https://github.com/MitoshiroPJ/comfyui_focal_sampler",
|
||||
"files": [
|
||||
"https://github.com/MitoshiroPJ/comfyui_focal_sampler"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Apply additional sampling to specific area"
|
||||
},
|
||||
{
|
||||
"author": "manifestations",
|
||||
"title": "ComfyUI Ethnic Outfit & Prompt Enhancer Nodes [REMOVED]",
|
||||
"reference": "https://github.com/manifestations/comfyui-indian-outfit",
|
||||
"files": [
|
||||
"https://github.com/manifestations/comfyui-indian-outfit"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Features:\n* Extensive options for Indian, Indonesian, and international clothing, jewelry, accessories, and styles\n* Multiple jewelry and accessory fields (with material support: gold, diamond, silver, leather, beads, etc.)\n* Support for tattoos, henna, hair styles, poses, shot types, lighting, and photography genres\n* Seamless prompt expansion using your own Ollama LLM instance\n* Modular, extensible JSON data files for easy customization"
|
||||
},
|
||||
{
|
||||
"author": "coVISIONSld",
|
||||
"title": "ComfyUI-OmniGen2 [REMOVED]",
|
||||
"reference": "https://github.com/coVISIONSld/ComfyUI-OmniGen2",
|
||||
"files": [
|
||||
"https://github.com/coVISIONSld/ComfyUI-OmniGen2"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "ComfyUI-OmniGen2 is a custom node package for the OmniGen2 model, enabling advanced text-to-image generation and visual understanding."
|
||||
},
|
||||
{
|
||||
"author": "S4MUEL-404",
|
||||
"title": "ComfyUI-S4Tool-Image-Overlay [REMOVED]",
|
||||
"reference": "https://github.com/S4MUEL-404/ComfyUI-S4Tool-Image-Overlay",
|
||||
"files": [
|
||||
"https://github.com/S4MUEL-404/ComfyUI-S4Tool-Image-Overlay"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Quickly set up image overlay effects"
|
||||
},
|
||||
{
|
||||
"author": "akspa0",
|
||||
"title": "ComfyUI-FapMixPlus [REMOVED]",
|
||||
"reference": "https://github.com/akspa0/ComfyUI-FapMixPlus",
|
||||
"files": [
|
||||
"https://github.com/akspa0/ComfyUI-FapMixPlus"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "This is an audio processing script that applies soft limiting, optional loudness normalization, and optional slicing for transcription. It can also produce stereo-mixed outputs with optional audio appended to the end. The script organizes processed files into structured folders with sanitized filenames and retains original timestamps for continuity."
|
||||
},
|
||||
{
|
||||
"author": "RedmondAI",
|
||||
"title": "comfyui-tools [UNSAFE]",
|
||||
"reference": "https://github.com/RedmondAI/comfyui-tools",
|
||||
"files": [
|
||||
"https://github.com/RedmondAI/comfyui-tools"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Custom extensions for ComfyUI used by the Redmond3D VFX team.[w/This node pack has a vulnerability that allows it to create files at arbitrary paths.]"
|
||||
},
|
||||
{
|
||||
"author": "S4MUEL-404",
|
||||
"title": "Image Position Blend [REMOVED]",
|
||||
"id": "ComfyUI-Image-Position-Blend",
|
||||
"version": "1.1",
|
||||
"reference": "https://github.com/S4MUEL-404/ComfyUI-Image-Position-Blend",
|
||||
"files": [
|
||||
"https://github.com/S4MUEL-404/ComfyUI-Image-Position-Blend"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A custom node for conveniently adjusting the overlay position of two images."
|
||||
},
|
||||
{
|
||||
"author": "S4MUEL-404",
|
||||
"title": "ComfyUI-Text-On-Image [REMOVED]",
|
||||
"id": "ComfyUI-Text-On-Image",
|
||||
"reference": "https://github.com/S4MUEL-404/ComfyUI-Text-On-Image",
|
||||
"files": [
|
||||
"https://github.com/S4MUEL-404/ComfyUI-Text-On-Image"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A custom node for ComfyUI that allows users to add text overlays to images with customizable size, font, position, and shadow."
|
||||
},
|
||||
{
|
||||
"author": "S4MUEL-404",
|
||||
"title": "ComfyUI-Prompts-Selector [REMOVED]",
|
||||
"reference": "https://github.com/S4MUEL-404/ComfyUI-Prompts-Selector",
|
||||
"files": [
|
||||
"https://github.com/S4MUEL-404/ComfyUI-Prompts-Selector"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Quickly select preset prompts and merge them"
|
||||
},
|
||||
{
|
||||
"author": "juntaosun",
|
||||
"title": "ComfyUI_open_nodes [REMOVED]",
|
||||
"reference": "https://github.com/juntaosun/ComfyUI_open_nodes",
|
||||
"files": [
|
||||
"https://github.com/juntaosun/ComfyUI_open_nodes"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "ComfyUI open nodes by juntaosun."
|
||||
},
|
||||
{
|
||||
"author": "perilli",
|
||||
"title": "apw_nodes [DEPRECATED]",
|
||||
"reference": "https://github.com/alessandroperilli/apw_nodes",
|
||||
"files": [
|
||||
"https://github.com/alessandroperilli/apw_nodes"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A custom node suite to augment the capabilities of the [a/AP Workflows for ComfyUI](https://perilli.com/ai/comfyui/)[w/'APW_Nodes' has been newly added in place of 'apw_nodes'.]"
|
||||
},
|
||||
{
|
||||
"author": "markuryy",
|
||||
"title": "ComfyUI Spiritparticle Nodes [REMOVED]",
|
||||
"reference": "https://github.com/markuryy/comfyui-spiritparticle",
|
||||
"files": [
|
||||
"https://github.com/markuryy/comfyui-spiritparticle"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A node pack by spiritparticle."
|
||||
},
|
||||
{
|
||||
"author": "SpaceKendo",
|
||||
"title": "Text to video for Stable Video Diffusion in ComfyUI [REMOVED]",
|
||||
"id": "svd-txt2vid",
|
||||
"reference": "https://github.com/SpaceKendo/ComfyUI-svd_txt2vid",
|
||||
"files": [
|
||||
"https://github.com/SpaceKendo/ComfyUI-svd_txt2vid"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "This is node replaces the init_image conditioning for the [a/Stable Video Diffusion](https://github.com/Stability-AI/generative-models) image to video model with text embeds, together with a conditioning frame. The conditioning frame is a set of latents."
|
||||
},
|
||||
{
|
||||
"author": "vovler",
|
||||
"title": "ComfyUI Civitai Helper Extension [REMOVED]",
|
||||
"reference": "https://github.com/vovler/comfyui-civitaihelper",
|
||||
"files": [
|
||||
"https://github.com/vovler/comfyui-civitaihelper"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "ComfyUI extension for parsing Civitai PNG workflows and automatically downloading missing models"
|
||||
},
|
||||
{
|
||||
"author": "DriftJohnson",
|
||||
"title": "DJZ-Nodes [REMOVED]",
|
||||
"id": "DJZ-Nodes",
|
||||
"reference": "https://github.com/MushroomFleet/DJZ-Nodes",
|
||||
"files": [
|
||||
"https://github.com/MushroomFleet/DJZ-Nodes"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "AspectSize and other nodes"
|
||||
},
|
||||
{
|
||||
"author": "DriftJohnson",
|
||||
"title": "KokoroTTS Node [REMOVED]",
|
||||
"reference": "https://github.com/MushroomFleet/DJZ-KokoroTTS",
|
||||
"files": [
|
||||
"https://github.com/MushroomFleet/DJZ-KokoroTTS"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "This node provides advanced text-to-speech functionality powered by KokoroTTS. Follow the instructions below to install, configure, and use the node within your portable ComfyUI installation."
|
||||
},
|
||||
{
|
||||
"author": "MushroomFleet",
|
||||
"title": "DJZ-Pedalboard [REMOVED]",
|
||||
"reference": "https://github.com/MushroomFleet/DJZ-Pedalboard",
|
||||
"files": [
|
||||
"https://github.com/MushroomFleet/DJZ-Pedalboard"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "This project provides a collection of custom nodes designed for enhanced audio effects in ComfyUI. With an intuitive pedalboard interface, users can easily integrate and manipulate various audio effects within their workflows."
|
||||
},
|
||||
{
|
||||
"author": "MushroomFleet",
|
||||
"title": "SVG Suite for ComfyUI [REMOVED]",
|
||||
"reference": "https://github.com/MushroomFleet/svg-suite",
|
||||
"files": [
|
||||
"https://github.com/MushroomFleet/svg-suite"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "SVG Suite is an advanced set of nodes for converting images to SVG in ComfyUI, expanding upon the functionality of ComfyUI-ToSVG."
|
||||
},
|
||||
{
|
||||
"author": "joeriben",
|
||||
"title": "AI4ArtsEd Ollama Prompt Node [DEPRECATED]",
|
||||
"reference": "https://github.com/joeriben/ai4artsed_comfyui",
|
||||
"files": [
|
||||
"https://github.com/joeriben/ai4artsed_comfyui"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Experimental nodes for ComfyUI. For more, see [a/https://kubi-meta.de/ai4artsed](https://kubi-meta.de/ai4artsed) A custom ComfyUI node for stylistic and cultural transformation of input text using local LLMs served via Ollama. This node allows you to combine a free-form prompt (e.g. translation, poetic recoding, genre shift) with externally supplied text in the ComfyUI graph. The result is processed via an Ollama-hosted model and returned as plain text."
|
||||
},
|
||||
{
|
||||
"author": "bento234",
|
||||
"title": "ComfyUI-bento-toolbox [REMOVED]",
|
||||
"reference": "https://github.com/bento234/ComfyUI-bento-toolbox",
|
||||
"files": [
|
||||
"https://github.com/bento234/ComfyUI-bento-toolbox"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "NODES: Tile Prompt Distributor"
|
||||
},
|
||||
{
|
||||
"author": "yichengup",
|
||||
"title": "ComfyUI-VideoBlender [REMOVED]",
|
||||
"reference": "https://github.com/yichengup/ComfyUI-VideoBlender",
|
||||
"files": [
|
||||
"https://github.com/yichengup/ComfyUI-VideoBlender"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Video clip mixing"
|
||||
},
|
||||
{
|
||||
"author": "xl0",
|
||||
"title": "latent-tools [REMOVED]",
|
||||
"reference": "https://github.com/xl0/latent-tools",
|
||||
"files": [
|
||||
"https://github.com/xl0/latent-tools"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Visualize and manipulate the latent space in ComfyUI"
|
||||
},
|
||||
{
|
||||
"author": "Conor-Collins",
|
||||
"title": "ComfyUI-CoCoTools [REMOVED]",
|
||||
"reference": "https://github.com/Conor-Collins/coco_tools",
|
||||
"files": [
|
||||
"https://github.com/Conor-Collins/coco_tools"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A set of custom nodes for ComfyUI providing advanced image processing, file handling, and utility functions."
|
||||
},
|
||||
{
|
||||
"author": "theUpsider",
|
||||
"title": "ComfyUI-Logic [DEPRECATED]",
|
||||
"id": "comfy-logic",
|
||||
"reference": "https://github.com/theUpsider/ComfyUI-Logic",
|
||||
"files": [
|
||||
"https://github.com/theUpsider/ComfyUI-Logic"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "An extension to ComfyUI that introduces logic nodes and conditional rendering capabilities."
|
||||
},
|
||||
{
|
||||
"author": "Malloc-pix",
|
||||
"title": "comfyui_qwen2.4_vl_node [REMOVED]",
|
||||
"reference": "https://github.com/Malloc-pix/comfyui_qwen2.4_vl_node",
|
||||
"files": [
|
||||
"https://github.com/Malloc-pix/comfyui_qwen2.4_vl_node"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "NODES: CogVLM2 Captioner, CLIP Dynamic Text Encode(cy)"
|
||||
},
|
||||
{
|
||||
"author": "inyourdreams-studio",
|
||||
"title": "ComfyUI-RBLM [REMOVED]",
|
||||
"reference": "https://github.com/inyourdreams-studio/comfyui-rblm",
|
||||
"files": [
|
||||
"https://github.com/inyourdreams-studio/comfyui-rblm"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A custom node pack for ComfyUI that provides text manipulation nodes."
|
||||
},
|
||||
{
|
||||
"author": "dream-computing",
|
||||
"title": "SyntaxNodes - Image Processing Effects for ComfyUI [REMOVED]",
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,219 @@
|
||||
{
|
||||
"models": [
|
||||
|
||||
{
|
||||
"name": "Comfy-Org/Wan2.2 i2v high noise 14B (fp16)",
|
||||
"type": "diffusion_model",
|
||||
"base": "Wan2.2",
|
||||
"save_path": "diffusion_models/Wan2.2",
|
||||
"description": "Wan2.2 diffusion model for i2v high noise 14B (fp16)",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||
"filename": "wan2.2_i2v_high_noise_14B_fp16.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_high_noise_14B_fp16.safetensors",
|
||||
"size": "28.6GB"
|
||||
},
|
||||
{
|
||||
"name": "Comfy-Org/Wan2.2 i2v high noise 14B (fp8_scaled)",
|
||||
"type": "diffusion_model",
|
||||
"base": "Wan2.2",
|
||||
"save_path": "diffusion_models/Wan2.2",
|
||||
"description": "Wan2.2 diffusion model for i2v high noise 14B (fp8_scaled)",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||
"filename": "wan2.2_i2v_high_noise_14B_fp8_scaled.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_high_noise_14B_fp8_scaled.safetensors",
|
||||
"size": "14.3GB"
|
||||
},
|
||||
{
|
||||
"name": "Comfy-Org/Wan2.2 i2v low noise 14B (fp16)",
|
||||
"type": "diffusion_model",
|
||||
"base": "Wan2.2",
|
||||
"save_path": "diffusion_models/Wan2.2",
|
||||
"description": "Wan2.2 diffusion model for i2v low noise 14B (fp16)",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||
"filename": "wan2.2_i2v_low_noise_14B_fp16.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_low_noise_14B_fp16.safetensors",
|
||||
"size": "28.6GB"
|
||||
},
|
||||
{
|
||||
"name": "Comfy-Org/Wan2.2 i2v low noise 14B (fp8_scaled)",
|
||||
"type": "diffusion_model",
|
||||
"base": "Wan2.2",
|
||||
"save_path": "diffusion_models/Wan2.2",
|
||||
"description": "Wan2.2 diffusion model for i2v low noise 14B (fp8_scaled)",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||
"filename": "wan2.2_i2v_low_noise_14B_fp8_scaled.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_low_noise_14B_fp8_scaled.safetensors",
|
||||
"size": "14.3GB"
|
||||
},
|
||||
{
|
||||
"name": "Comfy-Org/Wan2.2 t2v high noise 14B (fp16)",
|
||||
"type": "diffusion_model",
|
||||
"base": "Wan2.2",
|
||||
"save_path": "diffusion_models/Wan2.2",
|
||||
"description": "Wan2.2 diffusion model for t2v high noise 14B (fp16)",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||
"filename": "wan2.2_t2v_high_noise_14B_fp16.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_high_noise_14B_fp16.safetensors",
|
||||
"size": "28.6GB"
|
||||
},
|
||||
{
|
||||
"name": "Comfy-Org/Wan2.2 t2v high noise 14B (fp8_scaled)",
|
||||
"type": "diffusion_model",
|
||||
"base": "Wan2.2",
|
||||
"save_path": "diffusion_models/Wan2.2",
|
||||
"description": "Wan2.2 diffusion model for t2v high noise 14B (fp8_scaled)",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||
"filename": "wan2.2_t2v_high_noise_14B_fp8_scaled.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_high_noise_14B_fp8_scaled.safetensors",
|
||||
"size": "14.3GB"
|
||||
},
|
||||
{
|
||||
"name": "Comfy-Org/Wan2.2 t2v low noise 14B (fp16)",
|
||||
"type": "diffusion_model",
|
||||
"base": "Wan2.2",
|
||||
"save_path": "diffusion_models/Wan2.2",
|
||||
"description": "Wan2.2 diffusion model for t2v low noise 14B (fp16)",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||
"filename": "wan2.2_t2v_low_noise_14B_fp16.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_low_noise_14B_fp16.safetensors",
|
||||
"size": "28.6GB"
|
||||
},
|
||||
{
|
||||
"name": "Comfy-Org/Wan2.2 t2v low noise 14B (fp8_scaled)",
|
||||
"type": "diffusion_model",
|
||||
"base": "Wan2.2",
|
||||
"save_path": "diffusion_models/Wan2.2",
|
||||
"description": "Wan2.2 diffusion model for t2v low noise 14B (fp8_scaled)",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||
"filename": "wan2.2_t2v_low_noise_14B_fp8_scaled.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_low_noise_14B_fp8_scaled.safetensors",
|
||||
"size": "14.3GB"
|
||||
},
|
||||
{
|
||||
"name": "Comfy-Org/Wan2.2 ti2v 5B (fp16)",
|
||||
"type": "diffusion_model",
|
||||
"base": "Wan2.2",
|
||||
"save_path": "diffusion_models/Wan2.2",
|
||||
"description": "Wan2.2 diffusion model for ti2v 5B (fp16)",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||
"filename": "wan2.2_ti2v_5B_fp16.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_ti2v_5B_fp16.safetensors",
|
||||
"size": "10.0GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "sam2.1_hiera_tiny.pt",
|
||||
"type": "sam2.1",
|
||||
"base": "SAM",
|
||||
"save_path": "sams",
|
||||
"description": "Segmenty Anything SAM 2.1 hiera model (tiny)",
|
||||
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||
"filename": "sam2.1_hiera_tiny.pt",
|
||||
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_tiny.pt",
|
||||
"size": "149.0MB"
|
||||
},
|
||||
{
|
||||
"name": "sam2.1_hiera_small.pt",
|
||||
"type": "sam2.1",
|
||||
"base": "SAM",
|
||||
"save_path": "sams",
|
||||
"description": "Segmenty Anything SAM 2.1 hiera model (small)",
|
||||
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||
"filename": "sam2.1_hiera_small.pt",
|
||||
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_small.pt",
|
||||
"size": "176.0MB"
|
||||
},
|
||||
{
|
||||
"name": "sam2.1_hiera_base_plus.pt",
|
||||
"type": "sam2.1",
|
||||
"base": "SAM",
|
||||
"save_path": "sams",
|
||||
"description": "Segmenty Anything SAM 2.1 hiera model (base+)",
|
||||
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||
"filename": "sam2.1_hiera_base_plus.pt",
|
||||
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_base_plus.pt",
|
||||
"size": "309.0MB"
|
||||
},
|
||||
{
|
||||
"name": "sam2.1_hiera_large.pt",
|
||||
"type": "sam2.1",
|
||||
"base": "SAM",
|
||||
"save_path": "sams",
|
||||
"description": "Segmenty Anything SAM 2.1 hiera model (large)",
|
||||
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||
"filename": "sam2.1_hiera_large.pt",
|
||||
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_large.pt",
|
||||
"size": "857.0MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "sam2_hiera_tiny.pt",
|
||||
"type": "sam2",
|
||||
"base": "SAM",
|
||||
"save_path": "sams",
|
||||
"description": "Segmenty Anything SAM 2 hiera model (tiny)",
|
||||
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||
"filename": "sam2_hiera_tiny.pt",
|
||||
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/072824/sam2_hiera_tiny.pt",
|
||||
"size": "149.0MB"
|
||||
},
|
||||
{
|
||||
"name": "sam2_hiera_small.pt",
|
||||
"type": "sam2",
|
||||
"base": "SAM",
|
||||
"save_path": "sams",
|
||||
"description": "Segmenty Anything SAM 2 hiera model (small)",
|
||||
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||
"filename": "sam2_hiera_small.pt",
|
||||
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/072824/sam2_hiera_small.pt",
|
||||
"size": "176.0MB"
|
||||
},
|
||||
{
|
||||
"name": "sam2_hiera_base_plus.pt",
|
||||
"type": "sam2",
|
||||
"base": "SAM",
|
||||
"save_path": "sams",
|
||||
"description": "Segmenty Anything SAM 2 hiera model (base+)",
|
||||
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||
"filename": "sam2_hiera_base_plus.pt",
|
||||
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/072824/sam2_hiera_base_plus.pt",
|
||||
"size": "309.0MB"
|
||||
},
|
||||
{
|
||||
"name": "sam2_hiera_large.pt",
|
||||
"type": "sam2",
|
||||
"base": "SAM",
|
||||
"save_path": "sams",
|
||||
"description": "Segmenty Anything SAM 2 hiera model (large)",
|
||||
"reference": "https://github.com/facebookresearch/sam2#model-description",
|
||||
"filename": "sam2_hiera_large.pt",
|
||||
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/072824/sam2_hiera_large.pt",
|
||||
"size": "857.0MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Comfy-Org/omnigen2_fp16.safetensors",
|
||||
"type": "diffusion_model",
|
||||
"base": "OmniGen2",
|
||||
"save_path": "default",
|
||||
"description": "OmniGen2 diffusion model. This is required for using OmniGen2.",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged",
|
||||
"filename": "omnigen2_fp16.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged/resolve/main/split_files/diffusion_models/omnigen2_fp16.safetensors",
|
||||
"size": "7.93GB"
|
||||
},
|
||||
{
|
||||
"name": "Comfy-Org/qwen_2.5_vl_fp16.safetensors",
|
||||
"type": "clip",
|
||||
"base": "qwen-2.5",
|
||||
"save_path": "default",
|
||||
"description": "text encoder for OmniGen2",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged",
|
||||
"filename": "qwen_2.5_vl_fp16.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged/resolve/main/split_files/text_encoders/qwen_2.5_vl_fp16.safetensors",
|
||||
"size": "7.51GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Latent Bridge Matching for Image Relighting",
|
||||
"type": "diffusion_model",
|
||||
@@ -473,224 +687,6 @@
|
||||
"filename": "llava_llama3_fp16.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/text_encoders/llava_llama3_fp16.safetensors",
|
||||
"size": "16.1GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "PixArt-Sigma-XL-2-512-MS.safetensors (diffusion)",
|
||||
"type": "diffusion_model",
|
||||
"base": "pixart-sigma",
|
||||
"save_path": "diffusion_models/PixArt-Sigma",
|
||||
"description": "PixArt-Sigma Diffusion model",
|
||||
"reference": "https://huggingface.co/PixArt-alpha/PixArt-Sigma-XL-2-512-MS",
|
||||
"filename": "PixArt-Sigma-XL-2-512-MS.safetensors",
|
||||
"url": "https://huggingface.co/PixArt-alpha/PixArt-Sigma-XL-2-512-MS/resolve/main/transformer/diffusion_pytorch_model.safetensors",
|
||||
"size": "2.44GB"
|
||||
},
|
||||
{
|
||||
"name": "PixArt-Sigma-XL-2-1024-MS.safetensors (diffusion)",
|
||||
"type": "diffusion_model",
|
||||
"base": "pixart-sigma",
|
||||
"save_path": "diffusion_models/PixArt-Sigma",
|
||||
"description": "PixArt-Sigma Diffusion model",
|
||||
"reference": "https://huggingface.co/PixArt-alpha/PixArt-Sigma-XL-2-1024-MS",
|
||||
"filename": "PixArt-Sigma-XL-2-1024-MS.safetensors",
|
||||
"url": "https://huggingface.co/PixArt-alpha/PixArt-Sigma-XL-2-1024-MS/resolve/main/transformer/diffusion_pytorch_model.safetensors",
|
||||
"size": "2.44GB"
|
||||
},
|
||||
{
|
||||
"name": "PixArt-XL-2-1024-MS.safetensors (diffusion)",
|
||||
"type": "diffusion_model",
|
||||
"base": "pixart-alpha",
|
||||
"save_path": "diffusion_models/PixArt-Alpha",
|
||||
"description": "PixArt-Alpha Diffusion model",
|
||||
"reference": "https://huggingface.co/PixArt-alpha/PixArt-XL-2-1024-MS",
|
||||
"filename": "PixArt-XL-2-1024-MS.safetensors",
|
||||
"url": "https://huggingface.co/PixArt-alpha/PixArt-XL-2-1024-MS/resolve/main/transformer/diffusion_pytorch_model.safetensors",
|
||||
"size": "2.45GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Comfy-Org/hunyuan_video_t2v_720p_bf16.safetensors",
|
||||
"type": "diffusion_model",
|
||||
"base": "Hunyuan Video",
|
||||
"save_path": "diffusion_models/hunyuan_video",
|
||||
"description": "Huyuan Video diffusion model. repackaged version.",
|
||||
"reference": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged",
|
||||
"filename": "hunyuan_video_t2v_720p_bf16.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/diffusion_models/hunyuan_video_t2v_720p_bf16.safetensors",
|
||||
"size": "25.6GB"
|
||||
},
|
||||
{
|
||||
"name": "Comfy-Org/hunyuan_video_vae_bf16.safetensors",
|
||||
"type": "VAE",
|
||||
"base": "Hunyuan Video",
|
||||
"save_path": "VAE",
|
||||
"description": "Huyuan Video VAE model. repackaged version.",
|
||||
"reference": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged",
|
||||
"filename": "hunyuan_video_vae_bf16.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/vae/hunyuan_video_vae_bf16.safetensors",
|
||||
"size": "493MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "LTX-Video 2B v0.9.1 Checkpoint",
|
||||
"type": "checkpoint",
|
||||
"base": "LTX-Video",
|
||||
"save_path": "checkpoints/LTXV",
|
||||
"description": "LTX-Video is the first DiT-based video generation model capable of generating high-quality videos in real-time. It produces 24 FPS videos at a 768x512 resolution faster than they can be watched. Trained on a large-scale dataset of diverse videos, the model generates high-resolution videos with realistic and varied content.",
|
||||
"reference": "https://huggingface.co/Lightricks/LTX-Video",
|
||||
"filename": "ltx-video-2b-v0.9.1.safetensors",
|
||||
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltx-video-2b-v0.9.1.safetensors",
|
||||
"size": "5.72GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "XLabs-AI/flux-canny-controlnet-v3.safetensors",
|
||||
"type": "controlnet",
|
||||
"base": "FLUX.1",
|
||||
"save_path": "xlabs/controlnets",
|
||||
"description": "ControlNet checkpoints for FLUX.1-dev model by Black Forest Labs.",
|
||||
"reference": "https://huggingface.co/XLabs-AI/flux-controlnet-collections",
|
||||
"filename": "flux-canny-controlnet-v3.safetensors",
|
||||
"url": "https://huggingface.co/XLabs-AI/flux-controlnet-collections/resolve/main/flux-canny-controlnet-v3.safetensors",
|
||||
"size": "1.49GB"
|
||||
},
|
||||
{
|
||||
"name": "XLabs-AI/flux-depth-controlnet-v3.safetensors",
|
||||
"type": "controlnet",
|
||||
"base": "FLUX.1",
|
||||
"save_path": "xlabs/controlnets",
|
||||
"description": "ControlNet checkpoints for FLUX.1-dev model by Black Forest Labs.",
|
||||
"reference": "https://huggingface.co/XLabs-AI/flux-controlnet-collections",
|
||||
"filename": "flux-depth-controlnet-v3.safetensors",
|
||||
"url": "https://huggingface.co/XLabs-AI/flux-controlnet-collections/resolve/main/flux-depth-controlnet-v3.safetensors",
|
||||
"size": "1.49GB"
|
||||
},
|
||||
{
|
||||
"name": "XLabs-AI/flux-hed-controlnet-v3.safetensors",
|
||||
"type": "controlnet",
|
||||
"base": "FLUX.1",
|
||||
"save_path": "xlabs/controlnets",
|
||||
"description": "ControlNet checkpoints for FLUX.1-dev model by Black Forest Labs.",
|
||||
"reference": "https://huggingface.co/XLabs-AI/flux-controlnet-collections",
|
||||
"filename": "flux-hed-controlnet-v3.safetensors",
|
||||
"url": "https://huggingface.co/XLabs-AI/flux-controlnet-collections/resolve/main/flux-hed-controlnet-v3.safetensors",
|
||||
"size": "1.49GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "XLabs-AI/realism_lora.safetensors",
|
||||
"type": "lora",
|
||||
"base": "FLUX.1",
|
||||
"save_path": "xlabs/loras",
|
||||
"description": "A checkpoint with trained LoRAs for FLUX.1-dev model by Black Forest Labs",
|
||||
"reference": "https://huggingface.co/XLabs-AI/flux-lora-collection",
|
||||
"filename": "realism_lora.safetensors",
|
||||
"url": "https://huggingface.co/XLabs-AI/flux-lora-collection/resolve/main/realism_lora.safetensors",
|
||||
"size": "44.8MB"
|
||||
},
|
||||
{
|
||||
"name": "XLabs-AI/art_lora.safetensors",
|
||||
"type": "lora",
|
||||
"base": "FLUX.1",
|
||||
"save_path": "xlabs/loras",
|
||||
"description": "A checkpoint with trained LoRAs for FLUX.1-dev model by Black Forest Labs",
|
||||
"reference": "https://huggingface.co/XLabs-AI/flux-lora-collection",
|
||||
"filename": "art_lora.safetensors",
|
||||
"url": "https://huggingface.co/XLabs-AI/flux-lora-collection/resolve/main/scenery_lora.safetensors",
|
||||
"size": "44.8MB"
|
||||
},
|
||||
{
|
||||
"name": "XLabs-AI/mjv6_lora.safetensors",
|
||||
"type": "lora",
|
||||
"base": "FLUX.1",
|
||||
"save_path": "xlabs/loras",
|
||||
"description": "A checkpoint with trained LoRAs for FLUX.1-dev model by Black Forest Labs",
|
||||
"reference": "https://huggingface.co/XLabs-AI/flux-lora-collection",
|
||||
"filename": "mjv6_lora.safetensors",
|
||||
"url": "https://huggingface.co/XLabs-AI/flux-lora-collection/resolve/main/mjv6_lora.safetensors",
|
||||
"size": "44.8MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "XLabs-AI/flux-ip-adapter",
|
||||
"type": "lora",
|
||||
"base": "FLUX.1",
|
||||
"save_path": "xlabs/ipadapters",
|
||||
"description": "A checkpoint with trained LoRAs for FLUX.1-dev model by Black Forest Labs",
|
||||
"reference": "https://huggingface.co/XLabs-AI/flux-ip-adapter",
|
||||
"filename": "ip_adapter.safetensors",
|
||||
"url": "https://huggingface.co/XLabs-AI/flux-ip-adapter/resolve/main/ip_adapter.safetensors",
|
||||
"size": "982MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "stabilityai/SD3.5-Large-Controlnet-Blur",
|
||||
"type": "controlnet",
|
||||
"base": "SD3.5",
|
||||
"save_path": "controlnet/SD3.5",
|
||||
"description": "Blur Controlnet model for SD3.5 Large",
|
||||
"reference": "https://huggingface.co/stabilityai/stable-diffusion-3.5-controlnets",
|
||||
"filename": "sd3.5_large_controlnet_blur.safetensors",
|
||||
"url": "https://huggingface.co/stabilityai/stable-diffusion-3.5-controlnets/resolve/main/sd3.5_large_controlnet_blur.safetensors",
|
||||
"size": "8.65GB"
|
||||
},
|
||||
{
|
||||
"name": "stabilityai/SD3.5-Large-Controlnet-Canny",
|
||||
"type": "controlnet",
|
||||
"base": "SD3.5",
|
||||
"save_path": "controlnet/SD3.5",
|
||||
"description": "Canny Controlnet model for SD3.5 Large",
|
||||
"reference": "https://huggingface.co/stabilityai/stable-diffusion-3.5-controlnets",
|
||||
"filename": "sd3.5_large_controlnet_canny.safetensors",
|
||||
"url": "https://huggingface.co/stabilityai/stable-diffusion-3.5-controlnets/resolve/main/sd3.5_large_controlnet_canny.safetensors",
|
||||
"size": "8.65GB"
|
||||
},
|
||||
{
|
||||
"name": "stabilityai/SD3.5-Large-Controlnet-Depth",
|
||||
"type": "controlnet",
|
||||
"base": "SD3.5",
|
||||
"save_path": "controlnet/SD3.5",
|
||||
"description": "Depth Controlnet model for SD3.5 Large",
|
||||
"reference": "https://huggingface.co/stabilityai/stable-diffusion-3.5-controlnets",
|
||||
"filename": "sd3.5_large_controlnet_depth.safetensors",
|
||||
"url": "https://huggingface.co/stabilityai/stable-diffusion-3.5-controlnets/resolve/main/sd3.5_large_controlnet_depth.safetensors",
|
||||
"size": "8.65GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "LTX-Video 2B v0.9 Checkpoint",
|
||||
"type": "checkpoint",
|
||||
"base": "LTX-Video",
|
||||
"save_path": "checkpoints/LTXV",
|
||||
"description": "LTX-Video is the first DiT-based video generation model capable of generating high-quality videos in real-time. It produces 24 FPS videos at a 768x512 resolution faster than they can be watched. Trained on a large-scale dataset of diverse videos, the model generates high-resolution videos with realistic and varied content.",
|
||||
"reference": "https://huggingface.co/Lightricks/LTX-Video",
|
||||
"filename": "ltx-video-2b-v0.9.safetensors",
|
||||
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltx-video-2b-v0.9.safetensors",
|
||||
"size": "9.37GB"
|
||||
},
|
||||
{
|
||||
"name": "InstantX/FLUX.1-dev-IP-Adapter",
|
||||
"type": "IP-Adapter",
|
||||
"base": "FLUX.1",
|
||||
"save_path": "ipadapter-flux",
|
||||
"description": "FLUX.1-dev-IP-Adapter",
|
||||
"reference": "https://huggingface.co/InstantX/FLUX.1-dev-IP-Adapter",
|
||||
"filename": "ip-adapter.bin",
|
||||
"url": "https://huggingface.co/InstantX/FLUX.1-dev-IP-Adapter/resolve/main/ip-adapter.bin",
|
||||
"size": "5.29GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Comfy-Org/sigclip_vision_384 (patch14_384)",
|
||||
"type": "clip_vision",
|
||||
"base": "sigclip",
|
||||
"save_path": "clip_vision",
|
||||
"description": "This clip vision model is required for FLUX.1 Redux.",
|
||||
"reference": "https://huggingface.co/Comfy-Org/sigclip_vision_384/tree/main",
|
||||
"filename": "sigclip_vision_patch14_384.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/sigclip_vision_384/resolve/main/sigclip_vision_patch14_384.safetensors",
|
||||
"size": "857MB"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -10,6 +10,16 @@
|
||||
"install_type": "git-clone",
|
||||
"description": "A minimal template for creating React/TypeScript frontend extensions for ComfyUI, with complete boilerplate setup including internationalization and unit testing."
|
||||
},
|
||||
{
|
||||
"author": "comfyui-wiki",
|
||||
"title": "ComfyUI-i18n-demo",
|
||||
"reference": "https://github.com/comfyui-wiki/ComfyUI-i18n-demo",
|
||||
"files": [
|
||||
"https://github.com/comfyui-wiki/ComfyUI-i18n-demo"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "ComfyUI custom node develop i18n support demo "
|
||||
},
|
||||
{
|
||||
"author": "Suzie1",
|
||||
"title": "Guide To Making Custom Nodes in ComfyUI",
|
||||
@@ -331,6 +341,26 @@
|
||||
],
|
||||
"description": "Dynamic Node examples for ComfyUI",
|
||||
"install_type": "git-clone"
|
||||
},
|
||||
{
|
||||
"author": "Jonathon-Doran",
|
||||
"title": "remote-combo-demo",
|
||||
"reference": "https://github.com/Jonathon-Doran/remote-combo-demo",
|
||||
"files": [
|
||||
"https://github.com/Jonathon-Doran/remote-combo-demo"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A minimal test suite demonstrating how remote COMBO inputs behave in ComfyUI, with and without force_input"
|
||||
},
|
||||
{
|
||||
"author": "J1mB091",
|
||||
"title": "ComfyUI-J1mB091 Custom Nodes",
|
||||
"reference": "https://github.com/J1mB091/ComfyUI-J1mB091",
|
||||
"files": [
|
||||
"https://github.com/J1mB091/ComfyUI-J1mB091"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Vibe Coded ComfyUI Custom Nodes"
|
||||
}
|
||||
]
|
||||
}
|
||||
179
openapi.yaml
179
openapi.yaml
@@ -18,6 +18,14 @@ security: []
|
||||
# Common API components
|
||||
components:
|
||||
schemas:
|
||||
OperationType:
|
||||
type: string
|
||||
enum: [install, uninstall, update, update-comfyui, fix, disable, enable, install-model]
|
||||
description: Type of operation or task being performed
|
||||
OperationResult:
|
||||
type: string
|
||||
enum: [success, failed, skipped, error, skip]
|
||||
description: Result status of an operation (failed/error and skipped/skip are aliases)
|
||||
# Core Task Queue Models
|
||||
QueueTaskItem:
|
||||
type: object
|
||||
@@ -29,9 +37,7 @@ components:
|
||||
type: string
|
||||
description: Client identifier that initiated the task
|
||||
kind:
|
||||
type: string
|
||||
description: Type of task being performed
|
||||
enum: [install, uninstall, update, update-all, update-comfyui, fix, disable, enable, install-model]
|
||||
$ref: '#/components/schemas/OperationType'
|
||||
params:
|
||||
oneOf:
|
||||
- $ref: '#/components/schemas/InstallPackParams'
|
||||
@@ -65,14 +71,19 @@ components:
|
||||
description: Task result message or details
|
||||
status:
|
||||
$ref: '#/components/schemas/TaskExecutionStatus'
|
||||
batch_id:
|
||||
type: [string, 'null']
|
||||
description: ID of the batch this task belongs to
|
||||
end_time:
|
||||
type: [string, 'null']
|
||||
format: date-time
|
||||
description: ISO timestamp when task execution ended
|
||||
required: [ui_id, client_id, kind, timestamp, result]
|
||||
TaskExecutionStatus:
|
||||
type: object
|
||||
properties:
|
||||
status_str:
|
||||
type: string
|
||||
enum: [success, error, skip]
|
||||
description: Overall task execution status
|
||||
$ref: '#/components/schemas/OperationResult'
|
||||
completed:
|
||||
type: boolean
|
||||
description: Whether the task completed
|
||||
@@ -223,6 +234,14 @@ components:
|
||||
type: string
|
||||
enum: [git-clone, copy, cnr]
|
||||
description: Type of installation used for the pack
|
||||
SecurityLevel:
|
||||
type: string
|
||||
enum: [strong, normal, normal-, weak]
|
||||
description: Security level configuration (from most to least restrictive)
|
||||
RiskLevel:
|
||||
type: string
|
||||
enum: [block, high+, high, middle+, middle]
|
||||
description: Risk classification for operations
|
||||
ManagerPack:
|
||||
allOf:
|
||||
- $ref: '#/components/schemas/ManagerPackInfo'
|
||||
@@ -235,7 +254,7 @@ components:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
description: Files included in the pack
|
||||
description: Repository URLs for installation (typically contains one GitHub URL)
|
||||
reference:
|
||||
type: string
|
||||
description: The type of installation reference
|
||||
@@ -366,6 +385,46 @@ components:
|
||||
type: string
|
||||
description: ComfyUI Node Registry ID of the package to enable
|
||||
required: [cnr_id]
|
||||
# Query Parameter Models
|
||||
UpdateAllQueryParams:
|
||||
type: object
|
||||
properties:
|
||||
client_id:
|
||||
type: string
|
||||
description: Client identifier that initiated the request
|
||||
ui_id:
|
||||
type: string
|
||||
description: Base UI identifier for task tracking
|
||||
mode:
|
||||
$ref: '#/components/schemas/ManagerDatabaseSource'
|
||||
required: [client_id, ui_id]
|
||||
UpdateComfyUIQueryParams:
|
||||
type: object
|
||||
properties:
|
||||
client_id:
|
||||
type: string
|
||||
description: Client identifier that initiated the request
|
||||
ui_id:
|
||||
type: string
|
||||
description: UI identifier for task tracking
|
||||
stable:
|
||||
type: boolean
|
||||
default: true
|
||||
description: Whether to update to stable version (true) or nightly (false)
|
||||
required: [client_id, ui_id]
|
||||
ComfyUISwitchVersionQueryParams:
|
||||
type: object
|
||||
properties:
|
||||
ver:
|
||||
type: string
|
||||
description: Version to switch to
|
||||
client_id:
|
||||
type: string
|
||||
description: Client identifier that initiated the request
|
||||
ui_id:
|
||||
type: string
|
||||
description: UI identifier for task tracking
|
||||
required: [ver, client_id, ui_id]
|
||||
# Queue Status Models
|
||||
QueueStatus:
|
||||
type: object
|
||||
@@ -580,9 +639,7 @@ components:
|
||||
type: string
|
||||
description: Unique operation identifier
|
||||
operation_type:
|
||||
type: string
|
||||
description: Type of operation
|
||||
enum: [install, update, uninstall, fix, disable, enable, install-model]
|
||||
$ref: '#/components/schemas/OperationType'
|
||||
target:
|
||||
type: string
|
||||
description: Target of the operation (node name, model name, etc.)
|
||||
@@ -590,9 +647,7 @@ components:
|
||||
type: [string, 'null']
|
||||
description: Target version for the operation
|
||||
result:
|
||||
type: string
|
||||
description: Operation result
|
||||
enum: [success, failed, skipped]
|
||||
$ref: '#/components/schemas/OperationResult'
|
||||
error_message:
|
||||
type: [string, 'null']
|
||||
description: Error message if operation failed
|
||||
@@ -640,6 +695,45 @@ components:
|
||||
type: object
|
||||
additionalProperties: true
|
||||
description: ComfyUI Manager configuration settings
|
||||
comfyui_root_path:
|
||||
type: [string, 'null']
|
||||
description: ComfyUI root installation directory
|
||||
model_paths:
|
||||
type: object
|
||||
additionalProperties:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
description: Map of model types to their configured paths
|
||||
manager_version:
|
||||
type: [string, 'null']
|
||||
description: ComfyUI Manager version
|
||||
security_level:
|
||||
$ref: '#/components/schemas/SecurityLevel'
|
||||
network_mode:
|
||||
type: [string, 'null']
|
||||
description: Network mode (online, offline, private)
|
||||
cli_args:
|
||||
type: object
|
||||
additionalProperties: true
|
||||
description: Selected ComfyUI CLI arguments
|
||||
custom_nodes_count:
|
||||
type: [integer, 'null']
|
||||
description: Total number of custom node packages
|
||||
minimum: 0
|
||||
failed_imports:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
description: List of custom nodes that failed to import
|
||||
pip_packages:
|
||||
type: object
|
||||
additionalProperties:
|
||||
type: string
|
||||
description: Map of installed pip packages to their versions
|
||||
embedded_python:
|
||||
type: [boolean, 'null']
|
||||
description: Whether ComfyUI is running from an embedded Python distribution
|
||||
required: [snapshot_time, comfyui_version, python_version, platform_info]
|
||||
BatchExecutionRecord:
|
||||
type: object
|
||||
@@ -688,6 +782,39 @@ components:
|
||||
minimum: 0
|
||||
default: 0
|
||||
required: [batch_id, start_time, state_before]
|
||||
|
||||
ImportFailInfoBulkRequest:
|
||||
type: object
|
||||
properties:
|
||||
cnr_ids:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
description: A list of CNR IDs to check.
|
||||
urls:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
description: A list of repository URLs to check.
|
||||
|
||||
ImportFailInfoBulkResponse:
|
||||
type: object
|
||||
additionalProperties:
|
||||
$ref: '#/components/schemas/ImportFailInfoItem'
|
||||
description: >-
|
||||
A dictionary where each key is a cnr_id or url from the request,
|
||||
and the value is the corresponding error info.
|
||||
|
||||
ImportFailInfoItem:
|
||||
oneOf:
|
||||
- type: object
|
||||
properties:
|
||||
error:
|
||||
type: string
|
||||
traceback:
|
||||
type: string
|
||||
- type: "null"
|
||||
|
||||
securitySchemes:
|
||||
securityLevel:
|
||||
type: apiKey
|
||||
@@ -923,6 +1050,32 @@ paths:
|
||||
description: Processing started
|
||||
'201':
|
||||
description: Processing already in progress
|
||||
|
||||
/v2/customnode/import_fail_info_bulk:
|
||||
post:
|
||||
summary: Get import failure info for multiple nodes
|
||||
description: Retrieves recorded import failure information for a list of custom nodes.
|
||||
tags:
|
||||
- customnode
|
||||
requestBody:
|
||||
description: A list of CNR IDs or repository URLs to check.
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ImportFailInfoBulkRequest'
|
||||
responses:
|
||||
'200':
|
||||
description: A dictionary containing the import failure information.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ImportFailInfoBulkResponse'
|
||||
'400':
|
||||
description: Bad Request. The request body is invalid.
|
||||
'500':
|
||||
description: Internal Server Error.
|
||||
|
||||
/v2/manager/queue/reset:
|
||||
get:
|
||||
summary: Reset queue
|
||||
|
||||
@@ -5,7 +5,7 @@ build-backend = "setuptools.build_meta"
|
||||
[project]
|
||||
name = "comfyui-manager"
|
||||
license = { text = "GPL-3.0-only" }
|
||||
version = "4.0.0-beta.4"
|
||||
version = "4.0.2"
|
||||
requires-python = ">= 3.9"
|
||||
description = "ComfyUI-Manager provides features to install and manage custom nodes for ComfyUI, as well as various functionalities to assist with ComfyUI."
|
||||
readme = "README.md"
|
||||
@@ -19,7 +19,7 @@ maintainers = [
|
||||
]
|
||||
|
||||
classifiers = [
|
||||
"Development Status :: 4 - Beta",
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
|
||||
]
|
||||
@@ -27,7 +27,7 @@ classifiers = [
|
||||
dependencies = [
|
||||
"GitPython",
|
||||
"PyGithub",
|
||||
"matrix-client==0.4.0",
|
||||
# "matrix-nio",
|
||||
"transformers",
|
||||
"huggingface-hub>0.20",
|
||||
"typer",
|
||||
|
||||
13
pytest.ini
13
pytest.ini
@@ -1,13 +0,0 @@
|
||||
[tool:pytest]
|
||||
testpaths = tests
|
||||
python_files = test_*.py
|
||||
python_classes = Test*
|
||||
python_functions = test_*
|
||||
addopts =
|
||||
-v
|
||||
--tb=short
|
||||
--strict-markers
|
||||
--disable-warnings
|
||||
markers =
|
||||
slow: marks tests as slow (deselect with '-m "not slow"')
|
||||
integration: marks tests as integration tests
|
||||
@@ -1,6 +1,6 @@
|
||||
GitPython
|
||||
PyGithub
|
||||
matrix-client==0.4.0
|
||||
# matrix-nio
|
||||
transformers
|
||||
huggingface-hub>0.20
|
||||
typer
|
||||
|
||||
42
run_tests.py
42
run_tests.py
@@ -1,42 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Simple test runner for ComfyUI-Manager tests.
|
||||
|
||||
Usage:
|
||||
python run_tests.py # Run all tests
|
||||
python run_tests.py -k test_task_queue # Run specific tests
|
||||
python run_tests.py --cov # Run with coverage
|
||||
"""
|
||||
|
||||
import sys
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
def main():
|
||||
"""Run pytest with appropriate arguments"""
|
||||
# Ensure we're in the project directory
|
||||
project_root = Path(__file__).parent
|
||||
|
||||
# Base pytest command
|
||||
cmd = [sys.executable, "-m", "pytest"]
|
||||
|
||||
# Add any command line arguments passed to this script
|
||||
cmd.extend(sys.argv[1:])
|
||||
|
||||
# Add default arguments if none provided
|
||||
if len(sys.argv) == 1:
|
||||
cmd.extend([
|
||||
"tests/",
|
||||
"-v",
|
||||
"--tb=short"
|
||||
])
|
||||
|
||||
print(f"Running: {' '.join(cmd)}")
|
||||
print(f"Working directory: {project_root}")
|
||||
|
||||
# Run pytest
|
||||
result = subprocess.run(cmd, cwd=project_root)
|
||||
sys.exit(result.returncode)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
15
scanner.py
15
scanner.py
@@ -255,13 +255,13 @@ def clone_or_pull_git_repository(git_url):
|
||||
repo.git.submodule('update', '--init', '--recursive')
|
||||
print(f"Pulling {repo_name}...")
|
||||
except Exception as e:
|
||||
print(f"Pulling {repo_name} failed: {e}")
|
||||
print(f"Failed to pull '{repo_name}': {e}")
|
||||
else:
|
||||
try:
|
||||
Repo.clone_from(git_url, repo_dir, recursive=True)
|
||||
print(f"Cloning {repo_name}...")
|
||||
except Exception as e:
|
||||
print(f"Cloning {repo_name} failed: {e}")
|
||||
print(f"Failed to clone '{repo_name}': {e}")
|
||||
|
||||
|
||||
def update_custom_nodes():
|
||||
@@ -496,8 +496,15 @@ def gen_json(node_info):
|
||||
nodes_in_url, metadata_in_url = data[git_url]
|
||||
nodes = set(nodes_in_url)
|
||||
|
||||
for x, desc in node_list_json.items():
|
||||
nodes.add(x.strip())
|
||||
try:
|
||||
for x, desc in node_list_json.items():
|
||||
nodes.add(x.strip())
|
||||
except Exception as e:
|
||||
print(f"\nERROR: Invalid json format '{node_list_json_path}'")
|
||||
print("------------------------------------------------------")
|
||||
print(e)
|
||||
print("------------------------------------------------------")
|
||||
node_list_json = {}
|
||||
|
||||
metadata_in_url['title_aux'] = title
|
||||
|
||||
|
||||
34
tests/.gitignore
vendored
Normal file
34
tests/.gitignore
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
# Test environment and artifacts
|
||||
|
||||
# Virtual environment
|
||||
test_venv/
|
||||
venv/
|
||||
env/
|
||||
|
||||
# pytest cache
|
||||
.pytest_cache/
|
||||
__pycache__/
|
||||
*.pyc
|
||||
*.pyo
|
||||
|
||||
# Coverage reports (module-specific naming)
|
||||
.coverage
|
||||
.coverage.*
|
||||
htmlcov*/
|
||||
coverage*.xml
|
||||
*.cover
|
||||
|
||||
# Test artifacts
|
||||
.tox/
|
||||
.hypothesis/
|
||||
|
||||
# IDE
|
||||
.vscode/
|
||||
.idea/
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
214
tests/README.md
214
tests/README.md
@@ -1,89 +1,181 @@
|
||||
# ComfyUI-Manager Tests
|
||||
# ComfyUI Manager Test Suite
|
||||
|
||||
This directory contains unit tests for ComfyUI-Manager components.
|
||||
This directory contains all tests for the ComfyUI Manager project, organized by module structure.
|
||||
|
||||
## Running Tests
|
||||
## Directory Structure
|
||||
|
||||
### Using the Virtual Environment
|
||||
|
||||
```bash
|
||||
# From the project root
|
||||
/path/to/comfyui/.venv/bin/python -m pytest tests/ -v
|
||||
```
|
||||
tests/
|
||||
├── setup_test_env.sh # Setup isolated test environment
|
||||
├── requirements.txt # Test dependencies
|
||||
├── pytest.ini # Global pytest configuration
|
||||
├── .gitignore # Ignore test artifacts
|
||||
│
|
||||
└── common/ # Tests for comfyui_manager/common/
|
||||
└── pip_util/ # Tests for pip_util.py
|
||||
├── README.md # pip_util test documentation
|
||||
├── conftest.py # pip_util test fixtures
|
||||
├── pytest.ini # pip_util-specific pytest config
|
||||
└── test_*.py # Actual test files (to be created)
|
||||
```
|
||||
|
||||
### Using the Test Runner
|
||||
## Quick Start
|
||||
|
||||
### 1. Setup Test Environment (One Time)
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
python run_tests.py
|
||||
|
||||
# Run specific tests
|
||||
python run_tests.py -k test_task_queue
|
||||
|
||||
# Run with coverage
|
||||
python run_tests.py --cov
|
||||
cd tests
|
||||
./setup_test_env.sh
|
||||
```
|
||||
|
||||
## Test Structure
|
||||
This creates an isolated virtual environment with all test dependencies.
|
||||
|
||||
### test_task_queue.py
|
||||
### 2. Run Tests
|
||||
|
||||
Comprehensive tests for the TaskQueue functionality including:
|
||||
```bash
|
||||
# Activate test environment
|
||||
source test_venv/bin/activate
|
||||
|
||||
- **Basic Operations**: Initialization, adding/removing tasks, state management
|
||||
- **Batch Tracking**: Automatic batch creation, history saving, finalization
|
||||
- **Thread Safety**: Concurrent access, worker lifecycle management
|
||||
- **Integration Testing**: Full task processing workflow
|
||||
- **Edge Cases**: Empty queues, invalid data, exception handling
|
||||
# Run all tests from root
|
||||
cd tests
|
||||
pytest
|
||||
|
||||
**Key Features Tested:**
|
||||
- ✅ Task queueing with Pydantic model validation
|
||||
- ✅ Batch history tracking and persistence
|
||||
- ✅ Thread-safe concurrent operations
|
||||
- ✅ Worker thread lifecycle management
|
||||
- ✅ WebSocket message tracking
|
||||
- ✅ State snapshots and transitions
|
||||
# Run specific module tests
|
||||
cd tests/common/pip_util
|
||||
pytest
|
||||
|
||||
### MockTaskQueue
|
||||
# Deactivate when done
|
||||
deactivate
|
||||
```
|
||||
|
||||
The tests use a `MockTaskQueue` class that:
|
||||
- Isolates testing from global state and external dependencies
|
||||
- Provides dependency injection for mocking external services
|
||||
- Maintains the same API as the real TaskQueue
|
||||
- Supports both synchronous and asynchronous testing patterns
|
||||
## Test Organization
|
||||
|
||||
Tests mirror the source code structure:
|
||||
|
||||
| Source Code | Test Location |
|
||||
|-------------|---------------|
|
||||
| `comfyui_manager/common/pip_util.py` | `tests/common/pip_util/test_*.py` |
|
||||
| `comfyui_manager/common/other.py` | `tests/common/other/test_*.py` |
|
||||
| `comfyui_manager/module/file.py` | `tests/module/file/test_*.py` |
|
||||
|
||||
## Writing Tests
|
||||
|
||||
1. Create test directory matching source structure
|
||||
2. Add `conftest.py` for module-specific fixtures
|
||||
3. Add `pytest.ini` for module-specific configuration (optional)
|
||||
4. Create `test_*.py` files with actual tests
|
||||
5. Document in module-specific README
|
||||
|
||||
## Test Categories
|
||||
|
||||
- **Unit Tests**: Individual method testing with mocked dependencies
|
||||
- **Integration Tests**: Full workflow testing with real threading
|
||||
- **Concurrency Tests**: Multi-threaded access verification
|
||||
- **Edge Case Tests**: Error conditions and boundary cases
|
||||
Use pytest markers to categorize tests:
|
||||
|
||||
## Dependencies
|
||||
```python
|
||||
@pytest.mark.unit
|
||||
def test_simple_function():
|
||||
pass
|
||||
|
||||
Tests require:
|
||||
- `pytest` - Test framework
|
||||
- `pytest-asyncio` - Async test support
|
||||
- `pydantic` - Data model validation
|
||||
@pytest.mark.integration
|
||||
def test_complex_workflow():
|
||||
pass
|
||||
|
||||
Install with: `pip install -e ".[dev]"`
|
||||
@pytest.mark.e2e
|
||||
def test_full_system():
|
||||
pass
|
||||
```
|
||||
|
||||
## Design Notes
|
||||
Run by category:
|
||||
```bash
|
||||
pytest -m unit # Only unit tests
|
||||
pytest -m integration # Only integration tests
|
||||
pytest -m e2e # Only end-to-end tests
|
||||
```
|
||||
|
||||
### Handling Singleton Pattern
|
||||
## Coverage Reports
|
||||
|
||||
The real TaskQueue uses a singleton pattern which makes testing challenging. The MockTaskQueue avoids this by:
|
||||
- Not setting global instance variables
|
||||
- Creating fresh instances per test
|
||||
- Providing controlled dependency injection
|
||||
Coverage reports are generated per module:
|
||||
|
||||
### Thread Management
|
||||
```bash
|
||||
cd tests/common/pip_util
|
||||
pytest # Generates htmlcov_pip_util/ and coverage_pip_util.xml
|
||||
```
|
||||
|
||||
Tests handle threading complexities by:
|
||||
- Using controlled mock workers for predictable behavior
|
||||
- Providing synchronization primitives for timing-sensitive tests
|
||||
- Testing both successful workflows and exception scenarios
|
||||
## Environment Isolation
|
||||
|
||||
### Heapq Compatibility
|
||||
**Why use venv?**
|
||||
- ✅ Prevents test dependencies from corrupting main environment
|
||||
- ✅ Allows safe package installation/uninstallation during tests
|
||||
- ✅ Consistent test results across machines
|
||||
- ✅ Easy to recreate clean environment
|
||||
|
||||
The original TaskQueue uses `heapq` with Pydantic models, which don't support comparison by default. Tests solve this by wrapping items in comparable tuples with priority values, maintaining FIFO order while enabling heap operations.
|
||||
## Available Test Modules
|
||||
|
||||
- **[common/pip_util](common/pip_util/)** - Policy-based pip package management system tests
|
||||
- Unit tests for policy loading, parsing, condition evaluation
|
||||
- Integration tests for policy application (60% of tests)
|
||||
- End-to-end workflow tests
|
||||
|
||||
## Adding New Test Modules
|
||||
|
||||
1. Create directory structure: `tests/module_path/component_name/`
|
||||
2. Add `conftest.py` with fixtures
|
||||
3. Add `pytest.ini` if needed (optional)
|
||||
4. Add `README.md` documenting the tests
|
||||
5. Create `test_*.py` files
|
||||
|
||||
Example:
|
||||
```bash
|
||||
mkdir -p tests/data_models/config
|
||||
cd tests/data_models/config
|
||||
touch conftest.py README.md test_config_loader.py
|
||||
```
|
||||
|
||||
## CI/CD Integration
|
||||
|
||||
Tests are designed to run in CI/CD pipelines:
|
||||
|
||||
```yaml
|
||||
# Example GitHub Actions
|
||||
- name: Setup test environment
|
||||
run: |
|
||||
cd tests
|
||||
./setup_test_env.sh
|
||||
|
||||
- name: Run tests
|
||||
run: |
|
||||
source tests/test_venv/bin/activate
|
||||
pytest tests/
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Import errors
|
||||
```bash
|
||||
# Make sure venv is activated
|
||||
source test_venv/bin/activate
|
||||
|
||||
# Verify Python path
|
||||
python -c "import sys; print(sys.path)"
|
||||
```
|
||||
|
||||
### Tests not discovered
|
||||
```bash
|
||||
# Check pytest configuration
|
||||
pytest --collect-only
|
||||
|
||||
# Verify test file naming (must start with test_)
|
||||
ls test_*.py
|
||||
```
|
||||
|
||||
### Clean rebuild
|
||||
```bash
|
||||
# Remove and recreate test environment
|
||||
rm -rf test_venv/
|
||||
./setup_test_env.sh
|
||||
```
|
||||
|
||||
## Resources
|
||||
|
||||
- **pytest Documentation**: https://docs.pytest.org/
|
||||
- **Coverage.py**: https://coverage.readthedocs.io/
|
||||
- **Module-specific READMEs**: Check each test module directory
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
"""Test suite for ComfyUI-Manager"""
|
||||
423
tests/common/pip_util/CONTEXT_FILES_GUIDE.md
Normal file
423
tests/common/pip_util/CONTEXT_FILES_GUIDE.md
Normal file
@@ -0,0 +1,423 @@
|
||||
# Context Files Guide for pip_util Tests
|
||||
|
||||
Quick reference for all context files created for extending pip_util tests.
|
||||
|
||||
---
|
||||
|
||||
## 📋 File Overview
|
||||
|
||||
| File | Purpose | When to Use |
|
||||
|------|---------|-------------|
|
||||
| **DEPENDENCY_TREE_CONTEXT.md** | Complete dependency trees with version analysis | Adding new test packages or updating scenarios |
|
||||
| **DEPENDENCY_ANALYSIS.md** | Analysis methodology and findings | Understanding why packages were chosen |
|
||||
| **TEST_SCENARIOS.md** | Detailed test specifications | Writing new tests or understanding existing ones |
|
||||
| **analyze_dependencies.py** | Interactive dependency analyzer | Exploring new packages before adding tests |
|
||||
| **requirements-test-base.txt** | Base test environment packages | Setting up or modifying test environment |
|
||||
|
||||
---
|
||||
|
||||
## 🎯 Common Tasks
|
||||
|
||||
### Task 1: Adding a New Test Package
|
||||
|
||||
**Steps**:
|
||||
|
||||
1. **Analyze the package**:
|
||||
```bash
|
||||
python analyze_dependencies.py NEW_PACKAGE
|
||||
```
|
||||
|
||||
2. **Check size and dependencies**:
|
||||
```bash
|
||||
./test_venv/bin/pip download --no-deps NEW_PACKAGE
|
||||
ls -lh NEW_PACKAGE*.whl # Check size
|
||||
```
|
||||
|
||||
3. **Verify dependency tree**:
|
||||
- Open **DEPENDENCY_TREE_CONTEXT.md**
|
||||
- Follow "Adding New Test Scenarios" section
|
||||
- Document findings in the file
|
||||
|
||||
4. **Update requirements** (if pre-installation needed):
|
||||
- Add to `requirements-test-base.txt`
|
||||
- Run `./setup_test_env.sh` to recreate venv
|
||||
|
||||
5. **Write test**:
|
||||
- Follow patterns in `test_dependency_protection.py`
|
||||
- Use `reset_test_venv` fixture
|
||||
- Add scenario to **TEST_SCENARIOS.md**
|
||||
|
||||
6. **Verify**:
|
||||
```bash
|
||||
pytest test_YOUR_NEW_TEST.py -v --override-ini="addopts="
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 2: Understanding Existing Tests
|
||||
|
||||
**Steps**:
|
||||
|
||||
1. **Read test scenario**:
|
||||
- Open **TEST_SCENARIOS.md**
|
||||
- Find your scenario (1-6)
|
||||
- Review initial state, action, expected result
|
||||
|
||||
2. **Check dependency details**:
|
||||
- Open **DEPENDENCY_TREE_CONTEXT.md**
|
||||
- Look up package in table of contents
|
||||
- Review dependency tree and version analysis
|
||||
|
||||
3. **Run analysis**:
|
||||
```bash
|
||||
python analyze_dependencies.py PACKAGE
|
||||
```
|
||||
|
||||
4. **Examine test code**:
|
||||
- Open relevant test file
|
||||
- Check policy fixture
|
||||
- Review assertions
|
||||
|
||||
---
|
||||
|
||||
### Task 3: Updating for New Package Versions
|
||||
|
||||
**When**: PyPI releases major version updates (e.g., urllib3 3.0)
|
||||
|
||||
**Steps**:
|
||||
|
||||
1. **Check current environment**:
|
||||
```bash
|
||||
python analyze_dependencies.py --env
|
||||
```
|
||||
|
||||
2. **Analyze new versions**:
|
||||
```bash
|
||||
./test_venv/bin/pip index versions PACKAGE | head -20
|
||||
python analyze_dependencies.py PACKAGE
|
||||
```
|
||||
|
||||
3. **Update context files**:
|
||||
- Update version numbers in **DEPENDENCY_TREE_CONTEXT.md**
|
||||
- Update "Version Analysis" section
|
||||
- Document breaking changes
|
||||
|
||||
4. **Test with new versions**:
|
||||
- Update `requirements-test-base.txt` (if testing new base version)
|
||||
- OR update test to verify protection from new version
|
||||
- Run tests to verify behavior
|
||||
|
||||
5. **Update scenarios**:
|
||||
- Update **TEST_SCENARIOS.md** with new version numbers
|
||||
- Update expected results if behavior changed
|
||||
|
||||
---
|
||||
|
||||
### Task 4: Debugging Dependency Issues
|
||||
|
||||
**Problem**: Test fails with unexpected dependency versions
|
||||
|
||||
**Steps**:
|
||||
|
||||
1. **Check what's installed**:
|
||||
```bash
|
||||
./test_venv/bin/pip freeze | grep -E "(urllib3|certifi|six|requests)"
|
||||
```
|
||||
|
||||
2. **Analyze what would install**:
|
||||
```bash
|
||||
python analyze_dependencies.py PACKAGE
|
||||
```
|
||||
|
||||
3. **Compare with expected**:
|
||||
- Open **DEPENDENCY_TREE_CONTEXT.md**
|
||||
- Check "Install Scenarios" for the package
|
||||
- Compare actual vs. expected
|
||||
|
||||
4. **Check for PyPI changes**:
|
||||
```bash
|
||||
./test_venv/bin/pip index versions PACKAGE
|
||||
```
|
||||
|
||||
5. **Verify test environment**:
|
||||
```bash
|
||||
rm -rf test_venv && ./setup_test_env.sh
|
||||
pytest test_FILE.py -v --override-ini="addopts="
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📚 Context File Details
|
||||
|
||||
### DEPENDENCY_TREE_CONTEXT.md
|
||||
|
||||
**Contents**:
|
||||
- Current test environment snapshot
|
||||
- Complete dependency trees for all test packages
|
||||
- Version analysis (current vs. latest)
|
||||
- Upgrade scenarios matrix
|
||||
- Guidelines for adding new scenarios
|
||||
- Quick reference tables
|
||||
|
||||
**Use when**:
|
||||
- Adding new test package
|
||||
- Understanding why a package was chosen
|
||||
- Checking version compatibility
|
||||
- Updating for new PyPI releases
|
||||
|
||||
**Key sections**:
|
||||
- Package Dependency Trees → See what each package depends on
|
||||
- Version Analysis → Understand version gaps and breaking changes
|
||||
- Adding New Test Scenarios → Step-by-step guide
|
||||
|
||||
---
|
||||
|
||||
### DEPENDENCY_ANALYSIS.md
|
||||
|
||||
**Contents**:
|
||||
- Detailed analysis of each test scenario
|
||||
- Real dependency verification using `pip --dry-run`
|
||||
- Version difference analysis
|
||||
- Rejected scenarios (and why)
|
||||
- Package size verification
|
||||
- Recommendations for implementation
|
||||
|
||||
**Use when**:
|
||||
- Understanding test design decisions
|
||||
- Evaluating new package candidates
|
||||
- Reviewing why certain packages were rejected
|
||||
- Learning the analysis methodology
|
||||
|
||||
**Key sections**:
|
||||
- Test Scenarios with Real Dependencies → Detailed scenarios
|
||||
- Rejected Scenarios → What NOT to use (e.g., click+colorama)
|
||||
- Validation Commands → How to verify analysis
|
||||
|
||||
---
|
||||
|
||||
### TEST_SCENARIOS.md
|
||||
|
||||
**Contents**:
|
||||
- Complete specifications for scenarios 1-6
|
||||
- Exact package versions and states
|
||||
- Policy configurations (JSON)
|
||||
- Expected pip commands
|
||||
- Expected final states
|
||||
- Key points for each scenario
|
||||
|
||||
**Use when**:
|
||||
- Writing new tests
|
||||
- Understanding test expectations
|
||||
- Debugging test failures
|
||||
- Documenting new scenarios
|
||||
|
||||
**Key sections**:
|
||||
- Each scenario section → Complete specification
|
||||
- Summary tables → Quick reference
|
||||
- Policy types summary → Available policy options
|
||||
|
||||
---
|
||||
|
||||
### analyze_dependencies.py
|
||||
|
||||
**Features**:
|
||||
- Interactive package analysis
|
||||
- Dry-run simulation
|
||||
- Version comparison
|
||||
- Pin impact analysis
|
||||
|
||||
**Use when**:
|
||||
- Exploring new packages
|
||||
- Verifying current environment
|
||||
- Checking upgrade impacts
|
||||
- Quick dependency checks
|
||||
|
||||
**Commands**:
|
||||
```bash
|
||||
# Analyze specific package
|
||||
python analyze_dependencies.py requests
|
||||
|
||||
# Analyze all test packages
|
||||
python analyze_dependencies.py --all
|
||||
|
||||
# Show current environment
|
||||
python analyze_dependencies.py --env
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### requirements-test-base.txt
|
||||
|
||||
**Contents**:
|
||||
- Base packages for test environment
|
||||
- Version specifications
|
||||
- Comments explaining each package's purpose
|
||||
|
||||
**Use when**:
|
||||
- Setting up test environment
|
||||
- Adding pre-installed packages
|
||||
- Modifying base versions
|
||||
- Recreating clean environment
|
||||
|
||||
**Format**:
|
||||
```txt
|
||||
# Scenario X: Purpose
|
||||
package==version # Comment explaining role
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🔄 Workflow Examples
|
||||
|
||||
### Example 1: Adding flask Test
|
||||
|
||||
```bash
|
||||
# 1. Analyze flask
|
||||
python analyze_dependencies.py flask
|
||||
|
||||
# Output shows:
|
||||
# Would install: Flask, Jinja2, MarkupSafe, Werkzeug, blinker, click, itsdangerous
|
||||
|
||||
# 2. Check sizes
|
||||
./test_venv/bin/pip download --no-deps flask jinja2 werkzeug
|
||||
ls -lh *.whl
|
||||
|
||||
# 3. Document in DEPENDENCY_TREE_CONTEXT.md
|
||||
# Add section:
|
||||
### 3. flask → Dependencies
|
||||
**Package**: `flask==3.1.2`
|
||||
**Size**: ~100KB
|
||||
...
|
||||
|
||||
# 4. Write test
|
||||
# Create test_flask_dependencies.py
|
||||
|
||||
# 5. Test
|
||||
pytest test_flask_dependencies.py -v --override-ini="addopts="
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Example 2: Investigating Test Failure
|
||||
|
||||
```bash
|
||||
# Test failed: "urllib3 version mismatch"
|
||||
|
||||
# 1. Check installed
|
||||
./test_venv/bin/pip freeze | grep urllib3
|
||||
# Output: urllib3==2.5.0 (expected: 1.26.15)
|
||||
|
||||
# 2. Analyze what happened
|
||||
python analyze_dependencies.py requests
|
||||
|
||||
# 3. Check context
|
||||
# Open DEPENDENCY_TREE_CONTEXT.md
|
||||
# Section: "urllib3: Major Version Jump"
|
||||
# Confirms: 1.26.15 → 2.5.0 is expected without pin
|
||||
|
||||
# 4. Verify test has pin
|
||||
# Check test_dependency_protection.py for pin_policy fixture
|
||||
|
||||
# 5. Reset environment
|
||||
rm -rf test_venv && ./setup_test_env.sh
|
||||
|
||||
# 6. Re-run test
|
||||
pytest test_dependency_protection.py -v --override-ini="addopts="
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🎓 Best Practices
|
||||
|
||||
### When Adding New Tests
|
||||
|
||||
✅ **DO**:
|
||||
- Use `analyze_dependencies.py` first
|
||||
- Document in **DEPENDENCY_TREE_CONTEXT.md**
|
||||
- Add scenario to **TEST_SCENARIOS.md**
|
||||
- Verify with real pip operations
|
||||
- Keep packages lightweight (<500KB total)
|
||||
|
||||
❌ **DON'T**:
|
||||
- Add packages without verifying dependencies
|
||||
- Use packages with optional dependencies only
|
||||
- Add heavy packages (>1MB)
|
||||
- Skip documentation
|
||||
- Mock subprocess for integration tests
|
||||
|
||||
---
|
||||
|
||||
### When Updating Context
|
||||
|
||||
✅ **DO**:
|
||||
- Re-run `analyze_dependencies.py --all`
|
||||
- Update version numbers throughout
|
||||
- Document breaking changes
|
||||
- Test after updates
|
||||
- Note update date
|
||||
|
||||
❌ **DON'T**:
|
||||
- Update only one file
|
||||
- Skip verification
|
||||
- Forget to update TEST_SCENARIOS.md
|
||||
- Leave outdated version numbers
|
||||
|
||||
---
|
||||
|
||||
## 🆘 Quick Troubleshooting
|
||||
|
||||
| Problem | Check | Solution |
|
||||
|---------|-------|----------|
|
||||
| Test fails with version mismatch | `pip freeze` | Recreate venv with `./setup_test_env.sh` |
|
||||
| Package not found | `pip index versions PKG` | Check if package exists on PyPI |
|
||||
| Unexpected dependencies | `analyze_dependencies.py PKG` | Review dependency tree in context file |
|
||||
| Wrong test data | **TEST_SCENARIOS.md** | Verify against documented scenario |
|
||||
| Unclear why package chosen | **DEPENDENCY_ANALYSIS.md** | Read "Rejected Scenarios" section |
|
||||
|
||||
---
|
||||
|
||||
## 📞 Need Help?
|
||||
|
||||
1. **Check context files first**: Most answers are documented
|
||||
2. **Run analyze_dependencies.py**: Verify current state
|
||||
3. **Review test scenarios**: Understand expected behavior
|
||||
4. **Examine dependency trees**: Understand relationships
|
||||
5. **Check DEPENDENCY_ANALYSIS.md**: Learn the "why" behind decisions
|
||||
|
||||
---
|
||||
|
||||
## 📝 Maintenance Checklist
|
||||
|
||||
**Every 6 months or when major versions release**:
|
||||
|
||||
- [ ] Run `python analyze_dependencies.py --all`
|
||||
- [ ] Check for new major versions: `pip index versions urllib3 certifi six`
|
||||
- [ ] Update **DEPENDENCY_TREE_CONTEXT.md** version numbers
|
||||
- [ ] Update **TEST_SCENARIOS.md** expected versions
|
||||
- [ ] Test all scenarios: `pytest -v --override-ini="addopts="`
|
||||
- [ ] Document any breaking changes
|
||||
- [ ] Update this guide if workflow changed
|
||||
|
||||
---
|
||||
|
||||
## 🔗 File Relationships
|
||||
|
||||
```
|
||||
requirements-test-base.txt
|
||||
↓ (defines)
|
||||
Current Test Environment
|
||||
↓ (analyzed by)
|
||||
analyze_dependencies.py
|
||||
↓ (documents)
|
||||
DEPENDENCY_TREE_CONTEXT.md
|
||||
↓ (informs)
|
||||
TEST_SCENARIOS.md
|
||||
↓ (implemented in)
|
||||
test_*.py files
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
**Last Updated**: 2025-10-01
|
||||
**Python Version**: 3.12.3
|
||||
**pip Version**: 25.2
|
||||
261
tests/common/pip_util/DEPENDENCY_ANALYSIS.md
Normal file
261
tests/common/pip_util/DEPENDENCY_ANALYSIS.md
Normal file
@@ -0,0 +1,261 @@
|
||||
# pip_util Test Package Dependency Analysis
|
||||
|
||||
Real dependency analysis using `pip install --dry-run` to verify meaningful test scenarios.
|
||||
|
||||
## Analysis Date
|
||||
|
||||
Generated: 2025-10-01
|
||||
Tool: `pip install --dry-run --ignore-installed`
|
||||
|
||||
## Test Scenarios with Real Dependencies
|
||||
|
||||
### Scenario 1: Dependency Version Protection (requests + urllib3)
|
||||
|
||||
**Purpose**: Verify pin_dependencies prevents unwanted upgrades
|
||||
|
||||
**Initial Environment**:
|
||||
```
|
||||
urllib3==1.26.15
|
||||
certifi==2023.7.22
|
||||
charset-normalizer==3.2.0
|
||||
```
|
||||
|
||||
**Without pin** (`pip install requests`):
|
||||
```bash
|
||||
Would install:
|
||||
certifi-2025.8.3 # UPGRADED from 2023.7.22 (+2 years)
|
||||
charset-normalizer-3.4.3 # UPGRADED from 3.2.0 (minor)
|
||||
idna-3.10 # NEW dependency
|
||||
requests-2.32.5 # NEW package
|
||||
urllib3-2.5.0 # UPGRADED from 1.26.15 (MAJOR 1.x→2.x!)
|
||||
```
|
||||
|
||||
**With pin** (`pip install requests urllib3==1.26.15 certifi==2023.7.22 charset-normalizer==3.2.0`):
|
||||
```bash
|
||||
Would install:
|
||||
idna-3.10 # NEW dependency (required by requests)
|
||||
requests-2.32.5 # NEW package
|
||||
|
||||
# Pinned packages stay at old versions:
|
||||
urllib3==1.26.15 ✅ PROTECTED (prevented 1.x→2.x jump)
|
||||
certifi==2023.7.22 ✅ PROTECTED
|
||||
charset-normalizer==3.2.0 ✅ PROTECTED
|
||||
```
|
||||
|
||||
**Key Finding**:
|
||||
- `urllib3` 1.26.15 → 2.5.0 is a **MAJOR version jump** (breaking changes!)
|
||||
- requests accepts both: `urllib3<3,>=1.21.1` (compatible with 1.x and 2.x)
|
||||
- Pin successfully prevents unwanted major upgrade
|
||||
|
||||
---
|
||||
|
||||
### Scenario 2: Package with Dependency (python-dateutil + six)
|
||||
|
||||
**Purpose**: Verify pin_dependencies with dependency chain
|
||||
|
||||
**Analysis**:
|
||||
```bash
|
||||
$ pip install --dry-run python-dateutil
|
||||
|
||||
Would install:
|
||||
python-dateutil-2.9.0.post0
|
||||
six-1.17.0 # DEPENDENCY
|
||||
```
|
||||
|
||||
**Initial Environment**:
|
||||
```
|
||||
six==1.16.0 # Older version
|
||||
```
|
||||
|
||||
**Without pin** (`pip install python-dateutil`):
|
||||
```bash
|
||||
Would install:
|
||||
python-dateutil-2.9.0.post0
|
||||
six-1.17.0 # UPGRADED from 1.16.0
|
||||
```
|
||||
|
||||
**With pin** (`pip install python-dateutil six==1.16.0`):
|
||||
```bash
|
||||
Would install:
|
||||
python-dateutil-2.9.0.post0
|
||||
|
||||
# Pinned package:
|
||||
six==1.16.0 ✅ PROTECTED
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Scenario 3: Package Deletion and Restore (six)
|
||||
|
||||
**Purpose**: Verify restore policy reinstalls deleted packages
|
||||
|
||||
**Initial Environment**:
|
||||
```
|
||||
six==1.16.0
|
||||
attrs==23.1.0
|
||||
packaging==23.1
|
||||
```
|
||||
|
||||
**Action Sequence**:
|
||||
1. Delete six: `pip uninstall -y six`
|
||||
2. Verify deletion: `pip freeze | grep six` (empty)
|
||||
3. Restore: `batch.ensure_installed()` → `pip install six==1.16.0`
|
||||
|
||||
**Expected Result**:
|
||||
```
|
||||
six==1.16.0 # ✅ RESTORED
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Scenario 4: Version Change and Restore (urllib3)
|
||||
|
||||
**Purpose**: Verify restore policy reverts version changes
|
||||
|
||||
**Initial Environment**:
|
||||
```
|
||||
urllib3==1.26.15
|
||||
```
|
||||
|
||||
**Action Sequence**:
|
||||
1. Upgrade: `pip install urllib3==2.5.0`
|
||||
2. Verify change: `pip freeze | grep urllib3` → `urllib3==2.5.0`
|
||||
3. Restore: `batch.ensure_installed()` → `pip install urllib3==1.26.15`
|
||||
|
||||
**Expected Result**:
|
||||
```
|
||||
urllib3==1.26.15 # ✅ RESTORED (downgraded from 2.5.0)
|
||||
```
|
||||
|
||||
**Key Finding**:
|
||||
- Downgrade from 2.x to 1.x requires explicit version specification
|
||||
- pip allows downgrades with `pip install urllib3==1.26.15`
|
||||
|
||||
---
|
||||
|
||||
## Rejected Scenarios
|
||||
|
||||
### click + colorama (NO REAL DEPENDENCY)
|
||||
|
||||
**Analysis**:
|
||||
```bash
|
||||
$ pip install --dry-run click
|
||||
Would install: click-8.3.0
|
||||
|
||||
$ pip install --dry-run click colorama==0.4.6
|
||||
Would install: click-8.3.0 # colorama not installed!
|
||||
```
|
||||
|
||||
**Finding**: click has **NO direct dependency** on colorama
|
||||
- colorama is **optional** and platform-specific (Windows only)
|
||||
- Not a good test case for dependency protection
|
||||
|
||||
**Recommendation**: Use python-dateutil + six instead
|
||||
|
||||
---
|
||||
|
||||
## Package Size Verification
|
||||
|
||||
```bash
|
||||
Package Size Version Purpose
|
||||
-------------------------------------------------------
|
||||
urllib3 ~140KB 1.26.15 Protected dependency
|
||||
certifi ~158KB 2023.7.22 SSL certificates
|
||||
charset-normalizer ~46KB 3.2.0 Charset detection
|
||||
idna ~69KB 3.10 NEW dep from requests
|
||||
requests ~100KB 2.32.5 Main package to install
|
||||
six ~11KB 1.16.0 Restore test
|
||||
python-dateutil ~280KB 2.9.0 Depends on six
|
||||
attrs ~61KB 23.1.0 Bystander
|
||||
packaging ~48KB 23.1 Bystander
|
||||
-------------------------------------------------------
|
||||
Total ~913KB (< 1MB) ✅ All lightweight
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Dependency Graph
|
||||
|
||||
```
|
||||
requests 2.32.5
|
||||
├── charset_normalizer<4,>=2 (have: 3.2.0)
|
||||
├── idna<4,>=2.5 (need: 3.10) ← NEW
|
||||
├── urllib3<3,>=1.21.1 (have: 1.26.15, latest: 2.5.0)
|
||||
└── certifi>=2017.4.17 (have: 2023.7.22, latest: 2025.8.3)
|
||||
|
||||
python-dateutil 2.9.0
|
||||
└── six>=1.5 (have: 1.16.0, latest: 1.17.0)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Version Compatibility Matrix
|
||||
|
||||
| Package | Old Version | Latest | Spec | Compatible? |
|
||||
|---------|------------|--------|------|-------------|
|
||||
| urllib3 | 1.26.15 | 2.5.0 | <3,>=1.21.1 | ✅ Both work |
|
||||
| certifi | 2023.7.22 | 2025.8.3 | >=2017.4.17 | ✅ Both work |
|
||||
| charset-normalizer | 3.2.0 | 3.4.3 | <4,>=2 | ✅ Both work |
|
||||
| six | 1.16.0 | 1.17.0 | >=1.5 | ✅ Both work |
|
||||
| idna | (none) | 3.10 | <4,>=2.5 | ⚠️ Must install |
|
||||
|
||||
---
|
||||
|
||||
## Test Data Justification
|
||||
|
||||
### Why urllib3 1.26.15?
|
||||
1. **Real world scenario**: Many projects pin urllib3<2 to avoid breaking changes
|
||||
2. **Meaningful test**: 1.26.15 → 2.5.0 is a major version jump (API changes)
|
||||
3. **Compatibility**: requests accepts both 1.x and 2.x (good for testing)
|
||||
|
||||
### Why certifi 2023.7.22?
|
||||
1. **Real world scenario**: Older environment with outdated SSL certificates
|
||||
2. **Meaningful test**: 2-year version gap (2023 → 2025)
|
||||
3. **Safety**: Still compatible with requests
|
||||
|
||||
### Why six 1.16.0?
|
||||
1. **Lightweight**: Only 11KB
|
||||
2. **Real dependency**: python-dateutil actually depends on it
|
||||
3. **Stable**: six is mature and rarely changes
|
||||
|
||||
---
|
||||
|
||||
## Recommendations for Test Implementation
|
||||
|
||||
### ✅ Keep These Scenarios:
|
||||
1. **requests + urllib3 pin** - Real major version protection
|
||||
2. **python-dateutil + six** - Real dependency chain
|
||||
3. **six deletion/restore** - Real package management
|
||||
4. **urllib3 version change** - Real downgrade scenario
|
||||
|
||||
### ❌ Remove These Scenarios:
|
||||
1. **click + colorama** - No real dependency (colorama is optional/Windows-only)
|
||||
|
||||
### 📝 Update Required Files:
|
||||
1. `requirements-test-base.txt` - Add idna (new dependency from requests)
|
||||
2. `TEST_SCENARIOS.md` - Update with real dependency analysis
|
||||
3. `test_dependency_protection.py` - Remove click-colorama test
|
||||
4. `pip_util.design.en.md` - Update examples with verified dependencies
|
||||
|
||||
---
|
||||
|
||||
## Validation Commands
|
||||
|
||||
Run these to verify analysis:
|
||||
|
||||
```bash
|
||||
# Check current environment
|
||||
./test_venv/bin/pip freeze
|
||||
|
||||
# Simulate requests installation without pin
|
||||
./test_venv/bin/pip install --dry-run requests
|
||||
|
||||
# Simulate requests installation with pin
|
||||
./test_venv/bin/pip install --dry-run requests urllib3==1.26.15 certifi==2023.7.22 charset-normalizer==3.2.0
|
||||
|
||||
# Check python-dateutil dependencies
|
||||
./test_venv/bin/pip install --dry-run python-dateutil
|
||||
|
||||
# Verify urllib3 version availability
|
||||
./test_venv/bin/pip index versions urllib3 | head -20
|
||||
```
|
||||
413
tests/common/pip_util/DEPENDENCY_TREE_CONTEXT.md
Normal file
413
tests/common/pip_util/DEPENDENCY_TREE_CONTEXT.md
Normal file
@@ -0,0 +1,413 @@
|
||||
# Dependency Tree Context for pip_util Tests
|
||||
|
||||
**Generated**: 2025-10-01
|
||||
**Tool**: `pip install --dry-run --ignore-installed`
|
||||
**Python**: 3.12.3
|
||||
**pip**: 25.2
|
||||
|
||||
This document provides detailed dependency tree information for all test packages, verified against real PyPI data. Use this as a reference when extending tests.
|
||||
|
||||
---
|
||||
|
||||
## Table of Contents
|
||||
|
||||
1. [Current Test Environment](#current-test-environment)
|
||||
2. [Package Dependency Trees](#package-dependency-trees)
|
||||
3. [Version Analysis](#version-analysis)
|
||||
4. [Upgrade Scenarios](#upgrade-scenarios)
|
||||
5. [Adding New Test Scenarios](#adding-new-test-scenarios)
|
||||
|
||||
---
|
||||
|
||||
## Current Test Environment
|
||||
|
||||
**Base packages installed in test_venv** (from `requirements-test-base.txt`):
|
||||
|
||||
```
|
||||
urllib3==1.26.15 # Protected from 2.x upgrade
|
||||
certifi==2023.7.22 # Protected from 2025.x upgrade
|
||||
charset-normalizer==3.2.0 # Protected from 3.4.x upgrade
|
||||
six==1.16.0 # For deletion/restore tests
|
||||
attrs==23.1.0 # Bystander package
|
||||
packaging==23.1 # Bystander package
|
||||
pytest==8.4.2 # Test framework
|
||||
```
|
||||
|
||||
**Total environment size**: ~913KB (all packages < 1MB)
|
||||
|
||||
---
|
||||
|
||||
## Package Dependency Trees
|
||||
|
||||
### 1. requests → Dependencies
|
||||
|
||||
**Package**: `requests==2.32.5`
|
||||
**Size**: ~100KB
|
||||
**Purpose**: Main test package for dependency protection
|
||||
|
||||
#### Dependency Tree
|
||||
|
||||
```
|
||||
requests==2.32.5
|
||||
├── charset-normalizer<4,>=2
|
||||
│ └── 3.2.0 (OLD) → 3.4.3 (LATEST)
|
||||
├── idna<4,>=2.5
|
||||
│ └── (NOT INSTALLED) → 3.10 (LATEST)
|
||||
├── urllib3<3,>=1.21.1
|
||||
│ └── 1.26.15 (OLD) → 2.5.0 (LATEST) ⚠️ MAJOR VERSION JUMP
|
||||
└── certifi>=2017.4.17
|
||||
└── 2023.7.22 (OLD) → 2025.8.3 (LATEST)
|
||||
```
|
||||
|
||||
#### Install Scenarios
|
||||
|
||||
**Scenario A: Without constraints (fresh install)**
|
||||
```bash
|
||||
$ pip install --dry-run --ignore-installed requests
|
||||
|
||||
Would install:
|
||||
certifi-2025.8.3 # Latest version
|
||||
charset-normalizer-3.4.3 # Latest version
|
||||
idna-3.10 # New dependency
|
||||
requests-2.32.5 # Target package
|
||||
urllib3-2.5.0 # Latest version (2.x!)
|
||||
```
|
||||
|
||||
**Scenario B: With pin constraints**
|
||||
```bash
|
||||
$ pip install --dry-run requests \
|
||||
urllib3==1.26.15 \
|
||||
certifi==2023.7.22 \
|
||||
charset-normalizer==3.2.0
|
||||
|
||||
Would install:
|
||||
certifi-2023.7.22 # Pinned to OLD version
|
||||
charset-normalizer-3.2.0 # Pinned to OLD version
|
||||
idna-3.10 # New dependency (not pinned)
|
||||
requests-2.32.5 # Target package
|
||||
urllib3-1.26.15 # Pinned to OLD version
|
||||
```
|
||||
|
||||
**Impact Analysis**:
|
||||
- ✅ Pin successfully prevents urllib3 1.x → 2.x major upgrade
|
||||
- ✅ Pin prevents certifi 2023 → 2025 upgrade (2 years)
|
||||
- ✅ Pin prevents charset-normalizer minor upgrade
|
||||
- ⚠️ idna is NEW and NOT pinned (acceptable - new dependency)
|
||||
|
||||
---
|
||||
|
||||
### 2. python-dateutil → Dependencies
|
||||
|
||||
**Package**: `python-dateutil==2.9.0.post0`
|
||||
**Size**: ~280KB
|
||||
**Purpose**: Real dependency chain test (depends on six)
|
||||
|
||||
#### Dependency Tree
|
||||
|
||||
```
|
||||
python-dateutil==2.9.0.post0
|
||||
└── six>=1.5
|
||||
└── 1.16.0 (OLD) → 1.17.0 (LATEST)
|
||||
```
|
||||
|
||||
#### Install Scenarios
|
||||
|
||||
**Scenario A: Without constraints**
|
||||
```bash
|
||||
$ pip install --dry-run --ignore-installed python-dateutil
|
||||
|
||||
Would install:
|
||||
python-dateutil-2.9.0.post0 # Target package
|
||||
six-1.17.0 # Latest version
|
||||
```
|
||||
|
||||
**Scenario B: With pin constraints**
|
||||
```bash
|
||||
$ pip install --dry-run python-dateutil six==1.16.0
|
||||
|
||||
Would install:
|
||||
python-dateutil-2.9.0.post0 # Target package
|
||||
six-1.16.0 # Pinned to OLD version
|
||||
```
|
||||
|
||||
**Impact Analysis**:
|
||||
- ✅ Pin successfully prevents six 1.16.0 → 1.17.0 upgrade
|
||||
- ✅ Real dependency relationship (verified via PyPI)
|
||||
|
||||
---
|
||||
|
||||
### 3. Other Test Packages (No Dependencies)
|
||||
|
||||
These packages have no dependencies or only have dependencies already in the test environment:
|
||||
|
||||
```
|
||||
attrs==23.1.0 # No dependencies
|
||||
packaging==23.1 # No dependencies (standalone)
|
||||
six==1.16.0 # No dependencies (pure Python)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Version Analysis
|
||||
|
||||
### urllib3: Major Version Jump (1.x → 2.x)
|
||||
|
||||
**Current**: 1.26.15 (2023)
|
||||
**Latest**: 2.5.0 (2025)
|
||||
**Breaking Changes**: YES - urllib3 2.0 removed deprecated APIs
|
||||
|
||||
**Available versions**:
|
||||
```
|
||||
2.x series: 2.5.0, 2.4.0, 2.3.0, 2.2.3, 2.2.2, 2.2.1, 2.2.0, 2.1.0, 2.0.7, ...
|
||||
1.26.x: 1.26.20, 1.26.19, 1.26.18, 1.26.17, 1.26.16, 1.26.15, ...
|
||||
1.25.x: 1.25.11, 1.25.10, 1.25.9, ...
|
||||
```
|
||||
|
||||
**Why test with 1.26.15?**
|
||||
- ✅ Real-world scenario: Many projects pin `urllib3<2` to avoid breaking changes
|
||||
- ✅ Meaningful test: 1.x → 2.x is a major API change
|
||||
- ✅ Compatibility: requests accepts both 1.x and 2.x (`urllib3<3,>=1.21.1`)
|
||||
|
||||
**Breaking changes in urllib3 2.0**:
|
||||
- Removed `urllib3.contrib.pyopenssl`
|
||||
- Removed `urllib3.contrib.securetransport`
|
||||
- Changed import paths for some modules
|
||||
- Updated connection pooling behavior
|
||||
|
||||
---
|
||||
|
||||
### certifi: Long-Term Version Gap (2023 → 2025)
|
||||
|
||||
**Current**: 2023.7.22 (July 2023)
|
||||
**Latest**: 2025.8.3 (August 2025)
|
||||
**Gap**: ~2 years of SSL certificate updates
|
||||
|
||||
**Available versions**:
|
||||
```
|
||||
2025: 2025.8.3, 2025.7.14, 2025.7.9, 2025.6.15, 2025.4.26, ...
|
||||
2024: 2024.12.25, 2024.11.28, 2024.10.29, 2024.9.19, ...
|
||||
2023: 2023.11.17, 2023.7.22, 2023.5.7, ...
|
||||
```
|
||||
|
||||
**Why test with 2023.7.22?**
|
||||
- ✅ Real-world scenario: Older environments with outdated SSL certificates
|
||||
- ✅ Meaningful test: 2-year gap shows protection of older versions
|
||||
- ✅ Safety: Still compatible with requests (`certifi>=2017.4.17`)
|
||||
|
||||
---
|
||||
|
||||
### charset-normalizer: Minor Version Updates
|
||||
|
||||
**Current**: 3.2.0 (2023)
|
||||
**Latest**: 3.4.3 (2025)
|
||||
**Breaking Changes**: NO - only minor/patch updates
|
||||
|
||||
**Available versions**:
|
||||
```
|
||||
3.4.x: 3.4.3, 3.4.2, 3.4.1, 3.4.0
|
||||
3.3.x: 3.3.2, 3.3.1, 3.3.0
|
||||
3.2.x: 3.2.0
|
||||
```
|
||||
|
||||
**Why test with 3.2.0?**
|
||||
- ✅ Demonstrates protection of minor version updates
|
||||
- ✅ Compatible with requests (`charset-normalizer<4,>=2`)
|
||||
|
||||
---
|
||||
|
||||
### six: Stable Version Update
|
||||
|
||||
**Current**: 1.16.0 (2021)
|
||||
**Latest**: 1.17.0 (2024)
|
||||
**Breaking Changes**: NO - six is very stable
|
||||
|
||||
**Available versions**:
|
||||
```
|
||||
1.17.0, 1.16.0, 1.15.0, 1.14.0, 1.13.0, 1.12.0, ...
|
||||
```
|
||||
|
||||
**Why test with 1.16.0?**
|
||||
- ✅ Real dependency of python-dateutil
|
||||
- ✅ Small size (11KB) - lightweight for tests
|
||||
- ✅ Demonstrates protection of stable packages
|
||||
|
||||
---
|
||||
|
||||
### idna: New Dependency
|
||||
|
||||
**Not pre-installed** - Added by requests
|
||||
|
||||
**Version**: 3.10
|
||||
**Size**: ~69KB
|
||||
**Dependency spec**: `idna<4,>=2.5` (from requests)
|
||||
|
||||
**Why NOT pre-installed?**
|
||||
- ✅ Tests that new dependencies are correctly added
|
||||
- ✅ Tests that pins only affect specified packages
|
||||
- ✅ Real-world scenario: new dependency introduced by package update
|
||||
|
||||
---
|
||||
|
||||
## Upgrade Scenarios
|
||||
|
||||
### Scenario Matrix
|
||||
|
||||
| Package | Initial | Without Pin | With Pin | Change Type |
|
||||
|---------|---------|-------------|----------|-------------|
|
||||
| **urllib3** | 1.26.15 | 2.5.0 ❌ | 1.26.15 ✅ | Major (breaking) |
|
||||
| **certifi** | 2023.7.22 | 2025.8.3 ❌ | 2023.7.22 ✅ | 2-year gap |
|
||||
| **charset-normalizer** | 3.2.0 | 3.4.3 ❌ | 3.2.0 ✅ | Minor update |
|
||||
| **six** | 1.16.0 | 1.17.0 ❌ | 1.16.0 ✅ | Stable update |
|
||||
| **idna** | (none) | 3.10 ✅ | 3.10 ✅ | New dependency |
|
||||
| **requests** | (none) | 2.32.5 ✅ | 2.32.5 ✅ | Target package |
|
||||
| **python-dateutil** | (none) | 2.9.0 ✅ | 2.9.0 ✅ | Target package |
|
||||
|
||||
---
|
||||
|
||||
## Adding New Test Scenarios
|
||||
|
||||
### Step 1: Identify Candidate Package
|
||||
|
||||
Use `pip install --dry-run` to analyze dependencies:
|
||||
|
||||
```bash
|
||||
# Analyze package dependencies
|
||||
./test_venv/bin/pip install --dry-run --ignore-installed PACKAGE
|
||||
|
||||
# Check what changes with current environment
|
||||
./test_venv/bin/pip install --dry-run PACKAGE
|
||||
|
||||
# List available versions
|
||||
./test_venv/bin/pip index versions PACKAGE
|
||||
```
|
||||
|
||||
### Step 2: Verify Real Dependencies
|
||||
|
||||
**Good candidates**:
|
||||
- ✅ Has 2+ dependencies
|
||||
- ✅ Dependencies have version upgrades available
|
||||
- ✅ Total size < 500KB (all packages combined)
|
||||
- ✅ Real-world use case (popular package)
|
||||
|
||||
**Examples**:
|
||||
```bash
|
||||
# flask → click, werkzeug, jinja2 (good: multiple dependencies)
|
||||
$ pip install --dry-run --ignore-installed flask
|
||||
Would install: Flask-3.1.2 Jinja2-3.1.6 MarkupSafe-3.0.3 Werkzeug-3.1.3 blinker-1.9.0 click-8.3.0 itsdangerous-2.2.0
|
||||
|
||||
# pytest-cov → pytest, coverage (good: popular testing tool)
|
||||
$ pip install --dry-run --ignore-installed pytest-cov
|
||||
Would install: coverage-7.10.7 pytest-8.4.2 pytest-cov-7.0.0
|
||||
```
|
||||
|
||||
**Bad candidates**:
|
||||
- ❌ click → colorama (no real dependency - colorama is optional/Windows-only)
|
||||
- ❌ pandas → numpy (too large - numpy is 50MB+)
|
||||
- ❌ torch → ... (too large - 800MB+)
|
||||
|
||||
### Step 3: Document Dependencies
|
||||
|
||||
Add to this file:
|
||||
|
||||
```markdown
|
||||
### Package: PACKAGE_NAME → Dependencies
|
||||
|
||||
**Package**: `PACKAGE==VERSION`
|
||||
**Size**: ~XXXKB
|
||||
**Purpose**: Brief description
|
||||
|
||||
#### Dependency Tree
|
||||
(Use tree format)
|
||||
|
||||
#### Install Scenarios
|
||||
(Show with/without pin)
|
||||
|
||||
#### Impact Analysis
|
||||
(What does pin protect?)
|
||||
```
|
||||
|
||||
### Step 4: Update Test Files
|
||||
|
||||
1. Add package to `requirements-test-base.txt` (if pre-installation needed)
|
||||
2. Create policy fixture in test file
|
||||
3. Write test function using `reset_test_venv` fixture
|
||||
4. Update `TEST_SCENARIOS.md` with detailed scenario
|
||||
|
||||
---
|
||||
|
||||
## Maintenance Notes
|
||||
|
||||
### Updating This Document
|
||||
|
||||
Re-run analysis when:
|
||||
- ✅ PyPI releases major version updates (e.g., urllib3 3.0)
|
||||
- ✅ Adding new test packages
|
||||
- ✅ Test environment base packages change
|
||||
- ✅ Every 6 months (to catch version drift)
|
||||
|
||||
### Verification Commands
|
||||
|
||||
```bash
|
||||
# Regenerate dependency tree
|
||||
./test_venv/bin/pip install --dry-run --ignore-installed requests
|
||||
./test_venv/bin/pip install --dry-run --ignore-installed python-dateutil
|
||||
|
||||
# Check current environment
|
||||
./test_venv/bin/pip freeze
|
||||
|
||||
# Verify test packages still available on PyPI
|
||||
./test_venv/bin/pip index versions urllib3
|
||||
./test_venv/bin/pip index versions certifi
|
||||
./test_venv/bin/pip index versions six
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Quick Reference: Package Specs
|
||||
|
||||
From actual package metadata:
|
||||
|
||||
```python
|
||||
# requests dependencies (from requests==2.32.5)
|
||||
install_requires = [
|
||||
"charset_normalizer<4,>=2",
|
||||
"idna<4,>=2.5",
|
||||
"urllib3<3,>=1.21.1",
|
||||
"certifi>=2017.4.17"
|
||||
]
|
||||
|
||||
# python-dateutil dependencies (from python-dateutil==2.9.0)
|
||||
install_requires = [
|
||||
"six>=1.5"
|
||||
]
|
||||
|
||||
# six dependencies
|
||||
install_requires = [] # No dependencies
|
||||
|
||||
# attrs dependencies
|
||||
install_requires = [] # No dependencies
|
||||
|
||||
# packaging dependencies
|
||||
install_requires = [] # No dependencies
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Version Compatibility Table
|
||||
|
||||
| Package | Minimum | Maximum | Current Test | Latest | Notes |
|
||||
|---------|---------|---------|--------------|--------|-------|
|
||||
| urllib3 | 1.21.1 | <3.0 | 1.26.15 | 2.5.0 | Major version jump possible |
|
||||
| certifi | 2017.4.17 | (none) | 2023.7.22 | 2025.8.3 | Always backward compatible |
|
||||
| charset-normalizer | 2.0 | <4.0 | 3.2.0 | 3.4.3 | Within major version |
|
||||
| six | 1.5 | (none) | 1.16.0 | 1.17.0 | Very stable |
|
||||
| idna | 2.5 | <4.0 | (new) | 3.10 | Added by requests |
|
||||
|
||||
---
|
||||
|
||||
## See Also
|
||||
|
||||
- **DEPENDENCY_ANALYSIS.md** - Detailed analysis methodology
|
||||
- **TEST_SCENARIOS.md** - Complete test scenario specifications
|
||||
- **requirements-test-base.txt** - Base environment packages
|
||||
- **README.md** - Test suite overview and usage
|
||||
305
tests/common/pip_util/README.md
Normal file
305
tests/common/pip_util/README.md
Normal file
@@ -0,0 +1,305 @@
|
||||
# pip_util Integration Tests
|
||||
|
||||
Real integration tests for `pip_util.py` using actual PyPI packages and pip operations.
|
||||
|
||||
## Overview
|
||||
|
||||
These tests use a **real isolated venv** to verify pip_util behavior with actual package installations, deletions, and version changes. No mocks - real pip operations only.
|
||||
|
||||
## Quick Start
|
||||
|
||||
### 1. Setup Test Environment
|
||||
|
||||
```bash
|
||||
cd tests/common/pip_util
|
||||
./setup_test_env.sh
|
||||
```
|
||||
|
||||
This creates `test_venv/` with base packages:
|
||||
- urllib3==1.26.15
|
||||
- certifi==2023.7.22
|
||||
- charset-normalizer==3.2.0
|
||||
- colorama==0.4.6
|
||||
- six==1.16.0
|
||||
- attrs==23.1.0
|
||||
- packaging==23.1
|
||||
- pytest (latest)
|
||||
|
||||
### 2. Run Tests
|
||||
|
||||
```bash
|
||||
# Run all integration tests
|
||||
pytest -v --override-ini="addopts="
|
||||
|
||||
# Run specific test
|
||||
pytest test_dependency_protection.py -v --override-ini="addopts="
|
||||
|
||||
# Run with markers
|
||||
pytest -m integration -v --override-ini="addopts="
|
||||
```
|
||||
|
||||
## Test Architecture
|
||||
|
||||
### Real venv Integration
|
||||
|
||||
- **No subprocess mocking** - uses real pip install/uninstall
|
||||
- **Isolated test venv** - prevents system contamination
|
||||
- **Automatic cleanup** - `reset_test_venv` fixture restores state after each test
|
||||
|
||||
### Test Fixtures
|
||||
|
||||
**venv Management**:
|
||||
- `test_venv_path` - Path to test venv (session scope)
|
||||
- `test_pip_cmd` - pip command for test venv
|
||||
- `reset_test_venv` - Restore venv to initial state after each test
|
||||
|
||||
**Helpers**:
|
||||
- `get_installed_packages()` - Get current venv packages
|
||||
- `install_packages(*packages)` - Install packages in test venv
|
||||
- `uninstall_packages(*packages)` - Uninstall packages in test venv
|
||||
|
||||
**Policy Configuration**:
|
||||
- `temp_policy_dir` - Temporary directory for base policies
|
||||
- `temp_user_policy_dir` - Temporary directory for user policies
|
||||
- `mock_manager_util` - Mock manager_util paths to use temp dirs
|
||||
- `mock_context` - Mock context paths to use temp dirs
|
||||
|
||||
## Test Scenarios
|
||||
|
||||
### Scenario 1: Dependency Version Protection
|
||||
**File**: `test_dependency_protection.py::test_dependency_version_protection_with_pin`
|
||||
|
||||
**Initial State**:
|
||||
```python
|
||||
urllib3==1.26.15
|
||||
certifi==2023.7.22
|
||||
charset-normalizer==3.2.0
|
||||
```
|
||||
|
||||
**Action**: Install `requests` with pin_dependencies policy
|
||||
|
||||
**Expected Result**:
|
||||
```python
|
||||
# Dependencies stay at old versions (protected by pin)
|
||||
urllib3==1.26.15 # NOT upgraded to 2.x
|
||||
certifi==2023.7.22 # NOT upgraded
|
||||
charset-normalizer==3.2.0 # NOT upgraded
|
||||
requests==2.31.0 # newly installed
|
||||
```
|
||||
|
||||
### Scenario 2: Click-Colorama Dependency Chain
|
||||
**File**: `test_dependency_protection.py::test_dependency_chain_with_click_colorama`
|
||||
|
||||
**Initial State**:
|
||||
```python
|
||||
colorama==0.4.6
|
||||
```
|
||||
|
||||
**Action**: Install `click` with force_version + pin_dependencies
|
||||
|
||||
**Expected Result**:
|
||||
```python
|
||||
colorama==0.4.6 # PINNED
|
||||
click==8.1.3 # FORCED to specific version
|
||||
```
|
||||
|
||||
### Scenario 3: Package Deletion and Restore
|
||||
**File**: `test_environment_recovery.py::test_package_deletion_and_restore`
|
||||
|
||||
**Initial State**:
|
||||
```python
|
||||
six==1.16.0
|
||||
attrs==23.1.0
|
||||
packaging==23.1
|
||||
```
|
||||
|
||||
**Action**: Delete `six` → call `batch.ensure_installed()`
|
||||
|
||||
**Expected Result**:
|
||||
```python
|
||||
six==1.16.0 # RESTORED to required version
|
||||
```
|
||||
|
||||
### Scenario 4: Version Change and Restore
|
||||
**File**: `test_environment_recovery.py::test_version_change_and_restore`
|
||||
|
||||
**Initial State**:
|
||||
```python
|
||||
urllib3==1.26.15
|
||||
```
|
||||
|
||||
**Action**: Upgrade `urllib3` to 2.1.0 → call `batch.ensure_installed()`
|
||||
|
||||
**Expected Result**:
|
||||
```python
|
||||
urllib3==1.26.15 # RESTORED to required version (downgraded)
|
||||
```
|
||||
|
||||
## Test Categories
|
||||
|
||||
### Priority 1 (Essential) ✅ ALL PASSING
|
||||
- ✅ Dependency version protection (enhanced with exact versions)
|
||||
- ✅ Package deletion and restore (enhanced with exact versions)
|
||||
- ✅ Version change and restore (enhanced with downgrade verification)
|
||||
- ✅ Pin only affects specified packages ✨ NEW
|
||||
- ✅ Major version jump prevention ✨ NEW
|
||||
|
||||
### Priority 2 (Important)
|
||||
- ✅ Complex dependency chains (python-dateutil + six)
|
||||
- ⏳ Full workflow integration (TODO: update to real venv)
|
||||
- ⏳ Pin failure retry (TODO: update to real venv)
|
||||
|
||||
### Priority 3 (Edge Cases)
|
||||
- ⏳ Platform conditions (TODO: update to real venv)
|
||||
- ⏳ Policy priority (TODO: update to real venv)
|
||||
- ⏳ Unit tests (no venv needed)
|
||||
- ⏳ Edge cases (no venv needed)
|
||||
|
||||
## Package Selection
|
||||
|
||||
All test packages are **real PyPI packages < 200KB**:
|
||||
|
||||
| Package | Size | Version | Purpose |
|
||||
|---------|------|---------|---------|
|
||||
| **urllib3** | ~100KB | 1.26.15 | Protected dependency (prevent 2.x upgrade) |
|
||||
| **certifi** | ~10KB | 2023.7.22 | SSL certificates (pinned) |
|
||||
| **charset-normalizer** | ~46KB | 3.2.0 | Charset detection (pinned) |
|
||||
| **requests** | ~100KB | 2.31.0 | Main package to install |
|
||||
| **colorama** | ~25KB | 0.4.6 | Terminal colors (pinned) |
|
||||
| **click** | ~90KB | 8.1.3 | CLI framework (forced version) |
|
||||
| **six** | ~11KB | 1.16.0 | Python 2/3 compatibility (restore) |
|
||||
| **attrs** | ~61KB | 23.1.0 | Bystander package |
|
||||
| **packaging** | ~48KB | 23.1 | Bystander package |
|
||||
|
||||
## Cleanup
|
||||
|
||||
### Manual Cleanup
|
||||
```bash
|
||||
# Remove test venv
|
||||
rm -rf test_venv/
|
||||
|
||||
# Recreate fresh venv
|
||||
./setup_test_env.sh
|
||||
```
|
||||
|
||||
### Automatic Cleanup
|
||||
The `reset_test_venv` fixture automatically:
|
||||
1. Records initial package state
|
||||
2. Runs test
|
||||
3. Removes all packages (except pip/setuptools/wheel)
|
||||
4. Reinstalls initial packages
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Error: "Test venv not found"
|
||||
**Solution**: Run `./setup_test_env.sh`
|
||||
|
||||
### Error: "Package not installed in initial state"
|
||||
**Solution**: Check `requirements-test-base.txt` and recreate venv
|
||||
|
||||
### Tests are slow
|
||||
**Reason**: Real pip operations take 2-3 seconds per test
|
||||
**This is expected** - we're doing actual pip install/uninstall
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### How reset_test_venv Works
|
||||
|
||||
```python
|
||||
@pytest.fixture
|
||||
def reset_test_venv(test_pip_cmd):
|
||||
# 1. Record initial state
|
||||
initial = subprocess.run(test_pip_cmd + ["freeze"], ...)
|
||||
|
||||
yield # Run test here
|
||||
|
||||
# 2. Remove all packages
|
||||
current = subprocess.run(test_pip_cmd + ["freeze"], ...)
|
||||
subprocess.run(test_pip_cmd + ["uninstall", "-y", ...], ...)
|
||||
|
||||
# 3. Restore initial state
|
||||
subprocess.run(test_pip_cmd + ["install", "-r", initial], ...)
|
||||
```
|
||||
|
||||
### How make_pip_cmd is Patched
|
||||
|
||||
```python
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup_pip_util(monkeypatch, test_pip_cmd):
|
||||
from comfyui_manager.common import pip_util
|
||||
|
||||
def make_test_pip_cmd(args: List[str]) -> List[str]:
|
||||
return test_pip_cmd + args # Use test venv pip
|
||||
|
||||
monkeypatch.setattr(
|
||||
pip_util.manager_util,
|
||||
"make_pip_cmd",
|
||||
make_test_pip_cmd
|
||||
)
|
||||
```
|
||||
|
||||
## Dependency Analysis Tool
|
||||
|
||||
Use `analyze_dependencies.py` to examine package dependencies before adding new tests:
|
||||
|
||||
```bash
|
||||
# Analyze specific package
|
||||
python analyze_dependencies.py requests
|
||||
|
||||
# Analyze all test packages
|
||||
python analyze_dependencies.py --all
|
||||
|
||||
# Show current environment
|
||||
python analyze_dependencies.py --env
|
||||
```
|
||||
|
||||
**Output includes**:
|
||||
- Latest available versions
|
||||
- Dependencies that would be installed
|
||||
- Version upgrades that would occur
|
||||
- Impact of pin constraints
|
||||
|
||||
**Example output**:
|
||||
```
|
||||
📦 Latest version: 2.32.5
|
||||
🔍 Scenario A: Install without constraints
|
||||
Would install 5 packages:
|
||||
• urllib3 1.26.15 → 2.5.0 ⚠️ UPGRADE
|
||||
|
||||
🔍 Scenario B: Install with pin constraints
|
||||
Would install 5 packages:
|
||||
• urllib3 1.26.15 (no change) 📌 PINNED
|
||||
|
||||
✅ Pin prevented 2 upgrade(s)
|
||||
```
|
||||
|
||||
## Test Statistics
|
||||
|
||||
**Current Status**: 6 tests, 100% passing
|
||||
|
||||
```
|
||||
test_dependency_version_protection_with_pin PASSED (2.28s)
|
||||
test_dependency_chain_with_six_pin PASSED (2.00s)
|
||||
test_pin_only_affects_specified_packages PASSED (2.25s) ✨ NEW
|
||||
test_major_version_jump_prevention PASSED (3.53s) ✨ NEW
|
||||
test_package_deletion_and_restore PASSED (2.25s)
|
||||
test_version_change_and_restore PASSED (2.24s)
|
||||
|
||||
Total: 14.10s
|
||||
```
|
||||
|
||||
**Test Improvements**:
|
||||
- ✅ All tests verify exact version numbers
|
||||
- ✅ All tests reference DEPENDENCY_TREE_CONTEXT.md
|
||||
- ✅ Added 2 new critical tests (pin selectivity, major version prevention)
|
||||
- ✅ Enhanced error messages with expected vs actual values
|
||||
|
||||
## Design Documents
|
||||
|
||||
- **TEST_IMPROVEMENTS.md** - Summary of test enhancements based on dependency context
|
||||
- **DEPENDENCY_TREE_CONTEXT.md** - Verified dependency trees for all test packages
|
||||
- **DEPENDENCY_ANALYSIS.md** - Dependency analysis methodology
|
||||
- **CONTEXT_FILES_GUIDE.md** - Guide for using context files
|
||||
- **TEST_SCENARIOS.md** - Detailed test scenario specifications
|
||||
- **pip_util.test-design.md** - Test design and architecture
|
||||
- **pip_util.design.en.md** - pip_util design documentation
|
||||
433
tests/common/pip_util/TEST_IMPROVEMENTS.md
Normal file
433
tests/common/pip_util/TEST_IMPROVEMENTS.md
Normal file
@@ -0,0 +1,433 @@
|
||||
# Test Code Improvements Based on Dependency Context
|
||||
|
||||
**Date**: 2025-10-01
|
||||
**Basis**: DEPENDENCY_TREE_CONTEXT.md analysis
|
||||
|
||||
This document summarizes all test improvements made using verified dependency tree information.
|
||||
|
||||
---
|
||||
|
||||
## Summary of Changes
|
||||
|
||||
### Tests Enhanced
|
||||
|
||||
| Test File | Tests Modified | Tests Added | Total Tests |
|
||||
|-----------|----------------|-------------|-------------|
|
||||
| `test_dependency_protection.py` | 2 | 2 | 4 |
|
||||
| `test_environment_recovery.py` | 2 | 0 | 2 |
|
||||
| **Total** | **4** | **2** | **6** |
|
||||
|
||||
### Test Results
|
||||
|
||||
```bash
|
||||
$ pytest test_dependency_protection.py test_environment_recovery.py -v
|
||||
|
||||
test_dependency_protection.py::test_dependency_version_protection_with_pin PASSED
|
||||
test_dependency_protection.py::test_dependency_chain_with_six_pin PASSED
|
||||
test_dependency_protection.py::test_pin_only_affects_specified_packages PASSED ✨ NEW
|
||||
test_dependency_protection.py::test_major_version_jump_prevention PASSED ✨ NEW
|
||||
test_environment_recovery.py::test_package_deletion_and_restore PASSED
|
||||
test_environment_recovery.py::test_version_change_and_restore PASSED
|
||||
|
||||
6 passed in 14.10s
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Detailed Improvements
|
||||
|
||||
### 1. test_dependency_version_protection_with_pin
|
||||
|
||||
**File**: `test_dependency_protection.py:34-94`
|
||||
|
||||
**Enhancements**:
|
||||
- ✅ Added exact version assertions based on DEPENDENCY_TREE_CONTEXT.md
|
||||
- ✅ Verified initial versions: urllib3==1.26.15, certifi==2023.7.22, charset-normalizer==3.2.0
|
||||
- ✅ Added verification that idna is NOT pre-installed
|
||||
- ✅ Added assertion that idna==3.10 is installed as NEW dependency
|
||||
- ✅ Verified requests==2.32.5 is installed
|
||||
- ✅ Added detailed error messages explaining what versions are expected and why
|
||||
|
||||
**Key Assertions Added**:
|
||||
```python
|
||||
# Verify expected OLD versions
|
||||
assert initial_urllib3 == "1.26.15", f"Expected urllib3==1.26.15, got {initial_urllib3}"
|
||||
assert initial_certifi == "2023.7.22", f"Expected certifi==2023.7.22, got {initial_certifi}"
|
||||
assert initial_charset == "3.2.0", f"Expected charset-normalizer==3.2.0, got {initial_charset}"
|
||||
|
||||
# Verify idna is NOT installed initially
|
||||
assert "idna" not in initial, "idna should not be pre-installed"
|
||||
|
||||
# Verify new dependency was added (idna is NOT pinned, so it gets installed)
|
||||
assert "idna" in final_packages, "idna should be installed as new dependency"
|
||||
assert final_packages["idna"] == "3.10", f"Expected idna==3.10, got {final_packages['idna']}"
|
||||
```
|
||||
|
||||
**Based on Context**:
|
||||
- DEPENDENCY_TREE_CONTEXT.md Section 1: requests → Dependencies
|
||||
- Verified: Without pin, urllib3 would upgrade to 2.5.0 (MAJOR version jump)
|
||||
- Verified: idna is NEW dependency (not in requirements-test-base.txt)
|
||||
|
||||
---
|
||||
|
||||
### 2. test_dependency_chain_with_six_pin
|
||||
|
||||
**File**: `test_dependency_protection.py:117-162`
|
||||
|
||||
**Enhancements**:
|
||||
- ✅ Added exact version assertion for six==1.16.0
|
||||
- ✅ Added exact version assertion for python-dateutil==2.9.0.post0
|
||||
- ✅ Added detailed error messages
|
||||
- ✅ Added docstring reference to DEPENDENCY_TREE_CONTEXT.md
|
||||
|
||||
**Key Assertions Added**:
|
||||
```python
|
||||
# Verify expected OLD version
|
||||
assert initial_six == "1.16.0", f"Expected six==1.16.0, got {initial_six}"
|
||||
|
||||
# Verify final versions
|
||||
assert final_packages["python-dateutil"] == "2.9.0.post0", f"Expected python-dateutil==2.9.0.post0"
|
||||
assert final_packages["six"] == "1.16.0", "six should remain at 1.16.0 (prevented 1.17.0 upgrade)"
|
||||
```
|
||||
|
||||
**Based on Context**:
|
||||
- DEPENDENCY_TREE_CONTEXT.md Section 2: python-dateutil → Dependencies
|
||||
- Verified: six is a REAL dependency (not optional like colorama)
|
||||
- Verified: Without pin, six would upgrade from 1.16.0 to 1.17.0
|
||||
|
||||
---
|
||||
|
||||
### 3. test_pin_only_affects_specified_packages ✨ NEW
|
||||
|
||||
**File**: `test_dependency_protection.py:165-208`
|
||||
|
||||
**Purpose**: Verify that pin is selective, not global
|
||||
|
||||
**Test Logic**:
|
||||
1. Verify idna is NOT pre-installed
|
||||
2. Verify requests is NOT pre-installed
|
||||
3. Install requests with pin policy (only pins urllib3, certifi, charset-normalizer)
|
||||
4. Verify idna was installed at latest version (3.10) - NOT pinned
|
||||
5. Verify requests was installed at expected version (2.32.5)
|
||||
|
||||
**Key Assertions**:
|
||||
```python
|
||||
# Verify idna was installed (NOT pinned, so gets latest)
|
||||
assert "idna" in final_packages, "idna should be installed as new dependency"
|
||||
assert final_packages["idna"] == "3.10", "idna should be at latest version 3.10 (not pinned)"
|
||||
```
|
||||
|
||||
**Based on Context**:
|
||||
- DEPENDENCY_TREE_CONTEXT.md: "⚠️ idna is NEW and NOT pinned (acceptable - new dependency)"
|
||||
- Verified: Pin only affects specified packages in pinned_packages list
|
||||
|
||||
---
|
||||
|
||||
### 4. test_major_version_jump_prevention ✨ NEW
|
||||
|
||||
**File**: `test_dependency_protection.py:211-271`
|
||||
|
||||
**Purpose**: Verify that pin prevents MAJOR version jumps with breaking changes
|
||||
|
||||
**Test Logic**:
|
||||
1. Verify initial urllib3==1.26.15
|
||||
2. **Test WITHOUT pin**: Uninstall deps, install requests → urllib3 upgrades to 2.x
|
||||
3. Verify urllib3 was upgraded to 2.x (starts with "2.")
|
||||
4. Reset environment
|
||||
5. **Test WITH pin**: Install requests with pin → urllib3 stays at 1.x
|
||||
6. Verify urllib3 stayed at 1.26.15 (starts with "1.")
|
||||
|
||||
**Key Assertions**:
|
||||
```python
|
||||
# Without pin - verify urllib3 upgrades to 2.x
|
||||
assert without_pin["urllib3"].startswith("2."), \
|
||||
f"Without pin, urllib3 should upgrade to 2.x, got {without_pin['urllib3']}"
|
||||
|
||||
# With pin - verify urllib3 stays at 1.x
|
||||
assert final_packages["urllib3"] == "1.26.15", \
|
||||
"Pin should prevent urllib3 from upgrading to 2.x (breaking changes)"
|
||||
assert final_packages["urllib3"].startswith("1."), \
|
||||
f"urllib3 should remain at 1.x series, got {final_packages['urllib3']}"
|
||||
```
|
||||
|
||||
**Based on Context**:
|
||||
- DEPENDENCY_TREE_CONTEXT.md: "urllib3 1.26.15 → 2.5.0 is a MAJOR version jump"
|
||||
- DEPENDENCY_TREE_CONTEXT.md: "urllib3 2.0 removed deprecated APIs"
|
||||
- This is the MOST IMPORTANT test - prevents breaking changes
|
||||
|
||||
---
|
||||
|
||||
### 5. test_package_deletion_and_restore
|
||||
|
||||
**File**: `test_environment_recovery.py:33-78`
|
||||
|
||||
**Enhancements**:
|
||||
- ✅ Added exact version assertion for six==1.16.0
|
||||
- ✅ Added verification that six is restored to EXACT version (not latest)
|
||||
- ✅ Added detailed error messages
|
||||
- ✅ Added docstring reference to DEPENDENCY_TREE_CONTEXT.md
|
||||
|
||||
**Key Assertions Added**:
|
||||
```python
|
||||
# Verify six is initially installed at expected version
|
||||
assert initial["six"] == "1.16.0", f"Expected six==1.16.0, got {initial['six']}"
|
||||
|
||||
# Verify six was restored to EXACT required version (not latest)
|
||||
assert final_packages["six"] == "1.16.0", \
|
||||
"six should be restored to exact version 1.16.0 (not 1.17.0 latest)"
|
||||
```
|
||||
|
||||
**Based on Context**:
|
||||
- DEPENDENCY_TREE_CONTEXT.md: "six: 1.16.0 (OLD) → 1.17.0 (LATEST)"
|
||||
- Verified: Restore policy restores to EXACT version, not latest
|
||||
|
||||
---
|
||||
|
||||
### 6. test_version_change_and_restore
|
||||
|
||||
**File**: `test_environment_recovery.py:105-158`
|
||||
|
||||
**Enhancements**:
|
||||
- ✅ Added exact version assertions (1.26.15 initially, 2.1.0 after upgrade)
|
||||
- ✅ Added verification of major version change (1.x → 2.x)
|
||||
- ✅ Added verification of major version downgrade (2.x → 1.x)
|
||||
- ✅ Added detailed error messages explaining downgrade capability
|
||||
- ✅ Added docstring reference to DEPENDENCY_TREE_CONTEXT.md
|
||||
|
||||
**Key Assertions Added**:
|
||||
```python
|
||||
# Verify version was changed to 2.x
|
||||
assert installed_after["urllib3"] == "2.1.0", \
|
||||
f"urllib3 should be upgraded to 2.1.0, got {installed_after['urllib3']}"
|
||||
assert installed_after["urllib3"].startswith("2."), \
|
||||
"urllib3 should be at 2.x series"
|
||||
|
||||
# Verify version was DOWNGRADED from 2.x back to 1.x
|
||||
assert final["urllib3"] == "1.26.15", \
|
||||
"urllib3 should be downgraded to 1.26.15 (from 2.1.0)"
|
||||
assert final["urllib3"].startswith("1."), \
|
||||
f"urllib3 should be back at 1.x series, got {final['urllib3']}"
|
||||
```
|
||||
|
||||
**Based on Context**:
|
||||
- DEPENDENCY_TREE_CONTEXT.md: "urllib3 can upgrade from 1.26.15 (1.x) to 2.5.0 (2.x)"
|
||||
- Verified: Restore policy can DOWNGRADE (not just prevent upgrades)
|
||||
- Tests actual version downgrade capability (2.x → 1.x)
|
||||
|
||||
---
|
||||
|
||||
## Test Coverage Analysis
|
||||
|
||||
### Before Improvements
|
||||
|
||||
| Scenario | Coverage |
|
||||
|----------|----------|
|
||||
| Pin prevents upgrades | ✅ Basic |
|
||||
| New dependencies installed | ❌ Not tested |
|
||||
| Pin is selective | ❌ Not tested |
|
||||
| Major version jump prevention | ❌ Not tested |
|
||||
| Exact version restoration | ❌ Not tested |
|
||||
| Version downgrade capability | ❌ Not tested |
|
||||
|
||||
### After Improvements
|
||||
|
||||
| Scenario | Coverage | Test |
|
||||
|----------|----------|------|
|
||||
| Pin prevents upgrades | ✅ Enhanced | test_dependency_version_protection_with_pin |
|
||||
| New dependencies installed | ✅ Added | test_dependency_version_protection_with_pin |
|
||||
| Pin is selective | ✅ Added | test_pin_only_affects_specified_packages |
|
||||
| Major version jump prevention | ✅ Added | test_major_version_jump_prevention |
|
||||
| Exact version restoration | ✅ Enhanced | test_package_deletion_and_restore |
|
||||
| Version downgrade capability | ✅ Enhanced | test_version_change_and_restore |
|
||||
|
||||
---
|
||||
|
||||
## Key Testing Principles Applied
|
||||
|
||||
### 1. Exact Version Verification
|
||||
|
||||
**Before**:
|
||||
```python
|
||||
assert final_packages["urllib3"] == initial_urllib3 # Generic
|
||||
```
|
||||
|
||||
**After**:
|
||||
```python
|
||||
assert initial_urllib3 == "1.26.15", f"Expected urllib3==1.26.15, got {initial_urllib3}"
|
||||
assert final_packages["urllib3"] == "1.26.15", "urllib3 should remain at 1.26.15 (prevented 2.x upgrade)"
|
||||
```
|
||||
|
||||
**Benefit**: Fails with clear message if environment setup is wrong
|
||||
|
||||
---
|
||||
|
||||
### 2. Version Series Verification
|
||||
|
||||
**Added**:
|
||||
```python
|
||||
assert final_packages["urllib3"].startswith("1."), \
|
||||
f"urllib3 should remain at 1.x series, got {final_packages['urllib3']}"
|
||||
```
|
||||
|
||||
**Benefit**: Catches major version jumps even if exact version changes
|
||||
|
||||
---
|
||||
|
||||
### 3. Negative Testing (Verify NOT Installed)
|
||||
|
||||
**Added**:
|
||||
```python
|
||||
assert "idna" not in initial, "idna should not be pre-installed"
|
||||
```
|
||||
|
||||
**Benefit**: Ensures test environment is in expected state
|
||||
|
||||
---
|
||||
|
||||
### 4. Context-Based Documentation
|
||||
|
||||
**Every test now includes**:
|
||||
```python
|
||||
"""
|
||||
Based on DEPENDENCY_TREE_CONTEXT.md:
|
||||
<specific section reference>
|
||||
<expected behavior from context>
|
||||
"""
|
||||
```
|
||||
|
||||
**Benefit**: Links test expectations to verified dependency data
|
||||
|
||||
---
|
||||
|
||||
## Real-World Scenarios Tested
|
||||
|
||||
### Scenario 1: Preventing Breaking Changes
|
||||
|
||||
**Test**: `test_major_version_jump_prevention`
|
||||
|
||||
**Real-World Impact**:
|
||||
- urllib3 2.0 removed deprecated APIs
|
||||
- Many applications break when upgrading from 1.x to 2.x
|
||||
- Pin prevents this automatic breaking change
|
||||
|
||||
**Verified**: ✅ Pin successfully prevents 1.x → 2.x upgrade
|
||||
|
||||
---
|
||||
|
||||
### Scenario 2: Allowing New Dependencies
|
||||
|
||||
**Test**: `test_pin_only_affects_specified_packages`
|
||||
|
||||
**Real-World Impact**:
|
||||
- New dependencies are safe to add (idna)
|
||||
- Pin should not block ALL changes
|
||||
- Only specified packages are protected
|
||||
|
||||
**Verified**: ✅ idna installs at 3.10 even with pin policy active
|
||||
|
||||
---
|
||||
|
||||
### Scenario 3: Version Downgrade Recovery
|
||||
|
||||
**Test**: `test_version_change_and_restore`
|
||||
|
||||
**Real-World Impact**:
|
||||
- Sometimes packages get upgraded accidentally
|
||||
- Need to downgrade to known-good version
|
||||
- Downgrade is harder than upgrade prevention
|
||||
|
||||
**Verified**: ✅ Can downgrade urllib3 from 2.x to 1.x
|
||||
|
||||
---
|
||||
|
||||
## Test Execution Performance
|
||||
|
||||
```
|
||||
Test Performance Summary:
|
||||
|
||||
test_dependency_version_protection_with_pin 2.28s (enhanced)
|
||||
test_dependency_chain_with_six_pin 2.00s (enhanced)
|
||||
test_pin_only_affects_specified_packages 2.25s (NEW)
|
||||
test_major_version_jump_prevention 3.53s (NEW - does 2 install cycles)
|
||||
test_package_deletion_and_restore 2.25s (enhanced)
|
||||
test_version_change_and_restore 2.24s (enhanced)
|
||||
|
||||
Total: 14.10s for 6 tests
|
||||
Average: 2.35s per test
|
||||
```
|
||||
|
||||
**Note**: `test_major_version_jump_prevention` is slower because it tests both WITH and WITHOUT pin (2 install cycles).
|
||||
|
||||
---
|
||||
|
||||
## Files Modified
|
||||
|
||||
1. **test_dependency_protection.py**: +138 lines
|
||||
- Enhanced 2 existing tests
|
||||
- Added 2 new tests
|
||||
- Total: 272 lines (was 132 lines)
|
||||
|
||||
2. **test_environment_recovery.py**: +35 lines
|
||||
- Enhanced 2 existing tests
|
||||
- Total: 159 lines (was 141 lines)
|
||||
|
||||
---
|
||||
|
||||
## Verification Against Context
|
||||
|
||||
All test improvements verified against:
|
||||
|
||||
| Context Source | Usage |
|
||||
|----------------|-------|
|
||||
| **DEPENDENCY_TREE_CONTEXT.md** | All version numbers, dependency trees |
|
||||
| **DEPENDENCY_ANALYSIS.md** | Package selection rationale, rejected scenarios |
|
||||
| **TEST_SCENARIOS.md** | Scenario specifications, expected outcomes |
|
||||
| **requirements-test-base.txt** | Initial environment state |
|
||||
| **analyze_dependencies.py** | Real-time verification of expectations |
|
||||
|
||||
---
|
||||
|
||||
## Future Maintenance
|
||||
|
||||
### When to Update Tests
|
||||
|
||||
Update tests when:
|
||||
- ✅ PyPI releases new major versions (e.g., urllib3 3.0)
|
||||
- ✅ Base package versions change in requirements-test-base.txt
|
||||
- ✅ New test scenarios added to DEPENDENCY_TREE_CONTEXT.md
|
||||
- ✅ Policy behavior changes in pip_util.py
|
||||
|
||||
### How to Update Tests
|
||||
|
||||
1. Run `python analyze_dependencies.py --all`
|
||||
2. Update expected version numbers in tests
|
||||
3. Update DEPENDENCY_TREE_CONTEXT.md
|
||||
4. Update TEST_SCENARIOS.md
|
||||
5. Run tests to verify
|
||||
|
||||
### Verification Commands
|
||||
|
||||
```bash
|
||||
# Verify environment
|
||||
python analyze_dependencies.py --env
|
||||
|
||||
# Verify package dependencies
|
||||
python analyze_dependencies.py requests
|
||||
python analyze_dependencies.py python-dateutil
|
||||
|
||||
# Run all tests
|
||||
pytest test_dependency_protection.py test_environment_recovery.py -v --override-ini="addopts="
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Summary
|
||||
|
||||
✅ **6 tests** now verify real PyPI package dependencies
|
||||
✅ **100% pass rate** with real pip operations
|
||||
✅ **All version numbers** verified against DEPENDENCY_TREE_CONTEXT.md
|
||||
✅ **Major version jump prevention** explicitly tested
|
||||
✅ **Selective pinning** verified (only specified packages)
|
||||
✅ **Version downgrade** capability tested
|
||||
|
||||
**Key Achievement**: Tests now verify actual PyPI behavior, not mocked expectations.
|
||||
573
tests/common/pip_util/TEST_SCENARIOS.md
Normal file
573
tests/common/pip_util/TEST_SCENARIOS.md
Normal file
@@ -0,0 +1,573 @@
|
||||
# pip_util Test Scenarios - Test Data Specification
|
||||
|
||||
This document precisely defines all test scenarios, packages, versions, and expected behaviors used in the pip_util test suite.
|
||||
|
||||
## Table of Contents
|
||||
1. [Test Scenario 1: Dependency Version Protection](#scenario-1-dependency-version-protection)
|
||||
2. [Test Scenario 2: Complex Dependency Chain](#scenario-2-complex-dependency-chain)
|
||||
3. [Test Scenario 3: Package Deletion and Restore](#scenario-3-package-deletion-and-restore)
|
||||
4. [Test Scenario 4: Version Change and Restore](#scenario-4-version-change-and-restore)
|
||||
5. [Test Scenario 5: Full Workflow Integration](#scenario-5-full-workflow-integration)
|
||||
6. [Test Scenario 6: Pin Failure Retry](#scenario-6-pin-failure-retry)
|
||||
|
||||
---
|
||||
|
||||
## Scenario 1: Dependency Version Protection
|
||||
|
||||
**File**: `test_dependency_protection.py::test_dependency_version_protection_with_pin`
|
||||
|
||||
**Purpose**: Verify that `pin_dependencies` policy prevents dependency upgrades during package installation.
|
||||
|
||||
### Initial Environment State
|
||||
```python
|
||||
installed_packages = {
|
||||
"urllib3": "1.26.15", # OLD stable version
|
||||
"certifi": "2023.7.22", # OLD version
|
||||
"charset-normalizer": "3.2.0" # OLD version
|
||||
}
|
||||
```
|
||||
|
||||
### Policy Configuration
|
||||
```json
|
||||
{
|
||||
"requests": {
|
||||
"apply_all_matches": [
|
||||
{
|
||||
"type": "pin_dependencies",
|
||||
"pinned_packages": ["urllib3", "certifi", "charset-normalizer"],
|
||||
"on_failure": "retry_without_pin"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Action
|
||||
```python
|
||||
batch.install("requests")
|
||||
```
|
||||
|
||||
### Expected pip Command
|
||||
```bash
|
||||
pip install requests urllib3==1.26.15 certifi==2023.7.22 charset-normalizer==3.2.0
|
||||
```
|
||||
|
||||
### Expected Final State
|
||||
```python
|
||||
installed_packages = {
|
||||
"urllib3": "1.26.15", # PROTECTED - stayed at old version
|
||||
"certifi": "2023.7.22", # PROTECTED - stayed at old version
|
||||
"charset-normalizer": "3.2.0", # PROTECTED - stayed at old version
|
||||
"requests": "2.31.0" # NEWLY installed
|
||||
}
|
||||
```
|
||||
|
||||
### Without Pin (What Would Happen)
|
||||
```python
|
||||
# If pin_dependencies was NOT used:
|
||||
installed_packages = {
|
||||
"urllib3": "2.1.0", # UPGRADED to 2.x (breaking change)
|
||||
"certifi": "2024.2.2", # UPGRADED to latest
|
||||
"charset-normalizer": "3.3.2", # UPGRADED to latest
|
||||
"requests": "2.31.0"
|
||||
}
|
||||
```
|
||||
|
||||
**Key Point**: Pin prevents `urllib3` from upgrading to 2.x, which has breaking API changes.
|
||||
|
||||
---
|
||||
|
||||
## Scenario 2: Complex Dependency Chain
|
||||
|
||||
**File**: `test_dependency_protection.py::test_dependency_chain_with_click_colorama`
|
||||
|
||||
**Purpose**: Verify that `force_version` + `pin_dependencies` work together correctly.
|
||||
|
||||
### Initial Environment State
|
||||
```python
|
||||
installed_packages = {
|
||||
"colorama": "0.4.6" # Existing dependency
|
||||
}
|
||||
```
|
||||
|
||||
### Policy Configuration
|
||||
```json
|
||||
{
|
||||
"click": {
|
||||
"apply_first_match": [
|
||||
{
|
||||
"condition": {
|
||||
"type": "installed",
|
||||
"package": "colorama",
|
||||
"spec": "<0.5.0"
|
||||
},
|
||||
"type": "force_version",
|
||||
"version": "8.1.3",
|
||||
"reason": "click 8.1.3 compatible with colorama <0.5"
|
||||
}
|
||||
],
|
||||
"apply_all_matches": [
|
||||
{
|
||||
"type": "pin_dependencies",
|
||||
"pinned_packages": ["colorama"]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Condition Evaluation
|
||||
```python
|
||||
# Check: colorama installed AND version < 0.5.0?
|
||||
colorama_installed = True
|
||||
colorama_version = "0.4.6" # 0.4.6 < 0.5.0 → True
|
||||
# Result: Condition satisfied → apply force_version
|
||||
```
|
||||
|
||||
### Action
|
||||
```python
|
||||
batch.install("click")
|
||||
```
|
||||
|
||||
### Expected pip Command
|
||||
```bash
|
||||
pip install click==8.1.3 colorama==0.4.6
|
||||
```
|
||||
|
||||
### Expected Final State
|
||||
```python
|
||||
installed_packages = {
|
||||
"colorama": "0.4.6", # PINNED - version protected
|
||||
"click": "8.1.3" # FORCED to specific version
|
||||
}
|
||||
```
|
||||
|
||||
**Key Point**:
|
||||
- `force_version` forces click to install version 8.1.3
|
||||
- `pin_dependencies` ensures colorama stays at 0.4.6
|
||||
|
||||
---
|
||||
|
||||
## Scenario 3: Package Deletion and Restore
|
||||
|
||||
**File**: `test_environment_recovery.py::test_package_deletion_and_restore`
|
||||
|
||||
**Purpose**: Verify that deleted packages can be restored to required versions.
|
||||
|
||||
### Initial Environment State
|
||||
```python
|
||||
installed_packages = {
|
||||
"six": "1.16.0", # Critical package
|
||||
"attrs": "23.1.0",
|
||||
"packaging": "23.1"
|
||||
}
|
||||
```
|
||||
|
||||
### Policy Configuration
|
||||
```json
|
||||
{
|
||||
"six": {
|
||||
"restore": [
|
||||
{
|
||||
"target": "six",
|
||||
"version": "1.16.0",
|
||||
"reason": "six must be maintained at 1.16.0 for compatibility"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Action Sequence
|
||||
|
||||
**Step 1**: Install package that removes six
|
||||
```python
|
||||
batch.install("python-dateutil")
|
||||
```
|
||||
|
||||
**Step 1 Result**: six is DELETED
|
||||
```python
|
||||
installed_packages = {
|
||||
# "six": "1.16.0", # ❌ DELETED by python-dateutil
|
||||
"attrs": "23.1.0",
|
||||
"packaging": "23.1",
|
||||
"python-dateutil": "2.8.2" # ✅ NEW
|
||||
}
|
||||
```
|
||||
|
||||
**Step 2**: Restore deleted packages
|
||||
```python
|
||||
batch.ensure_installed()
|
||||
```
|
||||
|
||||
**Step 2 Result**: six is RESTORED
|
||||
```python
|
||||
installed_packages = {
|
||||
"six": "1.16.0", # ✅ RESTORED to required version
|
||||
"attrs": "23.1.0",
|
||||
"packaging": "23.1",
|
||||
"python-dateutil": "2.8.2"
|
||||
}
|
||||
```
|
||||
|
||||
### Expected pip Commands
|
||||
```bash
|
||||
# Step 1: Install
|
||||
pip install python-dateutil
|
||||
|
||||
# Step 2: Restore
|
||||
pip install six==1.16.0
|
||||
```
|
||||
|
||||
**Key Point**: `restore` policy automatically reinstalls deleted packages.
|
||||
|
||||
---
|
||||
|
||||
## Scenario 4: Version Change and Restore
|
||||
|
||||
**File**: `test_environment_recovery.py::test_version_change_and_restore`
|
||||
|
||||
**Purpose**: Verify that packages with changed versions can be restored to required versions.
|
||||
|
||||
### Initial Environment State
|
||||
```python
|
||||
installed_packages = {
|
||||
"urllib3": "1.26.15", # OLD version (required)
|
||||
"certifi": "2023.7.22"
|
||||
}
|
||||
```
|
||||
|
||||
### Policy Configuration
|
||||
```json
|
||||
{
|
||||
"urllib3": {
|
||||
"restore": [
|
||||
{
|
||||
"condition": {
|
||||
"type": "installed",
|
||||
"spec": "!=1.26.15"
|
||||
},
|
||||
"target": "urllib3",
|
||||
"version": "1.26.15",
|
||||
"reason": "urllib3 must be 1.26.15 for compatibility"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Action Sequence
|
||||
|
||||
**Step 1**: Install package that upgrades urllib3
|
||||
```python
|
||||
batch.install("requests")
|
||||
```
|
||||
|
||||
**Step 1 Result**: urllib3 is UPGRADED
|
||||
```python
|
||||
installed_packages = {
|
||||
"urllib3": "2.1.0", # ❌ UPGRADED from 1.26.15 to 2.1.0
|
||||
"certifi": "2023.7.22",
|
||||
"requests": "2.31.0" # ✅ NEW
|
||||
}
|
||||
```
|
||||
|
||||
**Step 2**: Check restore condition
|
||||
```python
|
||||
# Condition: urllib3 installed AND version != 1.26.15?
|
||||
urllib3_version = "2.1.0"
|
||||
condition_met = (urllib3_version != "1.26.15") # True
|
||||
# Result: Restore urllib3 to 1.26.15
|
||||
```
|
||||
|
||||
**Step 2**: Restore to required version
|
||||
```python
|
||||
batch.ensure_installed()
|
||||
```
|
||||
|
||||
**Step 2 Result**: urllib3 is DOWNGRADED
|
||||
```python
|
||||
installed_packages = {
|
||||
"urllib3": "1.26.15", # ✅ RESTORED to required version
|
||||
"certifi": "2023.7.22",
|
||||
"requests": "2.31.0"
|
||||
}
|
||||
```
|
||||
|
||||
### Expected pip Commands
|
||||
```bash
|
||||
# Step 1: Install (causes upgrade)
|
||||
pip install requests
|
||||
|
||||
# Step 2: Restore (downgrade)
|
||||
pip install urllib3==1.26.15
|
||||
```
|
||||
|
||||
**Key Point**: `restore` with condition can revert unwanted version changes.
|
||||
|
||||
---
|
||||
|
||||
## Scenario 5: Full Workflow Integration
|
||||
|
||||
**File**: `test_full_workflow_integration.py::test_uninstall_install_restore_workflow`
|
||||
|
||||
**Purpose**: Verify complete workflow: uninstall → install → restore.
|
||||
|
||||
### Initial Environment State
|
||||
```python
|
||||
installed_packages = {
|
||||
"old-package": "1.0.0", # To be removed
|
||||
"critical-package": "1.2.3", # To be restored
|
||||
"urllib3": "1.26.15",
|
||||
"certifi": "2023.7.22"
|
||||
}
|
||||
```
|
||||
|
||||
### Policy Configuration
|
||||
```json
|
||||
{
|
||||
"old-package": {
|
||||
"uninstall": [
|
||||
{
|
||||
"target": "old-package"
|
||||
}
|
||||
]
|
||||
},
|
||||
"requests": {
|
||||
"apply_all_matches": [
|
||||
{
|
||||
"type": "pin_dependencies",
|
||||
"pinned_packages": ["urllib3", "certifi"]
|
||||
}
|
||||
]
|
||||
},
|
||||
"critical-package": {
|
||||
"restore": [
|
||||
{
|
||||
"target": "critical-package",
|
||||
"version": "1.2.3"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Action Sequence
|
||||
|
||||
**Step 1**: Remove old packages
|
||||
```python
|
||||
removed = batch.ensure_not_installed()
|
||||
```
|
||||
|
||||
**Step 1 Result**:
|
||||
```python
|
||||
installed_packages = {
|
||||
# "old-package": "1.0.0", # ❌ REMOVED
|
||||
"critical-package": "1.2.3",
|
||||
"urllib3": "1.26.15",
|
||||
"certifi": "2023.7.22"
|
||||
}
|
||||
removed = ["old-package"]
|
||||
```
|
||||
|
||||
**Step 2**: Install new package with pins
|
||||
```python
|
||||
batch.install("requests")
|
||||
```
|
||||
|
||||
**Step 2 Result**:
|
||||
```python
|
||||
installed_packages = {
|
||||
"critical-package": "1.2.3",
|
||||
"urllib3": "1.26.15", # PINNED - no upgrade
|
||||
"certifi": "2023.7.22", # PINNED - no upgrade
|
||||
"requests": "2.31.0" # NEW
|
||||
}
|
||||
```
|
||||
|
||||
**Step 3**: Restore required packages
|
||||
```python
|
||||
restored = batch.ensure_installed()
|
||||
```
|
||||
|
||||
**Step 3 Result**:
|
||||
```python
|
||||
installed_packages = {
|
||||
"critical-package": "1.2.3", # Still present
|
||||
"urllib3": "1.26.15",
|
||||
"certifi": "2023.7.22",
|
||||
"requests": "2.31.0"
|
||||
}
|
||||
restored = [] # Nothing to restore (all present)
|
||||
```
|
||||
|
||||
### Expected pip Commands
|
||||
```bash
|
||||
# Step 1: Uninstall
|
||||
pip uninstall -y old-package
|
||||
|
||||
# Step 2: Install with pins
|
||||
pip install requests urllib3==1.26.15 certifi==2023.7.22
|
||||
|
||||
# Step 3: (No command - all packages present)
|
||||
```
|
||||
|
||||
**Key Point**: Complete workflow demonstrates policy coordination.
|
||||
|
||||
---
|
||||
|
||||
## Scenario 6: Pin Failure Retry
|
||||
|
||||
**File**: `test_pin_failure_retry.py::test_pin_failure_retry_without_pin_succeeds`
|
||||
|
||||
**Purpose**: Verify automatic retry without pins when installation with pins fails.
|
||||
|
||||
### Initial Environment State
|
||||
```python
|
||||
installed_packages = {
|
||||
"urllib3": "1.26.15",
|
||||
"certifi": "2023.7.22"
|
||||
}
|
||||
```
|
||||
|
||||
### Policy Configuration
|
||||
```json
|
||||
{
|
||||
"requests": {
|
||||
"apply_all_matches": [
|
||||
{
|
||||
"type": "pin_dependencies",
|
||||
"pinned_packages": ["urllib3", "certifi"],
|
||||
"on_failure": "retry_without_pin"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Action
|
||||
```python
|
||||
batch.install("requests")
|
||||
```
|
||||
|
||||
### Attempt 1: Install WITH pins (FAILS)
|
||||
```bash
|
||||
# Command:
|
||||
pip install requests urllib3==1.26.15 certifi==2023.7.22
|
||||
|
||||
# Result: FAILURE (dependency conflict)
|
||||
# Error: "Package conflict: requests requires urllib3>=2.0"
|
||||
```
|
||||
|
||||
### Attempt 2: Retry WITHOUT pins (SUCCEEDS)
|
||||
```bash
|
||||
# Command:
|
||||
pip install requests
|
||||
|
||||
# Result: SUCCESS
|
||||
```
|
||||
|
||||
**Final State**:
|
||||
```python
|
||||
installed_packages = {
|
||||
"urllib3": "2.1.0", # UPGRADED (pins removed)
|
||||
"certifi": "2024.2.2", # UPGRADED (pins removed)
|
||||
"requests": "2.31.0" # INSTALLED
|
||||
}
|
||||
```
|
||||
|
||||
### Expected Behavior
|
||||
1. **First attempt**: Install with pinned versions
|
||||
2. **On failure**: Log warning about conflict
|
||||
3. **Retry**: Install without pins
|
||||
4. **Success**: Package installed, dependencies upgraded
|
||||
|
||||
**Key Point**: `retry_without_pin` provides automatic fallback for compatibility issues.
|
||||
|
||||
---
|
||||
|
||||
## Scenario 6b: Pin Failure with Hard Fail
|
||||
|
||||
**File**: `test_pin_failure_retry.py::test_pin_failure_with_fail_raises_exception`
|
||||
|
||||
**Purpose**: Verify that `on_failure: fail` raises exception instead of retrying.
|
||||
|
||||
### Initial Environment State
|
||||
```python
|
||||
installed_packages = {
|
||||
"urllib3": "1.26.15",
|
||||
"certifi": "2023.7.22"
|
||||
}
|
||||
```
|
||||
|
||||
### Policy Configuration
|
||||
```json
|
||||
{
|
||||
"requests": {
|
||||
"apply_all_matches": [
|
||||
{
|
||||
"type": "pin_dependencies",
|
||||
"pinned_packages": ["urllib3", "certifi"],
|
||||
"on_failure": "fail"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Action
|
||||
```python
|
||||
batch.install("requests")
|
||||
```
|
||||
|
||||
### Attempt 1: Install WITH pins (FAILS)
|
||||
```bash
|
||||
# Command:
|
||||
pip install requests urllib3==1.26.15 certifi==2023.7.22
|
||||
|
||||
# Result: FAILURE (dependency conflict)
|
||||
# Error: "Package conflict: requests requires urllib3>=2.0"
|
||||
```
|
||||
|
||||
### Expected Behavior
|
||||
1. **First attempt**: Install with pinned versions
|
||||
2. **On failure**: Raise `subprocess.CalledProcessError`
|
||||
3. **No retry**: Exception propagates to caller
|
||||
4. **No changes**: Environment unchanged
|
||||
|
||||
**Key Point**: `on_failure: fail` ensures strict version requirements.
|
||||
|
||||
---
|
||||
|
||||
## Summary Table: All Test Packages
|
||||
|
||||
| Package | Initial Version | Action | Final Version | Role |
|
||||
|---------|----------------|--------|---------------|------|
|
||||
| **urllib3** | 1.26.15 | Pin | 1.26.15 | Protected dependency |
|
||||
| **certifi** | 2023.7.22 | Pin | 2023.7.22 | Protected dependency |
|
||||
| **charset-normalizer** | 3.2.0 | Pin | 3.2.0 | Protected dependency |
|
||||
| **requests** | (not installed) | Install | 2.31.0 | New package |
|
||||
| **colorama** | 0.4.6 | Pin | 0.4.6 | Protected dependency |
|
||||
| **click** | (not installed) | Force version | 8.1.3 | New package with forced version |
|
||||
| **six** | 1.16.0 | Delete→Restore | 1.16.0 | Deleted then restored |
|
||||
| **python-dateutil** | (not installed) | Install | 2.8.2 | Package that deletes six |
|
||||
| **attrs** | 23.1.0 | No change | 23.1.0 | Bystander package |
|
||||
| **packaging** | 23.1 | No change | 23.1 | Bystander package |
|
||||
|
||||
## Policy Types Summary
|
||||
|
||||
| Policy Type | Purpose | Example |
|
||||
|-------------|---------|---------|
|
||||
| **pin_dependencies** | Prevent dependency upgrades | Keep urllib3 at 1.26.15 |
|
||||
| **force_version** | Force specific package version | Install click==8.1.3 |
|
||||
| **restore** | Reinstall deleted/changed packages | Restore six to 1.16.0 |
|
||||
| **uninstall** | Remove obsolete packages | Remove old-package |
|
||||
| **on_failure** | Handle installation failures | retry_without_pin or fail |
|
||||
|
||||
## Test Data Design Principles
|
||||
|
||||
1. **Lightweight Packages**: All packages are <200KB for fast testing
|
||||
2. **Real Dependencies**: Use actual PyPI package relationships
|
||||
3. **Version Realism**: Use real version numbers from PyPI
|
||||
4. **Clear Scenarios**: Each test demonstrates one clear behavior
|
||||
5. **Reproducible**: Mock ensures consistent behavior across environments
|
||||
261
tests/common/pip_util/analyze_dependencies.py
Executable file
261
tests/common/pip_util/analyze_dependencies.py
Executable file
@@ -0,0 +1,261 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Dependency Tree Analyzer for pip_util Tests
|
||||
|
||||
Usage:
|
||||
python analyze_dependencies.py [package]
|
||||
python analyze_dependencies.py --all
|
||||
python analyze_dependencies.py --update-context
|
||||
|
||||
Examples:
|
||||
python analyze_dependencies.py requests
|
||||
python analyze_dependencies.py python-dateutil
|
||||
python analyze_dependencies.py --all
|
||||
"""
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Dict, List, Tuple, Optional
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
PIP = "./test_venv/bin/pip"
|
||||
|
||||
|
||||
def check_venv():
|
||||
"""Check if test venv exists"""
|
||||
if not Path(PIP).exists():
|
||||
print("❌ Test venv not found!")
|
||||
print(" Run: ./setup_test_env.sh")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def get_installed_packages() -> Dict[str, str]:
|
||||
"""Get currently installed packages"""
|
||||
result = subprocess.run(
|
||||
[PIP, "freeze"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True
|
||||
)
|
||||
|
||||
packages = {}
|
||||
for line in result.stdout.strip().split('\n'):
|
||||
if '==' in line:
|
||||
pkg, ver = line.split('==', 1)
|
||||
packages[pkg] = ver
|
||||
|
||||
return packages
|
||||
|
||||
|
||||
def analyze_package_dry_run(
|
||||
package: str,
|
||||
constraints: Optional[List[str]] = None
|
||||
) -> Tuple[List[Tuple[str, str]], Dict[str, str]]:
|
||||
"""
|
||||
Analyze what would be installed with --dry-run
|
||||
|
||||
Returns:
|
||||
- List of (package_name, version) tuples in install order
|
||||
- Dict of current_version → new_version for upgrades
|
||||
"""
|
||||
cmd = [PIP, "install", "--dry-run", "--ignore-installed", package]
|
||||
if constraints:
|
||||
cmd.extend(constraints)
|
||||
|
||||
result = subprocess.run(cmd, capture_output=True, text=True)
|
||||
|
||||
# Parse "Would install" line
|
||||
would_install = []
|
||||
for line in result.stdout.split('\n'):
|
||||
if 'Would install' in line:
|
||||
packages_str = line.split('Would install')[1].strip()
|
||||
for pkg_str in packages_str.split():
|
||||
parts = pkg_str.split('-', 1)
|
||||
if len(parts) == 2:
|
||||
would_install.append((parts[0], parts[1]))
|
||||
|
||||
# Check against current installed
|
||||
installed = get_installed_packages()
|
||||
changes = {}
|
||||
for pkg, new_ver in would_install:
|
||||
if pkg in installed:
|
||||
old_ver = installed[pkg]
|
||||
if old_ver != new_ver:
|
||||
changes[pkg] = (old_ver, new_ver)
|
||||
|
||||
return would_install, changes
|
||||
|
||||
|
||||
def get_available_versions(package: str, limit: int = 10) -> Tuple[str, List[str]]:
|
||||
"""
|
||||
Get available versions from PyPI
|
||||
|
||||
Returns:
|
||||
- Latest version
|
||||
- List of available versions (limited)
|
||||
"""
|
||||
result = subprocess.run(
|
||||
[PIP, "index", "versions", package],
|
||||
capture_output=True,
|
||||
text=True
|
||||
)
|
||||
|
||||
latest = None
|
||||
versions = []
|
||||
|
||||
for line in result.stdout.split('\n'):
|
||||
if 'LATEST:' in line:
|
||||
latest = line.split('LATEST:')[1].strip()
|
||||
elif 'Available versions:' in line:
|
||||
versions_str = line.split('Available versions:')[1].strip()
|
||||
versions = [v.strip() for v in versions_str.split(',')[:limit]]
|
||||
|
||||
return latest, versions
|
||||
|
||||
|
||||
def print_package_analysis(package: str, with_pin: bool = False):
|
||||
"""Print detailed analysis for a package"""
|
||||
print(f"\n{'='*80}")
|
||||
print(f"Package: {package}")
|
||||
print(f"{'='*80}")
|
||||
|
||||
installed = get_installed_packages()
|
||||
|
||||
# Get latest version
|
||||
latest, available = get_available_versions(package)
|
||||
if latest:
|
||||
print(f"\n📦 Latest version: {latest}")
|
||||
print(f"📋 Available versions: {', '.join(available[:5])}")
|
||||
|
||||
# Scenario 1: Without constraints
|
||||
print(f"\n🔍 Scenario A: Install without constraints")
|
||||
print(f" Command: pip install {package}")
|
||||
|
||||
would_install, changes = analyze_package_dry_run(package)
|
||||
|
||||
if would_install:
|
||||
print(f"\n Would install {len(would_install)} packages:")
|
||||
for pkg, ver in would_install:
|
||||
if pkg in changes:
|
||||
old_ver, new_ver = changes[pkg]
|
||||
print(f" • {pkg:25} {old_ver:15} → {new_ver:15} ⚠️ UPGRADE")
|
||||
elif pkg in installed:
|
||||
print(f" • {pkg:25} {ver:15} (already installed)")
|
||||
else:
|
||||
print(f" • {pkg:25} {ver:15} ✨ NEW")
|
||||
|
||||
# Scenario 2: With pin constraints (if dependencies exist)
|
||||
dependencies = [pkg for pkg, _ in would_install if pkg != package]
|
||||
if dependencies and with_pin:
|
||||
print(f"\n🔍 Scenario B: Install with pin constraints")
|
||||
|
||||
# Create pin constraints for all current dependencies
|
||||
constraints = []
|
||||
for dep in dependencies:
|
||||
if dep in installed:
|
||||
constraints.append(f"{dep}=={installed[dep]}")
|
||||
|
||||
if constraints:
|
||||
print(f" Command: pip install {package} {' '.join(constraints)}")
|
||||
|
||||
would_install_pinned, changes_pinned = analyze_package_dry_run(
|
||||
package, constraints
|
||||
)
|
||||
|
||||
print(f"\n Would install {len(would_install_pinned)} packages:")
|
||||
for pkg, ver in would_install_pinned:
|
||||
if pkg in constraints:
|
||||
print(f" • {pkg:25} {ver:15} 📌 PINNED")
|
||||
elif pkg in installed:
|
||||
print(f" • {pkg:25} {ver:15} (no change)")
|
||||
else:
|
||||
print(f" • {pkg:25} {ver:15} ✨ NEW")
|
||||
|
||||
# Show what was prevented
|
||||
prevented = set(changes.keys()) - set(changes_pinned.keys())
|
||||
if prevented:
|
||||
print(f"\n ✅ Pin prevented {len(prevented)} upgrade(s):")
|
||||
for pkg in prevented:
|
||||
old_ver, new_ver = changes[pkg]
|
||||
print(f" • {pkg:25} {old_ver:15} ❌→ {new_ver}")
|
||||
|
||||
|
||||
def analyze_all_test_packages():
|
||||
"""Analyze all packages used in tests"""
|
||||
print("="*80)
|
||||
print("ANALYZING ALL TEST PACKAGES")
|
||||
print("="*80)
|
||||
|
||||
test_packages = [
|
||||
("requests", True),
|
||||
("python-dateutil", True),
|
||||
]
|
||||
|
||||
for package, with_pin in test_packages:
|
||||
print_package_analysis(package, with_pin)
|
||||
|
||||
print(f"\n{'='*80}")
|
||||
print("ANALYSIS COMPLETE")
|
||||
print(f"{'='*80}")
|
||||
|
||||
|
||||
def print_current_environment():
|
||||
"""Print current test environment"""
|
||||
print("="*80)
|
||||
print("CURRENT TEST ENVIRONMENT")
|
||||
print("="*80)
|
||||
|
||||
installed = get_installed_packages()
|
||||
|
||||
print(f"\nTotal packages: {len(installed)}\n")
|
||||
|
||||
# Group by category
|
||||
test_packages = ["urllib3", "certifi", "charset-normalizer", "six", "attrs", "packaging"]
|
||||
framework = ["pytest", "iniconfig", "pluggy", "Pygments"]
|
||||
|
||||
print("Test packages:")
|
||||
for pkg in test_packages:
|
||||
if pkg in installed:
|
||||
print(f" {pkg:25} {installed[pkg]}")
|
||||
|
||||
print("\nTest framework:")
|
||||
for pkg in framework:
|
||||
if pkg in installed:
|
||||
print(f" {pkg:25} {installed[pkg]}")
|
||||
|
||||
other = set(installed.keys()) - set(test_packages) - set(framework)
|
||||
if other:
|
||||
print("\nOther packages:")
|
||||
for pkg in sorted(other):
|
||||
print(f" {pkg:25} {installed[pkg]}")
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point"""
|
||||
check_venv()
|
||||
|
||||
if len(sys.argv) == 1:
|
||||
print("Usage: python analyze_dependencies.py [package|--all|--env]")
|
||||
print("\nExamples:")
|
||||
print(" python analyze_dependencies.py requests")
|
||||
print(" python analyze_dependencies.py --all")
|
||||
print(" python analyze_dependencies.py --env")
|
||||
sys.exit(0)
|
||||
|
||||
command = sys.argv[1]
|
||||
|
||||
if command == "--all":
|
||||
analyze_all_test_packages()
|
||||
elif command == "--env":
|
||||
print_current_environment()
|
||||
elif command.startswith("--"):
|
||||
print(f"Unknown option: {command}")
|
||||
sys.exit(1)
|
||||
else:
|
||||
# Analyze specific package
|
||||
print_package_analysis(command, with_pin=True)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
387
tests/common/pip_util/conftest.py
Normal file
387
tests/common/pip_util/conftest.py
Normal file
@@ -0,0 +1,387 @@
|
||||
"""
|
||||
pytest configuration and shared fixtures for pip_util.py tests
|
||||
|
||||
This file provides common fixtures and configuration for all tests.
|
||||
Uses real isolated venv for actual pip operations.
|
||||
"""
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Test venv Management
|
||||
# =============================================================================
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def test_venv_path():
|
||||
"""
|
||||
Get path to test venv (must be created by setup_test_env.sh)
|
||||
|
||||
Returns:
|
||||
Path: Path to test venv directory
|
||||
"""
|
||||
venv_path = Path(__file__).parent / "test_venv"
|
||||
if not venv_path.exists():
|
||||
pytest.fail(
|
||||
f"Test venv not found at {venv_path}.\n"
|
||||
"Please run: ./setup_test_env.sh"
|
||||
)
|
||||
return venv_path
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def test_pip_cmd(test_venv_path):
|
||||
"""
|
||||
Get pip command for test venv
|
||||
|
||||
Returns:
|
||||
List[str]: pip command prefix for subprocess
|
||||
"""
|
||||
pip_path = test_venv_path / "bin" / "pip"
|
||||
if not pip_path.exists():
|
||||
pytest.fail(f"pip not found at {pip_path}")
|
||||
return [str(pip_path)]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def reset_test_venv(test_pip_cmd):
|
||||
"""
|
||||
Reset test venv to initial state before each test
|
||||
|
||||
This fixture:
|
||||
1. Records current installed packages
|
||||
2. Yields control to test
|
||||
3. Restores original packages after test
|
||||
"""
|
||||
# Get initial state
|
||||
result = subprocess.run(
|
||||
test_pip_cmd + ["freeze"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True
|
||||
)
|
||||
initial_packages = result.stdout.strip()
|
||||
|
||||
yield
|
||||
|
||||
# Restore initial state
|
||||
# Uninstall everything except pip, setuptools, wheel
|
||||
result = subprocess.run(
|
||||
test_pip_cmd + ["freeze"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True
|
||||
)
|
||||
current_packages = result.stdout.strip()
|
||||
|
||||
if current_packages:
|
||||
packages_to_remove = []
|
||||
for line in current_packages.split('\n'):
|
||||
if line and '==' in line:
|
||||
pkg = line.split('==')[0].lower()
|
||||
if pkg not in ['pip', 'setuptools', 'wheel']:
|
||||
packages_to_remove.append(pkg)
|
||||
|
||||
if packages_to_remove:
|
||||
subprocess.run(
|
||||
test_pip_cmd + ["uninstall", "-y"] + packages_to_remove,
|
||||
capture_output=True,
|
||||
check=False # Don't fail if package doesn't exist
|
||||
)
|
||||
|
||||
# Reinstall initial packages
|
||||
if initial_packages:
|
||||
# Create temporary requirements file
|
||||
import tempfile
|
||||
with tempfile.NamedTemporaryFile(mode='w', suffix='.txt', delete=False) as f:
|
||||
f.write(initial_packages)
|
||||
temp_req = f.name
|
||||
|
||||
try:
|
||||
subprocess.run(
|
||||
test_pip_cmd + ["install", "-r", temp_req],
|
||||
capture_output=True,
|
||||
check=True
|
||||
)
|
||||
finally:
|
||||
Path(temp_req).unlink()
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Directory and Path Fixtures
|
||||
# =============================================================================
|
||||
|
||||
@pytest.fixture
|
||||
def temp_policy_dir(tmp_path):
|
||||
"""
|
||||
Create temporary directory for policy files
|
||||
|
||||
Returns:
|
||||
Path: Temporary directory for storing test policy files
|
||||
"""
|
||||
policy_dir = tmp_path / "policies"
|
||||
policy_dir.mkdir()
|
||||
return policy_dir
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_user_policy_dir(tmp_path):
|
||||
"""
|
||||
Create temporary directory for user policy files
|
||||
|
||||
Returns:
|
||||
Path: Temporary directory for storing user policy files
|
||||
"""
|
||||
user_dir = tmp_path / "user_policies"
|
||||
user_dir.mkdir()
|
||||
return user_dir
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Module Setup and Mocking
|
||||
# =============================================================================
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup_pip_util(monkeypatch, test_pip_cmd):
|
||||
"""
|
||||
Setup pip_util module for testing with real venv
|
||||
|
||||
This fixture:
|
||||
1. Mocks comfy module (not needed for tests)
|
||||
2. Adds comfyui_manager to path
|
||||
3. Patches make_pip_cmd to use test venv
|
||||
4. Resets policy cache
|
||||
"""
|
||||
# Mock comfy module before importing anything
|
||||
comfy_mock = MagicMock()
|
||||
cli_args_mock = MagicMock()
|
||||
cli_args_mock.args = MagicMock()
|
||||
comfy_mock.cli_args = cli_args_mock
|
||||
sys.modules['comfy'] = comfy_mock
|
||||
sys.modules['comfy.cli_args'] = cli_args_mock
|
||||
|
||||
# Add comfyui_manager parent to path so relative imports work
|
||||
comfyui_manager_path = str(Path(__file__).parent.parent.parent.parent)
|
||||
if comfyui_manager_path not in sys.path:
|
||||
sys.path.insert(0, comfyui_manager_path)
|
||||
|
||||
# Import pip_util
|
||||
from comfyui_manager.common import pip_util
|
||||
|
||||
# Patch make_pip_cmd to use test venv pip
|
||||
def make_test_pip_cmd(args: List[str]) -> List[str]:
|
||||
return test_pip_cmd + args
|
||||
|
||||
monkeypatch.setattr(
|
||||
pip_util.manager_util,
|
||||
"make_pip_cmd",
|
||||
make_test_pip_cmd
|
||||
)
|
||||
|
||||
# Reset policy cache
|
||||
pip_util._pip_policy_cache = None
|
||||
|
||||
yield
|
||||
|
||||
# Cleanup
|
||||
pip_util._pip_policy_cache = None
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_manager_util(monkeypatch, temp_policy_dir):
|
||||
"""
|
||||
Mock manager_util module paths
|
||||
|
||||
Args:
|
||||
monkeypatch: pytest monkeypatch fixture
|
||||
temp_policy_dir: Temporary policy directory
|
||||
"""
|
||||
from comfyui_manager.common import pip_util
|
||||
|
||||
monkeypatch.setattr(
|
||||
pip_util.manager_util,
|
||||
"comfyui_manager_path",
|
||||
str(temp_policy_dir)
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_context(monkeypatch, temp_user_policy_dir):
|
||||
"""
|
||||
Mock context module paths
|
||||
|
||||
Args:
|
||||
monkeypatch: pytest monkeypatch fixture
|
||||
temp_user_policy_dir: Temporary user policy directory
|
||||
"""
|
||||
from comfyui_manager.common import pip_util
|
||||
|
||||
monkeypatch.setattr(
|
||||
pip_util.context,
|
||||
"manager_files_path",
|
||||
str(temp_user_policy_dir)
|
||||
)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Platform Mocking Fixtures
|
||||
# =============================================================================
|
||||
|
||||
@pytest.fixture
|
||||
def mock_platform_linux(monkeypatch):
|
||||
"""Mock platform.system() to return 'Linux'"""
|
||||
monkeypatch.setattr("platform.system", lambda: "Linux")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_platform_windows(monkeypatch):
|
||||
"""Mock platform.system() to return 'Windows'"""
|
||||
monkeypatch.setattr("platform.system", lambda: "Windows")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_platform_darwin(monkeypatch):
|
||||
"""Mock platform.system() to return 'Darwin' (macOS)"""
|
||||
monkeypatch.setattr("platform.system", lambda: "Darwin")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_torch_cuda_available(monkeypatch):
|
||||
"""Mock torch.cuda.is_available() to return True"""
|
||||
class MockCuda:
|
||||
@staticmethod
|
||||
def is_available():
|
||||
return True
|
||||
|
||||
class MockTorch:
|
||||
cuda = MockCuda()
|
||||
|
||||
import sys
|
||||
monkeypatch.setitem(sys.modules, "torch", MockTorch())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_torch_cuda_unavailable(monkeypatch):
|
||||
"""Mock torch.cuda.is_available() to return False"""
|
||||
class MockCuda:
|
||||
@staticmethod
|
||||
def is_available():
|
||||
return False
|
||||
|
||||
class MockTorch:
|
||||
cuda = MockCuda()
|
||||
|
||||
import sys
|
||||
monkeypatch.setitem(sys.modules, "torch", MockTorch())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_torch_not_installed(monkeypatch):
|
||||
"""Mock torch as not installed (ImportError)"""
|
||||
import sys
|
||||
if "torch" in sys.modules:
|
||||
monkeypatch.delitem(sys.modules, "torch")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Helper Functions
|
||||
# =============================================================================
|
||||
|
||||
@pytest.fixture
|
||||
def get_installed_packages(test_pip_cmd):
|
||||
"""
|
||||
Helper to get currently installed packages in test venv
|
||||
|
||||
Returns:
|
||||
Callable that returns Dict[str, str] of installed packages
|
||||
"""
|
||||
def _get_installed() -> Dict[str, str]:
|
||||
result = subprocess.run(
|
||||
test_pip_cmd + ["freeze"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True
|
||||
)
|
||||
|
||||
packages = {}
|
||||
for line in result.stdout.strip().split('\n'):
|
||||
if line and '==' in line:
|
||||
pkg, ver = line.split('==', 1)
|
||||
packages[pkg] = ver
|
||||
|
||||
return packages
|
||||
|
||||
return _get_installed
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def install_packages(test_pip_cmd):
|
||||
"""
|
||||
Helper to install packages in test venv
|
||||
|
||||
Returns:
|
||||
Callable that installs packages
|
||||
"""
|
||||
def _install(*packages):
|
||||
subprocess.run(
|
||||
test_pip_cmd + ["install"] + list(packages),
|
||||
capture_output=True,
|
||||
check=True
|
||||
)
|
||||
|
||||
return _install
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def uninstall_packages(test_pip_cmd):
|
||||
"""
|
||||
Helper to uninstall packages in test venv
|
||||
|
||||
Returns:
|
||||
Callable that uninstalls packages
|
||||
"""
|
||||
def _uninstall(*packages):
|
||||
subprocess.run(
|
||||
test_pip_cmd + ["uninstall", "-y"] + list(packages),
|
||||
capture_output=True,
|
||||
check=False # Don't fail if package doesn't exist
|
||||
)
|
||||
|
||||
return _uninstall
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Test Data Factories
|
||||
# =============================================================================
|
||||
|
||||
@pytest.fixture
|
||||
def make_policy():
|
||||
"""
|
||||
Factory fixture for creating policy dictionaries
|
||||
|
||||
Returns:
|
||||
Callable that creates policy dict from parameters
|
||||
"""
|
||||
def _make_policy(
|
||||
package_name: str,
|
||||
policy_type: str,
|
||||
section: str = "apply_first_match",
|
||||
**kwargs
|
||||
) -> Dict:
|
||||
policy_item = {"type": policy_type}
|
||||
policy_item.update(kwargs)
|
||||
|
||||
return {
|
||||
package_name: {
|
||||
section: [policy_item]
|
||||
}
|
||||
}
|
||||
|
||||
return _make_policy
|
||||
52
tests/common/pip_util/pytest.ini
Normal file
52
tests/common/pip_util/pytest.ini
Normal file
@@ -0,0 +1,52 @@
|
||||
[pytest]
|
||||
# pytest configuration for pip_util.py tests
|
||||
|
||||
# Test discovery
|
||||
testpaths = .
|
||||
|
||||
# Markers
|
||||
markers =
|
||||
unit: Unit tests for individual functions
|
||||
integration: Integration tests for workflows
|
||||
e2e: End-to-end tests for complete scenarios
|
||||
|
||||
# Output options - extend global config
|
||||
addopts =
|
||||
# Coverage options for pip_util
|
||||
--cov=../../../comfyui_manager/common/pip_util
|
||||
--cov-report=html:htmlcov_pip_util
|
||||
--cov-report=term-missing
|
||||
--cov-report=xml:coverage_pip_util.xml
|
||||
# Coverage fail threshold
|
||||
--cov-fail-under=80
|
||||
|
||||
# Coverage configuration
|
||||
[coverage:run]
|
||||
source = ../../../comfyui_manager/common
|
||||
omit =
|
||||
*/tests/*
|
||||
*/test_*.py
|
||||
*/__pycache__/*
|
||||
*/test_venv/*
|
||||
|
||||
[coverage:report]
|
||||
precision = 2
|
||||
show_missing = True
|
||||
skip_covered = False
|
||||
|
||||
exclude_lines =
|
||||
# Standard pragma
|
||||
pragma: no cover
|
||||
# Don't complain about missing debug code
|
||||
def __repr__
|
||||
# Don't complain if tests don't hit defensive assertion code
|
||||
raise AssertionError
|
||||
raise NotImplementedError
|
||||
# Don't complain if non-runnable code isn't run
|
||||
if __name__ == .__main__.:
|
||||
# Don't complain about abstract methods
|
||||
@abstractmethod
|
||||
|
||||
[coverage:html]
|
||||
directory = htmlcov
|
||||
|
||||
20
tests/common/pip_util/requirements-test-base.txt
Normal file
20
tests/common/pip_util/requirements-test-base.txt
Normal file
@@ -0,0 +1,20 @@
|
||||
# Base packages for pip_util integration tests
|
||||
# These packages are installed initially to test various scenarios
|
||||
# All versions verified using: pip install --dry-run --ignore-installed
|
||||
|
||||
# Scenario 1: Dependency Version Protection (requests + urllib3)
|
||||
# Purpose: Pin prevents urllib3 1.26.15 → 2.5.0 major upgrade
|
||||
urllib3==1.26.15 # OLD stable version (prevent 2.x upgrade)
|
||||
certifi==2023.7.22 # OLD version (prevent 2025.x upgrade)
|
||||
charset-normalizer==3.2.0 # OLD version (prevent 3.4.x upgrade)
|
||||
# Note: idna is NOT pre-installed (will be added by requests)
|
||||
|
||||
# Scenario 2: Package Deletion and Restore (six)
|
||||
# Purpose: Restore policy reinstalls deleted packages
|
||||
six==1.16.0 # Will be deleted and restored to 1.16.0
|
||||
attrs==23.1.0 # Bystander package
|
||||
packaging==23.1 # Bystander package (NOT 23.1.0, not 25.0)
|
||||
|
||||
# Scenario 3: Version Change and Restore (urllib3)
|
||||
# Purpose: Restore policy reverts version changes
|
||||
# urllib3==1.26.15 (same as Scenario 1, will be upgraded to 2.5.0 then restored)
|
||||
47
tests/common/pip_util/setup_test_env.sh
Executable file
47
tests/common/pip_util/setup_test_env.sh
Executable file
@@ -0,0 +1,47 @@
|
||||
#!/bin/bash
|
||||
# Setup script for pip_util integration tests
|
||||
# Creates a test venv and installs base packages
|
||||
|
||||
set -e # Exit on error
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
VENV_DIR="$SCRIPT_DIR/test_venv"
|
||||
|
||||
echo "Setting up test environment for pip_util integration tests..."
|
||||
|
||||
# Remove existing venv if present
|
||||
if [ -d "$VENV_DIR" ]; then
|
||||
echo "Removing existing test venv..."
|
||||
rm -rf "$VENV_DIR"
|
||||
fi
|
||||
|
||||
# Create new venv
|
||||
echo "Creating test venv at $VENV_DIR..."
|
||||
python3 -m venv "$VENV_DIR"
|
||||
|
||||
# Activate venv
|
||||
source "$VENV_DIR/bin/activate"
|
||||
|
||||
# Upgrade pip
|
||||
echo "Upgrading pip..."
|
||||
pip install --upgrade pip
|
||||
|
||||
# Install pytest
|
||||
echo "Installing pytest..."
|
||||
pip install pytest
|
||||
|
||||
# Install base test packages
|
||||
echo "Installing base test packages..."
|
||||
pip install -r "$SCRIPT_DIR/requirements-test-base.txt"
|
||||
|
||||
echo ""
|
||||
echo "Test environment setup complete!"
|
||||
echo "Installed packages:"
|
||||
pip freeze
|
||||
|
||||
echo ""
|
||||
echo "To activate the test venv, run:"
|
||||
echo " source $VENV_DIR/bin/activate"
|
||||
echo ""
|
||||
echo "To run tests:"
|
||||
echo " pytest -v"
|
||||
271
tests/common/pip_util/test_dependency_protection.py
Normal file
271
tests/common/pip_util/test_dependency_protection.py
Normal file
@@ -0,0 +1,271 @@
|
||||
"""
|
||||
Test dependency version protection with pin (Priority 1)
|
||||
|
||||
Tests that existing dependency versions are protected by pin_dependencies policy
|
||||
"""
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pin_policy(temp_policy_dir):
|
||||
"""Create policy with pin_dependencies for lightweight real packages"""
|
||||
policy_content = {
|
||||
"requests": {
|
||||
"apply_all_matches": [
|
||||
{
|
||||
"type": "pin_dependencies",
|
||||
"pinned_packages": ["urllib3", "certifi", "charset-normalizer"],
|
||||
"on_failure": "retry_without_pin"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
policy_file = temp_policy_dir / "pip-policy.json"
|
||||
policy_file.write_text(json.dumps(policy_content, indent=2))
|
||||
return policy_file
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_dependency_version_protection_with_pin(
|
||||
pin_policy,
|
||||
mock_manager_util,
|
||||
mock_context,
|
||||
reset_test_venv,
|
||||
get_installed_packages
|
||||
):
|
||||
"""
|
||||
Test existing dependency versions are protected by pin
|
||||
|
||||
Priority: 1 (Essential)
|
||||
|
||||
Purpose:
|
||||
Verify that when installing a package that would normally upgrade
|
||||
dependencies, the pin_dependencies policy protects existing versions.
|
||||
|
||||
Based on DEPENDENCY_TREE_CONTEXT.md:
|
||||
Without pin: urllib3 1.26.15 → 2.5.0 (MAJOR upgrade)
|
||||
With pin: urllib3 stays at 1.26.15 (protected)
|
||||
"""
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
# Verify initial packages are installed (from requirements-test-base.txt)
|
||||
initial = get_installed_packages()
|
||||
assert "urllib3" in initial
|
||||
assert "certifi" in initial
|
||||
assert "charset-normalizer" in initial
|
||||
|
||||
# Record initial versions (from DEPENDENCY_TREE_CONTEXT.md)
|
||||
initial_urllib3 = initial["urllib3"]
|
||||
initial_certifi = initial["certifi"]
|
||||
initial_charset = initial["charset-normalizer"]
|
||||
|
||||
# Verify expected OLD versions
|
||||
assert initial_urllib3 == "1.26.15", f"Expected urllib3==1.26.15, got {initial_urllib3}"
|
||||
assert initial_certifi == "2023.7.22", f"Expected certifi==2023.7.22, got {initial_certifi}"
|
||||
assert initial_charset == "3.2.0", f"Expected charset-normalizer==3.2.0, got {initial_charset}"
|
||||
|
||||
# Verify idna is NOT installed initially
|
||||
assert "idna" not in initial, "idna should not be pre-installed"
|
||||
|
||||
with PipBatch() as batch:
|
||||
result = batch.install("requests")
|
||||
final_packages = batch._get_installed_packages()
|
||||
|
||||
# Verify installation succeeded
|
||||
assert result is True
|
||||
assert "requests" in final_packages
|
||||
|
||||
# Verify versions were maintained (not upgraded to latest)
|
||||
# Without pin, these would upgrade to: urllib3==2.5.0, certifi==2025.8.3, charset-normalizer==3.4.3
|
||||
assert final_packages["urllib3"] == "1.26.15", "urllib3 should remain at 1.26.15 (prevented 2.x upgrade)"
|
||||
assert final_packages["certifi"] == "2023.7.22", "certifi should remain at 2023.7.22 (prevented 2025.x upgrade)"
|
||||
assert final_packages["charset-normalizer"] == "3.2.0", "charset-normalizer should remain at 3.2.0"
|
||||
|
||||
# Verify new dependency was added (idna is NOT pinned, so it gets installed)
|
||||
assert "idna" in final_packages, "idna should be installed as new dependency"
|
||||
assert final_packages["idna"] == "3.10", f"Expected idna==3.10, got {final_packages['idna']}"
|
||||
|
||||
# Verify requests was installed at expected version
|
||||
assert final_packages["requests"] == "2.32.5", f"Expected requests==2.32.5, got {final_packages['requests']}"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def python_dateutil_policy(temp_policy_dir):
|
||||
"""Create policy for python-dateutil with six pinning"""
|
||||
policy_content = {
|
||||
"python-dateutil": {
|
||||
"apply_all_matches": [
|
||||
{
|
||||
"type": "pin_dependencies",
|
||||
"pinned_packages": ["six"],
|
||||
"reason": "Protect six from upgrading"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
policy_file = temp_policy_dir / "pip-policy.json"
|
||||
policy_file.write_text(json.dumps(policy_content, indent=2))
|
||||
return policy_file
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_dependency_chain_with_six_pin(
|
||||
python_dateutil_policy,
|
||||
mock_manager_util,
|
||||
mock_context,
|
||||
reset_test_venv,
|
||||
get_installed_packages
|
||||
):
|
||||
"""
|
||||
Test python-dateutil + six dependency chain with pin
|
||||
|
||||
Priority: 2 (Important)
|
||||
|
||||
Purpose:
|
||||
Verify that pin_dependencies protects actual dependencies
|
||||
(six is a real dependency of python-dateutil).
|
||||
|
||||
Based on DEPENDENCY_TREE_CONTEXT.md:
|
||||
python-dateutil depends on six>=1.5
|
||||
Without pin: six 1.16.0 → 1.17.0
|
||||
With pin: six stays at 1.16.0 (protected)
|
||||
"""
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
# Verify six is installed
|
||||
initial = get_installed_packages()
|
||||
assert "six" in initial
|
||||
initial_six = initial["six"]
|
||||
|
||||
# Verify expected OLD version
|
||||
assert initial_six == "1.16.0", f"Expected six==1.16.0, got {initial_six}"
|
||||
|
||||
with PipBatch() as batch:
|
||||
result = batch.install("python-dateutil")
|
||||
final_packages = batch._get_installed_packages()
|
||||
|
||||
# Verify installation succeeded
|
||||
assert result is True
|
||||
|
||||
# Verify final versions
|
||||
assert "python-dateutil" in final_packages
|
||||
assert final_packages["python-dateutil"] == "2.9.0.post0", f"Expected python-dateutil==2.9.0.post0"
|
||||
|
||||
# Verify six was NOT upgraded (without pin, would upgrade to 1.17.0)
|
||||
assert "six" in final_packages
|
||||
assert final_packages["six"] == "1.16.0", "six should remain at 1.16.0 (prevented 1.17.0 upgrade)"
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_pin_only_affects_specified_packages(
|
||||
pin_policy,
|
||||
mock_manager_util,
|
||||
mock_context,
|
||||
reset_test_venv,
|
||||
get_installed_packages
|
||||
):
|
||||
"""
|
||||
Test that pin only affects specified packages, not all dependencies
|
||||
|
||||
Priority: 1 (Essential)
|
||||
|
||||
Purpose:
|
||||
Verify that idna (new dependency) is installed even though
|
||||
other dependencies are pinned. This tests that pin is selective,
|
||||
not global.
|
||||
|
||||
Based on DEPENDENCY_TREE_CONTEXT.md:
|
||||
idna is a NEW dependency (not in initial environment)
|
||||
Pin only affects: urllib3, certifi, charset-normalizer
|
||||
idna should be installed at latest version (3.10)
|
||||
"""
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
# Verify initial state
|
||||
initial = get_installed_packages()
|
||||
assert "idna" not in initial, "idna should not be pre-installed"
|
||||
assert "requests" not in initial, "requests should not be pre-installed"
|
||||
|
||||
with PipBatch() as batch:
|
||||
result = batch.install("requests")
|
||||
final_packages = batch._get_installed_packages()
|
||||
|
||||
# Verify installation succeeded
|
||||
assert result is True
|
||||
|
||||
# Verify idna was installed (NOT pinned, so gets latest)
|
||||
assert "idna" in final_packages, "idna should be installed as new dependency"
|
||||
assert final_packages["idna"] == "3.10", "idna should be at latest version 3.10 (not pinned)"
|
||||
|
||||
# Verify requests was installed
|
||||
assert "requests" in final_packages
|
||||
assert final_packages["requests"] == "2.32.5"
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_major_version_jump_prevention(
|
||||
pin_policy,
|
||||
mock_manager_util,
|
||||
mock_context,
|
||||
reset_test_venv,
|
||||
get_installed_packages,
|
||||
install_packages,
|
||||
uninstall_packages
|
||||
):
|
||||
"""
|
||||
Test that pin prevents MAJOR version jumps (breaking changes)
|
||||
|
||||
Priority: 1 (Essential)
|
||||
|
||||
Purpose:
|
||||
Verify that pin prevents urllib3 1.x → 2.x major upgrade.
|
||||
This is the most important test because urllib3 2.0 has
|
||||
breaking API changes.
|
||||
|
||||
Based on DEPENDENCY_TREE_CONTEXT.md:
|
||||
urllib3 1.26.15 → 2.5.0 is a MAJOR version jump
|
||||
urllib3 2.0 removed deprecated APIs
|
||||
requests accepts both: urllib3<3,>=1.21.1
|
||||
"""
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
# Verify initial urllib3 version
|
||||
initial = get_installed_packages()
|
||||
assert initial["urllib3"] == "1.26.15", "Expected urllib3==1.26.15"
|
||||
|
||||
# First, test WITHOUT pin to verify urllib3 would upgrade to 2.x
|
||||
# (This simulates what would happen without our protection)
|
||||
uninstall_packages("urllib3", "certifi", "charset-normalizer")
|
||||
install_packages("requests")
|
||||
|
||||
without_pin = get_installed_packages()
|
||||
|
||||
# Verify urllib3 was upgraded to 2.x without pin
|
||||
assert "urllib3" in without_pin
|
||||
assert without_pin["urllib3"].startswith("2."), \
|
||||
f"Without pin, urllib3 should upgrade to 2.x, got {without_pin['urllib3']}"
|
||||
|
||||
# Now reset and test WITH pin
|
||||
uninstall_packages("requests", "urllib3", "certifi", "charset-normalizer", "idna")
|
||||
install_packages("urllib3==1.26.15", "certifi==2023.7.22", "charset-normalizer==3.2.0")
|
||||
|
||||
with PipBatch() as batch:
|
||||
result = batch.install("requests")
|
||||
final_packages = batch._get_installed_packages()
|
||||
|
||||
# Verify installation succeeded
|
||||
assert result is True
|
||||
|
||||
# Verify urllib3 stayed at 1.x (prevented major version jump)
|
||||
assert final_packages["urllib3"] == "1.26.15", \
|
||||
"Pin should prevent urllib3 from upgrading to 2.x (breaking changes)"
|
||||
|
||||
# Verify it's specifically 1.x, not 2.x
|
||||
assert final_packages["urllib3"].startswith("1."), \
|
||||
f"urllib3 should remain at 1.x series, got {final_packages['urllib3']}"
|
||||
279
tests/common/pip_util/test_edge_cases.py
Normal file
279
tests/common/pip_util/test_edge_cases.py
Normal file
@@ -0,0 +1,279 @@
|
||||
"""
|
||||
Edge cases and boundary conditions (Priority 3)
|
||||
|
||||
Tests empty policies, malformed JSON, and edge cases
|
||||
"""
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_empty_base_policy_uses_default_installation(
|
||||
empty_policy_file,
|
||||
mock_manager_util,
|
||||
mock_context
|
||||
):
|
||||
"""
|
||||
Test default installation with empty policy
|
||||
|
||||
Priority: 3 (Recommended)
|
||||
|
||||
Purpose:
|
||||
Verify that when policy is empty, the system falls back
|
||||
to default installation behavior.
|
||||
"""
|
||||
import sys
|
||||
# Path setup handled by conftest.py
|
||||
|
||||
from comfyui_manager.common.pip_util import get_pip_policy
|
||||
|
||||
policy = get_pip_policy()
|
||||
|
||||
assert policy == {}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def malformed_policy_file(temp_policy_dir):
|
||||
"""Create malformed JSON policy file"""
|
||||
policy_file = temp_policy_dir / "pip-policy.json"
|
||||
policy_file.write_text("{invalid json content")
|
||||
return policy_file
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_json_parse_error_fallback_to_empty(
|
||||
malformed_policy_file,
|
||||
mock_manager_util,
|
||||
mock_context,
|
||||
capture_logs
|
||||
):
|
||||
"""
|
||||
Test empty dict on JSON parse error
|
||||
|
||||
Priority: 3 (Recommended)
|
||||
|
||||
Purpose:
|
||||
Verify that malformed JSON results in empty policy
|
||||
with appropriate error logging.
|
||||
"""
|
||||
import sys
|
||||
# Path setup handled by conftest.py
|
||||
|
||||
from comfyui_manager.common.pip_util import get_pip_policy
|
||||
|
||||
policy = get_pip_policy()
|
||||
|
||||
assert policy == {}
|
||||
# Should have error log about parsing failure
|
||||
assert any("parse" in record.message.lower() for record in capture_logs.records)
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_unknown_condition_type_returns_false(
|
||||
mock_manager_util,
|
||||
mock_context,
|
||||
capture_logs
|
||||
):
|
||||
"""
|
||||
Test unknown condition type returns False
|
||||
|
||||
Priority: 3 (Recommended)
|
||||
|
||||
Purpose:
|
||||
Verify that unknown condition types are handled gracefully
|
||||
by returning False with a warning.
|
||||
"""
|
||||
import sys
|
||||
# Path setup handled by conftest.py
|
||||
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
batch = PipBatch()
|
||||
condition = {"type": "unknown_type", "some_field": "value"}
|
||||
|
||||
result = batch._evaluate_condition(condition, "pkg", {})
|
||||
|
||||
assert result is False
|
||||
# Should have warning about unknown type
|
||||
assert any("unknown" in record.message.lower() for record in capture_logs.records)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def self_reference_policy(temp_policy_dir):
|
||||
"""Create policy with self-reference"""
|
||||
policy_content = {
|
||||
"critical-package": {
|
||||
"restore": [
|
||||
{
|
||||
"condition": {
|
||||
"type": "installed",
|
||||
"spec": "!=1.2.3"
|
||||
},
|
||||
"target": "critical-package",
|
||||
"version": "1.2.3"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
policy_file = temp_policy_dir / "pip-policy.json"
|
||||
policy_file.write_text(json.dumps(policy_content, indent=2))
|
||||
return policy_file
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_self_reference_subprocess(monkeypatch):
|
||||
"""Mock subprocess for self-reference test"""
|
||||
call_sequence = []
|
||||
|
||||
installed_packages = {
|
||||
"critical-package": "1.2.2"
|
||||
}
|
||||
|
||||
def mock_run(cmd, **kwargs):
|
||||
call_sequence.append(cmd)
|
||||
|
||||
# pip freeze
|
||||
if "freeze" in cmd:
|
||||
output = "\n".join([f"{pkg}=={ver}" for pkg, ver in installed_packages.items()])
|
||||
return subprocess.CompletedProcess(cmd, 0, output, "")
|
||||
|
||||
# pip install
|
||||
if "install" in cmd and "critical-package==1.2.3" in cmd:
|
||||
installed_packages["critical-package"] = "1.2.3"
|
||||
return subprocess.CompletedProcess(cmd, 0, "", "")
|
||||
|
||||
return subprocess.CompletedProcess(cmd, 0, "", "")
|
||||
|
||||
monkeypatch.setattr("subprocess.run", mock_run)
|
||||
return call_sequence, installed_packages
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_restore_self_version_check(
|
||||
self_reference_policy,
|
||||
mock_manager_util,
|
||||
mock_context,
|
||||
mock_self_reference_subprocess
|
||||
):
|
||||
"""
|
||||
Test restore policy checking its own version
|
||||
|
||||
Priority: 3 (Recommended)
|
||||
|
||||
Purpose:
|
||||
Verify that when a condition omits the package field,
|
||||
it correctly defaults to checking the package itself.
|
||||
"""
|
||||
import sys
|
||||
# Path setup handled by conftest.py
|
||||
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
call_sequence, installed_packages = mock_self_reference_subprocess
|
||||
|
||||
with PipBatch() as batch:
|
||||
restored = batch.ensure_installed()
|
||||
final = batch._get_installed_packages()
|
||||
|
||||
# Condition should evaluate with self-reference
|
||||
# "1.2.2" != "1.2.3" → True
|
||||
assert "critical-package" in restored
|
||||
assert final["critical-package"] == "1.2.3"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def partial_failure_policy(temp_policy_dir):
|
||||
"""Create policy for multiple uninstalls"""
|
||||
policy_content = {
|
||||
"pkg-a": {
|
||||
"uninstall": [{"target": "old-pkg-1"}]
|
||||
},
|
||||
"pkg-b": {
|
||||
"uninstall": [{"target": "old-pkg-2"}]
|
||||
},
|
||||
"pkg-c": {
|
||||
"uninstall": [{"target": "old-pkg-3"}]
|
||||
}
|
||||
}
|
||||
|
||||
policy_file = temp_policy_dir / "pip-policy.json"
|
||||
policy_file.write_text(json.dumps(policy_content, indent=2))
|
||||
return policy_file
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_partial_failure_subprocess(monkeypatch):
|
||||
"""Mock subprocess with one failure"""
|
||||
call_sequence = []
|
||||
|
||||
installed_packages = {
|
||||
"old-pkg-1": "1.0",
|
||||
"old-pkg-2": "1.0",
|
||||
"old-pkg-3": "1.0"
|
||||
}
|
||||
|
||||
def mock_run(cmd, **kwargs):
|
||||
call_sequence.append(cmd)
|
||||
|
||||
# pip freeze
|
||||
if "freeze" in cmd:
|
||||
output = "\n".join([f"{pkg}=={ver}" for pkg, ver in installed_packages.items()])
|
||||
return subprocess.CompletedProcess(cmd, 0, output, "")
|
||||
|
||||
# pip uninstall
|
||||
if "uninstall" in cmd:
|
||||
if "old-pkg-2" in cmd:
|
||||
# Fail on pkg-2
|
||||
raise subprocess.CalledProcessError(1, cmd, "", "Uninstall failed")
|
||||
else:
|
||||
# Success on others
|
||||
for pkg in ["old-pkg-1", "old-pkg-3"]:
|
||||
if pkg in cmd:
|
||||
installed_packages.pop(pkg, None)
|
||||
return subprocess.CompletedProcess(cmd, 0, "", "")
|
||||
|
||||
return subprocess.CompletedProcess(cmd, 0, "", "")
|
||||
|
||||
monkeypatch.setattr("subprocess.run", mock_run)
|
||||
return call_sequence, installed_packages
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_ensure_not_installed_continues_on_individual_failure(
|
||||
partial_failure_policy,
|
||||
mock_manager_util,
|
||||
mock_context,
|
||||
mock_partial_failure_subprocess,
|
||||
capture_logs
|
||||
):
|
||||
"""
|
||||
Test partial failure handling
|
||||
|
||||
Priority: 2 (Important)
|
||||
|
||||
Purpose:
|
||||
Verify that when one package removal fails, the system
|
||||
continues processing other packages.
|
||||
"""
|
||||
import sys
|
||||
# Path setup handled by conftest.py
|
||||
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
call_sequence, installed_packages = mock_partial_failure_subprocess
|
||||
|
||||
with PipBatch() as batch:
|
||||
removed = batch.ensure_not_installed()
|
||||
|
||||
# Verify partial success
|
||||
assert "old-pkg-1" in removed
|
||||
assert "old-pkg-3" in removed
|
||||
assert "old-pkg-2" not in removed # Failed
|
||||
|
||||
# Verify warning logged for failure
|
||||
assert any("warning" in record.levelname.lower() for record in capture_logs.records)
|
||||
158
tests/common/pip_util/test_environment_recovery.py
Normal file
158
tests/common/pip_util/test_environment_recovery.py
Normal file
@@ -0,0 +1,158 @@
|
||||
"""
|
||||
Test environment corruption and recovery (Priority 1)
|
||||
|
||||
Tests that packages deleted or modified during installation are restored
|
||||
"""
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def restore_policy(temp_policy_dir):
|
||||
"""Create policy with restore section for lightweight packages"""
|
||||
policy_content = {
|
||||
"six": {
|
||||
"restore": [
|
||||
{
|
||||
"target": "six",
|
||||
"version": "1.16.0",
|
||||
"reason": "six must be maintained at 1.16.0 for compatibility"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
policy_file = temp_policy_dir / "pip-policy.json"
|
||||
policy_file.write_text(json.dumps(policy_content, indent=2))
|
||||
return policy_file
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_package_deletion_and_restore(
|
||||
restore_policy,
|
||||
mock_manager_util,
|
||||
mock_context,
|
||||
reset_test_venv,
|
||||
get_installed_packages,
|
||||
install_packages,
|
||||
uninstall_packages
|
||||
):
|
||||
"""
|
||||
Test package deleted by installation is restored
|
||||
|
||||
Priority: 1 (Essential)
|
||||
|
||||
Purpose:
|
||||
Verify that when a package installation deletes another package,
|
||||
the restore policy can bring it back with the correct version.
|
||||
|
||||
Based on DEPENDENCY_TREE_CONTEXT.md:
|
||||
six==1.16.0 must be maintained for compatibility
|
||||
After deletion, should restore to exactly 1.16.0
|
||||
"""
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
# Verify six is initially installed at expected version
|
||||
initial = get_installed_packages()
|
||||
assert "six" in initial
|
||||
assert initial["six"] == "1.16.0", f"Expected six==1.16.0, got {initial['six']}"
|
||||
|
||||
with PipBatch() as batch:
|
||||
# Manually remove six to simulate deletion by another package
|
||||
uninstall_packages("six")
|
||||
|
||||
# Check six was deleted
|
||||
installed_after_delete = batch._get_installed_packages()
|
||||
assert "six" not in installed_after_delete, "six should be deleted"
|
||||
|
||||
# Restore six
|
||||
restored = batch.ensure_installed()
|
||||
final_packages = batch._get_installed_packages()
|
||||
|
||||
# Verify six was restored to EXACT required version (not latest)
|
||||
assert "six" in restored, "six should be in restored list"
|
||||
assert final_packages["six"] == "1.16.0", \
|
||||
"six should be restored to exact version 1.16.0 (not 1.17.0 latest)"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def version_change_policy(temp_policy_dir):
|
||||
"""Create policy for version change test with real packages"""
|
||||
policy_content = {
|
||||
"urllib3": {
|
||||
"restore": [
|
||||
{
|
||||
"condition": {
|
||||
"type": "installed",
|
||||
"spec": "!=1.26.15"
|
||||
},
|
||||
"target": "urllib3",
|
||||
"version": "1.26.15",
|
||||
"reason": "urllib3 must be 1.26.15 for compatibility"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
policy_file = temp_policy_dir / "pip-policy.json"
|
||||
policy_file.write_text(json.dumps(policy_content, indent=2))
|
||||
return policy_file
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_version_change_and_restore(
|
||||
version_change_policy,
|
||||
mock_manager_util,
|
||||
mock_context,
|
||||
reset_test_venv,
|
||||
get_installed_packages,
|
||||
install_packages
|
||||
):
|
||||
"""
|
||||
Test package version changed by installation is restored
|
||||
|
||||
Priority: 1 (Essential)
|
||||
|
||||
Purpose:
|
||||
Verify that when a package installation changes another package's
|
||||
version, the restore policy can revert it to the required version.
|
||||
|
||||
Based on DEPENDENCY_TREE_CONTEXT.md:
|
||||
urllib3 can upgrade from 1.26.15 (1.x) to 2.5.0 (2.x)
|
||||
Restore policy with condition "!=1.26.15" should downgrade back
|
||||
This tests downgrade capability (not just upgrade prevention)
|
||||
"""
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
# Verify urllib3 1.26.15 is installed
|
||||
initial = get_installed_packages()
|
||||
assert "urllib3" in initial
|
||||
assert initial["urllib3"] == "1.26.15", f"Expected urllib3==1.26.15, got {initial['urllib3']}"
|
||||
|
||||
with PipBatch() as batch:
|
||||
# Manually upgrade urllib3 to 2.x to simulate version change
|
||||
# This is a MAJOR version upgrade (1.x → 2.x)
|
||||
install_packages("urllib3==2.1.0")
|
||||
|
||||
installed_after = batch._get_installed_packages()
|
||||
# Verify version was changed to 2.x
|
||||
assert installed_after["urllib3"] == "2.1.0", \
|
||||
f"urllib3 should be upgraded to 2.1.0, got {installed_after['urllib3']}"
|
||||
assert installed_after["urllib3"].startswith("2."), \
|
||||
"urllib3 should be at 2.x series"
|
||||
|
||||
# Restore urllib3 to 1.26.15 (this is a DOWNGRADE from 2.x to 1.x)
|
||||
restored = batch.ensure_installed()
|
||||
final = batch._get_installed_packages()
|
||||
|
||||
# Verify condition was satisfied (2.1.0 != 1.26.15) and restore was triggered
|
||||
assert "urllib3" in restored, "urllib3 should be in restored list"
|
||||
|
||||
# Verify version was DOWNGRADED from 2.x back to 1.x
|
||||
assert final["urllib3"] == "1.26.15", \
|
||||
"urllib3 should be downgraded to 1.26.15 (from 2.1.0)"
|
||||
assert final["urllib3"].startswith("1."), \
|
||||
f"urllib3 should be back at 1.x series, got {final['urllib3']}"
|
||||
204
tests/common/pip_util/test_full_workflow_integration.py
Normal file
204
tests/common/pip_util/test_full_workflow_integration.py
Normal file
@@ -0,0 +1,204 @@
|
||||
"""
|
||||
Test full workflow integration (Priority 1)
|
||||
|
||||
Tests the complete uninstall → install → restore workflow
|
||||
"""
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def workflow_policy(temp_policy_dir):
|
||||
"""Create policy for full workflow test"""
|
||||
policy_content = {
|
||||
"target-package": {
|
||||
"uninstall": [
|
||||
{
|
||||
"condition": {
|
||||
"type": "installed",
|
||||
"package": "conflicting-pkg"
|
||||
},
|
||||
"target": "conflicting-pkg",
|
||||
"reason": "Conflicts with target-package"
|
||||
}
|
||||
],
|
||||
"apply_all_matches": [
|
||||
{
|
||||
"type": "pin_dependencies",
|
||||
"pinned_packages": ["numpy", "pandas"]
|
||||
}
|
||||
]
|
||||
},
|
||||
"critical-package": {
|
||||
"restore": [
|
||||
{
|
||||
"target": "critical-package",
|
||||
"version": "1.2.3",
|
||||
"reason": "Critical package must be 1.2.3"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
policy_file = temp_policy_dir / "pip-policy.json"
|
||||
policy_file.write_text(json.dumps(policy_content, indent=2))
|
||||
return policy_file
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_workflow_subprocess(monkeypatch):
|
||||
"""Mock subprocess for workflow test"""
|
||||
call_sequence = []
|
||||
|
||||
# Initial environment: conflicting-pkg, numpy, pandas, critical-package
|
||||
installed_packages = {
|
||||
"conflicting-pkg": "1.0.0",
|
||||
"numpy": "1.26.0",
|
||||
"pandas": "2.0.0",
|
||||
"critical-package": "1.2.3"
|
||||
}
|
||||
|
||||
def mock_run(cmd, **kwargs):
|
||||
call_sequence.append(cmd)
|
||||
|
||||
# pip freeze
|
||||
if "freeze" in cmd:
|
||||
output = "\n".join([f"{pkg}=={ver}" for pkg, ver in installed_packages.items()])
|
||||
return subprocess.CompletedProcess(cmd, 0, output, "")
|
||||
|
||||
# pip uninstall
|
||||
if "uninstall" in cmd:
|
||||
# Remove conflicting-pkg
|
||||
if "conflicting-pkg" in cmd:
|
||||
installed_packages.pop("conflicting-pkg", None)
|
||||
return subprocess.CompletedProcess(cmd, 0, "", "")
|
||||
|
||||
# pip install target-package (deletes critical-package)
|
||||
if "install" in cmd and "target-package" in cmd:
|
||||
# Simulate target-package installation deleting critical-package
|
||||
installed_packages.pop("critical-package", None)
|
||||
installed_packages["target-package"] = "1.0.0"
|
||||
return subprocess.CompletedProcess(cmd, 0, "", "")
|
||||
|
||||
# pip install critical-package (restore)
|
||||
if "install" in cmd and "critical-package==1.2.3" in cmd:
|
||||
installed_packages["critical-package"] = "1.2.3"
|
||||
return subprocess.CompletedProcess(cmd, 0, "", "")
|
||||
|
||||
return subprocess.CompletedProcess(cmd, 0, "", "")
|
||||
|
||||
monkeypatch.setattr("subprocess.run", mock_run)
|
||||
return call_sequence, installed_packages
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_uninstall_install_restore_workflow(
|
||||
workflow_policy,
|
||||
mock_manager_util,
|
||||
mock_context,
|
||||
mock_workflow_subprocess
|
||||
):
|
||||
"""
|
||||
Test complete uninstall → install → restore workflow
|
||||
|
||||
Priority: 1 (Essential)
|
||||
|
||||
Purpose:
|
||||
Verify the complete workflow executes in correct order:
|
||||
1. ensure_not_installed() removes conflicting packages
|
||||
2. install() applies policies (pin_dependencies)
|
||||
3. ensure_installed() restores deleted packages
|
||||
"""
|
||||
import sys
|
||||
# Path setup handled by conftest.py
|
||||
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
call_sequence, installed_packages = mock_workflow_subprocess
|
||||
|
||||
with PipBatch() as batch:
|
||||
# Step 1: uninstall - remove conflicting packages
|
||||
removed = batch.ensure_not_installed()
|
||||
|
||||
# Step 2: install target-package with pinned dependencies
|
||||
result = batch.install("target-package")
|
||||
|
||||
# Step 3: restore critical-package that was deleted
|
||||
restored = batch.ensure_installed()
|
||||
|
||||
# Verify Step 1: conflicting-pkg was removed
|
||||
assert "conflicting-pkg" in removed
|
||||
|
||||
# Verify Step 2: target-package was installed with pinned dependencies
|
||||
assert result is True
|
||||
# Check that pip install was called with pinned packages
|
||||
install_calls = [cmd for cmd in call_sequence if "install" in cmd and "target-package" in cmd]
|
||||
assert len(install_calls) > 0
|
||||
install_cmd = install_calls[0]
|
||||
assert "target-package" in install_cmd
|
||||
assert "numpy==1.26.0" in install_cmd
|
||||
assert "pandas==2.0.0" in install_cmd
|
||||
|
||||
# Verify Step 3: critical-package was restored
|
||||
assert "critical-package" in restored
|
||||
|
||||
# Verify final state
|
||||
assert "conflicting-pkg" not in installed_packages
|
||||
assert "critical-package" in installed_packages
|
||||
assert installed_packages["critical-package"] == "1.2.3"
|
||||
assert "target-package" in installed_packages
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_cache_invalidation_across_workflow(
|
||||
workflow_policy,
|
||||
mock_manager_util,
|
||||
mock_context,
|
||||
mock_workflow_subprocess
|
||||
):
|
||||
"""
|
||||
Test cache is correctly refreshed at each workflow step
|
||||
|
||||
Priority: 1 (Essential)
|
||||
|
||||
Purpose:
|
||||
Verify that the cache is invalidated and refreshed after each
|
||||
operation (uninstall, install, restore) to reflect current state.
|
||||
"""
|
||||
import sys
|
||||
# Path setup handled by conftest.py
|
||||
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
call_sequence, installed_packages = mock_workflow_subprocess
|
||||
|
||||
with PipBatch() as batch:
|
||||
# Initial cache state
|
||||
cache1 = batch._get_installed_packages()
|
||||
assert "conflicting-pkg" in cache1
|
||||
assert "critical-package" in cache1
|
||||
|
||||
# After uninstall
|
||||
removed = batch.ensure_not_installed()
|
||||
cache2 = batch._get_installed_packages()
|
||||
assert "conflicting-pkg" not in cache2 # Removed
|
||||
|
||||
# After install (critical-package gets deleted by target-package)
|
||||
batch.install("target-package")
|
||||
cache3 = batch._get_installed_packages()
|
||||
assert "target-package" in cache3 # Added
|
||||
assert "critical-package" not in cache3 # Deleted by target-package
|
||||
|
||||
# After restore
|
||||
restored = batch.ensure_installed()
|
||||
cache4 = batch._get_installed_packages()
|
||||
assert "critical-package" in cache4 # Restored
|
||||
|
||||
# Verify cache was refreshed at each step
|
||||
assert cache1 != cache2 # Changed after uninstall
|
||||
assert cache2 != cache3 # Changed after install
|
||||
assert cache3 != cache4 # Changed after restore
|
||||
216
tests/common/pip_util/test_pin_failure_retry.py
Normal file
216
tests/common/pip_util/test_pin_failure_retry.py
Normal file
@@ -0,0 +1,216 @@
|
||||
"""
|
||||
Test pin failure and retry logic (Priority 1)
|
||||
|
||||
Tests that installation with pinned dependencies can retry without pins on failure
|
||||
"""
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def retry_policy(temp_policy_dir):
|
||||
"""Create policy with retry_without_pin"""
|
||||
policy_content = {
|
||||
"new-pkg": {
|
||||
"apply_all_matches": [
|
||||
{
|
||||
"type": "pin_dependencies",
|
||||
"pinned_packages": ["numpy", "pandas"],
|
||||
"on_failure": "retry_without_pin"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
policy_file = temp_policy_dir / "pip-policy.json"
|
||||
policy_file.write_text(json.dumps(policy_content, indent=2))
|
||||
return policy_file
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_retry_subprocess(monkeypatch):
|
||||
"""Mock subprocess that fails with pins, succeeds without"""
|
||||
call_sequence = []
|
||||
attempt_count = [0]
|
||||
|
||||
installed_packages = {
|
||||
"numpy": "1.26.0",
|
||||
"pandas": "2.0.0"
|
||||
}
|
||||
|
||||
def mock_run(cmd, **kwargs):
|
||||
call_sequence.append(cmd)
|
||||
|
||||
# pip freeze
|
||||
if "freeze" in cmd:
|
||||
output = "\n".join([f"{pkg}=={ver}" for pkg, ver in installed_packages.items()])
|
||||
return subprocess.CompletedProcess(cmd, 0, output, "")
|
||||
|
||||
# pip install
|
||||
if "install" in cmd and "new-pkg" in cmd:
|
||||
attempt_count[0] += 1
|
||||
|
||||
# First attempt with pins - FAIL
|
||||
if attempt_count[0] == 1 and "numpy==1.26.0" in cmd and "pandas==2.0.0" in cmd:
|
||||
raise subprocess.CalledProcessError(1, cmd, "", "Dependency conflict")
|
||||
|
||||
# Second attempt without pins - SUCCESS
|
||||
if attempt_count[0] == 2:
|
||||
installed_packages["new-pkg"] = "1.0.0"
|
||||
# Without pins, versions might change
|
||||
return subprocess.CompletedProcess(cmd, 0, "", "")
|
||||
|
||||
return subprocess.CompletedProcess(cmd, 0, "", "")
|
||||
|
||||
monkeypatch.setattr("subprocess.run", mock_run)
|
||||
return call_sequence, installed_packages, attempt_count
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_pin_failure_retry_without_pin_succeeds(
|
||||
retry_policy,
|
||||
mock_manager_util,
|
||||
mock_context,
|
||||
mock_retry_subprocess,
|
||||
capture_logs
|
||||
):
|
||||
"""
|
||||
Test retry without pin succeeds after pin failure
|
||||
|
||||
Priority: 1 (Essential)
|
||||
|
||||
Purpose:
|
||||
Verify that when installation with pinned dependencies fails,
|
||||
the system automatically retries without pins and succeeds.
|
||||
"""
|
||||
import sys
|
||||
# Path setup handled by conftest.py
|
||||
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
call_sequence, installed_packages, attempt_count = mock_retry_subprocess
|
||||
|
||||
with PipBatch() as batch:
|
||||
result = batch.install("new-pkg")
|
||||
|
||||
# Verify installation succeeded on retry
|
||||
assert result is True
|
||||
|
||||
# Verify two installation attempts were made
|
||||
install_calls = [cmd for cmd in call_sequence if "install" in cmd and "new-pkg" in cmd]
|
||||
assert len(install_calls) == 2
|
||||
|
||||
# First attempt had pins
|
||||
first_call = install_calls[0]
|
||||
assert "new-pkg" in first_call
|
||||
assert "numpy==1.26.0" in first_call
|
||||
assert "pandas==2.0.0" in first_call
|
||||
|
||||
# Second attempt had no pins (just new-pkg)
|
||||
second_call = install_calls[1]
|
||||
assert "new-pkg" in second_call
|
||||
assert "numpy==1.26.0" not in second_call
|
||||
assert "pandas==2.0.0" not in second_call
|
||||
|
||||
# Verify warning log
|
||||
assert any("retrying without pins" in record.message.lower() for record in capture_logs.records)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fail_policy(temp_policy_dir):
|
||||
"""Create policy with on_failure: fail"""
|
||||
policy_content = {
|
||||
"pytorch-addon": {
|
||||
"apply_all_matches": [
|
||||
{
|
||||
"condition": {
|
||||
"type": "installed",
|
||||
"package": "torch",
|
||||
"spec": ">=2.0.0"
|
||||
},
|
||||
"type": "pin_dependencies",
|
||||
"pinned_packages": ["torch", "torchvision", "torchaudio"],
|
||||
"on_failure": "fail"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
policy_file = temp_policy_dir / "pip-policy.json"
|
||||
policy_file.write_text(json.dumps(policy_content, indent=2))
|
||||
return policy_file
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_fail_subprocess(monkeypatch):
|
||||
"""Mock subprocess that always fails"""
|
||||
call_sequence = []
|
||||
|
||||
installed_packages = {
|
||||
"torch": "2.1.0",
|
||||
"torchvision": "0.16.0",
|
||||
"torchaudio": "2.1.0"
|
||||
}
|
||||
|
||||
def mock_run(cmd, **kwargs):
|
||||
call_sequence.append(cmd)
|
||||
|
||||
# pip freeze
|
||||
if "freeze" in cmd:
|
||||
output = "\n".join([f"{pkg}=={ver}" for pkg, ver in installed_packages.items()])
|
||||
return subprocess.CompletedProcess(cmd, 0, output, "")
|
||||
|
||||
# pip install - ALWAYS FAIL
|
||||
if "install" in cmd and "pytorch-addon" in cmd:
|
||||
raise subprocess.CalledProcessError(1, cmd, "", "Installation failed")
|
||||
|
||||
return subprocess.CompletedProcess(cmd, 0, "", "")
|
||||
|
||||
monkeypatch.setattr("subprocess.run", mock_run)
|
||||
return call_sequence, installed_packages
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_pin_failure_with_fail_raises_exception(
|
||||
fail_policy,
|
||||
mock_manager_util,
|
||||
mock_context,
|
||||
mock_fail_subprocess,
|
||||
capture_logs
|
||||
):
|
||||
"""
|
||||
Test exception is raised when on_failure is "fail"
|
||||
|
||||
Priority: 1 (Essential)
|
||||
|
||||
Purpose:
|
||||
Verify that when on_failure is set to "fail", installation
|
||||
failure with pinned dependencies raises an exception and
|
||||
does not retry.
|
||||
"""
|
||||
import sys
|
||||
# Path setup handled by conftest.py
|
||||
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
call_sequence, installed_packages = mock_fail_subprocess
|
||||
|
||||
with PipBatch() as batch:
|
||||
# Should raise exception
|
||||
with pytest.raises(subprocess.CalledProcessError):
|
||||
batch.install("pytorch-addon")
|
||||
|
||||
# Verify only one installation attempt was made (no retry)
|
||||
install_calls = [cmd for cmd in call_sequence if "install" in cmd and "pytorch-addon" in cmd]
|
||||
assert len(install_calls) == 1
|
||||
|
||||
# Verify it had pins
|
||||
install_cmd = install_calls[0]
|
||||
assert "pytorch-addon" in install_cmd
|
||||
assert "torch==2.1.0" in install_cmd
|
||||
assert "torchvision==0.16.0" in install_cmd
|
||||
assert "torchaudio==2.1.0" in install_cmd
|
||||
139
tests/common/pip_util/test_platform_conditions.py
Normal file
139
tests/common/pip_util/test_platform_conditions.py
Normal file
@@ -0,0 +1,139 @@
|
||||
"""
|
||||
Test platform-specific conditions (Priority 2)
|
||||
|
||||
Tests OS and GPU detection for conditional policies
|
||||
"""
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def platform_policy(temp_policy_dir):
|
||||
"""Create policy with platform conditions"""
|
||||
policy_content = {
|
||||
"onnxruntime": {
|
||||
"apply_first_match": [
|
||||
{
|
||||
"condition": {
|
||||
"type": "platform",
|
||||
"os": "linux",
|
||||
"has_gpu": True
|
||||
},
|
||||
"type": "replace",
|
||||
"replacement": "onnxruntime-gpu"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
policy_file = temp_policy_dir / "pip-policy.json"
|
||||
policy_file.write_text(json.dumps(policy_content, indent=2))
|
||||
return policy_file
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_platform_subprocess(monkeypatch):
|
||||
"""Mock subprocess for platform test"""
|
||||
call_sequence = []
|
||||
installed_packages = {}
|
||||
|
||||
def mock_run(cmd, **kwargs):
|
||||
call_sequence.append(cmd)
|
||||
|
||||
# pip freeze
|
||||
if "freeze" in cmd:
|
||||
output = "\n".join([f"{pkg}=={ver}" for pkg, ver in installed_packages.items()])
|
||||
return subprocess.CompletedProcess(cmd, 0, output, "")
|
||||
|
||||
# pip install
|
||||
if "install" in cmd:
|
||||
if "onnxruntime-gpu" in cmd:
|
||||
installed_packages["onnxruntime-gpu"] = "1.0.0"
|
||||
elif "onnxruntime" in cmd:
|
||||
installed_packages["onnxruntime"] = "1.0.0"
|
||||
return subprocess.CompletedProcess(cmd, 0, "", "")
|
||||
|
||||
return subprocess.CompletedProcess(cmd, 0, "", "")
|
||||
|
||||
monkeypatch.setattr("subprocess.run", mock_run)
|
||||
return call_sequence, installed_packages
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_linux_gpu_uses_gpu_package(
|
||||
platform_policy,
|
||||
mock_manager_util,
|
||||
mock_context,
|
||||
mock_platform_subprocess,
|
||||
mock_platform_linux,
|
||||
mock_torch_cuda_available
|
||||
):
|
||||
"""
|
||||
Test GPU-specific package on Linux + GPU
|
||||
|
||||
Priority: 2 (Important)
|
||||
|
||||
Purpose:
|
||||
Verify that platform-conditional policies correctly detect
|
||||
Linux + GPU and install the appropriate package variant.
|
||||
"""
|
||||
import sys
|
||||
# Path setup handled by conftest.py
|
||||
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
call_sequence, installed_packages = mock_platform_subprocess
|
||||
|
||||
with PipBatch() as batch:
|
||||
result = batch.install("onnxruntime")
|
||||
|
||||
# Verify installation succeeded
|
||||
assert result is True
|
||||
|
||||
# Verify GPU version was installed
|
||||
install_calls = [cmd for cmd in call_sequence if "install" in cmd]
|
||||
assert any("onnxruntime-gpu" in str(cmd) for cmd in install_calls)
|
||||
assert "onnxruntime-gpu" in installed_packages
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_windows_no_gpu_uses_cpu_package(
|
||||
platform_policy,
|
||||
mock_manager_util,
|
||||
mock_context,
|
||||
mock_platform_subprocess,
|
||||
mock_platform_windows,
|
||||
mock_torch_cuda_unavailable
|
||||
):
|
||||
"""
|
||||
Test CPU package on Windows + No GPU
|
||||
|
||||
Priority: 2 (Important)
|
||||
|
||||
Purpose:
|
||||
Verify that when platform conditions are not met,
|
||||
the original package is installed without replacement.
|
||||
"""
|
||||
import sys
|
||||
# Path setup handled by conftest.py
|
||||
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
call_sequence, installed_packages = mock_platform_subprocess
|
||||
|
||||
with PipBatch() as batch:
|
||||
result = batch.install("onnxruntime")
|
||||
|
||||
# Verify installation succeeded
|
||||
assert result is True
|
||||
|
||||
# Verify CPU version was installed (no GPU replacement)
|
||||
install_calls = [cmd for cmd in call_sequence if "install" in cmd]
|
||||
assert any("onnxruntime" in str(cmd) for cmd in install_calls)
|
||||
assert "onnxruntime-gpu" not in str(call_sequence)
|
||||
assert "onnxruntime" in installed_packages
|
||||
assert "onnxruntime-gpu" not in installed_packages
|
||||
180
tests/common/pip_util/test_policy_priority.py
Normal file
180
tests/common/pip_util/test_policy_priority.py
Normal file
@@ -0,0 +1,180 @@
|
||||
"""
|
||||
Test policy priority and conflicts (Priority 2)
|
||||
|
||||
Tests that user policies override base policies correctly
|
||||
"""
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def conflicting_policies(temp_policy_dir, temp_user_policy_dir):
|
||||
"""Create conflicting base and user policies"""
|
||||
# Base policy
|
||||
base_content = {
|
||||
"numpy": {
|
||||
"apply_first_match": [
|
||||
{
|
||||
"type": "skip",
|
||||
"reason": "Base policy skip"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
base_file = temp_policy_dir / "pip-policy.json"
|
||||
base_file.write_text(json.dumps(base_content, indent=2))
|
||||
|
||||
# User policy (should override)
|
||||
user_content = {
|
||||
"numpy": {
|
||||
"apply_first_match": [
|
||||
{
|
||||
"type": "force_version",
|
||||
"version": "1.26.0",
|
||||
"reason": "User override"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
user_file = temp_user_policy_dir / "pip-policy.user.json"
|
||||
user_file.write_text(json.dumps(user_content, indent=2))
|
||||
|
||||
return base_file, user_file
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_user_policy_overrides_base_policy(
|
||||
conflicting_policies,
|
||||
mock_manager_util,
|
||||
mock_context,
|
||||
mock_subprocess_success
|
||||
):
|
||||
"""
|
||||
Test user policy completely replaces base policy
|
||||
|
||||
Priority: 2 (Important)
|
||||
|
||||
Purpose:
|
||||
Verify that user policy completely overrides base policy
|
||||
at the package level (not section-level merge).
|
||||
"""
|
||||
import sys
|
||||
# Path setup handled by conftest.py
|
||||
|
||||
from comfyui_manager.common.pip_util import get_pip_policy
|
||||
|
||||
policy = get_pip_policy()
|
||||
|
||||
# Verify user policy replaced base policy
|
||||
assert "numpy" in policy
|
||||
assert "apply_first_match" in policy["numpy"]
|
||||
assert len(policy["numpy"]["apply_first_match"]) == 1
|
||||
|
||||
# Should be force_version (user), not skip (base)
|
||||
assert policy["numpy"]["apply_first_match"][0]["type"] == "force_version"
|
||||
assert policy["numpy"]["apply_first_match"][0]["version"] == "1.26.0"
|
||||
|
||||
# Base policy skip should be completely gone
|
||||
assert not any(
|
||||
item["type"] == "skip"
|
||||
for item in policy["numpy"]["apply_first_match"]
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def first_match_policy(temp_policy_dir):
|
||||
"""Create policy with multiple apply_first_match entries"""
|
||||
policy_content = {
|
||||
"pkg": {
|
||||
"apply_first_match": [
|
||||
{
|
||||
"condition": {
|
||||
"type": "installed",
|
||||
"package": "numpy"
|
||||
},
|
||||
"type": "force_version",
|
||||
"version": "1.0"
|
||||
},
|
||||
{
|
||||
"type": "force_version",
|
||||
"version": "2.0"
|
||||
},
|
||||
{
|
||||
"type": "skip"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
policy_file = temp_policy_dir / "pip-policy.json"
|
||||
policy_file.write_text(json.dumps(policy_content, indent=2))
|
||||
return policy_file
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_first_match_subprocess(monkeypatch):
|
||||
"""Mock subprocess for first match test"""
|
||||
call_sequence = []
|
||||
|
||||
installed_packages = {
|
||||
"numpy": "1.26.0"
|
||||
}
|
||||
|
||||
def mock_run(cmd, **kwargs):
|
||||
call_sequence.append(cmd)
|
||||
|
||||
# pip freeze
|
||||
if "freeze" in cmd:
|
||||
output = "\n".join([f"{pkg}=={ver}" for pkg, ver in installed_packages.items()])
|
||||
return subprocess.CompletedProcess(cmd, 0, output, "")
|
||||
|
||||
# pip install
|
||||
if "install" in cmd and "pkg" in cmd:
|
||||
if "pkg==1.0" in cmd:
|
||||
installed_packages["pkg"] = "1.0"
|
||||
return subprocess.CompletedProcess(cmd, 0, "", "")
|
||||
|
||||
return subprocess.CompletedProcess(cmd, 0, "", "")
|
||||
|
||||
monkeypatch.setattr("subprocess.run", mock_run)
|
||||
return call_sequence, installed_packages
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_first_match_stops_at_first_satisfied(
|
||||
first_match_policy,
|
||||
mock_manager_util,
|
||||
mock_context,
|
||||
mock_first_match_subprocess
|
||||
):
|
||||
"""
|
||||
Test apply_first_match stops at first satisfied condition
|
||||
|
||||
Priority: 2 (Important)
|
||||
|
||||
Purpose:
|
||||
Verify that in apply_first_match, only the first policy
|
||||
with a satisfied condition is executed (exclusive execution).
|
||||
"""
|
||||
import sys
|
||||
# Path setup handled by conftest.py
|
||||
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
call_sequence, installed_packages = mock_first_match_subprocess
|
||||
|
||||
with PipBatch() as batch:
|
||||
result = batch.install("pkg")
|
||||
|
||||
# Verify installation succeeded
|
||||
assert result is True
|
||||
|
||||
# First condition satisfied (numpy installed), so version 1.0 applied
|
||||
install_calls = [cmd for cmd in call_sequence if "install" in cmd and "pkg" in cmd]
|
||||
assert len(install_calls) > 0
|
||||
assert "pkg==1.0" in install_calls[0]
|
||||
assert "pkg==2.0" not in str(call_sequence) # Second policy not applied
|
||||
178
tests/common/pip_util/test_unit_parsing.py
Normal file
178
tests/common/pip_util/test_unit_parsing.py
Normal file
@@ -0,0 +1,178 @@
|
||||
"""
|
||||
Unit tests for package spec parsing and condition evaluation
|
||||
|
||||
Tests core utility functions
|
||||
"""
|
||||
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_parse_package_spec_name_only(mock_manager_util, mock_context):
|
||||
"""Test parsing package name without version"""
|
||||
import sys
|
||||
# Path setup handled by conftest.py
|
||||
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
batch = PipBatch()
|
||||
name, spec = batch._parse_package_spec("numpy")
|
||||
|
||||
assert name == "numpy"
|
||||
assert spec is None
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_parse_package_spec_exact_version(mock_manager_util, mock_context):
|
||||
"""Test parsing package with exact version"""
|
||||
import sys
|
||||
# Path setup handled by conftest.py
|
||||
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
batch = PipBatch()
|
||||
name, spec = batch._parse_package_spec("numpy==1.26.0")
|
||||
|
||||
assert name == "numpy"
|
||||
assert spec == "==1.26.0"
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_parse_package_spec_min_version(mock_manager_util, mock_context):
|
||||
"""Test parsing package with minimum version"""
|
||||
import sys
|
||||
# Path setup handled by conftest.py
|
||||
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
batch = PipBatch()
|
||||
name, spec = batch._parse_package_spec("pandas>=2.0.0")
|
||||
|
||||
assert name == "pandas"
|
||||
assert spec == ">=2.0.0"
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_parse_package_spec_hyphenated_name(mock_manager_util, mock_context):
|
||||
"""Test parsing package with hyphens"""
|
||||
import sys
|
||||
# Path setup handled by conftest.py
|
||||
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
batch = PipBatch()
|
||||
name, spec = batch._parse_package_spec("scikit-learn>=1.0")
|
||||
|
||||
assert name == "scikit-learn"
|
||||
assert spec == ">=1.0"
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_evaluate_condition_none(mock_manager_util, mock_context):
|
||||
"""Test None condition always returns True"""
|
||||
import sys
|
||||
# Path setup handled by conftest.py
|
||||
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
batch = PipBatch()
|
||||
result = batch._evaluate_condition(None, "numpy", {})
|
||||
|
||||
assert result is True
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_evaluate_condition_installed_package_exists(mock_manager_util, mock_context):
|
||||
"""Test installed condition when package exists"""
|
||||
import sys
|
||||
# Path setup handled by conftest.py
|
||||
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
batch = PipBatch()
|
||||
condition = {"type": "installed", "package": "numpy"}
|
||||
installed = {"numpy": "1.26.0"}
|
||||
|
||||
result = batch._evaluate_condition(condition, "numba", installed)
|
||||
|
||||
assert result is True
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_evaluate_condition_installed_package_not_exists(mock_manager_util, mock_context):
|
||||
"""Test installed condition when package doesn't exist"""
|
||||
import sys
|
||||
# Path setup handled by conftest.py
|
||||
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
batch = PipBatch()
|
||||
condition = {"type": "installed", "package": "numpy"}
|
||||
installed = {}
|
||||
|
||||
result = batch._evaluate_condition(condition, "numba", installed)
|
||||
|
||||
assert result is False
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_evaluate_condition_platform_os_match(
|
||||
mock_manager_util,
|
||||
mock_context,
|
||||
mock_platform_linux
|
||||
):
|
||||
"""Test platform OS condition matching"""
|
||||
import sys
|
||||
# Path setup handled by conftest.py
|
||||
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
batch = PipBatch()
|
||||
condition = {"type": "platform", "os": "linux"}
|
||||
|
||||
result = batch._evaluate_condition(condition, "package", {})
|
||||
|
||||
assert result is True
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_evaluate_condition_platform_gpu_available(
|
||||
mock_manager_util,
|
||||
mock_context,
|
||||
mock_torch_cuda_available
|
||||
):
|
||||
"""Test GPU detection when available"""
|
||||
import sys
|
||||
# Path setup handled by conftest.py
|
||||
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
batch = PipBatch()
|
||||
condition = {"type": "platform", "has_gpu": True}
|
||||
|
||||
result = batch._evaluate_condition(condition, "package", {})
|
||||
|
||||
assert result is True
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_evaluate_condition_platform_gpu_not_available(
|
||||
mock_manager_util,
|
||||
mock_context,
|
||||
mock_torch_cuda_unavailable
|
||||
):
|
||||
"""Test GPU detection when not available"""
|
||||
import sys
|
||||
# Path setup handled by conftest.py
|
||||
|
||||
from comfyui_manager.common.pip_util import PipBatch
|
||||
|
||||
batch = PipBatch()
|
||||
condition = {"type": "platform", "has_gpu": True}
|
||||
|
||||
result = batch._evaluate_condition(condition, "package", {})
|
||||
|
||||
assert result is False
|
||||
41
tests/pytest.ini
Normal file
41
tests/pytest.ini
Normal file
@@ -0,0 +1,41 @@
|
||||
[pytest]
|
||||
# Global pytest configuration for comfyui-manager tests
|
||||
|
||||
# Test discovery
|
||||
python_files = test_*.py
|
||||
python_classes = Test*
|
||||
python_functions = test_*
|
||||
|
||||
# Add comfyui_manager to Python path
|
||||
pythonpath = ../comfyui_manager
|
||||
|
||||
# Output options
|
||||
addopts =
|
||||
# Verbose output
|
||||
-v
|
||||
# Show extra test summary info
|
||||
-ra
|
||||
# Show local variables in tracebacks
|
||||
--showlocals
|
||||
# Strict markers (fail on unknown markers)
|
||||
--strict-markers
|
||||
|
||||
# Markers for test categorization
|
||||
markers =
|
||||
unit: Unit tests for individual functions
|
||||
integration: Integration tests for policy application
|
||||
e2e: End-to-end workflow tests
|
||||
slow: Tests that take significant time
|
||||
requires_network: Tests that require network access
|
||||
|
||||
# Logging
|
||||
log_cli = false
|
||||
log_cli_level = INFO
|
||||
log_cli_format = %(asctime)s [%(levelname)8s] %(message)s
|
||||
log_cli_date_format = %Y-%m-%d %H:%M:%S
|
||||
|
||||
# Warnings
|
||||
filterwarnings =
|
||||
error
|
||||
ignore::DeprecationWarning
|
||||
ignore::PendingDeprecationWarning
|
||||
19
tests/requirements.txt
Normal file
19
tests/requirements.txt
Normal file
@@ -0,0 +1,19 @@
|
||||
# Test Dependencies for pip_util.py
|
||||
# Install in isolated venv to prevent environment corruption
|
||||
|
||||
# Testing Framework
|
||||
pytest>=7.4.0
|
||||
pytest-cov>=4.1.0
|
||||
pytest-mock>=3.11.0
|
||||
|
||||
# Code Quality
|
||||
flake8>=6.0.0
|
||||
black>=23.0.0
|
||||
mypy>=1.5.0
|
||||
|
||||
# Dependencies from main project
|
||||
packaging>=23.0
|
||||
|
||||
# Mock and testing utilities
|
||||
responses>=0.23.0
|
||||
freezegun>=1.2.0
|
||||
75
tests/setup_test_env.sh
Executable file
75
tests/setup_test_env.sh
Executable file
@@ -0,0 +1,75 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Test Environment Setup Script for pip_util.py
|
||||
# Creates isolated venv to prevent environment corruption
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
VENV_DIR="${SCRIPT_DIR}/test_venv"
|
||||
|
||||
echo "=================================================="
|
||||
echo "pip_util.py Test Environment Setup"
|
||||
echo "=================================================="
|
||||
echo ""
|
||||
|
||||
# Check Python version
|
||||
PYTHON_CMD=""
|
||||
if command -v python3 &> /dev/null; then
|
||||
PYTHON_CMD="python3"
|
||||
elif command -v python &> /dev/null; then
|
||||
PYTHON_CMD="python"
|
||||
else
|
||||
echo "❌ Error: Python not found. Please install Python 3.8 or higher."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
PYTHON_VERSION=$($PYTHON_CMD --version 2>&1 | awk '{print $2}')
|
||||
echo "✓ Found Python: $PYTHON_VERSION"
|
||||
|
||||
# Remove existing venv if present
|
||||
if [ -d "$VENV_DIR" ]; then
|
||||
echo ""
|
||||
read -p "⚠️ Existing test venv found. Remove and recreate? (y/N): " -n 1 -r
|
||||
echo
|
||||
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
||||
echo "🗑️ Removing existing venv..."
|
||||
rm -rf "$VENV_DIR"
|
||||
else
|
||||
echo "Keeping existing venv. Skipping creation."
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
# Create venv
|
||||
echo ""
|
||||
echo "📦 Creating virtual environment..."
|
||||
$PYTHON_CMD -m venv "$VENV_DIR"
|
||||
|
||||
# Activate venv
|
||||
echo "🔌 Activating virtual environment..."
|
||||
source "${VENV_DIR}/bin/activate"
|
||||
|
||||
# Upgrade pip
|
||||
echo "⬆️ Upgrading pip..."
|
||||
pip install --upgrade pip
|
||||
|
||||
# Install test dependencies
|
||||
echo ""
|
||||
echo "📚 Installing test dependencies..."
|
||||
pip install -r "${SCRIPT_DIR}/requirements.txt"
|
||||
|
||||
echo ""
|
||||
echo "=================================================="
|
||||
echo "✅ Test environment setup complete!"
|
||||
echo "=================================================="
|
||||
echo ""
|
||||
echo "To activate the test environment:"
|
||||
echo " source ${VENV_DIR}/bin/activate"
|
||||
echo ""
|
||||
echo "To run tests:"
|
||||
echo " pytest"
|
||||
echo ""
|
||||
echo "To deactivate:"
|
||||
echo " deactivate"
|
||||
echo ""
|
||||
@@ -1,510 +0,0 @@
|
||||
"""
|
||||
Tests for TaskQueue functionality.
|
||||
|
||||
This module tests the core TaskQueue operations including:
|
||||
- Task queueing and processing
|
||||
- Batch tracking
|
||||
- Thread lifecycle management
|
||||
- State management
|
||||
- WebSocket message delivery
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import threading
|
||||
import time
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from unittest.mock import AsyncMock, MagicMock, Mock, patch
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import pytest
|
||||
|
||||
from comfyui_manager.data_models import (
|
||||
QueueTaskItem,
|
||||
TaskExecutionStatus,
|
||||
TaskStateMessage,
|
||||
InstallPackParams,
|
||||
ManagerDatabaseSource,
|
||||
ManagerChannel,
|
||||
)
|
||||
|
||||
|
||||
class MockTaskQueue:
|
||||
"""
|
||||
A testable version of TaskQueue that allows for dependency injection
|
||||
and isolated testing without global state.
|
||||
"""
|
||||
|
||||
def __init__(self, history_dir: Optional[Path] = None):
|
||||
# Don't set the global instance for testing
|
||||
self.mutex = threading.RLock()
|
||||
self.not_empty = threading.Condition(self.mutex)
|
||||
self.current_index = 0
|
||||
self.pending_tasks = []
|
||||
self.running_tasks = {}
|
||||
self.history_tasks = {}
|
||||
self.task_counter = 0
|
||||
self.batch_id = None
|
||||
self.batch_start_time = None
|
||||
self.batch_state_before = None
|
||||
self._worker_task = None
|
||||
self._history_dir = history_dir
|
||||
|
||||
# Mock external dependencies
|
||||
self.mock_core = MagicMock()
|
||||
self.mock_prompt_server = MagicMock()
|
||||
|
||||
def is_processing(self) -> bool:
|
||||
"""Check if the queue is currently processing tasks"""
|
||||
return (
|
||||
self._worker_task is not None
|
||||
and self._worker_task.is_alive()
|
||||
)
|
||||
|
||||
def start_worker(self, mock_task_worker=None) -> bool:
|
||||
"""Start the task worker. Can inject a mock worker for testing."""
|
||||
if self._worker_task is not None and self._worker_task.is_alive():
|
||||
return False # Already running
|
||||
|
||||
if mock_task_worker:
|
||||
self._worker_task = threading.Thread(target=mock_task_worker)
|
||||
else:
|
||||
# Use a simple test worker that processes one task then stops
|
||||
self._worker_task = threading.Thread(target=self._test_worker)
|
||||
self._worker_task.start()
|
||||
return True
|
||||
|
||||
def _test_worker(self):
|
||||
"""Simple test worker that processes tasks without external dependencies"""
|
||||
while True:
|
||||
task = self.get(timeout=1.0) # Short timeout for tests
|
||||
if task is None:
|
||||
if self.total_count() == 0:
|
||||
break
|
||||
continue
|
||||
|
||||
item, task_index = task
|
||||
|
||||
# Simulate task processing
|
||||
self.running_tasks[task_index] = item
|
||||
|
||||
# Simulate work
|
||||
time.sleep(0.1)
|
||||
|
||||
# Mark as completed
|
||||
status = TaskExecutionStatus(
|
||||
status_str="success",
|
||||
completed=True,
|
||||
messages=["Test task completed"]
|
||||
)
|
||||
|
||||
self.mark_done(task_index, item, status, "Test result")
|
||||
|
||||
# Clean up
|
||||
if task_index in self.running_tasks:
|
||||
del self.running_tasks[task_index]
|
||||
|
||||
def get_current_state(self) -> TaskStateMessage:
|
||||
"""Get current queue state with mocked dependencies"""
|
||||
return TaskStateMessage(
|
||||
history=self.get_history(),
|
||||
running_queue=self.get_current_queue()[0],
|
||||
pending_queue=self.get_current_queue()[1],
|
||||
installed_packs={} # Mocked empty
|
||||
)
|
||||
|
||||
def send_queue_state_update(self, msg: str, update, client_id: Optional[str] = None):
|
||||
"""Mock implementation that tracks calls instead of sending WebSocket messages"""
|
||||
if not hasattr(self, '_sent_updates'):
|
||||
self._sent_updates = []
|
||||
self._sent_updates.append({
|
||||
'msg': msg,
|
||||
'update': update,
|
||||
'client_id': client_id
|
||||
})
|
||||
|
||||
# Copy the essential methods from the real TaskQueue
|
||||
def put(self, item) -> None:
|
||||
"""Add a task to the queue. Item can be a dict or QueueTaskItem model."""
|
||||
with self.mutex:
|
||||
# Start a new batch if this is the first task after queue was empty
|
||||
if (
|
||||
self.batch_id is None
|
||||
and len(self.pending_tasks) == 0
|
||||
and len(self.running_tasks) == 0
|
||||
):
|
||||
self._start_new_batch()
|
||||
|
||||
# Convert to Pydantic model if it's a dict
|
||||
if isinstance(item, dict):
|
||||
item = QueueTaskItem(**item)
|
||||
|
||||
import heapq
|
||||
# Wrap in tuple with priority to make it comparable
|
||||
# Use task_counter as priority to maintain FIFO order
|
||||
priority_item = (self.task_counter, item)
|
||||
heapq.heappush(self.pending_tasks, priority_item)
|
||||
self.task_counter += 1
|
||||
self.not_empty.notify()
|
||||
|
||||
def _start_new_batch(self) -> None:
|
||||
"""Start a new batch session for tracking operations."""
|
||||
self.batch_id = (
|
||||
f"test_batch_{datetime.now().strftime('%Y%m%d_%H%M%S')}_{uuid.uuid4().hex[:8]}"
|
||||
)
|
||||
self.batch_start_time = datetime.now().isoformat()
|
||||
self.batch_state_before = {"test": "state"} # Simplified for testing
|
||||
|
||||
def get(self, timeout: Optional[float] = None):
|
||||
"""Get next task from queue"""
|
||||
with self.not_empty:
|
||||
while len(self.pending_tasks) == 0:
|
||||
self.not_empty.wait(timeout=timeout)
|
||||
if timeout is not None and len(self.pending_tasks) == 0:
|
||||
return None
|
||||
import heapq
|
||||
priority_item = heapq.heappop(self.pending_tasks)
|
||||
task_index, item = priority_item # Unwrap the tuple
|
||||
return item, task_index
|
||||
|
||||
def total_count(self) -> int:
|
||||
"""Get total number of tasks (pending + running)"""
|
||||
return len(self.pending_tasks) + len(self.running_tasks)
|
||||
|
||||
def done_count(self) -> int:
|
||||
"""Get number of completed tasks"""
|
||||
return len(self.history_tasks)
|
||||
|
||||
def get_current_queue(self):
|
||||
"""Get current running and pending queues"""
|
||||
running = list(self.running_tasks.values())
|
||||
# Extract items from the priority tuples
|
||||
pending = [item for priority, item in self.pending_tasks]
|
||||
return running, pending
|
||||
|
||||
def get_history(self):
|
||||
"""Get task history"""
|
||||
return self.history_tasks
|
||||
|
||||
def mark_done(self, task_index: int, item: QueueTaskItem, status: TaskExecutionStatus, result: str):
|
||||
"""Mark a task as completed"""
|
||||
from comfyui_manager.data_models import TaskHistoryItem
|
||||
|
||||
history_item = TaskHistoryItem(
|
||||
ui_id=item.ui_id,
|
||||
client_id=item.client_id,
|
||||
kind=item.kind.value if hasattr(item.kind, 'value') else str(item.kind),
|
||||
timestamp=datetime.now().isoformat(),
|
||||
result=result,
|
||||
status=status
|
||||
)
|
||||
|
||||
self.history_tasks[item.ui_id] = history_item
|
||||
|
||||
def finalize(self):
|
||||
"""Finalize batch (simplified for testing)"""
|
||||
if self._history_dir and self.batch_id:
|
||||
batch_file = self._history_dir / f"{self.batch_id}.json"
|
||||
batch_record = {
|
||||
"batch_id": self.batch_id,
|
||||
"start_time": self.batch_start_time,
|
||||
"state_before": self.batch_state_before,
|
||||
"operations": [] # Simplified
|
||||
}
|
||||
with open(batch_file, 'w') as f:
|
||||
json.dump(batch_record, f, indent=2)
|
||||
|
||||
|
||||
class TestTaskQueue:
|
||||
"""Test suite for TaskQueue functionality"""
|
||||
|
||||
@pytest.fixture
|
||||
def task_queue(self, tmp_path):
|
||||
"""Create a clean TaskQueue instance for each test"""
|
||||
return MockTaskQueue(history_dir=tmp_path)
|
||||
|
||||
@pytest.fixture
|
||||
def sample_task(self):
|
||||
"""Create a sample task for testing"""
|
||||
return QueueTaskItem(
|
||||
ui_id=str(uuid.uuid4()),
|
||||
client_id="test_client",
|
||||
kind="install",
|
||||
params=InstallPackParams(
|
||||
id="test-node",
|
||||
version="1.0.0",
|
||||
selected_version="1.0.0",
|
||||
mode=ManagerDatabaseSource.cache,
|
||||
channel=ManagerChannel.dev
|
||||
)
|
||||
)
|
||||
|
||||
def test_task_queue_initialization(self, task_queue):
|
||||
"""Test TaskQueue initializes with correct default state"""
|
||||
assert task_queue.total_count() == 0
|
||||
assert task_queue.done_count() == 0
|
||||
assert not task_queue.is_processing()
|
||||
assert task_queue.batch_id is None
|
||||
assert len(task_queue.pending_tasks) == 0
|
||||
assert len(task_queue.running_tasks) == 0
|
||||
assert len(task_queue.history_tasks) == 0
|
||||
|
||||
def test_put_task_starts_batch(self, task_queue, sample_task):
|
||||
"""Test that adding first task starts a new batch"""
|
||||
assert task_queue.batch_id is None
|
||||
|
||||
task_queue.put(sample_task)
|
||||
|
||||
assert task_queue.batch_id is not None
|
||||
assert task_queue.batch_id.startswith("test_batch_")
|
||||
assert task_queue.batch_start_time is not None
|
||||
assert task_queue.total_count() == 1
|
||||
|
||||
def test_put_multiple_tasks(self, task_queue, sample_task):
|
||||
"""Test adding multiple tasks to queue"""
|
||||
task_queue.put(sample_task)
|
||||
|
||||
# Create second task
|
||||
task2 = QueueTaskItem(
|
||||
ui_id=str(uuid.uuid4()),
|
||||
client_id="test_client_2",
|
||||
kind="install",
|
||||
params=sample_task.params
|
||||
)
|
||||
task_queue.put(task2)
|
||||
|
||||
assert task_queue.total_count() == 2
|
||||
assert len(task_queue.pending_tasks) == 2
|
||||
|
||||
def test_put_task_with_dict(self, task_queue):
|
||||
"""Test adding task as dictionary gets converted to QueueTaskItem"""
|
||||
task_dict = {
|
||||
"ui_id": str(uuid.uuid4()),
|
||||
"client_id": "test_client",
|
||||
"kind": "install",
|
||||
"params": {
|
||||
"id": "test-node",
|
||||
"version": "1.0.0",
|
||||
"selected_version": "1.0.0",
|
||||
"mode": "cache",
|
||||
"channel": "dev"
|
||||
}
|
||||
}
|
||||
|
||||
task_queue.put(task_dict)
|
||||
|
||||
assert task_queue.total_count() == 1
|
||||
# Verify it was converted to QueueTaskItem
|
||||
item, _ = task_queue.get(timeout=0.1)
|
||||
assert isinstance(item, QueueTaskItem)
|
||||
assert item.ui_id == task_dict["ui_id"]
|
||||
|
||||
def test_get_task_from_queue(self, task_queue, sample_task):
|
||||
"""Test retrieving task from queue"""
|
||||
task_queue.put(sample_task)
|
||||
|
||||
item, task_index = task_queue.get(timeout=0.1)
|
||||
|
||||
assert item == sample_task
|
||||
assert isinstance(task_index, int)
|
||||
assert task_queue.total_count() == 0 # Should be removed from pending
|
||||
|
||||
def test_get_task_timeout(self, task_queue):
|
||||
"""Test get with timeout on empty queue returns None"""
|
||||
result = task_queue.get(timeout=0.1)
|
||||
assert result is None
|
||||
|
||||
def test_start_stop_worker(self, task_queue):
|
||||
"""Test worker thread lifecycle"""
|
||||
assert not task_queue.is_processing()
|
||||
|
||||
# Mock worker that stops immediately
|
||||
stop_event = threading.Event()
|
||||
def mock_worker():
|
||||
stop_event.wait(0.1) # Brief delay then stop
|
||||
|
||||
started = task_queue.start_worker(mock_worker)
|
||||
assert started is True
|
||||
assert task_queue.is_processing()
|
||||
|
||||
# Try to start again - should return False
|
||||
started_again = task_queue.start_worker(mock_worker)
|
||||
assert started_again is False
|
||||
|
||||
# Wait for worker to finish
|
||||
stop_event.set()
|
||||
task_queue._worker_task.join(timeout=1.0)
|
||||
assert not task_queue.is_processing()
|
||||
|
||||
def test_task_processing_integration(self, task_queue, sample_task):
|
||||
"""Test full task processing workflow"""
|
||||
# Add task to queue
|
||||
task_queue.put(sample_task)
|
||||
assert task_queue.total_count() == 1
|
||||
|
||||
# Start worker
|
||||
started = task_queue.start_worker()
|
||||
assert started is True
|
||||
|
||||
# Wait for processing to complete
|
||||
for _ in range(50): # Max 5 seconds
|
||||
if task_queue.done_count() > 0:
|
||||
break
|
||||
time.sleep(0.1)
|
||||
|
||||
# Verify task was processed
|
||||
assert task_queue.done_count() == 1
|
||||
assert task_queue.total_count() == 0
|
||||
assert sample_task.ui_id in task_queue.history_tasks
|
||||
|
||||
# Stop worker
|
||||
task_queue._worker_task.join(timeout=1.0)
|
||||
|
||||
def test_get_current_state(self, task_queue, sample_task):
|
||||
"""Test getting current queue state"""
|
||||
task_queue.put(sample_task)
|
||||
|
||||
state = task_queue.get_current_state()
|
||||
|
||||
assert isinstance(state, TaskStateMessage)
|
||||
assert len(state.pending_queue) == 1
|
||||
assert len(state.running_queue) == 0
|
||||
assert state.pending_queue[0] == sample_task
|
||||
|
||||
def test_batch_finalization(self, task_queue, tmp_path):
|
||||
"""Test batch history is saved correctly"""
|
||||
task_queue.put(QueueTaskItem(
|
||||
ui_id=str(uuid.uuid4()),
|
||||
client_id="test_client",
|
||||
kind="install",
|
||||
params=InstallPackParams(
|
||||
id="test-node",
|
||||
version="1.0.0",
|
||||
selected_version="1.0.0",
|
||||
mode=ManagerDatabaseSource.cache,
|
||||
channel=ManagerChannel.dev
|
||||
)
|
||||
))
|
||||
|
||||
batch_id = task_queue.batch_id
|
||||
task_queue.finalize()
|
||||
|
||||
# Check batch file was created
|
||||
batch_file = tmp_path / f"{batch_id}.json"
|
||||
assert batch_file.exists()
|
||||
|
||||
# Verify content
|
||||
with open(batch_file) as f:
|
||||
batch_data = json.load(f)
|
||||
|
||||
assert batch_data["batch_id"] == batch_id
|
||||
assert "start_time" in batch_data
|
||||
assert "state_before" in batch_data
|
||||
|
||||
def test_concurrent_access(self, task_queue):
|
||||
"""Test thread-safe concurrent access to queue"""
|
||||
num_tasks = 10
|
||||
added_tasks = []
|
||||
|
||||
def add_tasks():
|
||||
for i in range(num_tasks):
|
||||
task = QueueTaskItem(
|
||||
ui_id=f"task_{i}",
|
||||
client_id=f"client_{i}",
|
||||
kind="install",
|
||||
params=InstallPackParams(
|
||||
id=f"node_{i}",
|
||||
version="1.0.0",
|
||||
selected_version="1.0.0",
|
||||
mode=ManagerDatabaseSource.cache,
|
||||
channel=ManagerChannel.dev
|
||||
)
|
||||
)
|
||||
task_queue.put(task)
|
||||
added_tasks.append(task)
|
||||
|
||||
# Start multiple threads adding tasks
|
||||
threads = []
|
||||
for _ in range(3):
|
||||
thread = threading.Thread(target=add_tasks)
|
||||
threads.append(thread)
|
||||
thread.start()
|
||||
|
||||
# Wait for all threads to complete
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
|
||||
# Verify all tasks were added
|
||||
assert task_queue.total_count() == num_tasks * 3
|
||||
assert len(added_tasks) == num_tasks * 3
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_queue_state_updates_tracking(self, task_queue, sample_task):
|
||||
"""Test that queue state updates are tracked properly"""
|
||||
# Mock the update tracking
|
||||
task_queue.send_queue_state_update("test-message", {"test": "data"}, "client1")
|
||||
|
||||
# Verify update was tracked
|
||||
assert hasattr(task_queue, '_sent_updates')
|
||||
assert len(task_queue._sent_updates) == 1
|
||||
|
||||
update = task_queue._sent_updates[0]
|
||||
assert update['msg'] == "test-message"
|
||||
assert update['update'] == {"test": "data"}
|
||||
assert update['client_id'] == "client1"
|
||||
|
||||
|
||||
class TestTaskQueueEdgeCases:
|
||||
"""Test edge cases and error conditions"""
|
||||
|
||||
@pytest.fixture
|
||||
def task_queue(self):
|
||||
return MockTaskQueue()
|
||||
|
||||
def test_empty_queue_operations(self, task_queue):
|
||||
"""Test operations on empty queue"""
|
||||
assert task_queue.total_count() == 0
|
||||
assert task_queue.done_count() == 0
|
||||
|
||||
# Getting from empty queue should timeout
|
||||
result = task_queue.get(timeout=0.1)
|
||||
assert result is None
|
||||
|
||||
# State should be empty
|
||||
state = task_queue.get_current_state()
|
||||
assert len(state.pending_queue) == 0
|
||||
assert len(state.running_queue) == 0
|
||||
|
||||
def test_invalid_task_data(self, task_queue):
|
||||
"""Test handling of invalid task data"""
|
||||
# This should raise ValidationError due to missing required fields
|
||||
with pytest.raises(Exception): # ValidationError from Pydantic
|
||||
task_queue.put({
|
||||
"ui_id": "test",
|
||||
# Missing required fields
|
||||
})
|
||||
|
||||
def test_worker_cleanup_on_exception(self, task_queue):
|
||||
"""Test worker cleanup when worker function raises exception"""
|
||||
exception_raised = threading.Event()
|
||||
|
||||
def failing_worker():
|
||||
exception_raised.set()
|
||||
raise RuntimeError("Test exception")
|
||||
|
||||
started = task_queue.start_worker(failing_worker)
|
||||
assert started is True
|
||||
|
||||
# Wait for exception to be raised
|
||||
exception_raised.wait(timeout=1.0)
|
||||
|
||||
# Worker should eventually stop
|
||||
task_queue._worker_task.join(timeout=1.0)
|
||||
assert not task_queue.is_processing()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Allow running tests directly
|
||||
pytest.main([__file__])
|
||||
Reference in New Issue
Block a user