Compare commits
2 Commits
manager-v4
...
draft-v4-s
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
24ca0ab538 | ||
|
|
62da330182 |
32
.github/workflows/publish-to-pypi.yml
vendored
32
.github/workflows/publish-to-pypi.yml
vendored
@@ -4,7 +4,7 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- manager-v4
|
- main
|
||||||
paths:
|
paths:
|
||||||
- "pyproject.toml"
|
- "pyproject.toml"
|
||||||
|
|
||||||
@@ -21,7 +21,7 @@ jobs:
|
|||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.x'
|
python-version: '3.9'
|
||||||
|
|
||||||
- name: Install build dependencies
|
- name: Install build dependencies
|
||||||
run: |
|
run: |
|
||||||
@@ -31,28 +31,28 @@ jobs:
|
|||||||
- name: Get current version
|
- name: Get current version
|
||||||
id: current_version
|
id: current_version
|
||||||
run: |
|
run: |
|
||||||
CURRENT_VERSION=$(grep -oP '^version = "\K[^"]+' pyproject.toml)
|
CURRENT_VERSION=$(grep -oP 'version = "\K[^"]+' pyproject.toml)
|
||||||
echo "version=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
echo "version=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||||
echo "Current version: $CURRENT_VERSION"
|
echo "Current version: $CURRENT_VERSION"
|
||||||
|
|
||||||
- name: Build package
|
- name: Build package
|
||||||
run: python -m build
|
run: python -m build
|
||||||
|
|
||||||
# - name: Create GitHub Release
|
- name: Create GitHub Release
|
||||||
# id: create_release
|
id: create_release
|
||||||
# uses: softprops/action-gh-release@v2
|
uses: softprops/action-gh-release@v2
|
||||||
# env:
|
env:
|
||||||
# GITHUB_TOKEN: ${{ github.token }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
# with:
|
with:
|
||||||
# files: dist/*
|
files: dist/*
|
||||||
# tag_name: v${{ steps.current_version.outputs.version }}
|
tag_name: v${{ steps.current_version.outputs.version }}
|
||||||
# draft: false
|
draft: false
|
||||||
# prerelease: false
|
prerelease: false
|
||||||
# generate_release_notes: true
|
generate_release_notes: true
|
||||||
|
|
||||||
- name: Publish to PyPI
|
- name: Publish to PyPI
|
||||||
uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc
|
uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
with:
|
with:
|
||||||
password: ${{ secrets.PYPI_TOKEN }}
|
password: ${{ secrets.PYPI_TOKEN }}
|
||||||
skip-existing: true
|
skip-existing: true
|
||||||
verbose: true
|
verbose: true
|
||||||
25
.github/workflows/publish.yml
vendored
Normal file
25
.github/workflows/publish.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
name: Publish to Comfy registry
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main-blocked
|
||||||
|
paths:
|
||||||
|
- "pyproject.toml"
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
publish-node:
|
||||||
|
name: Publish Custom Node to registry
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: ${{ github.repository_owner == 'ltdrdata' || github.repository_owner == 'Comfy-Org' }}
|
||||||
|
steps:
|
||||||
|
- name: Check out code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Publish Custom Node
|
||||||
|
uses: Comfy-Org/publish-node-action@v1
|
||||||
|
with:
|
||||||
|
## Add your own personal access token to your Github Repository secrets and reference it here.
|
||||||
|
personal_access_token: ${{ secrets.REGISTRY_ACCESS_TOKEN }}
|
||||||
50
README.md
50
README.md
@@ -89,20 +89,20 @@
|
|||||||
|
|
||||||
|
|
||||||
## Paths
|
## Paths
|
||||||
In `ComfyUI-Manager` V4.0.3b4 and later, configuration files and dynamically generated files are located under `<USER_DIRECTORY>/__manager/`.
|
In `ComfyUI-Manager` V3.0 and later, configuration files and dynamically generated files are located under `<USER_DIRECTORY>/default/ComfyUI-Manager/`.
|
||||||
|
|
||||||
* <USER_DIRECTORY>
|
* <USER_DIRECTORY>
|
||||||
* If executed without any options, the path defaults to ComfyUI/user.
|
* If executed without any options, the path defaults to ComfyUI/user.
|
||||||
* It can be set using --user-directory <USER_DIRECTORY>.
|
* It can be set using --user-directory <USER_DIRECTORY>.
|
||||||
|
|
||||||
* Basic config files: `<USER_DIRECTORY>/__manager/config.ini`
|
* Basic config files: `<USER_DIRECTORY>/default/ComfyUI-Manager/config.ini`
|
||||||
* Configurable channel lists: `<USER_DIRECTORY>/__manager/channels.ini`
|
* Configurable channel lists: `<USER_DIRECTORY>/default/ComfyUI-Manager/channels.ini`
|
||||||
* Configurable pip overrides: `<USER_DIRECTORY>/__manager/pip_overrides.json`
|
* Configurable pip overrides: `<USER_DIRECTORY>/default/ComfyUI-Manager/pip_overrides.json`
|
||||||
* Configurable pip blacklist: `<USER_DIRECTORY>/__manager/pip_blacklist.list`
|
* Configurable pip blacklist: `<USER_DIRECTORY>/default/ComfyUI-Manager/pip_blacklist.list`
|
||||||
* Configurable pip auto fix: `<USER_DIRECTORY>/__manager/pip_auto_fix.list`
|
* Configurable pip auto fix: `<USER_DIRECTORY>/default/ComfyUI-Manager/pip_auto_fix.list`
|
||||||
* Saved snapshot files: `<USER_DIRECTORY>/__manager/snapshots`
|
* Saved snapshot files: `<USER_DIRECTORY>/default/ComfyUI-Manager/snapshots`
|
||||||
* Startup script files: `<USER_DIRECTORY>/__manager/startup-scripts`
|
* Startup script files: `<USER_DIRECTORY>/default/ComfyUI-Manager/startup-scripts`
|
||||||
* Component files: `<USER_DIRECTORY>/__manager/components`
|
* Component files: `<USER_DIRECTORY>/default/ComfyUI-Manager/components`
|
||||||
|
|
||||||
|
|
||||||
## `extra_model_paths.yaml` Configuration
|
## `extra_model_paths.yaml` Configuration
|
||||||
@@ -115,17 +115,17 @@ The following settings are applied based on the section marked as `is_default`.
|
|||||||
|
|
||||||
## Snapshot-Manager
|
## Snapshot-Manager
|
||||||
* When you press `Save snapshot` or use `Update All` on `Manager Menu`, the current installation status snapshot is saved.
|
* When you press `Save snapshot` or use `Update All` on `Manager Menu`, the current installation status snapshot is saved.
|
||||||
* Snapshot file dir: `<USER_DIRECTORY>/__manager/snapshots`
|
* Snapshot file dir: `<USER_DIRECTORY>/default/ComfyUI-Manager/snapshots`
|
||||||
* You can rename snapshot file.
|
* You can rename snapshot file.
|
||||||
* Press the "Restore" button to revert to the installation status of the respective snapshot.
|
* Press the "Restore" button to revert to the installation status of the respective snapshot.
|
||||||
* However, for custom nodes not managed by Git, snapshot support is incomplete.
|
* However, for custom nodes not managed by Git, snapshot support is incomplete.
|
||||||
* When you press `Restore`, it will take effect on the next ComfyUI startup.
|
* When you press `Restore`, it will take effect on the next ComfyUI startup.
|
||||||
* The selected snapshot file is saved in `<USER_DIRECTORY>/__manager/startup-scripts/restore-snapshot.json`, and upon restarting ComfyUI, the snapshot is applied and then deleted.
|
* The selected snapshot file is saved in `<USER_DIRECTORY>/default/ComfyUI-Manager/startup-scripts/restore-snapshot.json`, and upon restarting ComfyUI, the snapshot is applied and then deleted.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
|
||||||
## cm-cli: command line tools for power users
|
## cm-cli: command line tools for power user
|
||||||
* A tool is provided that allows you to use the features of ComfyUI-Manager without running ComfyUI.
|
* A tool is provided that allows you to use the features of ComfyUI-Manager without running ComfyUI.
|
||||||
* For more details, please refer to the [cm-cli documentation](docs/en/cm-cli.md).
|
* For more details, please refer to the [cm-cli documentation](docs/en/cm-cli.md).
|
||||||
|
|
||||||
@@ -169,12 +169,12 @@ The following settings are applied based on the section marked as `is_default`.
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
* `<current timestamp>` Ensure that the timestamp is always unique.
|
* `<current timestamp>` Ensure that the timestamp is always unique.
|
||||||
* "components" should have the same structure as the content of the file stored in `<USER_DIRECTORY>/__manager/components`.
|
* "components" should have the same structure as the content of the file stored in `<USER_DIRECTORY>/default/ComfyUI-Manager/components`.
|
||||||
* `<component name>`: The name should be in the format `<prefix>::<node name>`.
|
* `<component name>`: The name should be in the format `<prefix>::<node name>`.
|
||||||
* `<component node data>`: In the node data of the group node.
|
* `<compnent nodeata>`: In the nodedata of the group node.
|
||||||
* `<version>`: Only two formats are allowed: `major.minor.patch` or `major.minor`. (e.g. `1.0`, `2.2.1`)
|
* `<version>`: Only two formats are allowed: `major.minor.patch` or `major.minor`. (e.g. `1.0`, `2.2.1`)
|
||||||
* `<datetime>`: Saved time
|
* `<datetime>`: Saved time
|
||||||
* `<packname>`: If the packname is not empty, the category becomes packname/workflow, and it is saved in the <packname>.pack file in `<USER_DIRECTORY>/__manager/components`.
|
* `<packname>`: If the packname is not empty, the category becomes packname/workflow, and it is saved in the <packname>.pack file in `<USER_DIRECTORY>/default/ComfyUI-Manager/components`.
|
||||||
* `<category>`: If there is neither a category nor a packname, it is saved in the components category.
|
* `<category>`: If there is neither a category nor a packname, it is saved in the components category.
|
||||||
```
|
```
|
||||||
"version":"1.0",
|
"version":"1.0",
|
||||||
@@ -189,7 +189,7 @@ The following settings are applied based on the section marked as `is_default`.
|
|||||||
* Dragging and dropping or pasting a single component will add a node. However, when adding multiple components, nodes will not be added.
|
* Dragging and dropping or pasting a single component will add a node. However, when adding multiple components, nodes will not be added.
|
||||||
|
|
||||||
|
|
||||||
## Support for installing missing nodes
|
## Support of missing nodes installation
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
@@ -229,10 +229,10 @@ The following settings are applied based on the section marked as `is_default`.
|
|||||||
* Logging to file feature
|
* Logging to file feature
|
||||||
* This feature is enabled by default and can be disabled by setting `file_logging = False` in the `config.ini`.
|
* This feature is enabled by default and can be disabled by setting `file_logging = False` in the `config.ini`.
|
||||||
|
|
||||||
* Fix node (recreate): When right-clicking on a node and selecting `Fix node (recreate)`, you can recreate the node. The widget's values are reset, while the connections maintain those with the same names.
|
* Fix node(recreate): When right-clicking on a node and selecting `Fix node (recreate)`, you can recreate the node. The widget's values are reset, while the connections maintain those with the same names.
|
||||||
* It is used to correct errors in nodes of old workflows created before, which are incompatible with the version changes of custom nodes.
|
* It is used to correct errors in nodes of old workflows created before, which are incompatible with the version changes of custom nodes.
|
||||||
|
|
||||||
* Double-Click Node Title: You can set the double-click behavior of nodes in the ComfyUI-Manager menu.
|
* Double-Click Node Title: You can set the double click behavior of nodes in the ComfyUI-Manager menu.
|
||||||
* `Copy All Connections`, `Copy Input Connections`: Double-clicking a node copies the connections of the nearest node.
|
* `Copy All Connections`, `Copy Input Connections`: Double-clicking a node copies the connections of the nearest node.
|
||||||
* This action targets the nearest node within a straight-line distance of 1000 pixels from the center of the node.
|
* This action targets the nearest node within a straight-line distance of 1000 pixels from the center of the node.
|
||||||
* In the case of `Copy All Connections`, it duplicates existing outputs, but since it does not allow duplicate connections, the existing output connections of the original node are disconnected.
|
* In the case of `Copy All Connections`, it duplicates existing outputs, but since it does not allow duplicate connections, the existing output connections of the original node are disconnected.
|
||||||
@@ -298,17 +298,17 @@ When you run the `scan.sh` script:
|
|||||||
|
|
||||||
* It updates the `github-stats.json`.
|
* It updates the `github-stats.json`.
|
||||||
* This uses the GitHub API, so set your token with `export GITHUB_TOKEN=your_token_here` to avoid quickly reaching the rate limit and malfunctioning.
|
* This uses the GitHub API, so set your token with `export GITHUB_TOKEN=your_token_here` to avoid quickly reaching the rate limit and malfunctioning.
|
||||||
* To skip this step, add the `--skip-stat-update` option.
|
* To skip this step, add the `--skip-update-stat` option.
|
||||||
|
|
||||||
* The `--skip-all` option applies both `--skip-update` and `--skip-stat-update`.
|
* The `--skip-all` option applies both `--skip-update` and `--skip-stat-update`.
|
||||||
|
|
||||||
|
|
||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
* If your `git.exe` is installed in a specific location other than system git, please install ComfyUI-Manager and run ComfyUI. Then, specify the path including the file name in `git_exe = ` in the `<USER_DIRECTORY>/__manager/config.ini` file that is generated.
|
* If your `git.exe` is installed in a specific location other than system git, please install ComfyUI-Manager and run ComfyUI. Then, specify the path including the file name in `git_exe = ` in the `<USER_DIRECTORY>/default/ComfyUI-Manager/config.ini` file that is generated.
|
||||||
* If updating ComfyUI-Manager itself fails, please go to the **ComfyUI-Manager** directory and execute the command `git update-ref refs/remotes/origin/main a361cc1 && git fetch --all && git pull`.
|
* If updating ComfyUI-Manager itself fails, please go to the **ComfyUI-Manager** directory and execute the command `git update-ref refs/remotes/origin/main a361cc1 && git fetch --all && git pull`.
|
||||||
* If you encounter the error message `Overlapped Object has pending operation at deallocation on ComfyUI Manager load` under Windows
|
* If you encounter the error message `Overlapped Object has pending operation at deallocation on Comfyui Manager load` under Windows
|
||||||
* Edit `config.ini` file: add `windows_selector_event_loop_policy = True`
|
* Edit `config.ini` file: add `windows_selector_event_loop_policy = True`
|
||||||
* If the `SSL: CERTIFICATE_VERIFY_FAILED` error occurs.
|
* if `SSL: CERTIFICATE_VERIFY_FAILED` error is occured.
|
||||||
* Edit `config.ini` file: add `bypass_ssl = True`
|
* Edit `config.ini` file: add `bypass_ssl = True`
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
4
check.sh
4
check.sh
@@ -37,7 +37,7 @@ find ~/.tmp/default -name "*.py" -print0 | xargs -0 grep -E "crypto|^_A="
|
|||||||
|
|
||||||
echo
|
echo
|
||||||
echo CHECK3
|
echo CHECK3
|
||||||
find ~/.tmp/default -name "requirements.txt" | xargs grep "^\s*[^#]*https\?:"
|
find ~/.tmp/default -name "requirements.txt" | xargs grep "^\s*https\\?:"
|
||||||
find ~/.tmp/default -name "requirements.txt" | xargs grep "^\s*[^#].*\.whl"
|
find ~/.tmp/default -name "requirements.txt" | xargs grep "\.whl"
|
||||||
|
|
||||||
echo
|
echo
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ def start():
|
|||||||
logging.info('[START] ComfyUI-Manager')
|
logging.info('[START] ComfyUI-Manager')
|
||||||
from .common import cm_global # noqa: F401
|
from .common import cm_global # noqa: F401
|
||||||
|
|
||||||
if args.enable_manager:
|
if not args.disable_manager:
|
||||||
if args.enable_manager_legacy_ui:
|
if args.enable_manager_legacy_ui:
|
||||||
try:
|
try:
|
||||||
from .legacy import manager_server # noqa: F401
|
from .legacy import manager_server # noqa: F401
|
||||||
@@ -42,7 +42,7 @@ def should_be_disabled(fullpath:str) -> bool:
|
|||||||
1. Disables the legacy ComfyUI-Manager.
|
1. Disables the legacy ComfyUI-Manager.
|
||||||
2. The blocklist can be expanded later based on policies.
|
2. The blocklist can be expanded later based on policies.
|
||||||
"""
|
"""
|
||||||
if args.enable_manager:
|
if not args.disable_manager:
|
||||||
# In cases where installation is done via a zip archive, the directory name may not be comfyui-manager, and it may not contain a git repository.
|
# In cases where installation is done via a zip archive, the directory name may not be comfyui-manager, and it may not contain a git repository.
|
||||||
# It is assumed that any installed legacy ComfyUI-Manager will have at least 'comfyui-manager' in its directory name.
|
# It is assumed that any installed legacy ComfyUI-Manager will have at least 'comfyui-manager' in its directory name.
|
||||||
dir_name = os.path.basename(fullpath).lower()
|
dir_name = os.path.basename(fullpath).lower()
|
||||||
|
|||||||
@@ -11,7 +11,6 @@ from . import manager_util
|
|||||||
|
|
||||||
import requests
|
import requests
|
||||||
import toml
|
import toml
|
||||||
import logging
|
|
||||||
|
|
||||||
base_url = "https://api.comfy.org"
|
base_url = "https://api.comfy.org"
|
||||||
|
|
||||||
@@ -24,7 +23,7 @@ async def get_cnr_data(cache_mode=True, dont_wait=True):
|
|||||||
try:
|
try:
|
||||||
return await _get_cnr_data(cache_mode, dont_wait)
|
return await _get_cnr_data(cache_mode, dont_wait)
|
||||||
except asyncio.TimeoutError:
|
except asyncio.TimeoutError:
|
||||||
logging.info("A timeout occurred during the fetch process from ComfyRegistry.")
|
print("A timeout occurred during the fetch process from ComfyRegistry.")
|
||||||
return await _get_cnr_data(cache_mode=True, dont_wait=True) # timeout fallback
|
return await _get_cnr_data(cache_mode=True, dont_wait=True) # timeout fallback
|
||||||
|
|
||||||
async def _get_cnr_data(cache_mode=True, dont_wait=True):
|
async def _get_cnr_data(cache_mode=True, dont_wait=True):
|
||||||
@@ -80,12 +79,12 @@ async def _get_cnr_data(cache_mode=True, dont_wait=True):
|
|||||||
full_nodes[x['id']] = x
|
full_nodes[x['id']] = x
|
||||||
|
|
||||||
if page % 5 == 0:
|
if page % 5 == 0:
|
||||||
logging.info(f"FETCH ComfyRegistry Data: {page}/{sub_json_obj['totalPages']}")
|
print(f"FETCH ComfyRegistry Data: {page}/{sub_json_obj['totalPages']}")
|
||||||
|
|
||||||
page += 1
|
page += 1
|
||||||
time.sleep(0.5)
|
time.sleep(0.5)
|
||||||
|
|
||||||
logging.info("FETCH ComfyRegistry Data [DONE]")
|
print("FETCH ComfyRegistry Data [DONE]")
|
||||||
|
|
||||||
for v in full_nodes.values():
|
for v in full_nodes.values():
|
||||||
if 'latest_version' not in v:
|
if 'latest_version' not in v:
|
||||||
@@ -101,7 +100,7 @@ async def _get_cnr_data(cache_mode=True, dont_wait=True):
|
|||||||
if cache_state == 'not-cached':
|
if cache_state == 'not-cached':
|
||||||
return {}
|
return {}
|
||||||
else:
|
else:
|
||||||
logging.info("[ComfyUI-Manager] The ComfyRegistry cache update is still in progress, so an outdated cache is being used.")
|
print("[ComfyUI-Manager] The ComfyRegistry cache update is still in progress, so an outdated cache is being used.")
|
||||||
with open(manager_util.get_cache_path(uri), 'r', encoding="UTF-8", errors="ignore") as json_file:
|
with open(manager_util.get_cache_path(uri), 'r', encoding="UTF-8", errors="ignore") as json_file:
|
||||||
return json.load(json_file)['nodes']
|
return json.load(json_file)['nodes']
|
||||||
|
|
||||||
@@ -115,7 +114,7 @@ async def _get_cnr_data(cache_mode=True, dont_wait=True):
|
|||||||
return json_obj['nodes']
|
return json_obj['nodes']
|
||||||
except Exception:
|
except Exception:
|
||||||
res = {}
|
res = {}
|
||||||
logging.warning("Cannot connect to comfyregistry.")
|
print("Cannot connect to comfyregistry.")
|
||||||
finally:
|
finally:
|
||||||
if cache_mode:
|
if cache_mode:
|
||||||
is_cache_loading = False
|
is_cache_loading = False
|
||||||
@@ -212,7 +211,6 @@ def read_cnr_info(fullpath):
|
|||||||
|
|
||||||
project = data.get('project', {})
|
project = data.get('project', {})
|
||||||
name = project.get('name').strip().lower()
|
name = project.get('name').strip().lower()
|
||||||
original_name = project.get('name')
|
|
||||||
|
|
||||||
# normalize version
|
# normalize version
|
||||||
# for example: 2.5 -> 2.5.0
|
# for example: 2.5 -> 2.5.0
|
||||||
@@ -224,7 +222,6 @@ def read_cnr_info(fullpath):
|
|||||||
if name and version: # repository is optional
|
if name and version: # repository is optional
|
||||||
return {
|
return {
|
||||||
"id": name,
|
"id": name,
|
||||||
"original_name": original_name,
|
|
||||||
"version": version,
|
"version": version,
|
||||||
"url": repository
|
"url": repository
|
||||||
}
|
}
|
||||||
@@ -241,7 +238,7 @@ def generate_cnr_id(fullpath, cnr_id):
|
|||||||
with open(cnr_id_path, "w") as f:
|
with open(cnr_id_path, "w") as f:
|
||||||
return f.write(cnr_id)
|
return f.write(cnr_id)
|
||||||
except Exception:
|
except Exception:
|
||||||
logging.error(f"[ComfyUI Manager] unable to create file: {cnr_id_path}")
|
print(f"[ComfyUI Manager] unable to create file: {cnr_id_path}")
|
||||||
|
|
||||||
|
|
||||||
def read_cnr_id(fullpath):
|
def read_cnr_id(fullpath):
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ manager_pip_blacklist_path = None
|
|||||||
manager_components_path = None
|
manager_components_path = None
|
||||||
manager_batch_history_path = None
|
manager_batch_history_path = None
|
||||||
|
|
||||||
def update_user_directory(manager_dir):
|
def update_user_directory(user_dir):
|
||||||
global manager_files_path
|
global manager_files_path
|
||||||
global manager_config_path
|
global manager_config_path
|
||||||
global manager_channel_list_path
|
global manager_channel_list_path
|
||||||
@@ -45,7 +45,7 @@ def update_user_directory(manager_dir):
|
|||||||
global manager_components_path
|
global manager_components_path
|
||||||
global manager_batch_history_path
|
global manager_batch_history_path
|
||||||
|
|
||||||
manager_files_path = manager_dir
|
manager_files_path = os.path.abspath(os.path.join(user_dir, 'default', 'ComfyUI-Manager'))
|
||||||
if not os.path.exists(manager_files_path):
|
if not os.path.exists(manager_files_path):
|
||||||
os.makedirs(manager_files_path)
|
os.makedirs(manager_files_path)
|
||||||
|
|
||||||
@@ -73,7 +73,7 @@ def update_user_directory(manager_dir):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
import folder_paths
|
import folder_paths
|
||||||
update_user_directory(folder_paths.get_system_user_directory("manager"))
|
update_user_directory(folder_paths.get_user_directory())
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
# fallback:
|
# fallback:
|
||||||
|
|||||||
@@ -15,8 +15,7 @@ import re
|
|||||||
import logging
|
import logging
|
||||||
import platform
|
import platform
|
||||||
import shlex
|
import shlex
|
||||||
from functools import lru_cache
|
from packaging import version
|
||||||
|
|
||||||
|
|
||||||
cache_lock = threading.Lock()
|
cache_lock = threading.Lock()
|
||||||
session_lock = threading.Lock()
|
session_lock = threading.Lock()
|
||||||
@@ -39,64 +38,18 @@ def add_python_path_to_env():
|
|||||||
os.environ['PATH'] = os.path.dirname(sys.executable)+sep+os.environ['PATH']
|
os.environ['PATH'] = os.path.dirname(sys.executable)+sep+os.environ['PATH']
|
||||||
|
|
||||||
|
|
||||||
@lru_cache(maxsize=2)
|
|
||||||
def get_pip_cmd(force_uv=False):
|
|
||||||
"""
|
|
||||||
Get the base pip command, with automatic fallback to uv if pip is unavailable.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
force_uv (bool): If True, use uv directly without trying pip
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list: Base command for pip operations
|
|
||||||
"""
|
|
||||||
embedded = 'python_embeded' in sys.executable
|
|
||||||
|
|
||||||
# Try pip first (unless forcing uv)
|
|
||||||
if not force_uv:
|
|
||||||
try:
|
|
||||||
test_cmd = [sys.executable] + (['-s'] if embedded else []) + ['-m', 'pip', '--version']
|
|
||||||
subprocess.check_output(test_cmd, stderr=subprocess.DEVNULL, timeout=5)
|
|
||||||
return [sys.executable] + (['-s'] if embedded else []) + ['-m', 'pip']
|
|
||||||
except Exception:
|
|
||||||
logging.warning("[ComfyUI-Manager] python -m pip not available. Falling back to uv.")
|
|
||||||
|
|
||||||
# Try uv (either forced or pip failed)
|
|
||||||
import shutil
|
|
||||||
|
|
||||||
# Try uv as Python module
|
|
||||||
try:
|
|
||||||
test_cmd = [sys.executable] + (['-s'] if embedded else []) + ['-m', 'uv', '--version']
|
|
||||||
subprocess.check_output(test_cmd, stderr=subprocess.DEVNULL, timeout=5)
|
|
||||||
logging.info("[ComfyUI-Manager] Using uv as Python module for pip operations.")
|
|
||||||
return [sys.executable] + (['-s'] if embedded else []) + ['-m', 'uv', 'pip']
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Try standalone uv
|
|
||||||
if shutil.which('uv'):
|
|
||||||
logging.info("[ComfyUI-Manager] Using standalone uv for pip operations.")
|
|
||||||
return ['uv', 'pip']
|
|
||||||
|
|
||||||
# Nothing worked
|
|
||||||
logging.error("[ComfyUI-Manager] Neither python -m pip nor uv are available. Cannot proceed with package operations.")
|
|
||||||
raise Exception("Neither pip nor uv are available for package management")
|
|
||||||
|
|
||||||
|
|
||||||
def make_pip_cmd(cmd):
|
def make_pip_cmd(cmd):
|
||||||
"""
|
if 'python_embeded' in sys.executable:
|
||||||
Create a pip command by combining the cached base pip command with the given arguments.
|
if use_uv:
|
||||||
|
return [sys.executable, '-s', '-m', 'uv', 'pip'] + cmd
|
||||||
Args:
|
else:
|
||||||
cmd (list): List of pip command arguments (e.g., ['install', 'package'])
|
return [sys.executable, '-s', '-m', 'pip'] + cmd
|
||||||
|
else:
|
||||||
Returns:
|
# FIXED: https://github.com/ltdrdata/ComfyUI-Manager/issues/1667
|
||||||
list: Complete command list ready for subprocess execution
|
if use_uv:
|
||||||
"""
|
return [sys.executable, '-m', 'uv', 'pip'] + cmd
|
||||||
global use_uv
|
else:
|
||||||
base_cmd = get_pip_cmd(force_uv=use_uv)
|
return [sys.executable, '-m', 'pip'] + cmd
|
||||||
return base_cmd + cmd
|
|
||||||
|
|
||||||
|
|
||||||
# DON'T USE StrictVersion - cannot handle pre_release version
|
# DON'T USE StrictVersion - cannot handle pre_release version
|
||||||
# try:
|
# try:
|
||||||
@@ -105,62 +58,32 @@ def make_pip_cmd(cmd):
|
|||||||
# print(f"[ComfyUI-Manager] 'distutils' package not found. Activating fallback mode for compatibility.")
|
# print(f"[ComfyUI-Manager] 'distutils' package not found. Activating fallback mode for compatibility.")
|
||||||
class StrictVersion:
|
class StrictVersion:
|
||||||
def __init__(self, version_string):
|
def __init__(self, version_string):
|
||||||
|
self.obj = version.parse(version_string)
|
||||||
self.version_string = version_string
|
self.version_string = version_string
|
||||||
self.major = 0
|
self.major = self.obj.major
|
||||||
self.minor = 0
|
self.minor = self.obj.minor
|
||||||
self.patch = 0
|
self.patch = self.obj.micro
|
||||||
self.pre_release = None
|
|
||||||
self.parse_version_string()
|
|
||||||
|
|
||||||
def parse_version_string(self):
|
|
||||||
parts = self.version_string.split('.')
|
|
||||||
if not parts:
|
|
||||||
raise ValueError("Version string must not be empty")
|
|
||||||
|
|
||||||
self.major = int(parts[0])
|
|
||||||
self.minor = int(parts[1]) if len(parts) > 1 else 0
|
|
||||||
self.patch = int(parts[2]) if len(parts) > 2 else 0
|
|
||||||
|
|
||||||
# Handling pre-release versions if present
|
|
||||||
if len(parts) > 3:
|
|
||||||
self.pre_release = parts[3]
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
version = f"{self.major}.{self.minor}.{self.patch}"
|
return self.version_string
|
||||||
if self.pre_release:
|
|
||||||
version += f"-{self.pre_release}"
|
|
||||||
return version
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
return (self.major, self.minor, self.patch, self.pre_release) == \
|
return self.obj == other.obj
|
||||||
(other.major, other.minor, other.patch, other.pre_release)
|
|
||||||
|
|
||||||
def __lt__(self, other):
|
def __lt__(self, other):
|
||||||
if (self.major, self.minor, self.patch) == (other.major, other.minor, other.patch):
|
return self.obj < other.obj
|
||||||
return self.pre_release_compare(self.pre_release, other.pre_release) < 0
|
|
||||||
return (self.major, self.minor, self.patch) < (other.major, other.minor, other.patch)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def pre_release_compare(pre1, pre2):
|
|
||||||
if pre1 == pre2:
|
|
||||||
return 0
|
|
||||||
if pre1 is None:
|
|
||||||
return 1
|
|
||||||
if pre2 is None:
|
|
||||||
return -1
|
|
||||||
return -1 if pre1 < pre2 else 1
|
|
||||||
|
|
||||||
def __le__(self, other):
|
def __le__(self, other):
|
||||||
return self == other or self < other
|
return self.obj == other.obj or self.obj < other.obj
|
||||||
|
|
||||||
def __gt__(self, other):
|
def __gt__(self, other):
|
||||||
return not self <= other
|
return not self.obj <= other.obj
|
||||||
|
|
||||||
def __ge__(self, other):
|
def __ge__(self, other):
|
||||||
return not self < other
|
return not self.obj < other.obj
|
||||||
|
|
||||||
def __ne__(self, other):
|
def __ne__(self, other):
|
||||||
return not self == other
|
return not self.obj == other.obj
|
||||||
|
|
||||||
|
|
||||||
def simple_hash(input_string):
|
def simple_hash(input_string):
|
||||||
|
|||||||
136
comfyui_manager/common/snapshot_util.py
Normal file
136
comfyui_manager/common/snapshot_util.py
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
from . import manager_util
|
||||||
|
from . import git_utils
|
||||||
|
import json
|
||||||
|
import yaml
|
||||||
|
import logging
|
||||||
|
|
||||||
|
def read_snapshot(snapshot_path):
|
||||||
|
try:
|
||||||
|
|
||||||
|
with open(snapshot_path, 'r', encoding="UTF-8") as snapshot_file:
|
||||||
|
if snapshot_path.endswith('.json'):
|
||||||
|
info = json.load(snapshot_file)
|
||||||
|
elif snapshot_path.endswith('.yaml'):
|
||||||
|
info = yaml.load(snapshot_file, Loader=yaml.SafeLoader)
|
||||||
|
info = info['custom_nodes']
|
||||||
|
|
||||||
|
return info
|
||||||
|
except Exception as e:
|
||||||
|
logging.warning(f"Failed to read snapshot file: {snapshot_path}\nError: {e}")
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def diff_snapshot(a, b):
|
||||||
|
if not a or not b:
|
||||||
|
return None
|
||||||
|
|
||||||
|
nodepack_diff = {
|
||||||
|
'added': {},
|
||||||
|
'removed': [],
|
||||||
|
'upgraded': {},
|
||||||
|
'downgraded': {},
|
||||||
|
'changed': []
|
||||||
|
}
|
||||||
|
|
||||||
|
pip_diff = {
|
||||||
|
'added': {},
|
||||||
|
'upgraded': {},
|
||||||
|
'downgraded': {}
|
||||||
|
}
|
||||||
|
|
||||||
|
# check: comfyui
|
||||||
|
if a.get('comfyui') != b.get('comfyui'):
|
||||||
|
nodepack_diff['changed'].append('comfyui')
|
||||||
|
|
||||||
|
# check: cnr nodes
|
||||||
|
a_cnrs = a.get('cnr_custom_nodes', {})
|
||||||
|
b_cnrs = b.get('cnr_custom_nodes', {})
|
||||||
|
|
||||||
|
if 'comfyui-manager' in a_cnrs:
|
||||||
|
del a_cnrs['comfyui-manager']
|
||||||
|
if 'comfyui-manager' in b_cnrs:
|
||||||
|
del b_cnrs['comfyui-manager']
|
||||||
|
|
||||||
|
for k, v in a_cnrs.items():
|
||||||
|
if k not in b_cnrs.keys():
|
||||||
|
nodepack_diff['removed'].append(k)
|
||||||
|
elif a_cnrs[k] != b_cnrs[k]:
|
||||||
|
a_ver = manager_util.StrictVersion(a_cnrs[k])
|
||||||
|
b_ver = manager_util.StrictVersion(b_cnrs[k])
|
||||||
|
if a_ver < b_ver:
|
||||||
|
nodepack_diff['upgraded'][k] = {'from': a_cnrs[k], 'to': b_cnrs[k]}
|
||||||
|
elif a_ver > b_ver:
|
||||||
|
nodepack_diff['downgraded'][k] = {'from': a_cnrs[k], 'to': b_cnrs[k]}
|
||||||
|
|
||||||
|
added_cnrs = set(b_cnrs.keys()) - set(a_cnrs.keys())
|
||||||
|
for k in added_cnrs:
|
||||||
|
nodepack_diff['added'][k] = b_cnrs[k]
|
||||||
|
|
||||||
|
# check: git custom nodes
|
||||||
|
a_gits = a.get('git_custom_nodes', {})
|
||||||
|
b_gits = b.get('git_custom_nodes', {})
|
||||||
|
|
||||||
|
a_gits = {git_utils.normalize_url(k): v for k, v in a_gits.items() if k.lower() != 'comfyui-manager'}
|
||||||
|
b_gits = {git_utils.normalize_url(k): v for k, v in b_gits.items() if k.lower() != 'comfyui-manager'}
|
||||||
|
|
||||||
|
for k, v in a_gits.items():
|
||||||
|
if k not in b_gits.keys():
|
||||||
|
nodepack_diff['removed'].append(k)
|
||||||
|
elif not v['disabled'] and b_gits[k]['disabled']:
|
||||||
|
nodepack_diff['removed'].append(k)
|
||||||
|
elif v['disabled'] and not b_gits[k]['disabled']:
|
||||||
|
nodepack_diff['added'].append(k)
|
||||||
|
elif v['hash'] != b_gits[k]['hash']:
|
||||||
|
a_date = v.get('commit_timestamp')
|
||||||
|
b_date = b_gits[k].get('commit_timestamp')
|
||||||
|
if a_date is not None and b_date is not None:
|
||||||
|
if a_date < b_date:
|
||||||
|
nodepack_diff['upgraded'].append(k)
|
||||||
|
elif a_date > b_date:
|
||||||
|
nodepack_diff['downgraded'].append(k)
|
||||||
|
else:
|
||||||
|
nodepack_diff['changed'].append(k)
|
||||||
|
|
||||||
|
# check: pip packages
|
||||||
|
a_pip = a.get('pips', {})
|
||||||
|
b_pip = b.get('pips', {})
|
||||||
|
for k, v in a_pip.items():
|
||||||
|
if '==' in k:
|
||||||
|
package_name, version = k.split('==', 1)
|
||||||
|
else:
|
||||||
|
package_name, version = k, None
|
||||||
|
|
||||||
|
for k2, v2 in b_pip.items():
|
||||||
|
if '==' in k2:
|
||||||
|
package_name2, version2 = k2.split('==', 1)
|
||||||
|
else:
|
||||||
|
package_name2, version2 = k2, None
|
||||||
|
|
||||||
|
if package_name.lower() == package_name2.lower():
|
||||||
|
if version != version2:
|
||||||
|
a_ver = manager_util.StrictVersion(version) if version else None
|
||||||
|
b_ver = manager_util.StrictVersion(version2) if version2 else None
|
||||||
|
if a_ver and b_ver:
|
||||||
|
if a_ver < b_ver:
|
||||||
|
pip_diff['upgraded'][package_name] = {'from': version, 'to': version2}
|
||||||
|
elif a_ver > b_ver:
|
||||||
|
pip_diff['downgraded'][package_name] = {'from': version, 'to': version2}
|
||||||
|
elif not a_ver and b_ver:
|
||||||
|
pip_diff['added'][package_name] = version2
|
||||||
|
|
||||||
|
a_pip_names = {k.split('==', 1)[0].lower() for k in a_pip.keys()}
|
||||||
|
|
||||||
|
for k in b_pip.keys():
|
||||||
|
if '==' in k:
|
||||||
|
package_name = k.split('==', 1)[0]
|
||||||
|
package_version = k.split('==', 1)[1]
|
||||||
|
else:
|
||||||
|
package_name = k
|
||||||
|
package_version = None
|
||||||
|
|
||||||
|
if package_name.lower() not in a_pip_names:
|
||||||
|
if package_version:
|
||||||
|
pip_diff['added'][package_name] = package_version
|
||||||
|
|
||||||
|
return {'nodepack_diff': nodepack_diff, 'pip_diff': pip_diff}
|
||||||
@@ -30,12 +30,6 @@ from .generated_models import (
|
|||||||
InstalledModelInfo,
|
InstalledModelInfo,
|
||||||
ComfyUIVersionInfo,
|
ComfyUIVersionInfo,
|
||||||
|
|
||||||
# Import Fail Info Models
|
|
||||||
ImportFailInfoBulkRequest,
|
|
||||||
ImportFailInfoBulkResponse,
|
|
||||||
ImportFailInfoItem,
|
|
||||||
ImportFailInfoItem1,
|
|
||||||
|
|
||||||
# Other models
|
# Other models
|
||||||
OperationType,
|
OperationType,
|
||||||
OperationResult,
|
OperationResult,
|
||||||
@@ -94,12 +88,6 @@ __all__ = [
|
|||||||
"InstalledModelInfo",
|
"InstalledModelInfo",
|
||||||
"ComfyUIVersionInfo",
|
"ComfyUIVersionInfo",
|
||||||
|
|
||||||
# Import Fail Info Models
|
|
||||||
"ImportFailInfoBulkRequest",
|
|
||||||
"ImportFailInfoBulkResponse",
|
|
||||||
"ImportFailInfoItem",
|
|
||||||
"ImportFailInfoItem1",
|
|
||||||
|
|
||||||
# Other models
|
# Other models
|
||||||
"OperationType",
|
"OperationType",
|
||||||
"OperationResult",
|
"OperationResult",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# generated by datamodel-codegen:
|
# generated by datamodel-codegen:
|
||||||
# filename: openapi.yaml
|
# filename: openapi.yaml
|
||||||
# timestamp: 2025-07-31T04:52:26+00:00
|
# timestamp: 2025-06-27T04:01:45+00:00
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
@@ -454,24 +454,6 @@ class BatchExecutionRecord(BaseModel):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class ImportFailInfoBulkRequest(BaseModel):
|
|
||||||
cnr_ids: Optional[List[str]] = Field(
|
|
||||||
None, description="A list of CNR IDs to check."
|
|
||||||
)
|
|
||||||
urls: Optional[List[str]] = Field(
|
|
||||||
None, description="A list of repository URLs to check."
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ImportFailInfoItem1(BaseModel):
|
|
||||||
error: Optional[str] = None
|
|
||||||
traceback: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class ImportFailInfoItem(RootModel[Optional[ImportFailInfoItem1]]):
|
|
||||||
root: Optional[ImportFailInfoItem1]
|
|
||||||
|
|
||||||
|
|
||||||
class QueueTaskItem(BaseModel):
|
class QueueTaskItem(BaseModel):
|
||||||
ui_id: str = Field(..., description="Unique identifier for the task")
|
ui_id: str = Field(..., description="Unique identifier for the task")
|
||||||
client_id: str = Field(..., description="Client identifier that initiated the task")
|
client_id: str = Field(..., description="Client identifier that initiated the task")
|
||||||
@@ -555,7 +537,3 @@ class HistoryResponse(BaseModel):
|
|||||||
history: Optional[Dict[str, TaskHistoryItem]] = Field(
|
history: Optional[Dict[str, TaskHistoryItem]] = Field(
|
||||||
None, description="Map of task IDs to their history items"
|
None, description="Map of task IDs to their history items"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class ImportFailInfoBulkResponse(RootModel[Optional[Dict[str, ImportFailInfoItem]]]):
|
|
||||||
root: Optional[Dict[str, ImportFailInfoItem]] = None
|
|
||||||
|
|||||||
@@ -41,12 +41,11 @@ from ..common.enums import NetworkMode, SecurityLevel, DBMode
|
|||||||
from ..common import context
|
from ..common import context
|
||||||
|
|
||||||
|
|
||||||
version_code = [4, 0, 3]
|
version_code = [4, 0]
|
||||||
version_str = f"V{version_code[0]}.{version_code[1]}" + (f'.{version_code[2]}' if len(version_code) > 2 else '')
|
version_str = f"V{version_code[0]}.{version_code[1]}" + (f'.{version_code[2]}' if len(version_code) > 2 else '')
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_CHANNEL = "https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main"
|
DEFAULT_CHANNEL = "https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main"
|
||||||
DEFAULT_CHANNEL_LEGACY = "https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main"
|
|
||||||
|
|
||||||
|
|
||||||
default_custom_nodes_path = None
|
default_custom_nodes_path = None
|
||||||
@@ -154,8 +153,14 @@ def check_invalid_nodes():
|
|||||||
cached_config = None
|
cached_config = None
|
||||||
js_path = None
|
js_path = None
|
||||||
|
|
||||||
|
comfy_ui_required_revision = 1930
|
||||||
|
comfy_ui_required_commit_datetime = datetime(2024, 1, 24, 0, 0, 0)
|
||||||
|
|
||||||
|
comfy_ui_revision = "Unknown"
|
||||||
|
comfy_ui_commit_datetime = datetime(1900, 1, 1, 0, 0, 0)
|
||||||
|
|
||||||
channel_dict = None
|
channel_dict = None
|
||||||
valid_channels = {'default', 'local', DEFAULT_CHANNEL, DEFAULT_CHANNEL_LEGACY}
|
valid_channels = {'default', 'local'}
|
||||||
channel_list = None
|
channel_list = None
|
||||||
|
|
||||||
|
|
||||||
@@ -1003,6 +1008,7 @@ class UnifiedManager:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
result = ManagedResult('enable')
|
result = ManagedResult('enable')
|
||||||
|
|
||||||
if 'comfyui-manager' in node_id.lower():
|
if 'comfyui-manager' in node_id.lower():
|
||||||
return result.fail(f"ignored: enabling '{node_id}'")
|
return result.fail(f"ignored: enabling '{node_id}'")
|
||||||
|
|
||||||
@@ -1473,7 +1479,7 @@ def identify_node_pack_from_path(fullpath):
|
|||||||
# cnr
|
# cnr
|
||||||
cnr = cnr_utils.read_cnr_info(fullpath)
|
cnr = cnr_utils.read_cnr_info(fullpath)
|
||||||
if cnr is not None:
|
if cnr is not None:
|
||||||
return module_name, cnr['version'], cnr['original_name'], None
|
return module_name, cnr['version'], cnr['id'], None
|
||||||
|
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
@@ -1523,10 +1529,7 @@ def get_installed_node_packs():
|
|||||||
if info is None:
|
if info is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# NOTE: don't add disabled nodepack if there is enabled nodepack
|
res[info[0]] = { 'ver': info[1], 'cnr_id': info[2], 'aux_id': info[3], 'enabled': False }
|
||||||
original_name = info[0].split('@')[0]
|
|
||||||
if original_name not in res:
|
|
||||||
res[info[0]] = { 'ver': info[1], 'cnr_id': info[2], 'aux_id': info[3], 'enabled': False }
|
|
||||||
|
|
||||||
return res
|
return res
|
||||||
|
|
||||||
@@ -1783,6 +1786,16 @@ def try_install_script(url, repo_path, install_cmd, instant_execution=False):
|
|||||||
print(f"\n## ComfyUI-Manager: EXECUTE => {install_cmd}")
|
print(f"\n## ComfyUI-Manager: EXECUTE => {install_cmd}")
|
||||||
code = manager_funcs.run_script(install_cmd, cwd=repo_path)
|
code = manager_funcs.run_script(install_cmd, cwd=repo_path)
|
||||||
|
|
||||||
|
if platform.system() != "Windows":
|
||||||
|
try:
|
||||||
|
if not os.environ.get('__COMFYUI_DESKTOP_VERSION__') and comfy_ui_commit_datetime.date() < comfy_ui_required_commit_datetime.date():
|
||||||
|
print("\n\n###################################################################")
|
||||||
|
print(f"[WARN] ComfyUI-Manager: Your ComfyUI version ({comfy_ui_revision})[{comfy_ui_commit_datetime.date()}] is too old. Please update to the latest version.")
|
||||||
|
print("[WARN] The extension installation feature may not work properly in the current installed ComfyUI version on Windows environment.")
|
||||||
|
print("###################################################################\n\n")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
if code != 0:
|
if code != 0:
|
||||||
if url is None:
|
if url is None:
|
||||||
url = os.path.dirname(repo_path)
|
url = os.path.dirname(repo_path)
|
||||||
@@ -1901,27 +1914,6 @@ def execute_install_script(url, repo_path, lazy_mode=False, instant_execution=Fa
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def install_manager_requirements(repo_path):
|
|
||||||
"""
|
|
||||||
Install packages from manager_requirements.txt if it exists.
|
|
||||||
This is specifically for ComfyUI's manager_requirements.txt.
|
|
||||||
"""
|
|
||||||
manager_requirements_path = os.path.join(repo_path, "manager_requirements.txt")
|
|
||||||
if not os.path.exists(manager_requirements_path):
|
|
||||||
return
|
|
||||||
|
|
||||||
logging.info("[ComfyUI-Manager] Installing manager_requirements.txt")
|
|
||||||
with open(manager_requirements_path, "r") as f:
|
|
||||||
for line in f:
|
|
||||||
line = line.strip()
|
|
||||||
if line and not line.startswith('#'):
|
|
||||||
if '#' in line:
|
|
||||||
line = line.split('#')[0].strip()
|
|
||||||
if line:
|
|
||||||
install_cmd = manager_util.make_pip_cmd(["install", line])
|
|
||||||
subprocess.run(install_cmd)
|
|
||||||
|
|
||||||
|
|
||||||
def git_repo_update_check_with(path, do_fetch=False, do_update=False, no_deps=False):
|
def git_repo_update_check_with(path, do_fetch=False, do_update=False, no_deps=False):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -2455,7 +2447,6 @@ def update_to_stable_comfyui(repo_path):
|
|||||||
else:
|
else:
|
||||||
logging.info(f"[ComfyUI-Manager] Updating ComfyUI: {current_tag} -> {latest_tag}")
|
logging.info(f"[ComfyUI-Manager] Updating ComfyUI: {current_tag} -> {latest_tag}")
|
||||||
repo.git.checkout(latest_tag)
|
repo.git.checkout(latest_tag)
|
||||||
execute_install_script("ComfyUI", repo_path, instant_execution=False, no_deps=False)
|
|
||||||
return 'updated', latest_tag
|
return 'updated', latest_tag
|
||||||
except Exception:
|
except Exception:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
@@ -2655,8 +2646,8 @@ async def get_current_snapshot(custom_nodes_only = False):
|
|||||||
commit_hash = git_utils.get_commit_hash(fullpath)
|
commit_hash = git_utils.get_commit_hash(fullpath)
|
||||||
url = git_utils.git_url(fullpath)
|
url = git_utils.git_url(fullpath)
|
||||||
git_custom_nodes[url] = dict(hash=commit_hash, disabled=is_disabled)
|
git_custom_nodes[url] = dict(hash=commit_hash, disabled=is_disabled)
|
||||||
except Exception:
|
except Exception as e:
|
||||||
print(f"Failed to extract snapshots for the custom node '{path}'.")
|
print(f"Failed to extract snapshots for the custom node '{path}'. / {e}")
|
||||||
|
|
||||||
elif path.endswith('.py'):
|
elif path.endswith('.py'):
|
||||||
is_disabled = path.endswith(".py.disabled")
|
is_disabled = path.endswith(".py.disabled")
|
||||||
|
|||||||
@@ -47,7 +47,7 @@ from ..common import manager_util
|
|||||||
from ..common import cm_global
|
from ..common import cm_global
|
||||||
from ..common import manager_downloader
|
from ..common import manager_downloader
|
||||||
from ..common import context
|
from ..common import context
|
||||||
|
from ..common import snapshot_util
|
||||||
|
|
||||||
|
|
||||||
from ..data_models import (
|
from ..data_models import (
|
||||||
@@ -61,7 +61,6 @@ from ..data_models import (
|
|||||||
ManagerMessageName,
|
ManagerMessageName,
|
||||||
BatchExecutionRecord,
|
BatchExecutionRecord,
|
||||||
ComfyUISystemState,
|
ComfyUISystemState,
|
||||||
ImportFailInfoBulkRequest,
|
|
||||||
BatchOperation,
|
BatchOperation,
|
||||||
InstalledNodeInfo,
|
InstalledNodeInfo,
|
||||||
ComfyUIVersionInfo,
|
ComfyUIVersionInfo,
|
||||||
@@ -968,8 +967,6 @@ async def task_worker():
|
|||||||
logging.error("ComfyUI update failed")
|
logging.error("ComfyUI update failed")
|
||||||
return "fail"
|
return "fail"
|
||||||
elif res == "updated":
|
elif res == "updated":
|
||||||
core.install_manager_requirements(repo_path)
|
|
||||||
|
|
||||||
if is_stable:
|
if is_stable:
|
||||||
logging.info("ComfyUI is updated to latest stable version.")
|
logging.info("ComfyUI is updated to latest stable version.")
|
||||||
return "success-stable-" + latest_tag
|
return "success-stable-" + latest_tag
|
||||||
@@ -1596,6 +1593,46 @@ async def save_snapshot(request):
|
|||||||
return web.Response(status=400)
|
return web.Response(status=400)
|
||||||
|
|
||||||
|
|
||||||
|
@routes.get("/v2/snapshot/diff")
|
||||||
|
async def get_snapshot_diff(request):
|
||||||
|
try:
|
||||||
|
from_id = request.rel_url.query.get("from")
|
||||||
|
to_id = request.rel_url.query.get("to")
|
||||||
|
|
||||||
|
if (from_id is not None and '..' in from_id) or (to_id is not None and '..' in to_id):
|
||||||
|
logging.error("/v2/snapshot/diff: invalid 'from' or 'to' parameter.")
|
||||||
|
return web.Response(status=400)
|
||||||
|
|
||||||
|
if from_id is None:
|
||||||
|
from_json = await core.get_current_snapshot()
|
||||||
|
else:
|
||||||
|
from_path = os.path.join(context.manager_snapshot_path, f"{from_id}.json")
|
||||||
|
if not os.path.exists(from_path):
|
||||||
|
logging.error(f"/v2/snapshot/diff: 'from' parameter file not found: {from_path}")
|
||||||
|
return web.Response(status=400)
|
||||||
|
|
||||||
|
from_json = snapshot_util.read_snapshot(from_path)
|
||||||
|
|
||||||
|
if to_id is None:
|
||||||
|
logging.error("/v2/snapshot/diff: 'to' parameter is required.")
|
||||||
|
return web.Response(status=401)
|
||||||
|
else:
|
||||||
|
to_path = os.path.join(context.manager_snapshot_path, f"{to_id}.json")
|
||||||
|
if not os.path.exists(to_path):
|
||||||
|
logging.error(f"/v2/snapshot/diff: 'to' parameter file not found: {to_path}")
|
||||||
|
return web.Response(status=400)
|
||||||
|
|
||||||
|
to_json = snapshot_util.read_snapshot(to_path)
|
||||||
|
|
||||||
|
return web.json_response(snapshot_util.diff_snapshot(from_json, to_json), content_type='application/json')
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"[ComfyUI-Manager] Error in /v2/snapshot/diff: {e}")
|
||||||
|
traceback.print_exc()
|
||||||
|
# Return a generic error response
|
||||||
|
return web.Response(status=400)
|
||||||
|
|
||||||
|
|
||||||
def unzip_install(files):
|
def unzip_install(files):
|
||||||
temp_filename = "manager-temp.zip"
|
temp_filename = "manager-temp.zip"
|
||||||
for url in files:
|
for url in files:
|
||||||
@@ -1659,67 +1696,6 @@ async def import_fail_info(request):
|
|||||||
return web.Response(status=500, text="Internal server error")
|
return web.Response(status=500, text="Internal server error")
|
||||||
|
|
||||||
|
|
||||||
@routes.post("/v2/customnode/import_fail_info_bulk")
|
|
||||||
async def import_fail_info_bulk(request):
|
|
||||||
try:
|
|
||||||
json_data = await request.json()
|
|
||||||
|
|
||||||
# Validate input using Pydantic model
|
|
||||||
request_data = ImportFailInfoBulkRequest.model_validate(json_data)
|
|
||||||
|
|
||||||
# Ensure we have either cnr_ids or urls
|
|
||||||
if not request_data.cnr_ids and not request_data.urls:
|
|
||||||
return web.Response(
|
|
||||||
status=400, text="Either 'cnr_ids' or 'urls' field is required"
|
|
||||||
)
|
|
||||||
|
|
||||||
await core.unified_manager.reload('cache')
|
|
||||||
await core.unified_manager.get_custom_nodes('default', 'cache')
|
|
||||||
|
|
||||||
results = {}
|
|
||||||
|
|
||||||
if request_data.cnr_ids:
|
|
||||||
for cnr_id in request_data.cnr_ids:
|
|
||||||
module_name = core.unified_manager.get_module_name(cnr_id)
|
|
||||||
if module_name is not None:
|
|
||||||
info = cm_global.error_dict.get(module_name)
|
|
||||||
if info is not None:
|
|
||||||
# Convert error_dict format to API spec format
|
|
||||||
results[cnr_id] = {
|
|
||||||
'error': info.get('msg', ''),
|
|
||||||
'traceback': info.get('traceback', '')
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
results[cnr_id] = None
|
|
||||||
else:
|
|
||||||
results[cnr_id] = None
|
|
||||||
|
|
||||||
if request_data.urls:
|
|
||||||
for url in request_data.urls:
|
|
||||||
module_name = core.unified_manager.get_module_name(url)
|
|
||||||
if module_name is not None:
|
|
||||||
info = cm_global.error_dict.get(module_name)
|
|
||||||
if info is not None:
|
|
||||||
# Convert error_dict format to API spec format
|
|
||||||
results[url] = {
|
|
||||||
'error': info.get('msg', ''),
|
|
||||||
'traceback': info.get('traceback', '')
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
results[url] = None
|
|
||||||
else:
|
|
||||||
results[url] = None
|
|
||||||
|
|
||||||
# Return results directly as JSON
|
|
||||||
return web.json_response(results, content_type="application/json")
|
|
||||||
except ValidationError as e:
|
|
||||||
logging.error(f"[ComfyUI-Manager] Invalid request data: {e}")
|
|
||||||
return web.Response(status=400, text=f"Invalid request data: {e}")
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"[ComfyUI-Manager] Error processing bulk import fail info: {e}")
|
|
||||||
return web.Response(status=500, text="Internal server error")
|
|
||||||
|
|
||||||
|
|
||||||
@routes.get("/v2/manager/queue/reset")
|
@routes.get("/v2/manager/queue/reset")
|
||||||
async def reset_queue(request):
|
async def reset_queue(request):
|
||||||
logging.debug("[ComfyUI-Manager] Queue reset requested")
|
logging.debug("[ComfyUI-Manager] Queue reset requested")
|
||||||
@@ -2047,7 +2023,10 @@ async def default_cache_update():
|
|||||||
)
|
)
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
|
|
||||||
if core.get_config()["network_mode"] != "offline":
|
if (
|
||||||
|
core.get_config()["network_mode"] != "offline"
|
||||||
|
and not manager_util.is_manager_pip_package()
|
||||||
|
):
|
||||||
a = get_cache("custom-node-list.json")
|
a = get_cache("custom-node-list.json")
|
||||||
b = get_cache("extension-node-map.json")
|
b = get_cache("extension-node-map.json")
|
||||||
c = get_cache("model-list.json")
|
c = get_cache("model-list.json")
|
||||||
|
|||||||
@@ -11,15 +11,6 @@ import hashlib
|
|||||||
import folder_paths
|
import folder_paths
|
||||||
from server import PromptServer
|
from server import PromptServer
|
||||||
import logging
|
import logging
|
||||||
import sys
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
from nio import AsyncClient, LoginResponse, UploadResponse
|
|
||||||
matrix_nio_is_available = True
|
|
||||||
except Exception:
|
|
||||||
logging.warning(f"[ComfyUI-Manager] The matrix sharing feature has been disabled because the `matrix-nio` dependency is not installed.\n\tTo use this feature, please run the following command:\n\t{sys.executable} -m pip install matrix-nio\n")
|
|
||||||
matrix_nio_is_available = False
|
|
||||||
|
|
||||||
|
|
||||||
def extract_model_file_names(json_data):
|
def extract_model_file_names(json_data):
|
||||||
@@ -202,14 +193,6 @@ async def get_esheep_workflow_and_images(request):
|
|||||||
return web.Response(status=200, text=json.dumps(data))
|
return web.Response(status=200, text=json.dumps(data))
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/get_matrix_dep_status")
|
|
||||||
async def get_matrix_dep_status(request):
|
|
||||||
if matrix_nio_is_available:
|
|
||||||
return web.Response(status=200, text='available')
|
|
||||||
else:
|
|
||||||
return web.Response(status=200, text='unavailable')
|
|
||||||
|
|
||||||
|
|
||||||
def set_matrix_auth(json_data):
|
def set_matrix_auth(json_data):
|
||||||
homeserver = json_data['homeserver']
|
homeserver = json_data['homeserver']
|
||||||
username = json_data['username']
|
username = json_data['username']
|
||||||
@@ -349,12 +332,15 @@ async def share_art(request):
|
|||||||
workflowId = upload_workflow_json["workflowId"]
|
workflowId = upload_workflow_json["workflowId"]
|
||||||
|
|
||||||
# check if the user has provided Matrix credentials
|
# check if the user has provided Matrix credentials
|
||||||
if matrix_nio_is_available and "matrix" in share_destinations:
|
if "matrix" in share_destinations:
|
||||||
comfyui_share_room_id = '!LGYSoacpJPhIfBqVfb:matrix.org'
|
comfyui_share_room_id = '!LGYSoacpJPhIfBqVfb:matrix.org'
|
||||||
filename = os.path.basename(asset_filepath)
|
filename = os.path.basename(asset_filepath)
|
||||||
content_type = assetFileType
|
content_type = assetFileType
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
from matrix_client.api import MatrixHttpApi
|
||||||
|
from matrix_client.client import MatrixClient
|
||||||
|
|
||||||
homeserver = 'matrix.org'
|
homeserver = 'matrix.org'
|
||||||
if matrix_auth:
|
if matrix_auth:
|
||||||
homeserver = matrix_auth.get('homeserver', 'matrix.org')
|
homeserver = matrix_auth.get('homeserver', 'matrix.org')
|
||||||
@@ -362,35 +348,20 @@ async def share_art(request):
|
|||||||
if not homeserver.startswith("https://"):
|
if not homeserver.startswith("https://"):
|
||||||
homeserver = "https://" + homeserver
|
homeserver = "https://" + homeserver
|
||||||
|
|
||||||
client = AsyncClient(homeserver, matrix_auth['username'])
|
client = MatrixClient(homeserver)
|
||||||
|
try:
|
||||||
# Login
|
token = client.login(username=matrix_auth['username'], password=matrix_auth['password'])
|
||||||
login_resp = await client.login(matrix_auth['password'])
|
if not token:
|
||||||
if not isinstance(login_resp, LoginResponse) or not login_resp.access_token:
|
return web.json_response({"error": "Invalid Matrix credentials."}, content_type='application/json', status=400)
|
||||||
await client.close()
|
except Exception:
|
||||||
return web.json_response({"error": "Invalid Matrix credentials."}, content_type='application/json', status=400)
|
return web.json_response({"error": "Invalid Matrix credentials."}, content_type='application/json', status=400)
|
||||||
|
|
||||||
# Upload asset
|
matrix = MatrixHttpApi(homeserver, token=token)
|
||||||
with open(asset_filepath, 'rb') as f:
|
with open(asset_filepath, 'rb') as f:
|
||||||
upload_resp, _maybe_keys = await client.upload(f, content_type=content_type, filename=filename)
|
mxc_url = matrix.media_upload(f.read(), content_type, filename=filename)['content_uri']
|
||||||
asset_data = f.seek(0) or f.read() # get size for info below
|
|
||||||
if not isinstance(upload_resp, UploadResponse) or not upload_resp.content_uri:
|
|
||||||
await client.close()
|
|
||||||
return web.json_response({"error": "Failed to upload asset to Matrix."}, content_type='application/json', status=500)
|
|
||||||
mxc_url = upload_resp.content_uri
|
|
||||||
|
|
||||||
# Upload workflow JSON
|
workflow_json_mxc_url = matrix.media_upload(prompt['workflow'], 'application/json', filename='workflow.json')['content_uri']
|
||||||
import io
|
|
||||||
workflow_json_bytes = json.dumps(prompt['workflow']).encode('utf-8')
|
|
||||||
workflow_io = io.BytesIO(workflow_json_bytes)
|
|
||||||
upload_workflow_resp, _maybe_keys = await client.upload(workflow_io, content_type='application/json', filename='workflow.json')
|
|
||||||
workflow_io.seek(0)
|
|
||||||
if not isinstance(upload_workflow_resp, UploadResponse) or not upload_workflow_resp.content_uri:
|
|
||||||
await client.close()
|
|
||||||
return web.json_response({"error": "Failed to upload workflow to Matrix."}, content_type='application/json', status=500)
|
|
||||||
workflow_json_mxc_url = upload_workflow_resp.content_uri
|
|
||||||
|
|
||||||
# Send text message
|
|
||||||
text_content = ""
|
text_content = ""
|
||||||
if title:
|
if title:
|
||||||
text_content += f"{title}\n"
|
text_content += f"{title}\n"
|
||||||
@@ -398,47 +369,11 @@ async def share_art(request):
|
|||||||
text_content += f"{description}\n"
|
text_content += f"{description}\n"
|
||||||
if credits:
|
if credits:
|
||||||
text_content += f"\ncredits: {credits}\n"
|
text_content += f"\ncredits: {credits}\n"
|
||||||
await client.room_send(
|
matrix.send_message(comfyui_share_room_id, text_content)
|
||||||
room_id=comfyui_share_room_id,
|
matrix.send_content(comfyui_share_room_id, mxc_url, filename, 'm.image')
|
||||||
message_type="m.room.message",
|
matrix.send_content(comfyui_share_room_id, workflow_json_mxc_url, 'workflow.json', 'm.file')
|
||||||
content={"msgtype": "m.text", "body": text_content}
|
except Exception:
|
||||||
)
|
logging.exception("An error occurred")
|
||||||
|
|
||||||
# Send image
|
|
||||||
await client.room_send(
|
|
||||||
room_id=comfyui_share_room_id,
|
|
||||||
message_type="m.room.message",
|
|
||||||
content={
|
|
||||||
"msgtype": "m.image",
|
|
||||||
"body": filename,
|
|
||||||
"url": mxc_url,
|
|
||||||
"info": {
|
|
||||||
"mimetype": content_type,
|
|
||||||
"size": len(asset_data)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Send workflow JSON file
|
|
||||||
await client.room_send(
|
|
||||||
room_id=comfyui_share_room_id,
|
|
||||||
message_type="m.room.message",
|
|
||||||
content={
|
|
||||||
"msgtype": "m.file",
|
|
||||||
"body": "workflow.json",
|
|
||||||
"url": workflow_json_mxc_url,
|
|
||||||
"info": {
|
|
||||||
"mimetype": "application/json",
|
|
||||||
"size": len(workflow_json_bytes)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
await client.close()
|
|
||||||
|
|
||||||
except:
|
|
||||||
import traceback
|
|
||||||
traceback.print_exc()
|
|
||||||
return web.json_response({"error": "An error occurred when sharing your art to Matrix."}, content_type='application/json', status=500)
|
return web.json_response({"error": "An error occurred when sharing your art to Matrix."}, content_type='application/json', status=500)
|
||||||
|
|
||||||
return web.json_response({
|
return web.json_response({
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ This directory contains the JavaScript frontend implementation for ComfyUI-Manag
|
|||||||
## Sharing Components
|
## Sharing Components
|
||||||
|
|
||||||
- **comfyui-share-common.js**: Base functionality for workflow sharing features.
|
- **comfyui-share-common.js**: Base functionality for workflow sharing features.
|
||||||
- **comfyui-share-copus.js**: Integration with the ComfyUI Copus sharing platform.
|
- **comfyui-share-copus.js**: Integration with the ComfyUI Opus sharing platform.
|
||||||
- **comfyui-share-openart.js**: Integration with the OpenArt sharing platform.
|
- **comfyui-share-openart.js**: Integration with the OpenArt sharing platform.
|
||||||
- **comfyui-share-youml.js**: Integration with the YouML sharing platform.
|
- **comfyui-share-youml.js**: Integration with the YouML sharing platform.
|
||||||
|
|
||||||
@@ -47,4 +47,4 @@ CSS files are included for specific components:
|
|||||||
- **custom-nodes-manager.css**: Styling for the node management UI
|
- **custom-nodes-manager.css**: Styling for the node management UI
|
||||||
- **model-manager.css**: Styling for the model management UI
|
- **model-manager.css**: Styling for the model management UI
|
||||||
|
|
||||||
This frontend implementation provides a comprehensive yet user-friendly interface for managing the ComfyUI ecosystem.
|
This frontend implementation provides a comprehensive yet user-friendly interface for managing the ComfyUI ecosystem.
|
||||||
@@ -1514,6 +1514,8 @@ app.registerExtension({
|
|||||||
tooltip: "Share"
|
tooltip: "Share"
|
||||||
}).element
|
}).element
|
||||||
);
|
);
|
||||||
|
|
||||||
|
app.menu?.settingsGroup.element.before(cmGroup.element);
|
||||||
}
|
}
|
||||||
catch(exception) {
|
catch(exception) {
|
||||||
console.log('ComfyUI is outdated. New style menu based features are disabled.');
|
console.log('ComfyUI is outdated. New style menu based features are disabled.');
|
||||||
|
|||||||
@@ -552,20 +552,6 @@ export class ShareDialog extends ComfyDialog {
|
|||||||
this.matrix_destination_checkbox.style.color = "var(--fg-color)";
|
this.matrix_destination_checkbox.style.color = "var(--fg-color)";
|
||||||
this.matrix_destination_checkbox.checked = this.share_option === 'matrix'; //true;
|
this.matrix_destination_checkbox.checked = this.share_option === 'matrix'; //true;
|
||||||
|
|
||||||
try {
|
|
||||||
api.fetchApi(`/v2/manager/get_matrix_dep_status`)
|
|
||||||
.then(response => response.text())
|
|
||||||
.then(data => {
|
|
||||||
if(data == 'unavailable') {
|
|
||||||
matrix_destination_checkbox_text.style.textDecoration = "line-through";
|
|
||||||
this.matrix_destination_checkbox.disabled = true;
|
|
||||||
this.matrix_destination_checkbox.title = "It has been disabled because the 'matrix-nio' dependency is not installed. Please install this dependency to use the matrix sharing feature.";
|
|
||||||
matrix_destination_checkbox_text.title = "It has been disabled because the 'matrix-nio' dependency is not installed. Please install this dependency to use the matrix sharing feature.";
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.catch(error => {});
|
|
||||||
} catch (error) {}
|
|
||||||
|
|
||||||
this.comfyworkflows_destination_checkbox = $el("input", { type: 'checkbox', id: "comfyworkflows_destination" }, [])
|
this.comfyworkflows_destination_checkbox = $el("input", { type: 'checkbox', id: "comfyworkflows_destination" }, [])
|
||||||
const comfyworkflows_destination_checkbox_text = $el("label", {}, [" ComfyWorkflows.com"])
|
const comfyworkflows_destination_checkbox_text = $el("label", {}, [" ComfyWorkflows.com"])
|
||||||
this.comfyworkflows_destination_checkbox.style.color = "var(--fg-color)";
|
this.comfyworkflows_destination_checkbox.style.color = "var(--fg-color)";
|
||||||
|
|||||||
@@ -201,15 +201,13 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
});
|
});
|
||||||
this.LockInput = $el("input", {
|
this.LockInput = $el("input", {
|
||||||
type: "text",
|
type: "text",
|
||||||
placeholder: "0",
|
placeholder: "",
|
||||||
style: {
|
style: {
|
||||||
width: "100px",
|
width: "100px",
|
||||||
padding: "7px",
|
padding: "7px",
|
||||||
paddingLeft: "30px",
|
|
||||||
borderRadius: "4px",
|
borderRadius: "4px",
|
||||||
border: "1px solid #ddd",
|
border: "1px solid #ddd",
|
||||||
boxSizing: "border-box",
|
boxSizing: "border-box",
|
||||||
position: "relative",
|
|
||||||
},
|
},
|
||||||
oninput: (event) => {
|
oninput: (event) => {
|
||||||
let input = event.target.value;
|
let input = event.target.value;
|
||||||
@@ -377,7 +375,7 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const blockChainSection_lock = $el("div", { style: sectionStyle }, [
|
const blockChainSection_lock = $el("div", { style: sectionStyle }, [
|
||||||
$el("label", { style: labelStyle }, ["6️⃣ Download threshold"]),
|
$el("label", { style: labelStyle }, ["6️⃣ Pay to download"]),
|
||||||
$el(
|
$el(
|
||||||
"label",
|
"label",
|
||||||
{
|
{
|
||||||
@@ -397,7 +395,6 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
marginLeft: "5px",
|
marginLeft: "5px",
|
||||||
display: "flex",
|
display: "flex",
|
||||||
alignItems: "center",
|
alignItems: "center",
|
||||||
position: "relative",
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
[
|
[
|
||||||
@@ -411,18 +408,8 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
color: "#fff",
|
color: "#fff",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
["Unlock with"]
|
["Price US$"]
|
||||||
),
|
),
|
||||||
$el("img", {
|
|
||||||
style: {
|
|
||||||
width: "16px",
|
|
||||||
height: "16px",
|
|
||||||
position: "absolute",
|
|
||||||
right: "75px",
|
|
||||||
zIndex: "100",
|
|
||||||
},
|
|
||||||
src: "https://static.copus.io/images/admin/202507/prod/e2919a1d8f3c2d99d3b8fe27ff94b841.png",
|
|
||||||
}),
|
|
||||||
this.LockInput,
|
this.LockInput,
|
||||||
]
|
]
|
||||||
),
|
),
|
||||||
@@ -433,7 +420,7 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
{ style: { display: "flex", alignItems: "center", cursor: "pointer" } },
|
{ style: { display: "flex", alignItems: "center", cursor: "pointer" } },
|
||||||
[
|
[
|
||||||
this.radioButtonsCheckOff_lock,
|
this.radioButtonsCheckOff_lock,
|
||||||
$el(
|
$el(
|
||||||
"div",
|
"div",
|
||||||
{
|
{
|
||||||
style: {
|
style: {
|
||||||
@@ -442,7 +429,9 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
alignItems: "center",
|
alignItems: "center",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
[$el("span", { style: { marginLeft: "5px" } }, ["OFF"])]
|
[
|
||||||
|
$el("span", { style: { marginLeft: "5px" } }, ["OFF"]),
|
||||||
|
]
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
),
|
),
|
||||||
@@ -451,6 +440,7 @@ export class CopusShareDialog extends ComfyDialog {
|
|||||||
"p",
|
"p",
|
||||||
{ style: { fontSize: "16px", color: "#fff", margin: "10px 0 0 0" } },
|
{ style: { fontSize: "16px", color: "#fff", margin: "10px 0 0 0" } },
|
||||||
[
|
[
|
||||||
|
"Get paid from your workflow. You can change the price and withdraw your earnings on Copus.",
|
||||||
]
|
]
|
||||||
),
|
),
|
||||||
]);
|
]);
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
.cn-manager {
|
.cn-manager {
|
||||||
--grid-font: -apple-system, BlinkMacSystemFont, "Segoe UI", "Noto Sans", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji";
|
--grid-font: -apple-system, BlinkMacSystemFont, "Segue UI", "Noto Sans", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji";
|
||||||
z-index: 1099;
|
z-index: 1099;
|
||||||
width: 80%;
|
width: 80%;
|
||||||
height: 80%;
|
height: 80%;
|
||||||
|
|||||||
@@ -1626,35 +1626,17 @@ export class CustomNodesManager {
|
|||||||
getNodesInWorkflow() {
|
getNodesInWorkflow() {
|
||||||
let usedGroupNodes = new Set();
|
let usedGroupNodes = new Set();
|
||||||
let allUsedNodes = {};
|
let allUsedNodes = {};
|
||||||
const visitedGraphs = new Set();
|
|
||||||
|
|
||||||
const visitGraph = (graph) => {
|
for(let k in app.graph._nodes) {
|
||||||
if (!graph || visitedGraphs.has(graph)) return;
|
let node = app.graph._nodes[k];
|
||||||
visitedGraphs.add(graph);
|
|
||||||
|
|
||||||
const nodes = graph._nodes || graph.nodes || [];
|
if(node.type.startsWith('workflow>')) {
|
||||||
for(let k in nodes) {
|
usedGroupNodes.add(node.type.slice(9));
|
||||||
let node = nodes[k];
|
continue;
|
||||||
if (!node) continue;
|
|
||||||
|
|
||||||
// If it's a SubgraphNode, recurse into its graph and continue searching
|
|
||||||
if (node.isSubgraphNode?.() && node.subgraph) {
|
|
||||||
visitGraph(node.subgraph);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!node.type) continue;
|
|
||||||
|
|
||||||
// Group nodes / components
|
|
||||||
if(typeof node.type === 'string' && node.type.startsWith('workflow>')) {
|
|
||||||
usedGroupNodes.add(node.type.slice(9));
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
allUsedNodes[node.type] = node;
|
|
||||||
}
|
}
|
||||||
};
|
|
||||||
|
|
||||||
visitGraph(app.graph);
|
allUsedNodes[node.type] = node;
|
||||||
|
}
|
||||||
|
|
||||||
for(let k of usedGroupNodes) {
|
for(let k of usedGroupNodes) {
|
||||||
let subnodes = app.graph.extra.groupNodes[k]?.nodes;
|
let subnodes = app.graph.extra.groupNodes[k]?.nodes;
|
||||||
|
|||||||
@@ -41,12 +41,11 @@ from ..common.enums import NetworkMode, SecurityLevel, DBMode
|
|||||||
from ..common import context
|
from ..common import context
|
||||||
|
|
||||||
|
|
||||||
version_code = [4, 0, 3]
|
version_code = [4, 0]
|
||||||
version_str = f"V{version_code[0]}.{version_code[1]}" + (f'.{version_code[2]}' if len(version_code) > 2 else '')
|
version_str = f"V{version_code[0]}.{version_code[1]}" + (f'.{version_code[2]}' if len(version_code) > 2 else '')
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_CHANNEL = "https://raw.githubusercontent.com/Comfy-Org/ComfyUI-Manager/main"
|
DEFAULT_CHANNEL = "https://raw.githubusercontent.com/Comfy-Org/ComfyUI-Manager/main"
|
||||||
DEFAULT_CHANNEL_LEGACY = "https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main"
|
|
||||||
|
|
||||||
|
|
||||||
default_custom_nodes_path = None
|
default_custom_nodes_path = None
|
||||||
@@ -161,7 +160,7 @@ comfy_ui_revision = "Unknown"
|
|||||||
comfy_ui_commit_datetime = datetime(1900, 1, 1, 0, 0, 0)
|
comfy_ui_commit_datetime = datetime(1900, 1, 1, 0, 0, 0)
|
||||||
|
|
||||||
channel_dict = None
|
channel_dict = None
|
||||||
valid_channels = {'default', 'local', DEFAULT_CHANNEL, DEFAULT_CHANNEL_LEGACY}
|
valid_channels = {'default', 'local'}
|
||||||
channel_list = None
|
channel_list = None
|
||||||
|
|
||||||
|
|
||||||
@@ -1391,7 +1390,6 @@ class UnifiedManager:
|
|||||||
return ManagedResult('skip')
|
return ManagedResult('skip')
|
||||||
elif self.is_disabled(node_id):
|
elif self.is_disabled(node_id):
|
||||||
return self.unified_enable(node_id)
|
return self.unified_enable(node_id)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
version_spec = self.resolve_unspecified_version(node_id)
|
version_spec = self.resolve_unspecified_version(node_id)
|
||||||
|
|
||||||
@@ -1913,27 +1911,6 @@ def execute_install_script(url, repo_path, lazy_mode=False, instant_execution=Fa
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def install_manager_requirements(repo_path):
|
|
||||||
"""
|
|
||||||
Install packages from manager_requirements.txt if it exists.
|
|
||||||
This is specifically for ComfyUI's manager_requirements.txt.
|
|
||||||
"""
|
|
||||||
manager_requirements_path = os.path.join(repo_path, "manager_requirements.txt")
|
|
||||||
if not os.path.exists(manager_requirements_path):
|
|
||||||
return
|
|
||||||
|
|
||||||
logging.info("[ComfyUI-Manager] Installing manager_requirements.txt")
|
|
||||||
with open(manager_requirements_path, "r") as f:
|
|
||||||
for line in f:
|
|
||||||
line = line.strip()
|
|
||||||
if line and not line.startswith('#'):
|
|
||||||
if '#' in line:
|
|
||||||
line = line.split('#')[0].strip()
|
|
||||||
if line:
|
|
||||||
install_cmd = manager_util.make_pip_cmd(["install", line])
|
|
||||||
subprocess.run(install_cmd)
|
|
||||||
|
|
||||||
|
|
||||||
def git_repo_update_check_with(path, do_fetch=False, do_update=False, no_deps=False):
|
def git_repo_update_check_with(path, do_fetch=False, do_update=False, no_deps=False):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -2453,7 +2430,6 @@ def update_to_stable_comfyui(repo_path):
|
|||||||
else:
|
else:
|
||||||
logging.info(f"[ComfyUI-Manager] Updating ComfyUI: {current_tag} -> {latest_tag}")
|
logging.info(f"[ComfyUI-Manager] Updating ComfyUI: {current_tag} -> {latest_tag}")
|
||||||
repo.git.checkout(latest_tag)
|
repo.git.checkout(latest_tag)
|
||||||
execute_install_script("ComfyUI", repo_path, instant_execution=False, no_deps=False)
|
|
||||||
return 'updated', latest_tag
|
return 'updated', latest_tag
|
||||||
except Exception:
|
except Exception:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
@@ -2585,13 +2561,9 @@ def check_state_of_git_node_pack_single(item, do_fetch=False, do_update_check=Tr
|
|||||||
|
|
||||||
|
|
||||||
def get_installed_pip_packages():
|
def get_installed_pip_packages():
|
||||||
try:
|
# extract pip package infos
|
||||||
# extract pip package infos
|
cmd = manager_util.make_pip_cmd(['freeze'])
|
||||||
cmd = manager_util.make_pip_cmd(['freeze'])
|
pips = subprocess.check_output(cmd, text=True).split('\n')
|
||||||
pips = subprocess.check_output(cmd, text=True).split('\n')
|
|
||||||
except Exception as e:
|
|
||||||
logging.warning("[ComfyUI-Manager] Could not enumerate pip packages for snapshot: %s", e)
|
|
||||||
return {}
|
|
||||||
|
|
||||||
res = {}
|
res = {}
|
||||||
for x in pips:
|
for x in pips:
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ from ..common import cm_global
|
|||||||
from ..common import manager_downloader
|
from ..common import manager_downloader
|
||||||
from ..common import context
|
from ..common import context
|
||||||
from ..common import manager_security
|
from ..common import manager_security
|
||||||
|
from ..common import snapshot_util
|
||||||
|
|
||||||
|
|
||||||
logging.info(f"### Loading: ComfyUI-Manager ({core.version_str})")
|
logging.info(f"### Loading: ComfyUI-Manager ({core.version_str})")
|
||||||
@@ -561,8 +562,6 @@ async def task_worker():
|
|||||||
logging.error("ComfyUI update failed")
|
logging.error("ComfyUI update failed")
|
||||||
return "fail"
|
return "fail"
|
||||||
elif res == "updated":
|
elif res == "updated":
|
||||||
core.install_manager_requirements(repo_path)
|
|
||||||
|
|
||||||
if is_stable:
|
if is_stable:
|
||||||
logging.info("ComfyUI is updated to latest stable version.")
|
logging.info("ComfyUI is updated to latest stable version.")
|
||||||
return "success-stable-"+latest_tag
|
return "success-stable-"+latest_tag
|
||||||
@@ -1074,15 +1073,12 @@ async def fetch_customnode_list(request):
|
|||||||
if channel != 'local':
|
if channel != 'local':
|
||||||
found = 'custom'
|
found = 'custom'
|
||||||
|
|
||||||
if channel == core.DEFAULT_CHANNEL or channel == core.DEFAULT_CHANNEL_LEGACY:
|
for name, url in core.get_channel_dict().items():
|
||||||
channel = 'default'
|
if url == channel:
|
||||||
else:
|
found = name
|
||||||
for name, url in core.get_channel_dict().items():
|
break
|
||||||
if url == channel:
|
|
||||||
found = name
|
|
||||||
break
|
|
||||||
|
|
||||||
channel = found
|
channel = found
|
||||||
|
|
||||||
result = dict(channel=channel, node_packs=node_packs.to_dict())
|
result = dict(channel=channel, node_packs=node_packs.to_dict())
|
||||||
|
|
||||||
@@ -1173,7 +1169,7 @@ async def fetch_externalmodel_list(request):
|
|||||||
return web.json_response(json_obj, content_type='application/json')
|
return web.json_response(json_obj, content_type='application/json')
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/snapshot/getlist")
|
@routes.get("/v2/snapshot/getlist")
|
||||||
async def get_snapshot_list(request):
|
async def get_snapshot_list(request):
|
||||||
items = [f[:-5] for f in os.listdir(context.manager_snapshot_path) if f.endswith('.json')]
|
items = [f[:-5] for f in os.listdir(context.manager_snapshot_path) if f.endswith('.json')]
|
||||||
items.sort(reverse=True)
|
items.sort(reverse=True)
|
||||||
@@ -1241,6 +1237,46 @@ async def save_snapshot(request):
|
|||||||
return web.Response(status=400)
|
return web.Response(status=400)
|
||||||
|
|
||||||
|
|
||||||
|
@routes.get("/v2/snapshot/diff")
|
||||||
|
async def get_snapshot_diff(request):
|
||||||
|
try:
|
||||||
|
from_id = request.rel_url.query.get("from")
|
||||||
|
to_id = request.rel_url.query.get("to")
|
||||||
|
|
||||||
|
if (from_id is not None and '..' in from_id) or (to_id is not None and '..' in to_id):
|
||||||
|
logging.error("/v2/snapshot/diff: invalid 'from' or 'to' parameter.")
|
||||||
|
return web.Response(status=400)
|
||||||
|
|
||||||
|
if from_id is None:
|
||||||
|
from_json = await core.get_current_snapshot()
|
||||||
|
else:
|
||||||
|
from_path = os.path.join(context.manager_snapshot_path, f"{from_id}.json")
|
||||||
|
if not os.path.exists(from_path):
|
||||||
|
logging.error(f"/v2/snapshot/diff: 'from' parameter file not found: {from_path}")
|
||||||
|
return web.Response(status=400)
|
||||||
|
|
||||||
|
from_json = snapshot_util.read_snapshot(from_path)
|
||||||
|
|
||||||
|
if to_id is None:
|
||||||
|
logging.error("/v2/snapshot/diff: 'to' parameter is required.")
|
||||||
|
return web.Response(status=401)
|
||||||
|
else:
|
||||||
|
to_path = os.path.join(context.manager_snapshot_path, f"{to_id}.json")
|
||||||
|
if not os.path.exists(to_path):
|
||||||
|
logging.error(f"/v2/snapshot/diff: 'to' parameter file not found: {to_path}")
|
||||||
|
return web.Response(status=400)
|
||||||
|
|
||||||
|
to_json = snapshot_util.read_snapshot(to_path)
|
||||||
|
|
||||||
|
return web.json_response(snapshot_util.diff_snapshot(from_json, to_json), content_type='application/json')
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"[ComfyUI-Manager] Error in /v2/snapshot/diff: {e}")
|
||||||
|
traceback.print_exc()
|
||||||
|
# Return a generic error response
|
||||||
|
return web.Response(status=400)
|
||||||
|
|
||||||
|
|
||||||
def unzip_install(files):
|
def unzip_install(files):
|
||||||
temp_filename = 'manager-temp.zip'
|
temp_filename = 'manager-temp.zip'
|
||||||
for url in files:
|
for url in files:
|
||||||
@@ -1313,65 +1349,6 @@ async def import_fail_info(request):
|
|||||||
return web.Response(status=400)
|
return web.Response(status=400)
|
||||||
|
|
||||||
|
|
||||||
@routes.post("/v2/customnode/import_fail_info_bulk")
|
|
||||||
async def import_fail_info_bulk(request):
|
|
||||||
try:
|
|
||||||
json_data = await request.json()
|
|
||||||
|
|
||||||
# Basic validation - ensure we have either cnr_ids or urls
|
|
||||||
if not isinstance(json_data, dict):
|
|
||||||
return web.Response(status=400, text="Request body must be a JSON object")
|
|
||||||
|
|
||||||
if "cnr_ids" not in json_data and "urls" not in json_data:
|
|
||||||
return web.Response(
|
|
||||||
status=400, text="Either 'cnr_ids' or 'urls' field is required"
|
|
||||||
)
|
|
||||||
|
|
||||||
await core.unified_manager.reload('cache')
|
|
||||||
await core.unified_manager.get_custom_nodes('default', 'cache')
|
|
||||||
|
|
||||||
results = {}
|
|
||||||
|
|
||||||
if "cnr_ids" in json_data:
|
|
||||||
if not isinstance(json_data["cnr_ids"], list):
|
|
||||||
return web.Response(status=400, text="'cnr_ids' must be an array")
|
|
||||||
for cnr_id in json_data["cnr_ids"]:
|
|
||||||
if not isinstance(cnr_id, str):
|
|
||||||
results[cnr_id] = {"error": "cnr_id must be a string"}
|
|
||||||
continue
|
|
||||||
module_name = core.unified_manager.get_module_name(cnr_id)
|
|
||||||
if module_name is not None:
|
|
||||||
info = cm_global.error_dict.get(module_name)
|
|
||||||
if info is not None:
|
|
||||||
results[cnr_id] = info
|
|
||||||
else:
|
|
||||||
results[cnr_id] = None
|
|
||||||
else:
|
|
||||||
results[cnr_id] = None
|
|
||||||
|
|
||||||
if "urls" in json_data:
|
|
||||||
if not isinstance(json_data["urls"], list):
|
|
||||||
return web.Response(status=400, text="'urls' must be an array")
|
|
||||||
for url in json_data["urls"]:
|
|
||||||
if not isinstance(url, str):
|
|
||||||
results[url] = {"error": "url must be a string"}
|
|
||||||
continue
|
|
||||||
module_name = core.unified_manager.get_module_name(url)
|
|
||||||
if module_name is not None:
|
|
||||||
info = cm_global.error_dict.get(module_name)
|
|
||||||
if info is not None:
|
|
||||||
results[url] = info
|
|
||||||
else:
|
|
||||||
results[url] = None
|
|
||||||
else:
|
|
||||||
results[url] = None
|
|
||||||
|
|
||||||
return web.json_response(results)
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"[ComfyUI-Manager] Error processing bulk import fail info: {e}")
|
|
||||||
return web.Response(status=500, text="Internal server error")
|
|
||||||
|
|
||||||
|
|
||||||
@routes.post("/v2/manager/queue/reinstall")
|
@routes.post("/v2/manager/queue/reinstall")
|
||||||
async def reinstall_custom_node(request):
|
async def reinstall_custom_node(request):
|
||||||
await uninstall_custom_node(request)
|
await uninstall_custom_node(request)
|
||||||
|
|||||||
@@ -10,16 +10,6 @@ import hashlib
|
|||||||
|
|
||||||
import folder_paths
|
import folder_paths
|
||||||
from server import PromptServer
|
from server import PromptServer
|
||||||
import logging
|
|
||||||
import sys
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
from nio import AsyncClient, LoginResponse, UploadResponse
|
|
||||||
matrix_nio_is_available = True
|
|
||||||
except Exception:
|
|
||||||
logging.warning(f"[ComfyUI-Manager] The matrix sharing feature has been disabled because the `matrix-nio` dependency is not installed.\n\tTo use this feature, please run the following command:\n\t{sys.executable} -m pip install matrix-nio\n")
|
|
||||||
matrix_nio_is_available = False
|
|
||||||
|
|
||||||
|
|
||||||
def extract_model_file_names(json_data):
|
def extract_model_file_names(json_data):
|
||||||
@@ -202,14 +192,6 @@ async def get_esheep_workflow_and_images(request):
|
|||||||
return web.Response(status=200, text=json.dumps(data))
|
return web.Response(status=200, text=json.dumps(data))
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/get_matrix_dep_status")
|
|
||||||
async def get_matrix_dep_status(request):
|
|
||||||
if matrix_nio_is_available:
|
|
||||||
return web.Response(status=200, text='available')
|
|
||||||
else:
|
|
||||||
return web.Response(status=200, text='unavailable')
|
|
||||||
|
|
||||||
|
|
||||||
def set_matrix_auth(json_data):
|
def set_matrix_auth(json_data):
|
||||||
homeserver = json_data['homeserver']
|
homeserver = json_data['homeserver']
|
||||||
username = json_data['username']
|
username = json_data['username']
|
||||||
@@ -349,12 +331,14 @@ async def share_art(request):
|
|||||||
workflowId = upload_workflow_json["workflowId"]
|
workflowId = upload_workflow_json["workflowId"]
|
||||||
|
|
||||||
# check if the user has provided Matrix credentials
|
# check if the user has provided Matrix credentials
|
||||||
if matrix_nio_is_available and "matrix" in share_destinations:
|
if "matrix" in share_destinations:
|
||||||
comfyui_share_room_id = '!LGYSoacpJPhIfBqVfb:matrix.org'
|
comfyui_share_room_id = '!LGYSoacpJPhIfBqVfb:matrix.org'
|
||||||
filename = os.path.basename(asset_filepath)
|
filename = os.path.basename(asset_filepath)
|
||||||
content_type = assetFileType
|
content_type = assetFileType
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
from nio import AsyncClient, LoginResponse, UploadResponse
|
||||||
|
|
||||||
homeserver = 'matrix.org'
|
homeserver = 'matrix.org'
|
||||||
if matrix_auth:
|
if matrix_auth:
|
||||||
homeserver = matrix_auth.get('homeserver', 'matrix.org')
|
homeserver = matrix_auth.get('homeserver', 'matrix.org')
|
||||||
|
|||||||
@@ -80,7 +80,7 @@ cm_global.register_api('cm.is_import_failed_extension', is_import_failed_extensi
|
|||||||
comfyui_manager_path = os.path.abspath(os.path.dirname(__file__))
|
comfyui_manager_path = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
|
||||||
custom_nodes_base_path = folder_paths.get_folder_paths('custom_nodes')[0]
|
custom_nodes_base_path = folder_paths.get_folder_paths('custom_nodes')[0]
|
||||||
manager_files_path = folder_paths.get_system_user_directory("manager")
|
manager_files_path = os.path.abspath(os.path.join(folder_paths.get_user_directory(), 'default', 'ComfyUI-Manager'))
|
||||||
manager_pip_overrides_path = os.path.join(manager_files_path, "pip_overrides.json")
|
manager_pip_overrides_path = os.path.join(manager_files_path, "pip_overrides.json")
|
||||||
manager_pip_blacklist_path = os.path.join(manager_files_path, "pip_blacklist.list")
|
manager_pip_blacklist_path = os.path.join(manager_files_path, "pip_blacklist.list")
|
||||||
restore_snapshot_path = os.path.join(manager_files_path, "startup-scripts", "restore-snapshot.json")
|
restore_snapshot_path = os.path.join(manager_files_path, "startup-scripts", "restore-snapshot.json")
|
||||||
@@ -483,7 +483,7 @@ check_bypass_ssl()
|
|||||||
|
|
||||||
# Perform install
|
# Perform install
|
||||||
processed_install = set()
|
processed_install = set()
|
||||||
script_list_path = os.path.join(manager_files_path, "startup-scripts", "install-scripts.txt")
|
script_list_path = os.path.join(folder_paths.user_directory, "default", "ComfyUI-Manager", "startup-scripts", "install-scripts.txt")
|
||||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, manager_files_path)
|
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, manager_files_path)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
8069
comfyui_manager/custom-node-list.json → custom-node-list.json
Normal file → Executable file
8069
comfyui_manager/custom-node-list.json → custom-node-list.json
Normal file → Executable file
File diff suppressed because it is too large
Load Diff
@@ -139,9 +139,9 @@ You can set whether to use ComfyUI-Manager solely via CLI.
|
|||||||
`restore-dependencies`
|
`restore-dependencies`
|
||||||
|
|
||||||
* This command can be used if custom nodes are installed under the `ComfyUI/custom_nodes` path but their dependencies are not installed.
|
* This command can be used if custom nodes are installed under the `ComfyUI/custom_nodes` path but their dependencies are not installed.
|
||||||
* It is useful when starting a new cloud instance, like Colab, where dependencies need to be reinstalled and installation scripts re-executed.
|
* It is useful when starting a new cloud instance, like colab, where dependencies need to be reinstalled and installation scripts re-executed.
|
||||||
* It can also be utilized if ComfyUI is reinstalled and only the custom_nodes path has been backed up and restored.
|
* It can also be utilized if ComfyUI is reinstalled and only the custom_nodes path has been backed up and restored.
|
||||||
|
|
||||||
### 7. Clear
|
### 7. Clear
|
||||||
|
|
||||||
In the GUI, installations, updates, or snapshot restorations are scheduled to execute the next time ComfyUI is launched. The `clear` command clears this scheduled state, ensuring no pre-execution actions are applied.
|
In the GUI, installations, updates, or snapshot restorations are scheduled to execute the next time ComfyUI is launched. The `clear` command clears this scheduled state, ensuring no pre-execution actions are applied.
|
||||||
@@ -23,13 +23,13 @@ OPTIONS:
|
|||||||
## How To Use?
|
## How To Use?
|
||||||
* `python cm-cli.py` 를 통해서 실행 시킬 수 있습니다.
|
* `python cm-cli.py` 를 통해서 실행 시킬 수 있습니다.
|
||||||
* 예를 들어 custom node를 모두 업데이트 하고 싶다면
|
* 예를 들어 custom node를 모두 업데이트 하고 싶다면
|
||||||
* ComfyUI-Manager 경로에서 `python cm-cli.py update all` 명령을 실행할 수 있습니다.
|
* ComfyUI-Manager경로 에서 `python cm-cli.py update all` 를 command를 실행할 수 있습니다.
|
||||||
* ComfyUI 경로에서 실행한다면, `python custom_nodes/ComfyUI-Manager/cm-cli.py update all` 와 같이 cm-cli.py 의 경로를 지정할 수도 있습니다.
|
* ComfyUI 경로에서 실행한다면, `python custom_nodes/ComfyUI-Manager/cm-cli.py update all` 와 같이 cm-cli.py 의 경로를 지정할 수도 있습니다.
|
||||||
|
|
||||||
## Prerequisite
|
## Prerequisite
|
||||||
* ComfyUI 를 실행하는 python과 동일한 python 환경에서 실행해야 합니다.
|
* ComfyUI 를 실행하는 python과 동일한 python 환경에서 실행해야 합니다.
|
||||||
* venv를 사용할 경우 해당 venv를 activate 한 상태에서 실행해야 합니다.
|
* venv를 사용할 경우 해당 venv를 activate 한 상태에서 실행해야 합니다.
|
||||||
* portable 버전을 사용할 경우 run_nvidia_gpu.bat 파일이 있는 경로인 경우, 다음과 같은 방식으로 명령을 실행해야 합니다.
|
* portable 버전을 사용할 경우 run_nvidia_gpu.bat 파일이 있는 경로인 경우, 다음과 같은 방식으로 코맨드를 실행해야 합니다.
|
||||||
`.\python_embeded\python.exe ComfyUI\custom_nodes\ComfyUI-Manager\cm-cli.py update all`
|
`.\python_embeded\python.exe ComfyUI\custom_nodes\ComfyUI-Manager\cm-cli.py update all`
|
||||||
* ComfyUI 의 경로는 COMFYUI_PATH 환경 변수로 설정할 수 있습니다. 만약 생략할 경우 다음과 같은 경고 메시지가 나타나며, ComfyUI-Manager가 설치된 경로를 기준으로 상대 경로로 설정됩니다.
|
* ComfyUI 의 경로는 COMFYUI_PATH 환경 변수로 설정할 수 있습니다. 만약 생략할 경우 다음과 같은 경고 메시지가 나타나며, ComfyUI-Manager가 설치된 경로를 기준으로 상대 경로로 설정됩니다.
|
||||||
```
|
```
|
||||||
@@ -40,8 +40,8 @@ OPTIONS:
|
|||||||
|
|
||||||
### 1. --channel, --mode
|
### 1. --channel, --mode
|
||||||
* 정보 보기 기능과 커스텀 노드 관리 기능의 경우는 --channel과 --mode를 통해 정보 DB를 설정할 수 있습니다.
|
* 정보 보기 기능과 커스텀 노드 관리 기능의 경우는 --channel과 --mode를 통해 정보 DB를 설정할 수 있습니다.
|
||||||
* 예를 들어 `python cm-cli.py update all --channel recent --mode remote`와 같은 명령을 실행할 경우, 현재 ComfyUI-Manager repo에 내장된 로컬의 정보가 아닌 remote의 최신 정보를 기준으로 동작하며, recent channel에 있는 목록을 대상으로만 동작합니다.
|
* 예들 들어 `python cm-cli.py update all --channel recent --mode remote`와 같은 command를 실행할 경우, 현재 ComfyUI-Manager repo에 내장된 로컬의 정보가 아닌 remote의 최신 정보를 기준으로 동작하며, recent channel에 있는 목록을 대상으로만 동작합니다.
|
||||||
* --channel, --mode 는 `simple-show, show, install, uninstall, update, disable, enable, fix` 명령에서만 사용 가능합니다.
|
* --channel, --mode 는 `simple-show, show, install, uninstall, update, disable, enable, fix` command에서만 사용 가능합니다.
|
||||||
|
|
||||||
### 2. 관리 정보 보기
|
### 2. 관리 정보 보기
|
||||||
|
|
||||||
@@ -51,7 +51,7 @@ OPTIONS:
|
|||||||
* `[show|simple-show]` - `show`는 상세하게 정보를 보여주며, `simple-show`는 간단하게 정보를 보여줍니다.
|
* `[show|simple-show]` - `show`는 상세하게 정보를 보여주며, `simple-show`는 간단하게 정보를 보여줍니다.
|
||||||
|
|
||||||
|
|
||||||
`python cm-cli.py show installed` 와 같은 명령을 실행하면 설치된 커스텀 노드의 정보를 상세하게 보여줍니다.
|
`python cm-cli.py show installed` 와 같은 코맨드를 실행하면 설치된 커스텀 노드의 정보를 상세하게 보여줍니다.
|
||||||
```
|
```
|
||||||
-= ComfyUI-Manager CLI (V2.24) =-
|
-= ComfyUI-Manager CLI (V2.24) =-
|
||||||
|
|
||||||
@@ -67,7 +67,7 @@ FETCH DATA from: https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main
|
|||||||
[ DISABLED ] ComfyUI-Loopchain (author: Fannovel16)
|
[ DISABLED ] ComfyUI-Loopchain (author: Fannovel16)
|
||||||
```
|
```
|
||||||
|
|
||||||
`python cm-cli.py simple-show installed` 와 같은 명령을 이용해서 설치된 커스텀 노드의 정보를 간단하게 보여줍니다.
|
`python cm-cli.py simple-show installed` 와 같은 코맨드를 이용해서 설치된 커스텀 노드의 정보를 간단하게 보여줍니다.
|
||||||
|
|
||||||
```
|
```
|
||||||
-= ComfyUI-Manager CLI (V2.24) =-
|
-= ComfyUI-Manager CLI (V2.24) =-
|
||||||
@@ -89,7 +89,7 @@ ComfyUI-Loopchain
|
|||||||
* `installed`: enable, disable 여부와 상관없이 설치된 모든 노드를 보여줍니다
|
* `installed`: enable, disable 여부와 상관없이 설치된 모든 노드를 보여줍니다
|
||||||
* `not-installed`: 설치되지 않은 커스텀 노드의 목록을 보여줍니다.
|
* `not-installed`: 설치되지 않은 커스텀 노드의 목록을 보여줍니다.
|
||||||
* `all`: 모든 커스텀 노드의 목록을 보여줍니다.
|
* `all`: 모든 커스텀 노드의 목록을 보여줍니다.
|
||||||
* `snapshot`: 현재 설치된 커스텀 노드의 snapshot 정보를 보여줍니다. `show`를 통해서 볼 경우는 json 출력 형태로 보여주며, `simple-show`를 통해서 볼 경우는 간단하게, 커밋 해시와 함께 보여줍니다.
|
* `snapshot`: 현재 설치된 커스텀 노드의 snapshot 정보를 보여줍니다. `show`롤 통해서 볼 경우는 json 출력 형태로 보여주며, `simple-show`를 통해서 볼 경우는 간단하게, 커밋 해시와 함께 보여줍니다.
|
||||||
* `snapshot-list`: ComfyUI-Manager/snapshots 에 저장된 snapshot 파일의 목록을 보여줍니다.
|
* `snapshot-list`: ComfyUI-Manager/snapshots 에 저장된 snapshot 파일의 목록을 보여줍니다.
|
||||||
|
|
||||||
### 3. 커스텀 노드 관리 하기
|
### 3. 커스텀 노드 관리 하기
|
||||||
@@ -98,7 +98,7 @@ ComfyUI-Loopchain
|
|||||||
|
|
||||||
* `python cm-cli.py install ComfyUI-Impact-Pack ComfyUI-Inspire-Pack ComfyUI_experiments` 와 같이 커스텀 노드의 이름을 나열해서 관리 기능을 적용할 수 있습니다.
|
* `python cm-cli.py install ComfyUI-Impact-Pack ComfyUI-Inspire-Pack ComfyUI_experiments` 와 같이 커스텀 노드의 이름을 나열해서 관리 기능을 적용할 수 있습니다.
|
||||||
* 커스텀 노드의 이름은 `show`를 했을 때 보여주는 이름이며, git repository의 이름입니다.
|
* 커스텀 노드의 이름은 `show`를 했을 때 보여주는 이름이며, git repository의 이름입니다.
|
||||||
(추후 nickname을 사용 가능하도록 업데이트할 예정입니다.)
|
(추후 nickname 을 사용가능하돌고 업데이트 할 예정입니다.)
|
||||||
|
|
||||||
`[update|disable|enable|fix] all ?[--channel <channel name>] ?[--mode [remote|local|cache]]`
|
`[update|disable|enable|fix] all ?[--channel <channel name>] ?[--mode [remote|local|cache]]`
|
||||||
|
|
||||||
@@ -124,7 +124,7 @@ ComfyUI-Loopchain
|
|||||||
* `--pip-non-local-url`: web URL에 등록된 pip 패키지들에 대해서 복구를 수행
|
* `--pip-non-local-url`: web URL에 등록된 pip 패키지들에 대해서 복구를 수행
|
||||||
* `--pip-local-url`: local 경로를 지정하고 있는 pip 패키지들에 대해서 복구를 수행
|
* `--pip-local-url`: local 경로를 지정하고 있는 pip 패키지들에 대해서 복구를 수행
|
||||||
* `--user-directory`: 사용자 디렉토리 설정
|
* `--user-directory`: 사용자 디렉토리 설정
|
||||||
* `--restore-to`: 복구될 커스텀 노드가 설치될 경로. (이 옵션을 적용할 경우 오직 대상 경로에 설치된 custom nodes만 설치된 것으로 인식함.)
|
* `--restore-to`: 복구될 커스텀 노드가 설치될 경로. (이 옵션을 적용할 경우 오직 대상 경로에 설치된 custom nodes 만 설치된 것으로 인식함.)
|
||||||
|
|
||||||
### 5. CLI only mode
|
### 5. CLI only mode
|
||||||
|
|
||||||
@@ -133,7 +133,7 @@ ComfyUI-Manager를 CLI로만 사용할 것인지를 설정할 수 있습니다.
|
|||||||
`cli-only-mode [enable|disable]`
|
`cli-only-mode [enable|disable]`
|
||||||
|
|
||||||
* security 혹은 policy 의 이유로 GUI 를 통한 ComfyUI-Manager 사용을 제한하고 싶은 경우 이 모드를 사용할 수 있습니다.
|
* security 혹은 policy 의 이유로 GUI 를 통한 ComfyUI-Manager 사용을 제한하고 싶은 경우 이 모드를 사용할 수 있습니다.
|
||||||
* CLI only mode를 적용할 경우 ComfyUI-Manager 가 매우 제한된 상태로 로드되어, 내부적으로 제공하는 web API가 비활성화되며, 메인 메뉴에서도 Manager 버튼이 표시되지 않습니다.
|
* CLI only mode를 적용할 경우 ComfyUI-Manager 가 매우 제한된 상태로 로드되어, 내부적으로 제공하는 web API가 비활성화 되며, 메인 메뉴에서도 Manager 버튼이 표시되지 않습니다.
|
||||||
|
|
||||||
|
|
||||||
### 6. 의존성 설치
|
### 6. 의존성 설치
|
||||||
@@ -141,10 +141,10 @@ ComfyUI-Manager를 CLI로만 사용할 것인지를 설정할 수 있습니다.
|
|||||||
`restore-dependencies`
|
`restore-dependencies`
|
||||||
|
|
||||||
* `ComfyUI/custom_nodes` 하위 경로에 커스텀 노드들이 설치되어 있긴 하지만, 의존성이 설치되지 않은 경우 사용할 수 있습니다.
|
* `ComfyUI/custom_nodes` 하위 경로에 커스텀 노드들이 설치되어 있긴 하지만, 의존성이 설치되지 않은 경우 사용할 수 있습니다.
|
||||||
* Colab과 같이 cloud instance를 새로 시작하는 경우 의존성 재설치 및 설치 스크립트가 재실행되어야 하는 경우 사용합니다.
|
* colab 과 같이 cloud instance를 새로 시작하는 경우 의존성 재설치 및 설치 스크립트가 재실행 되어야 하는 경우 사용합니다.
|
||||||
* ComfyUI를 재설치할 경우, custom_nodes 경로만 백업했다가 재설치할 경우 활용 가능합니다.
|
* ComfyUI을 재설치할 경우, custom_nodes 경로만 백업했다가 재설치 할 경우 활용 가능합니다.
|
||||||
|
|
||||||
|
|
||||||
### 7. clear
|
### 7. clear
|
||||||
|
|
||||||
GUI에서 install, update를 하거나 snapshot을 restore하는 경우 예약을 통해서 다음번 ComfyUI를 실행할 경우 실행되는 구조입니다. `clear` 는 이런 예약 상태를 clear해서, 아무런 사전 실행이 적용되지 않도록 합니다.
|
GUI에서 install, update를 하거나 snapshot 을 restore하는 경우 예약을 통해서 다음번 ComfyUI를 실행할 경우 실행되는 구조입니다. `clear` 는 이런 예약 상태를 clear해서, 아무런 사전 실행이 적용되지 않도록 합니다.
|
||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -5045,105 +5045,6 @@
|
|||||||
"size": "1.26GB"
|
"size": "1.26GB"
|
||||||
},
|
},
|
||||||
|
|
||||||
{
|
|
||||||
"name": "Comfy-Org/Wan2.2 i2v high noise 14B (fp16)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Wan2.2",
|
|
||||||
"save_path": "diffusion_models/Wan2.2",
|
|
||||||
"description": "Wan2.2 diffusion model for i2v high noise 14B (fp16)",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
|
||||||
"filename": "wan2.2_i2v_high_noise_14B_fp16.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_high_noise_14B_fp16.safetensors",
|
|
||||||
"size": "28.6GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Comfy-Org/Wan2.2 i2v high noise 14B (fp8_scaled)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Wan2.2",
|
|
||||||
"save_path": "diffusion_models/Wan2.2",
|
|
||||||
"description": "Wan2.2 diffusion model for i2v high noise 14B (fp8_scaled)",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
|
||||||
"filename": "wan2.2_i2v_high_noise_14B_fp8_scaled.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_high_noise_14B_fp8_scaled.safetensors",
|
|
||||||
"size": "14.3GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Comfy-Org/Wan2.2 i2v low noise 14B (fp16)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Wan2.2",
|
|
||||||
"save_path": "diffusion_models/Wan2.2",
|
|
||||||
"description": "Wan2.2 diffusion model for i2v low noise 14B (fp16)",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
|
||||||
"filename": "wan2.2_i2v_low_noise_14B_fp16.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_low_noise_14B_fp16.safetensors",
|
|
||||||
"size": "28.6GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Comfy-Org/Wan2.2 i2v low noise 14B (fp8_scaled)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Wan2.2",
|
|
||||||
"save_path": "diffusion_models/Wan2.2",
|
|
||||||
"description": "Wan2.2 diffusion model for i2v low noise 14B (fp8_scaled)",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
|
||||||
"filename": "wan2.2_i2v_low_noise_14B_fp8_scaled.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_low_noise_14B_fp8_scaled.safetensors",
|
|
||||||
"size": "14.3GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Comfy-Org/Wan2.2 t2v high noise 14B (fp16)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Wan2.2",
|
|
||||||
"save_path": "diffusion_models/Wan2.2",
|
|
||||||
"description": "Wan2.2 diffusion model for t2v high noise 14B (fp16)",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
|
||||||
"filename": "wan2.2_t2v_high_noise_14B_fp16.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_high_noise_14B_fp16.safetensors",
|
|
||||||
"size": "28.6GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Comfy-Org/Wan2.2 t2v high noise 14B (fp8_scaled)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Wan2.2",
|
|
||||||
"save_path": "diffusion_models/Wan2.2",
|
|
||||||
"description": "Wan2.2 diffusion model for t2v high noise 14B (fp8_scaled)",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
|
||||||
"filename": "wan2.2_t2v_high_noise_14B_fp8_scaled.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_high_noise_14B_fp8_scaled.safetensors",
|
|
||||||
"size": "14.3GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Comfy-Org/Wan2.2 t2v low noise 14B (fp16)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Wan2.2",
|
|
||||||
"save_path": "diffusion_models/Wan2.2",
|
|
||||||
"description": "Wan2.2 diffusion model for t2v low noise 14B (fp16)",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
|
||||||
"filename": "wan2.2_t2v_low_noise_14B_fp16.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_low_noise_14B_fp16.safetensors",
|
|
||||||
"size": "28.6GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Comfy-Org/Wan2.2 t2v low noise 14B (fp8_scaled)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Wan2.2",
|
|
||||||
"save_path": "diffusion_models/Wan2.2",
|
|
||||||
"description": "Wan2.2 diffusion model for t2v low noise 14B (fp8_scaled)",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
|
||||||
"filename": "wan2.2_t2v_low_noise_14B_fp8_scaled.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_low_noise_14B_fp8_scaled.safetensors",
|
|
||||||
"size": "14.3GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Comfy-Org/Wan2.2 ti2v 5B (fp16)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Wan2.2",
|
|
||||||
"save_path": "diffusion_models/Wan2.2",
|
|
||||||
"description": "Wan2.2 diffusion model for ti2v 5B (fp16)",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
|
||||||
"filename": "wan2.2_ti2v_5B_fp16.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_ti2v_5B_fp16.safetensors",
|
|
||||||
"size": "10.0GB"
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
{
|
||||||
"name": "Comfy-Org/umt5_xxl_fp16.safetensors",
|
"name": "Comfy-Org/umt5_xxl_fp16.safetensors",
|
||||||
@@ -5355,317 +5256,6 @@
|
|||||||
"filename": "LBM_relighting.safetensors",
|
"filename": "LBM_relighting.safetensors",
|
||||||
"url": "https://huggingface.co/jasperai/LBM_relighting/resolve/main/model.safetensors",
|
"url": "https://huggingface.co/jasperai/LBM_relighting/resolve/main/model.safetensors",
|
||||||
"size": "5.02GB"
|
"size": "5.02GB"
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image VAE",
|
|
||||||
"type": "VAE",
|
|
||||||
"base": "Qwen-Image",
|
|
||||||
"save_path": "vae/qwen-image",
|
|
||||||
"description": "VAE model for Qwen-Image",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI",
|
|
||||||
"filename": "qwen_image_vae.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors",
|
|
||||||
"size": "335MB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Qwen 2.5 VL 7B Text Encoder (fp8_scaled)",
|
|
||||||
"type": "clip",
|
|
||||||
"base": "Qwen-2.5-VL",
|
|
||||||
"save_path": "text_encoders/qwen",
|
|
||||||
"description": "Qwen 2.5 VL 7B text encoder model (fp8_scaled)",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI",
|
|
||||||
"filename": "qwen_2.5_vl_7b_fp8_scaled.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors",
|
|
||||||
"size": "3.75GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Qwen 2.5 VL 7B Text Encoder",
|
|
||||||
"type": "clip",
|
|
||||||
"base": "Qwen-2.5-VL",
|
|
||||||
"save_path": "text_encoders/qwen",
|
|
||||||
"description": "Qwen 2.5 VL 7B text encoder model",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI",
|
|
||||||
"filename": "qwen_2.5_vl_7b.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b.safetensors",
|
|
||||||
"size": "7.51GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image Diffusion Model (fp8_e4m3fn)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Qwen-Image",
|
|
||||||
"save_path": "diffusion_models/qwen-image",
|
|
||||||
"description": "Qwen-Image diffusion model (fp8_e4m3fn)",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI",
|
|
||||||
"filename": "qwen_image_fp8_e4m3fn.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors",
|
|
||||||
"size": "4.89GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image Diffusion Model (bf16)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Qwen-Image",
|
|
||||||
"save_path": "diffusion_models/qwen-image",
|
|
||||||
"description": "Qwen-Image diffusion model (bf16)",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI",
|
|
||||||
"filename": "qwen_image_bf16.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_bf16.safetensors",
|
|
||||||
"size": "9.78GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image-Edit 2509 Diffusion Model (fp8_e4m3fn)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Qwen-Image-Edit",
|
|
||||||
"save_path": "diffusion_models/qwen-image-edit",
|
|
||||||
"description": "Qwen-Image-Edit 2509 diffusion model (fp8_e4m3fn)",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI",
|
|
||||||
"filename": "qwen_image_edit_2509_fp8_e4m3fn.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_edit_2509_fp8_e4m3fn.safetensors",
|
|
||||||
"size": "4.89GB"
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image-Edit 2509 Diffusion Model (bf16)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Qwen-Image-Edit",
|
|
||||||
"save_path": "diffusion_models/qwen-image-edit",
|
|
||||||
"description": "Qwen-Image-Edit 2509 diffusion model (bf16)",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI",
|
|
||||||
"filename": "qwen_image_edit_2509_bf16.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_edit_2509_bf16.safetensors",
|
|
||||||
"size": "9.78GB"
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image-Edit Diffusion Model (fp8_e4m3fn)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Qwen-Image-Edit",
|
|
||||||
"save_path": "diffusion_models/qwen-image-edit",
|
|
||||||
"description": "Qwen-Image-Edit diffusion model (fp8_e4m3fn)",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI",
|
|
||||||
"filename": "qwen_image_edit_fp8_e4m3fn.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_edit_fp8_e4m3fn.safetensors",
|
|
||||||
"size": "4.89GB"
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image-Edit Diffusion Model (bf16)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Qwen-Image-Edit",
|
|
||||||
"save_path": "diffusion_models/qwen-image-edit",
|
|
||||||
"description": "Qwen-Image-Edit diffusion model (bf16)",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI",
|
|
||||||
"filename": "qwen_image_edit_bf16.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_edit_bf16.safetensors",
|
|
||||||
"size": "9.78GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image-Lightning 8steps V1.0",
|
|
||||||
"type": "lora",
|
|
||||||
"base": "Qwen-Image",
|
|
||||||
"save_path": "loras/qwen-image-lightning",
|
|
||||||
"description": "Qwen-Image-Lightning 8-step LoRA model V1.0",
|
|
||||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
|
||||||
"filename": "Qwen-Image-Lightning-8steps-V1.0.safetensors",
|
|
||||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-8steps-V1.0.safetensors",
|
|
||||||
"size": "9.78GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image-Lightning 4steps V1.0",
|
|
||||||
"type": "lora",
|
|
||||||
"base": "Qwen-Image",
|
|
||||||
"save_path": "loras/qwen-image-lightning",
|
|
||||||
"description": "Qwen-Image-Lightning 4-step LoRA model V1.0",
|
|
||||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
|
||||||
"filename": "Qwen-Image-Lightning-4steps-V1.0.safetensors",
|
|
||||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors",
|
|
||||||
"size": "9.78GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image-Lightning 4steps V1.0 (bf16)",
|
|
||||||
"type": "lora",
|
|
||||||
"base": "Qwen-Image",
|
|
||||||
"save_path": "loras/qwen-image-lightning",
|
|
||||||
"description": "Qwen-Image-Lightning 4-step LoRA model V1.0 (bf16)",
|
|
||||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
|
||||||
"filename": "Qwen-Image-Lightning-4steps-V1.0-bf16.safetensors",
|
|
||||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0-bf16.safetensors",
|
|
||||||
"size": "19.6GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image-Lightning 4steps V2.0",
|
|
||||||
"type": "lora",
|
|
||||||
"base": "Qwen-Image",
|
|
||||||
"save_path": "loras/qwen-image-lightning",
|
|
||||||
"description": "Qwen-Image-Lightning 4-step LoRA model V2.0",
|
|
||||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
|
||||||
"filename": "Qwen-Image-Lightning-4steps-V2.0.safetensors",
|
|
||||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V2.0.safetensors",
|
|
||||||
"size": "9.78GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image-Lightning 4steps V2.0 (bf16)",
|
|
||||||
"type": "lora",
|
|
||||||
"base": "Qwen-Image",
|
|
||||||
"save_path": "loras/qwen-image-lightning",
|
|
||||||
"description": "Qwen-Image-Lightning 4-step LoRA model V2.0 (bf16)",
|
|
||||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
|
||||||
"filename": "Qwen-Image-Lightning-4steps-V2.0-bf16.safetensors",
|
|
||||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V2.0-bf16.safetensors",
|
|
||||||
"size": "19.6GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image-Lightning 8steps V1.1",
|
|
||||||
"type": "lora",
|
|
||||||
"base": "Qwen-Image",
|
|
||||||
"save_path": "loras/qwen-image-lightning",
|
|
||||||
"description": "Qwen-Image-Lightning 8-step LoRA model V1.1",
|
|
||||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
|
||||||
"filename": "Qwen-Image-Lightning-8steps-V1.1.safetensors",
|
|
||||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-8steps-V1.1.safetensors",
|
|
||||||
"size": "9.78GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image-Lightning 8steps V1.1 (bf16)",
|
|
||||||
"type": "lora",
|
|
||||||
"base": "Qwen-Image",
|
|
||||||
"save_path": "loras/qwen-image-lightning",
|
|
||||||
"description": "Qwen-Image-Lightning 8-step LoRA model V1.1 (bf16)",
|
|
||||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
|
||||||
"filename": "Qwen-Image-Lightning-8steps-V1.1-bf16.safetensors",
|
|
||||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-8steps-V1.1-bf16.safetensors",
|
|
||||||
"size": "19.6GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image-Lightning 8steps V2.0",
|
|
||||||
"type": "lora",
|
|
||||||
"base": "Qwen-Image",
|
|
||||||
"save_path": "loras/qwen-image-lightning",
|
|
||||||
"description": "Qwen-Image-Lightning 8-step LoRA model V2.0",
|
|
||||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
|
||||||
"filename": "Qwen-Image-Lightning-8steps-V2.0.safetensors",
|
|
||||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-8steps-V2.0.safetensors",
|
|
||||||
"size": "9.78GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image-Lightning 8steps V2.0 (bf16)",
|
|
||||||
"type": "lora",
|
|
||||||
"base": "Qwen-Image",
|
|
||||||
"save_path": "loras/qwen-image-lightning",
|
|
||||||
"description": "Qwen-Image-Lightning 8-step LoRA model V2.0 (bf16)",
|
|
||||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
|
||||||
"filename": "Qwen-Image-Lightning-8steps-V2.0-bf16.safetensors",
|
|
||||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-8steps-V2.0-bf16.safetensors",
|
|
||||||
"size": "19.6GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image-Edit-Lightning 4steps V1.0",
|
|
||||||
"type": "lora",
|
|
||||||
"base": "Qwen-Image-Edit",
|
|
||||||
"save_path": "loras/qwen-image-edit-lightning",
|
|
||||||
"description": "Qwen-Image-Edit-Lightning 4-step LoRA model V1.0",
|
|
||||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
|
||||||
"filename": "Qwen-Image-Edit-Lightning-4steps-V1.0.safetensors",
|
|
||||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-Lightning-4steps-V1.0.safetensors",
|
|
||||||
"size": "9.78GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image-Edit-Lightning 4steps V1.0 (bf16)",
|
|
||||||
"type": "lora",
|
|
||||||
"base": "Qwen-Image-Edit",
|
|
||||||
"save_path": "loras/qwen-image-edit-lightning",
|
|
||||||
"description": "Qwen-Image-Edit-Lightning 4-step LoRA model V1.0 (bf16)",
|
|
||||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
|
||||||
"filename": "Qwen-Image-Edit-Lightning-4steps-V1.0-bf16.safetensors",
|
|
||||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-Lightning-4steps-V1.0-bf16.safetensors",
|
|
||||||
"size": "19.6GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image-Edit-Lightning 8steps V1.0",
|
|
||||||
"type": "lora",
|
|
||||||
"base": "Qwen-Image-Edit",
|
|
||||||
"save_path": "loras/qwen-image-edit-lightning",
|
|
||||||
"description": "Qwen-Image-Edit-Lightning 8-step LoRA model V1.0",
|
|
||||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
|
||||||
"filename": "Qwen-Image-Edit-Lightning-8steps-V1.0.safetensors",
|
|
||||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-Lightning-8steps-V1.0.safetensors",
|
|
||||||
"size": "9.78GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image-Edit-Lightning 8steps V1.0 (bf16)",
|
|
||||||
"type": "lora",
|
|
||||||
"base": "Qwen-Image-Edit",
|
|
||||||
"save_path": "loras/qwen-image-edit-lightning",
|
|
||||||
"description": "Qwen-Image-Edit-Lightning 8-step LoRA model V1.0 (bf16)",
|
|
||||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
|
||||||
"filename": "Qwen-Image-Edit-Lightning-8steps-V1.0-bf16.safetensors",
|
|
||||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-Lightning-8steps-V1.0-bf16.safetensors",
|
|
||||||
"size": "19.6GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image-Edit-2509-Lightning 4steps V1.0 (bf16)",
|
|
||||||
"type": "lora",
|
|
||||||
"base": "Qwen-Image-Edit",
|
|
||||||
"save_path": "loras/qwen-image-edit-lightning",
|
|
||||||
"description": "Qwen-Image-Edit-2509-Lightning 4-step LoRA model V1.0 (bf16)",
|
|
||||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
|
||||||
"filename": "Qwen-Image-Edit-2509-Lightning-4steps-V1.0-bf16.safetensors",
|
|
||||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-2509/Qwen-Image-Edit-2509-Lightning-4steps-V1.0-bf16.safetensors",
|
|
||||||
"size": "19.6GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image-Edit-2509-Lightning 4steps V1.0 (fp32)",
|
|
||||||
"type": "lora",
|
|
||||||
"base": "Qwen-Image-Edit",
|
|
||||||
"save_path": "loras/qwen-image-edit-lightning",
|
|
||||||
"description": "Qwen-Image-Edit-2509-Lightning 4-step LoRA model V1.0 (fp32)",
|
|
||||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
|
||||||
"filename": "Qwen-Image-Edit-2509-Lightning-4steps-V1.0-fp32.safetensors",
|
|
||||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-2509/Qwen-Image-Edit-2509-Lightning-4steps-V1.0-fp32.safetensors",
|
|
||||||
"size": "39.1GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image-Edit-2509-Lightning 8steps V1.0 (bf16)",
|
|
||||||
"type": "lora",
|
|
||||||
"base": "Qwen-Image-Edit",
|
|
||||||
"save_path": "loras/qwen-image-edit-lightning",
|
|
||||||
"description": "Qwen-Image-Edit-2509-Lightning 8-step LoRA model V1.0 (bf16)",
|
|
||||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
|
||||||
"filename": "Qwen-Image-Edit-2509-Lightning-8steps-V1.0-bf16.safetensors",
|
|
||||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-2509/Qwen-Image-Edit-2509-Lightning-8steps-V1.0-bf16.safetensors",
|
|
||||||
"size": "19.6GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image-Edit-2509-Lightning 8steps V1.0 (fp32)",
|
|
||||||
"type": "lora",
|
|
||||||
"base": "Qwen-Image-Edit",
|
|
||||||
"save_path": "loras/qwen-image-edit-lightning",
|
|
||||||
"description": "Qwen-Image-Edit-2509-Lightning 8-step LoRA model V1.0 (fp32)",
|
|
||||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
|
||||||
"filename": "Qwen-Image-Edit-2509-Lightning-8steps-V1.0-fp32.safetensors",
|
|
||||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-2509/Qwen-Image-Edit-2509-Lightning-8steps-V1.0-fp32.safetensors",
|
|
||||||
"size": "39.1GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image InstantX ControlNet Union",
|
|
||||||
"type": "controlnet",
|
|
||||||
"base": "Qwen-Image",
|
|
||||||
"save_path": "controlnet/qwen-image/instantx",
|
|
||||||
"description": "Qwen-Image InstantX ControlNet Union model",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image-InstantX-ControlNets",
|
|
||||||
"filename": "Qwen-Image-InstantX-ControlNet-Union.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Qwen-Image-InstantX-ControlNets/resolve/main/split_files/controlnet/Qwen-Image-InstantX-ControlNet-Union.safetensors",
|
|
||||||
"size": "2.54GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Qwen-Image InstantX ControlNet Inpainting",
|
|
||||||
"type": "controlnet",
|
|
||||||
"base": "Qwen-Image",
|
|
||||||
"save_path": "controlnet/qwen-image/instantx",
|
|
||||||
"description": "Qwen-Image InstantX ControlNet Inpainting model",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image-InstantX-ControlNets",
|
|
||||||
"filename": "Qwen-Image-InstantX-ControlNet-Inpainting.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Qwen-Image-InstantX-ControlNets/resolve/main/split_files/controlnet/Qwen-Image-InstantX-ControlNet-Inpainting.safetensors",
|
|
||||||
"size": "2.54GB"
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,15 +1,5 @@
|
|||||||
{
|
{
|
||||||
"custom_nodes": [
|
"custom_nodes": [
|
||||||
{
|
|
||||||
"author": "synchronicity-labs",
|
|
||||||
"title": "ComfyUI Sync Lipsync Node",
|
|
||||||
"reference": "https://github.com/synchronicity-labs/sync-comfyui",
|
|
||||||
"files": [
|
|
||||||
"https://github.com/synchronicity-labs/sync-comfyui"
|
|
||||||
],
|
|
||||||
"install_type": "git-clone",
|
|
||||||
"description": "This custom node allows you to perform audio-video lip synchronization inside ComfyUI using a simple interface."
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"author": "joaomede",
|
"author": "joaomede",
|
||||||
"title": "ComfyUI-Unload-Model-Fork",
|
"title": "ComfyUI-Unload-Model-Fork",
|
||||||
@@ -169,16 +159,6 @@
|
|||||||
],
|
],
|
||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "A fork of KJNodes for ComfyUI.\nVarious quality of life -nodes for ComfyUI, mostly just visual stuff to improve usability"
|
"description": "A fork of KJNodes for ComfyUI.\nVarious quality of life -nodes for ComfyUI, mostly just visual stuff to improve usability"
|
||||||
},
|
|
||||||
{
|
|
||||||
"author": "huixingyun",
|
|
||||||
"title": "ComfyUI-SoundFlow",
|
|
||||||
"reference": "https://github.com/huixingyun/ComfyUI-SoundFlow",
|
|
||||||
"files": [
|
|
||||||
"https://github.com/huixingyun/ComfyUI-SoundFlow"
|
|
||||||
],
|
|
||||||
"install_type": "git-clone",
|
|
||||||
"description": "forked from https://github.com/fredconex/ComfyUI-SoundFlow (removed)"
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,106 +1,5 @@
|
|||||||
{
|
{
|
||||||
"models": [
|
"models": [
|
||||||
|
|
||||||
{
|
|
||||||
"name": "Comfy-Org/Wan2.2 i2v high noise 14B (fp16)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Wan2.2",
|
|
||||||
"save_path": "diffusion_models/Wan2.2",
|
|
||||||
"description": "Wan2.2 diffusion model for i2v high noise 14B (fp16)",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
|
||||||
"filename": "wan2.2_i2v_high_noise_14B_fp16.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_high_noise_14B_fp16.safetensors",
|
|
||||||
"size": "28.6GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Comfy-Org/Wan2.2 i2v high noise 14B (fp8_scaled)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Wan2.2",
|
|
||||||
"save_path": "diffusion_models/Wan2.2",
|
|
||||||
"description": "Wan2.2 diffusion model for i2v high noise 14B (fp8_scaled)",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
|
||||||
"filename": "wan2.2_i2v_high_noise_14B_fp8_scaled.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_high_noise_14B_fp8_scaled.safetensors",
|
|
||||||
"size": "14.3GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Comfy-Org/Wan2.2 i2v low noise 14B (fp16)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Wan2.2",
|
|
||||||
"save_path": "diffusion_models/Wan2.2",
|
|
||||||
"description": "Wan2.2 diffusion model for i2v low noise 14B (fp16)",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
|
||||||
"filename": "wan2.2_i2v_low_noise_14B_fp16.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_low_noise_14B_fp16.safetensors",
|
|
||||||
"size": "28.6GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Comfy-Org/Wan2.2 i2v low noise 14B (fp8_scaled)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Wan2.2",
|
|
||||||
"save_path": "diffusion_models/Wan2.2",
|
|
||||||
"description": "Wan2.2 diffusion model for i2v low noise 14B (fp8_scaled)",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
|
||||||
"filename": "wan2.2_i2v_low_noise_14B_fp8_scaled.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_low_noise_14B_fp8_scaled.safetensors",
|
|
||||||
"size": "14.3GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Comfy-Org/Wan2.2 t2v high noise 14B (fp16)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Wan2.2",
|
|
||||||
"save_path": "diffusion_models/Wan2.2",
|
|
||||||
"description": "Wan2.2 diffusion model for t2v high noise 14B (fp16)",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
|
||||||
"filename": "wan2.2_t2v_high_noise_14B_fp16.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_high_noise_14B_fp16.safetensors",
|
|
||||||
"size": "28.6GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Comfy-Org/Wan2.2 t2v high noise 14B (fp8_scaled)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Wan2.2",
|
|
||||||
"save_path": "diffusion_models/Wan2.2",
|
|
||||||
"description": "Wan2.2 diffusion model for t2v high noise 14B (fp8_scaled)",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
|
||||||
"filename": "wan2.2_t2v_high_noise_14B_fp8_scaled.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_high_noise_14B_fp8_scaled.safetensors",
|
|
||||||
"size": "14.3GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Comfy-Org/Wan2.2 t2v low noise 14B (fp16)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Wan2.2",
|
|
||||||
"save_path": "diffusion_models/Wan2.2",
|
|
||||||
"description": "Wan2.2 diffusion model for t2v low noise 14B (fp16)",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
|
||||||
"filename": "wan2.2_t2v_low_noise_14B_fp16.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_low_noise_14B_fp16.safetensors",
|
|
||||||
"size": "28.6GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Comfy-Org/Wan2.2 t2v low noise 14B (fp8_scaled)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Wan2.2",
|
|
||||||
"save_path": "diffusion_models/Wan2.2",
|
|
||||||
"description": "Wan2.2 diffusion model for t2v low noise 14B (fp8_scaled)",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
|
||||||
"filename": "wan2.2_t2v_low_noise_14B_fp8_scaled.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_low_noise_14B_fp8_scaled.safetensors",
|
|
||||||
"size": "14.3GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Comfy-Org/Wan2.2 ti2v 5B (fp16)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Wan2.2",
|
|
||||||
"save_path": "diffusion_models/Wan2.2",
|
|
||||||
"description": "Wan2.2 diffusion model for ti2v 5B (fp16)",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
|
||||||
"filename": "wan2.2_ti2v_5B_fp16.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_ti2v_5B_fp16.safetensors",
|
|
||||||
"size": "10.0GB"
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
{
|
||||||
"name": "sam2.1_hiera_tiny.pt",
|
"name": "sam2.1_hiera_tiny.pt",
|
||||||
"type": "sam2.1",
|
"type": "sam2.1",
|
||||||
@@ -687,6 +586,109 @@
|
|||||||
"filename": "llava_llama3_fp16.safetensors",
|
"filename": "llava_llama3_fp16.safetensors",
|
||||||
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/text_encoders/llava_llama3_fp16.safetensors",
|
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/text_encoders/llava_llama3_fp16.safetensors",
|
||||||
"size": "16.1GB"
|
"size": "16.1GB"
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "PixArt-Sigma-XL-2-512-MS.safetensors (diffusion)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "pixart-sigma",
|
||||||
|
"save_path": "diffusion_models/PixArt-Sigma",
|
||||||
|
"description": "PixArt-Sigma Diffusion model",
|
||||||
|
"reference": "https://huggingface.co/PixArt-alpha/PixArt-Sigma-XL-2-512-MS",
|
||||||
|
"filename": "PixArt-Sigma-XL-2-512-MS.safetensors",
|
||||||
|
"url": "https://huggingface.co/PixArt-alpha/PixArt-Sigma-XL-2-512-MS/resolve/main/transformer/diffusion_pytorch_model.safetensors",
|
||||||
|
"size": "2.44GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "PixArt-Sigma-XL-2-1024-MS.safetensors (diffusion)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "pixart-sigma",
|
||||||
|
"save_path": "diffusion_models/PixArt-Sigma",
|
||||||
|
"description": "PixArt-Sigma Diffusion model",
|
||||||
|
"reference": "https://huggingface.co/PixArt-alpha/PixArt-Sigma-XL-2-1024-MS",
|
||||||
|
"filename": "PixArt-Sigma-XL-2-1024-MS.safetensors",
|
||||||
|
"url": "https://huggingface.co/PixArt-alpha/PixArt-Sigma-XL-2-1024-MS/resolve/main/transformer/diffusion_pytorch_model.safetensors",
|
||||||
|
"size": "2.44GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "PixArt-XL-2-1024-MS.safetensors (diffusion)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "pixart-alpha",
|
||||||
|
"save_path": "diffusion_models/PixArt-Alpha",
|
||||||
|
"description": "PixArt-Alpha Diffusion model",
|
||||||
|
"reference": "https://huggingface.co/PixArt-alpha/PixArt-XL-2-1024-MS",
|
||||||
|
"filename": "PixArt-XL-2-1024-MS.safetensors",
|
||||||
|
"url": "https://huggingface.co/PixArt-alpha/PixArt-XL-2-1024-MS/resolve/main/transformer/diffusion_pytorch_model.safetensors",
|
||||||
|
"size": "2.45GB"
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/hunyuan_video_t2v_720p_bf16.safetensors",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Hunyuan Video",
|
||||||
|
"save_path": "diffusion_models/hunyuan_video",
|
||||||
|
"description": "Huyuan Video diffusion model. repackaged version.",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged",
|
||||||
|
"filename": "hunyuan_video_t2v_720p_bf16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/diffusion_models/hunyuan_video_t2v_720p_bf16.safetensors",
|
||||||
|
"size": "25.6GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/hunyuan_video_vae_bf16.safetensors",
|
||||||
|
"type": "VAE",
|
||||||
|
"base": "Hunyuan Video",
|
||||||
|
"save_path": "VAE",
|
||||||
|
"description": "Huyuan Video VAE model. repackaged version.",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged",
|
||||||
|
"filename": "hunyuan_video_vae_bf16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/vae/hunyuan_video_vae_bf16.safetensors",
|
||||||
|
"size": "493MB"
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "LTX-Video 2B v0.9.1 Checkpoint",
|
||||||
|
"type": "checkpoint",
|
||||||
|
"base": "LTX-Video",
|
||||||
|
"save_path": "checkpoints/LTXV",
|
||||||
|
"description": "LTX-Video is the first DiT-based video generation model capable of generating high-quality videos in real-time. It produces 24 FPS videos at a 768x512 resolution faster than they can be watched. Trained on a large-scale dataset of diverse videos, the model generates high-resolution videos with realistic and varied content.",
|
||||||
|
"reference": "https://huggingface.co/Lightricks/LTX-Video",
|
||||||
|
"filename": "ltx-video-2b-v0.9.1.safetensors",
|
||||||
|
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltx-video-2b-v0.9.1.safetensors",
|
||||||
|
"size": "5.72GB"
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "XLabs-AI/flux-canny-controlnet-v3.safetensors",
|
||||||
|
"type": "controlnet",
|
||||||
|
"base": "FLUX.1",
|
||||||
|
"save_path": "xlabs/controlnets",
|
||||||
|
"description": "ControlNet checkpoints for FLUX.1-dev model by Black Forest Labs.",
|
||||||
|
"reference": "https://huggingface.co/XLabs-AI/flux-controlnet-collections",
|
||||||
|
"filename": "flux-canny-controlnet-v3.safetensors",
|
||||||
|
"url": "https://huggingface.co/XLabs-AI/flux-controlnet-collections/resolve/main/flux-canny-controlnet-v3.safetensors",
|
||||||
|
"size": "1.49GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "XLabs-AI/flux-depth-controlnet-v3.safetensors",
|
||||||
|
"type": "controlnet",
|
||||||
|
"base": "FLUX.1",
|
||||||
|
"save_path": "xlabs/controlnets",
|
||||||
|
"description": "ControlNet checkpoints for FLUX.1-dev model by Black Forest Labs.",
|
||||||
|
"reference": "https://huggingface.co/XLabs-AI/flux-controlnet-collections",
|
||||||
|
"filename": "flux-depth-controlnet-v3.safetensors",
|
||||||
|
"url": "https://huggingface.co/XLabs-AI/flux-controlnet-collections/resolve/main/flux-depth-controlnet-v3.safetensors",
|
||||||
|
"size": "1.49GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "XLabs-AI/flux-hed-controlnet-v3.safetensors",
|
||||||
|
"type": "controlnet",
|
||||||
|
"base": "FLUX.1",
|
||||||
|
"save_path": "xlabs/controlnets",
|
||||||
|
"description": "ControlNet checkpoints for FLUX.1-dev model by Black Forest Labs.",
|
||||||
|
"reference": "https://huggingface.co/XLabs-AI/flux-controlnet-collections",
|
||||||
|
"filename": "flux-hed-controlnet-v3.safetensors",
|
||||||
|
"url": "https://huggingface.co/XLabs-AI/flux-controlnet-collections/resolve/main/flux-hed-controlnet-v3.safetensors",
|
||||||
|
"size": "1.49GB"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,16 +10,6 @@
|
|||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "A minimal template for creating React/TypeScript frontend extensions for ComfyUI, with complete boilerplate setup including internationalization and unit testing."
|
"description": "A minimal template for creating React/TypeScript frontend extensions for ComfyUI, with complete boilerplate setup including internationalization and unit testing."
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"author": "comfyui-wiki",
|
|
||||||
"title": "ComfyUI-i18n-demo",
|
|
||||||
"reference": "https://github.com/comfyui-wiki/ComfyUI-i18n-demo",
|
|
||||||
"files": [
|
|
||||||
"https://github.com/comfyui-wiki/ComfyUI-i18n-demo"
|
|
||||||
],
|
|
||||||
"install_type": "git-clone",
|
|
||||||
"description": "ComfyUI custom node develop i18n support demo "
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"author": "Suzie1",
|
"author": "Suzie1",
|
||||||
"title": "Guide To Making Custom Nodes in ComfyUI",
|
"title": "Guide To Making Custom Nodes in ComfyUI",
|
||||||
@@ -351,26 +341,6 @@
|
|||||||
],
|
],
|
||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "A minimal test suite demonstrating how remote COMBO inputs behave in ComfyUI, with and without force_input"
|
"description": "A minimal test suite demonstrating how remote COMBO inputs behave in ComfyUI, with and without force_input"
|
||||||
},
|
|
||||||
{
|
|
||||||
"author": "J1mB091",
|
|
||||||
"title": "ComfyUI-J1mB091 Custom Nodes",
|
|
||||||
"reference": "https://github.com/J1mB091/ComfyUI-J1mB091",
|
|
||||||
"files": [
|
|
||||||
"https://github.com/J1mB091/ComfyUI-J1mB091"
|
|
||||||
],
|
|
||||||
"install_type": "git-clone",
|
|
||||||
"description": "Vibe Coded ComfyUI Custom Nodes"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"author": "aiforhumans",
|
|
||||||
"title": "XDev Nodes - Complete Toolkit",
|
|
||||||
"reference": "https://github.com/aiforhumans/comfyui-xdev-nodes",
|
|
||||||
"files": [
|
|
||||||
"https://github.com/aiforhumans/comfyui-xdev-nodes"
|
|
||||||
],
|
|
||||||
"install_type": "git-clone",
|
|
||||||
"description": "Complete ComfyUI development toolkit with 8 professional nodes including VAE tools, universal type testing, and comprehensive debugging infrastructure."
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
142
openapi.yaml
142
openapi.yaml
@@ -782,39 +782,6 @@ components:
|
|||||||
minimum: 0
|
minimum: 0
|
||||||
default: 0
|
default: 0
|
||||||
required: [batch_id, start_time, state_before]
|
required: [batch_id, start_time, state_before]
|
||||||
|
|
||||||
ImportFailInfoBulkRequest:
|
|
||||||
type: object
|
|
||||||
properties:
|
|
||||||
cnr_ids:
|
|
||||||
type: array
|
|
||||||
items:
|
|
||||||
type: string
|
|
||||||
description: A list of CNR IDs to check.
|
|
||||||
urls:
|
|
||||||
type: array
|
|
||||||
items:
|
|
||||||
type: string
|
|
||||||
description: A list of repository URLs to check.
|
|
||||||
|
|
||||||
ImportFailInfoBulkResponse:
|
|
||||||
type: object
|
|
||||||
additionalProperties:
|
|
||||||
$ref: '#/components/schemas/ImportFailInfoItem'
|
|
||||||
description: >-
|
|
||||||
A dictionary where each key is a cnr_id or url from the request,
|
|
||||||
and the value is the corresponding error info.
|
|
||||||
|
|
||||||
ImportFailInfoItem:
|
|
||||||
oneOf:
|
|
||||||
- type: object
|
|
||||||
properties:
|
|
||||||
error:
|
|
||||||
type: string
|
|
||||||
traceback:
|
|
||||||
type: string
|
|
||||||
- type: "null"
|
|
||||||
|
|
||||||
securitySchemes:
|
securitySchemes:
|
||||||
securityLevel:
|
securityLevel:
|
||||||
type: apiKey
|
type: apiKey
|
||||||
@@ -1050,32 +1017,6 @@ paths:
|
|||||||
description: Processing started
|
description: Processing started
|
||||||
'201':
|
'201':
|
||||||
description: Processing already in progress
|
description: Processing already in progress
|
||||||
|
|
||||||
/v2/customnode/import_fail_info_bulk:
|
|
||||||
post:
|
|
||||||
summary: Get import failure info for multiple nodes
|
|
||||||
description: Retrieves recorded import failure information for a list of custom nodes.
|
|
||||||
tags:
|
|
||||||
- customnode
|
|
||||||
requestBody:
|
|
||||||
description: A list of CNR IDs or repository URLs to check.
|
|
||||||
required: true
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/ImportFailInfoBulkRequest'
|
|
||||||
responses:
|
|
||||||
'200':
|
|
||||||
description: A dictionary containing the import failure information.
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/ImportFailInfoBulkResponse'
|
|
||||||
'400':
|
|
||||||
description: Bad Request. The request body is invalid.
|
|
||||||
'500':
|
|
||||||
description: Internal Server Error.
|
|
||||||
|
|
||||||
/v2/manager/queue/reset:
|
/v2/manager/queue/reset:
|
||||||
get:
|
get:
|
||||||
summary: Reset queue
|
summary: Reset queue
|
||||||
@@ -1268,6 +1209,89 @@ paths:
|
|||||||
description: Snapshot saved successfully
|
description: Snapshot saved successfully
|
||||||
'400':
|
'400':
|
||||||
description: Error saving snapshot
|
description: Error saving snapshot
|
||||||
|
/v2/snapshot/diff:
|
||||||
|
get:
|
||||||
|
summary: Get snapshot diff
|
||||||
|
description: Returns the changes that would occur when restoring from the 'from' snapshot to the 'to' snapshot.
|
||||||
|
parameters:
|
||||||
|
- name: from
|
||||||
|
in: query
|
||||||
|
required: false
|
||||||
|
description: This parameter refers to the existing snapshot; if omitted, it defaults to the current snapshot.
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
- name: to
|
||||||
|
in: query
|
||||||
|
required: true
|
||||||
|
description: This parameter is the snapshot to compare against the existing snapshot.
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: Successful operation
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
nodepack_diff:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
added:
|
||||||
|
type: object
|
||||||
|
additionalProperties:
|
||||||
|
type: string
|
||||||
|
removed:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
upgraded:
|
||||||
|
type: object
|
||||||
|
additionalProperties:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
from:
|
||||||
|
type: string
|
||||||
|
to:
|
||||||
|
type: string
|
||||||
|
downgraded:
|
||||||
|
type: object
|
||||||
|
additionalProperties:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
from:
|
||||||
|
type: string
|
||||||
|
to:
|
||||||
|
type: string
|
||||||
|
changed:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
pip_diff:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
added:
|
||||||
|
type: object
|
||||||
|
additionalProperties:
|
||||||
|
type: string
|
||||||
|
upgraded:
|
||||||
|
type: object
|
||||||
|
additionalProperties:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
from:
|
||||||
|
type: string
|
||||||
|
to:
|
||||||
|
type: string
|
||||||
|
downgraded:
|
||||||
|
type: object
|
||||||
|
additionalProperties:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
from:
|
||||||
|
type: string
|
||||||
|
to:
|
||||||
|
type: string
|
||||||
# ComfyUI Management Endpoints (v2)
|
# ComfyUI Management Endpoints (v2)
|
||||||
/v2/comfyui_manager/comfyui_versions:
|
/v2/comfyui_manager/comfyui_versions:
|
||||||
get:
|
get:
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ build-backend = "setuptools.build_meta"
|
|||||||
[project]
|
[project]
|
||||||
name = "comfyui-manager"
|
name = "comfyui-manager"
|
||||||
license = { text = "GPL-3.0-only" }
|
license = { text = "GPL-3.0-only" }
|
||||||
version = "4.0.3b4"
|
version = "4.0.0-beta.10"
|
||||||
requires-python = ">= 3.9"
|
requires-python = ">= 3.9"
|
||||||
description = "ComfyUI-Manager provides features to install and manage custom nodes for ComfyUI, as well as various functionalities to assist with ComfyUI."
|
description = "ComfyUI-Manager provides features to install and manage custom nodes for ComfyUI, as well as various functionalities to assist with ComfyUI."
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
@@ -19,7 +19,7 @@ maintainers = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
classifiers = [
|
classifiers = [
|
||||||
"Development Status :: 5 - Production/Stable",
|
"Development Status :: 4 - Beta",
|
||||||
"Intended Audience :: Developers",
|
"Intended Audience :: Developers",
|
||||||
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
|
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
|
||||||
]
|
]
|
||||||
@@ -27,7 +27,7 @@ classifiers = [
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"GitPython",
|
"GitPython",
|
||||||
"PyGithub",
|
"PyGithub",
|
||||||
# "matrix-nio",
|
"matrix-client==0.4.0",
|
||||||
"transformers",
|
"transformers",
|
||||||
"huggingface-hub>0.20",
|
"huggingface-hub>0.20",
|
||||||
"typer",
|
"typer",
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
GitPython
|
GitPython
|
||||||
PyGithub
|
PyGithub
|
||||||
# matrix-nio
|
matrix-nio
|
||||||
transformers
|
transformers
|
||||||
huggingface-hub
|
huggingface-hub>0.20
|
||||||
typer
|
typer
|
||||||
rich
|
rich
|
||||||
typing-extensions
|
typing-extensions
|
||||||
|
|||||||
@@ -9,4 +9,4 @@ lint.select = [
|
|||||||
"F",
|
"F",
|
||||||
]
|
]
|
||||||
|
|
||||||
exclude = ["*.ipynb", "tests"]
|
exclude = ["*.ipynb"]
|
||||||
|
|||||||
410
scanner.py
410
scanner.py
@@ -7,15 +7,13 @@ import concurrent
|
|||||||
import datetime
|
import datetime
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
import requests
|
import requests
|
||||||
import warnings
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
builtin_nodes = set()
|
builtin_nodes = set()
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
from github import Github, Auth
|
from github import Github
|
||||||
|
|
||||||
|
|
||||||
def download_url(url, dest_folder, filename=None):
|
def download_url(url, dest_folder, filename=None):
|
||||||
@@ -41,51 +39,26 @@ def download_url(url, dest_folder, filename=None):
|
|||||||
raise Exception(f"Failed to download file from {url}")
|
raise Exception(f"Failed to download file from {url}")
|
||||||
|
|
||||||
|
|
||||||
def parse_arguments():
|
# prepare temp dir
|
||||||
"""Parse command-line arguments"""
|
if len(sys.argv) > 1:
|
||||||
parser = argparse.ArgumentParser(
|
temp_dir = sys.argv[1]
|
||||||
description='ComfyUI Manager Node Scanner',
|
else:
|
||||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
temp_dir = os.path.join(os.getcwd(), ".tmp")
|
||||||
epilog='''
|
|
||||||
Examples:
|
|
||||||
# Standard mode
|
|
||||||
python3 scanner.py
|
|
||||||
python3 scanner.py --skip-update
|
|
||||||
|
|
||||||
# Scan-only mode
|
if not os.path.exists(temp_dir):
|
||||||
python3 scanner.py --scan-only temp-urls-clean.list
|
os.makedirs(temp_dir)
|
||||||
python3 scanner.py --scan-only urls.list --temp-dir /custom/temp
|
|
||||||
python3 scanner.py --scan-only urls.list --skip-update
|
|
||||||
'''
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument('--scan-only', type=str, metavar='URL_LIST_FILE',
|
|
||||||
help='Scan-only mode: provide URL list file (one URL per line)')
|
|
||||||
parser.add_argument('--temp-dir', type=str, metavar='DIR',
|
|
||||||
help='Temporary directory for cloned repositories')
|
|
||||||
parser.add_argument('--skip-update', action='store_true',
|
|
||||||
help='Skip git clone/pull operations')
|
|
||||||
parser.add_argument('--skip-stat-update', action='store_true',
|
|
||||||
help='Skip GitHub stats collection')
|
|
||||||
parser.add_argument('--skip-all', action='store_true',
|
|
||||||
help='Skip all update operations')
|
|
||||||
|
|
||||||
# Backward compatibility: positional argument for temp_dir
|
|
||||||
parser.add_argument('temp_dir_positional', nargs='?', metavar='TEMP_DIR',
|
|
||||||
help='(Legacy) Temporary directory path')
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
return args
|
|
||||||
|
|
||||||
|
|
||||||
# Module-level variables (will be set in main if running as script)
|
skip_update = '--skip-update' in sys.argv or '--skip-all' in sys.argv
|
||||||
args = None
|
skip_stat_update = '--skip-stat-update' in sys.argv or '--skip-all' in sys.argv
|
||||||
scan_only_mode = False
|
|
||||||
url_list_file = None
|
if not skip_stat_update:
|
||||||
temp_dir = None
|
g = Github(os.environ.get('GITHUB_TOKEN'))
|
||||||
skip_update = False
|
else:
|
||||||
skip_stat_update = True
|
g = None
|
||||||
g = None
|
|
||||||
|
|
||||||
|
print(f"TEMP DIR: {temp_dir}")
|
||||||
|
|
||||||
|
|
||||||
parse_cnt = 0
|
parse_cnt = 0
|
||||||
@@ -100,22 +73,12 @@ def extract_nodes(code_text):
|
|||||||
parse_cnt += 1
|
parse_cnt += 1
|
||||||
|
|
||||||
code_text = re.sub(r'\\[^"\']', '', code_text)
|
code_text = re.sub(r'\\[^"\']', '', code_text)
|
||||||
with warnings.catch_warnings():
|
parsed_code = ast.parse(code_text)
|
||||||
warnings.filterwarnings('ignore', category=SyntaxWarning)
|
|
||||||
warnings.filterwarnings('ignore', category=DeprecationWarning)
|
|
||||||
parsed_code = ast.parse(code_text)
|
|
||||||
|
|
||||||
# Support both ast.Assign and ast.AnnAssign (for type-annotated assignments)
|
|
||||||
assignments = (node for node in parsed_code.body if isinstance(node, (ast.Assign, ast.AnnAssign)))
|
|
||||||
|
|
||||||
|
assignments = (node for node in parsed_code.body if isinstance(node, ast.Assign))
|
||||||
|
|
||||||
for assignment in assignments:
|
for assignment in assignments:
|
||||||
# Handle ast.AnnAssign (e.g., NODE_CLASS_MAPPINGS: Type = {...})
|
if isinstance(assignment.targets[0], ast.Name) and assignment.targets[0].id in ['NODE_CONFIG', 'NODE_CLASS_MAPPINGS']:
|
||||||
if isinstance(assignment, ast.AnnAssign):
|
|
||||||
if isinstance(assignment.target, ast.Name) and assignment.target.id in ['NODE_CONFIG', 'NODE_CLASS_MAPPINGS']:
|
|
||||||
node_class_mappings = assignment.value
|
|
||||||
break
|
|
||||||
# Handle ast.Assign (e.g., NODE_CLASS_MAPPINGS = {...})
|
|
||||||
elif isinstance(assignment.targets[0], ast.Name) and assignment.targets[0].id in ['NODE_CONFIG', 'NODE_CLASS_MAPPINGS']:
|
|
||||||
node_class_mappings = assignment.value
|
node_class_mappings = assignment.value
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
@@ -127,7 +90,7 @@ def extract_nodes(code_text):
|
|||||||
for key in node_class_mappings.keys:
|
for key in node_class_mappings.keys:
|
||||||
if key is not None and isinstance(key.value, str):
|
if key is not None and isinstance(key.value, str):
|
||||||
s.add(key.value.strip())
|
s.add(key.value.strip())
|
||||||
|
|
||||||
return s
|
return s
|
||||||
else:
|
else:
|
||||||
return set()
|
return set()
|
||||||
@@ -135,99 +98,6 @@ def extract_nodes(code_text):
|
|||||||
return set()
|
return set()
|
||||||
|
|
||||||
|
|
||||||
def has_comfy_node_base(class_node):
|
|
||||||
"""Check if class inherits from io.ComfyNode or ComfyNode"""
|
|
||||||
for base in class_node.bases:
|
|
||||||
# Case 1: ComfyNode
|
|
||||||
if isinstance(base, ast.Name) and base.id == 'ComfyNode':
|
|
||||||
return True
|
|
||||||
# Case 2: io.ComfyNode
|
|
||||||
elif isinstance(base, ast.Attribute):
|
|
||||||
if base.attr == 'ComfyNode':
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def extract_keyword_value(call_node, keyword):
|
|
||||||
"""
|
|
||||||
Extract string value of keyword argument
|
|
||||||
Schema(node_id="MyNode") -> "MyNode"
|
|
||||||
"""
|
|
||||||
for kw in call_node.keywords:
|
|
||||||
if kw.arg == keyword:
|
|
||||||
# ast.Constant (Python 3.8+)
|
|
||||||
if isinstance(kw.value, ast.Constant):
|
|
||||||
if isinstance(kw.value.value, str):
|
|
||||||
return kw.value.value
|
|
||||||
# ast.Str (Python 3.7-) - suppress deprecation warning
|
|
||||||
else:
|
|
||||||
with warnings.catch_warnings():
|
|
||||||
warnings.filterwarnings('ignore', category=DeprecationWarning)
|
|
||||||
if hasattr(ast, 'Str') and isinstance(kw.value, ast.Str):
|
|
||||||
return kw.value.s
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def is_schema_call(call_node):
|
|
||||||
"""Check if ast.Call is io.Schema() or Schema()"""
|
|
||||||
func = call_node.func
|
|
||||||
if isinstance(func, ast.Name) and func.id == 'Schema':
|
|
||||||
return True
|
|
||||||
elif isinstance(func, ast.Attribute) and func.attr == 'Schema':
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def extract_node_id_from_schema(class_node):
|
|
||||||
"""
|
|
||||||
Extract node_id from define_schema() method
|
|
||||||
"""
|
|
||||||
for item in class_node.body:
|
|
||||||
if isinstance(item, ast.FunctionDef) and item.name == 'define_schema':
|
|
||||||
# Walk through function body
|
|
||||||
for stmt in ast.walk(item):
|
|
||||||
if isinstance(stmt, ast.Call):
|
|
||||||
# Check if it's Schema() call
|
|
||||||
if is_schema_call(stmt):
|
|
||||||
node_id = extract_keyword_value(stmt, 'node_id')
|
|
||||||
if node_id:
|
|
||||||
return node_id
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def extract_v3_nodes(code_text):
|
|
||||||
"""
|
|
||||||
Extract V3 node IDs using AST parsing
|
|
||||||
Returns: set of node_id strings
|
|
||||||
"""
|
|
||||||
global parse_cnt
|
|
||||||
|
|
||||||
try:
|
|
||||||
if parse_cnt % 100 == 0:
|
|
||||||
print(".", end="", flush=True)
|
|
||||||
parse_cnt += 1
|
|
||||||
|
|
||||||
with warnings.catch_warnings():
|
|
||||||
warnings.filterwarnings('ignore', category=SyntaxWarning)
|
|
||||||
warnings.filterwarnings('ignore', category=DeprecationWarning)
|
|
||||||
tree = ast.parse(code_text)
|
|
||||||
except (SyntaxError, UnicodeDecodeError):
|
|
||||||
return set()
|
|
||||||
|
|
||||||
nodes = set()
|
|
||||||
|
|
||||||
# Find io.ComfyNode subclasses
|
|
||||||
for node in ast.walk(tree):
|
|
||||||
if isinstance(node, ast.ClassDef):
|
|
||||||
# Check if inherits from ComfyNode
|
|
||||||
if has_comfy_node_base(node):
|
|
||||||
node_id = extract_node_id_from_schema(node)
|
|
||||||
if node_id:
|
|
||||||
nodes.add(node_id)
|
|
||||||
|
|
||||||
return nodes
|
|
||||||
|
|
||||||
|
|
||||||
# scan
|
# scan
|
||||||
def scan_in_file(filename, is_builtin=False):
|
def scan_in_file(filename, is_builtin=False):
|
||||||
global builtin_nodes
|
global builtin_nodes
|
||||||
@@ -235,18 +105,13 @@ def scan_in_file(filename, is_builtin=False):
|
|||||||
with open(filename, encoding='utf-8', errors='ignore') as file:
|
with open(filename, encoding='utf-8', errors='ignore') as file:
|
||||||
code = file.read()
|
code = file.read()
|
||||||
|
|
||||||
# Support type annotations (e.g., NODE_CLASS_MAPPINGS: Type = {...}) and line continuations (\)
|
pattern = r"_CLASS_MAPPINGS\s*=\s*{([^}]*)}"
|
||||||
pattern = r"_CLASS_MAPPINGS\s*(?::\s*\w+\s*)?=\s*(?:\\\s*)?{([^}]*)}"
|
|
||||||
regex = re.compile(pattern, re.MULTILINE | re.DOTALL)
|
regex = re.compile(pattern, re.MULTILINE | re.DOTALL)
|
||||||
|
|
||||||
nodes = set()
|
nodes = set()
|
||||||
class_dict = {}
|
class_dict = {}
|
||||||
|
|
||||||
# V1 nodes detection
|
|
||||||
nodes |= extract_nodes(code)
|
nodes |= extract_nodes(code)
|
||||||
|
|
||||||
# V3 nodes detection
|
|
||||||
nodes |= extract_v3_nodes(code)
|
|
||||||
code = re.sub(r'^#.*?$', '', code, flags=re.MULTILINE)
|
code = re.sub(r'^#.*?$', '', code, flags=re.MULTILINE)
|
||||||
|
|
||||||
def extract_keys(pattern, code):
|
def extract_keys(pattern, code):
|
||||||
@@ -343,53 +208,6 @@ def get_nodes(target_dir):
|
|||||||
return py_files, directories
|
return py_files, directories
|
||||||
|
|
||||||
|
|
||||||
def get_urls_from_list_file(list_file):
|
|
||||||
"""
|
|
||||||
Read URLs from list file for scan-only mode
|
|
||||||
|
|
||||||
Args:
|
|
||||||
list_file (str): Path to URL list file (one URL per line)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list of tuples: [(url, "", None, None), ...]
|
|
||||||
Format: (url, title, preemptions, nodename_pattern)
|
|
||||||
- title: Empty string
|
|
||||||
- preemptions: None
|
|
||||||
- nodename_pattern: None
|
|
||||||
|
|
||||||
File format:
|
|
||||||
https://github.com/owner/repo1
|
|
||||||
https://github.com/owner/repo2
|
|
||||||
# Comments starting with # are ignored
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
FileNotFoundError: If list_file does not exist
|
|
||||||
"""
|
|
||||||
if not os.path.exists(list_file):
|
|
||||||
raise FileNotFoundError(f"URL list file not found: {list_file}")
|
|
||||||
|
|
||||||
urls = []
|
|
||||||
with open(list_file, 'r', encoding='utf-8') as f:
|
|
||||||
for line_num, line in enumerate(f, 1):
|
|
||||||
line = line.strip()
|
|
||||||
|
|
||||||
# Skip empty lines and comments
|
|
||||||
if not line or line.startswith('#'):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Validate URL format (basic check)
|
|
||||||
if not (line.startswith('http://') or line.startswith('https://')):
|
|
||||||
print(f"WARNING: Line {line_num} is not a valid URL: {line}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Add URL with empty metadata
|
|
||||||
# (url, title, preemptions, nodename_pattern)
|
|
||||||
urls.append((line, "", None, None))
|
|
||||||
|
|
||||||
print(f"Loaded {len(urls)} URLs from {list_file}")
|
|
||||||
return urls
|
|
||||||
|
|
||||||
|
|
||||||
def get_git_urls_from_json(json_file):
|
def get_git_urls_from_json(json_file):
|
||||||
with open(json_file, encoding='utf-8') as file:
|
with open(json_file, encoding='utf-8') as file:
|
||||||
data = json.load(file)
|
data = json.load(file)
|
||||||
@@ -446,43 +264,13 @@ def clone_or_pull_git_repository(git_url):
|
|||||||
print(f"Failed to clone '{repo_name}': {e}")
|
print(f"Failed to clone '{repo_name}': {e}")
|
||||||
|
|
||||||
|
|
||||||
def update_custom_nodes(scan_only_mode=False, url_list_file=None):
|
def update_custom_nodes():
|
||||||
"""
|
|
||||||
Update custom nodes by cloning/pulling repositories
|
|
||||||
|
|
||||||
Args:
|
|
||||||
scan_only_mode (bool): If True, use URL list file instead of custom-node-list.json
|
|
||||||
url_list_file (str): Path to URL list file (required if scan_only_mode=True)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict: node_info mapping {repo_name: (url, title, preemptions, node_pattern)}
|
|
||||||
"""
|
|
||||||
if not os.path.exists(temp_dir):
|
if not os.path.exists(temp_dir):
|
||||||
os.makedirs(temp_dir)
|
os.makedirs(temp_dir)
|
||||||
|
|
||||||
node_info = {}
|
node_info = {}
|
||||||
|
|
||||||
# Select URL source based on mode
|
git_url_titles_preemptions = get_git_urls_from_json('custom-node-list.json')
|
||||||
if scan_only_mode:
|
|
||||||
if not url_list_file:
|
|
||||||
raise ValueError("url_list_file is required in scan-only mode")
|
|
||||||
|
|
||||||
git_url_titles_preemptions = get_urls_from_list_file(url_list_file)
|
|
||||||
print("\n[Scan-Only Mode]")
|
|
||||||
print(f" - URL source: {url_list_file}")
|
|
||||||
print(" - GitHub stats: DISABLED")
|
|
||||||
print(f" - Git clone/pull: {'ENABLED' if not skip_update else 'DISABLED'}")
|
|
||||||
print(" - Metadata: EMPTY")
|
|
||||||
else:
|
|
||||||
if not os.path.exists('custom-node-list.json'):
|
|
||||||
raise FileNotFoundError("custom-node-list.json not found")
|
|
||||||
|
|
||||||
git_url_titles_preemptions = get_git_urls_from_json('custom-node-list.json')
|
|
||||||
print("\n[Standard Mode]")
|
|
||||||
print(" - URL source: custom-node-list.json")
|
|
||||||
print(f" - GitHub stats: {'ENABLED' if not skip_stat_update else 'DISABLED'}")
|
|
||||||
print(f" - Git clone/pull: {'ENABLED' if not skip_update else 'DISABLED'}")
|
|
||||||
print(" - Metadata: FULL")
|
|
||||||
|
|
||||||
def process_git_url_title(url, title, preemptions, node_pattern):
|
def process_git_url_title(url, title, preemptions, node_pattern):
|
||||||
name = os.path.basename(url)
|
name = os.path.basename(url)
|
||||||
@@ -594,59 +382,46 @@ def update_custom_nodes(scan_only_mode=False, url_list_file=None):
|
|||||||
if not skip_stat_update:
|
if not skip_stat_update:
|
||||||
process_git_stats(git_url_titles_preemptions)
|
process_git_stats(git_url_titles_preemptions)
|
||||||
|
|
||||||
# Git clone/pull for all repositories
|
|
||||||
with concurrent.futures.ThreadPoolExecutor(11) as executor:
|
with concurrent.futures.ThreadPoolExecutor(11) as executor:
|
||||||
for url, title, preemptions, node_pattern in git_url_titles_preemptions:
|
for url, title, preemptions, node_pattern in git_url_titles_preemptions:
|
||||||
executor.submit(process_git_url_title, url, title, preemptions, node_pattern)
|
executor.submit(process_git_url_title, url, title, preemptions, node_pattern)
|
||||||
|
|
||||||
# .py file download (skip in scan-only mode - only process git repos)
|
py_url_titles_and_pattern = get_py_urls_from_json('custom-node-list.json')
|
||||||
if not scan_only_mode:
|
|
||||||
py_url_titles_and_pattern = get_py_urls_from_json('custom-node-list.json')
|
|
||||||
|
|
||||||
def download_and_store_info(url_title_preemptions_and_pattern):
|
def download_and_store_info(url_title_preemptions_and_pattern):
|
||||||
url, title, preemptions, node_pattern = url_title_preemptions_and_pattern
|
url, title, preemptions, node_pattern = url_title_preemptions_and_pattern
|
||||||
name = os.path.basename(url)
|
name = os.path.basename(url)
|
||||||
if name.endswith(".py"):
|
if name.endswith(".py"):
|
||||||
node_info[name] = (url, title, preemptions, node_pattern)
|
node_info[name] = (url, title, preemptions, node_pattern)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
download_url(url, temp_dir)
|
download_url(url, temp_dir)
|
||||||
except Exception:
|
except Exception:
|
||||||
print(f"[ERROR] Cannot download '{url}'")
|
print(f"[ERROR] Cannot download '{url}'")
|
||||||
|
|
||||||
with concurrent.futures.ThreadPoolExecutor(10) as executor:
|
with concurrent.futures.ThreadPoolExecutor(10) as executor:
|
||||||
executor.map(download_and_store_info, py_url_titles_and_pattern)
|
executor.map(download_and_store_info, py_url_titles_and_pattern)
|
||||||
|
|
||||||
return node_info
|
return node_info
|
||||||
|
|
||||||
|
|
||||||
def gen_json(node_info, scan_only_mode=False):
|
def gen_json(node_info):
|
||||||
"""
|
|
||||||
Generate extension-node-map.json from scanned node information
|
|
||||||
|
|
||||||
Args:
|
|
||||||
node_info (dict): Repository metadata mapping
|
|
||||||
scan_only_mode (bool): If True, exclude metadata from output
|
|
||||||
"""
|
|
||||||
# scan from .py file
|
# scan from .py file
|
||||||
node_files, node_dirs = get_nodes(temp_dir)
|
node_files, node_dirs = get_nodes(temp_dir)
|
||||||
|
|
||||||
comfyui_path = os.path.abspath(os.path.join(temp_dir, "ComfyUI"))
|
comfyui_path = os.path.abspath(os.path.join(temp_dir, "ComfyUI"))
|
||||||
# Only reorder if ComfyUI exists in the list
|
node_dirs.remove(comfyui_path)
|
||||||
if comfyui_path in node_dirs:
|
node_dirs = [comfyui_path] + node_dirs
|
||||||
node_dirs.remove(comfyui_path)
|
|
||||||
node_dirs = [comfyui_path] + node_dirs
|
|
||||||
|
|
||||||
data = {}
|
data = {}
|
||||||
for dirname in node_dirs:
|
for dirname in node_dirs:
|
||||||
py_files = get_py_file_paths(dirname)
|
py_files = get_py_file_paths(dirname)
|
||||||
metadata = {}
|
metadata = {}
|
||||||
|
|
||||||
nodes = set()
|
nodes = set()
|
||||||
for py in py_files:
|
for py in py_files:
|
||||||
nodes_in_file, metadata_in_file = scan_in_file(py, dirname == "ComfyUI")
|
nodes_in_file, metadata_in_file = scan_in_file(py, dirname == "ComfyUI")
|
||||||
nodes.update(nodes_in_file)
|
nodes.update(nodes_in_file)
|
||||||
# Include metadata from .py files in both modes
|
|
||||||
metadata.update(metadata_in_file)
|
metadata.update(metadata_in_file)
|
||||||
|
|
||||||
dirname = os.path.basename(dirname)
|
dirname = os.path.basename(dirname)
|
||||||
@@ -661,28 +436,17 @@ def gen_json(node_info, scan_only_mode=False):
|
|||||||
if dirname in node_info:
|
if dirname in node_info:
|
||||||
git_url, title, preemptions, node_pattern = node_info[dirname]
|
git_url, title, preemptions, node_pattern = node_info[dirname]
|
||||||
|
|
||||||
# Conditionally add metadata based on mode
|
metadata['title_aux'] = title
|
||||||
if not scan_only_mode:
|
|
||||||
# Standard mode: include all metadata
|
|
||||||
metadata['title_aux'] = title
|
|
||||||
|
|
||||||
if preemptions is not None:
|
if preemptions is not None:
|
||||||
metadata['preemptions'] = preemptions
|
metadata['preemptions'] = preemptions
|
||||||
|
|
||||||
if node_pattern is not None:
|
if node_pattern is not None:
|
||||||
metadata['nodename_pattern'] = node_pattern
|
metadata['nodename_pattern'] = node_pattern
|
||||||
# Scan-only mode: metadata remains empty
|
|
||||||
|
|
||||||
data[git_url] = (nodes, metadata)
|
data[git_url] = (nodes, metadata)
|
||||||
else:
|
else:
|
||||||
# Scan-only mode: Repository not in node_info (expected behavior)
|
print(f"WARN: {dirname} is removed from custom-node-list.json")
|
||||||
# Construct URL from dirname (author_repo format)
|
|
||||||
if '_' in dirname:
|
|
||||||
parts = dirname.split('_', 1)
|
|
||||||
git_url = f"https://github.com/{parts[0]}/{parts[1]}"
|
|
||||||
data[git_url] = (nodes, metadata)
|
|
||||||
else:
|
|
||||||
print(f"WARN: {dirname} is removed from custom-node-list.json")
|
|
||||||
|
|
||||||
for file in node_files:
|
for file in node_files:
|
||||||
nodes, metadata = scan_in_file(file)
|
nodes, metadata = scan_in_file(file)
|
||||||
@@ -695,16 +459,13 @@ def gen_json(node_info, scan_only_mode=False):
|
|||||||
|
|
||||||
if file in node_info:
|
if file in node_info:
|
||||||
url, title, preemptions, node_pattern = node_info[file]
|
url, title, preemptions, node_pattern = node_info[file]
|
||||||
|
metadata['title_aux'] = title
|
||||||
|
|
||||||
# Conditionally add metadata based on mode
|
if preemptions is not None:
|
||||||
if not scan_only_mode:
|
metadata['preemptions'] = preemptions
|
||||||
metadata['title_aux'] = title
|
|
||||||
|
if node_pattern is not None:
|
||||||
if preemptions is not None:
|
metadata['nodename_pattern'] = node_pattern
|
||||||
metadata['preemptions'] = preemptions
|
|
||||||
|
|
||||||
if node_pattern is not None:
|
|
||||||
metadata['nodename_pattern'] = node_pattern
|
|
||||||
|
|
||||||
data[url] = (nodes, metadata)
|
data[url] = (nodes, metadata)
|
||||||
else:
|
else:
|
||||||
@@ -716,10 +477,6 @@ def gen_json(node_info, scan_only_mode=False):
|
|||||||
for extension in extensions:
|
for extension in extensions:
|
||||||
node_list_json_path = os.path.join(temp_dir, extension, 'node_list.json')
|
node_list_json_path = os.path.join(temp_dir, extension, 'node_list.json')
|
||||||
if os.path.exists(node_list_json_path):
|
if os.path.exists(node_list_json_path):
|
||||||
# Skip if extension not in node_info (scan-only mode with limited URLs)
|
|
||||||
if extension not in node_info:
|
|
||||||
continue
|
|
||||||
|
|
||||||
git_url, title, preemptions, node_pattern = node_info[extension]
|
git_url, title, preemptions, node_pattern = node_info[extension]
|
||||||
|
|
||||||
with open(node_list_json_path, 'r', encoding='utf-8') as f:
|
with open(node_list_json_path, 'r', encoding='utf-8') as f:
|
||||||
@@ -749,16 +506,14 @@ def gen_json(node_info, scan_only_mode=False):
|
|||||||
print("------------------------------------------------------")
|
print("------------------------------------------------------")
|
||||||
node_list_json = {}
|
node_list_json = {}
|
||||||
|
|
||||||
# Conditionally add metadata based on mode
|
metadata_in_url['title_aux'] = title
|
||||||
if not scan_only_mode:
|
|
||||||
metadata_in_url['title_aux'] = title
|
|
||||||
|
|
||||||
if preemptions is not None:
|
if preemptions is not None:
|
||||||
metadata_in_url['preemptions'] = preemptions
|
metadata['preemptions'] = preemptions
|
||||||
|
|
||||||
if node_pattern is not None:
|
|
||||||
metadata_in_url['nodename_pattern'] = node_pattern
|
|
||||||
|
|
||||||
|
if node_pattern is not None:
|
||||||
|
metadata_in_url['nodename_pattern'] = node_pattern
|
||||||
|
|
||||||
nodes = list(nodes)
|
nodes = list(nodes)
|
||||||
nodes.sort()
|
nodes.sort()
|
||||||
data[git_url] = (nodes, metadata_in_url)
|
data[git_url] = (nodes, metadata_in_url)
|
||||||
@@ -768,53 +523,12 @@ def gen_json(node_info, scan_only_mode=False):
|
|||||||
json.dump(data, file, indent=4, sort_keys=True)
|
json.dump(data, file, indent=4, sort_keys=True)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
print("### ComfyUI Manager Node Scanner ###")
|
||||||
# Parse arguments
|
|
||||||
args = parse_arguments()
|
|
||||||
|
|
||||||
# Determine mode
|
print("\n# Updating extensions\n")
|
||||||
scan_only_mode = args.scan_only is not None
|
updated_node_info = update_custom_nodes()
|
||||||
url_list_file = args.scan_only if scan_only_mode else None
|
|
||||||
|
|
||||||
# Determine temp_dir
|
print("\n# 'extension-node-map.json' file is generated.\n")
|
||||||
if args.temp_dir:
|
gen_json(updated_node_info)
|
||||||
temp_dir = args.temp_dir
|
|
||||||
elif args.temp_dir_positional:
|
|
||||||
temp_dir = args.temp_dir_positional
|
|
||||||
else:
|
|
||||||
temp_dir = os.path.join(os.getcwd(), ".tmp")
|
|
||||||
|
|
||||||
if not os.path.exists(temp_dir):
|
print("\nDONE.\n")
|
||||||
os.makedirs(temp_dir)
|
|
||||||
|
|
||||||
# Determine skip flags
|
|
||||||
skip_update = args.skip_update or args.skip_all
|
|
||||||
skip_stat_update = args.skip_stat_update or args.skip_all or scan_only_mode
|
|
||||||
|
|
||||||
if not skip_stat_update:
|
|
||||||
auth = Auth.Token(os.environ.get('GITHUB_TOKEN'))
|
|
||||||
g = Github(auth=auth)
|
|
||||||
else:
|
|
||||||
g = None
|
|
||||||
|
|
||||||
print("### ComfyUI Manager Node Scanner ###")
|
|
||||||
|
|
||||||
if scan_only_mode:
|
|
||||||
print(f"\n# [Scan-Only Mode] Processing URL list: {url_list_file}\n")
|
|
||||||
else:
|
|
||||||
print("\n# [Standard Mode] Updating extensions\n")
|
|
||||||
|
|
||||||
# Update/clone repositories and collect node info
|
|
||||||
updated_node_info = update_custom_nodes(scan_only_mode, url_list_file)
|
|
||||||
|
|
||||||
print("\n# Generating 'extension-node-map.json'...\n")
|
|
||||||
|
|
||||||
# Generate extension-node-map.json
|
|
||||||
gen_json(updated_node_info, scan_only_mode)
|
|
||||||
|
|
||||||
print("\n✅ DONE.\n")
|
|
||||||
|
|
||||||
if scan_only_mode:
|
|
||||||
print("Output: extension-node-map.json (node mappings only)")
|
|
||||||
else:
|
|
||||||
print("Output: extension-node-map.json (full metadata)")
|
|
||||||
Reference in New Issue
Block a user