Compare commits

..

25 Commits

Author SHA1 Message Date
Dr.Lt.Data
285d0c067f fix: message fix 2024-11-14 18:34:50 +09:00
Dr.Lt.Data
a6956ddafb Merge branch 'main' into manager-ext 2024-11-14 02:27:04 +09:00
Dr.Lt.Data
f2bc05f5f1 Merge branch 'feat/cnr' into manager-ext 2024-11-11 16:52:03 +09:00
Dr.Lt.Data
a534296d97 Merge branch 'feat/cnr' into manager-ext 2024-11-11 13:43:45 +09:00
Dr.Lt.Data
722b40df80 add comfyui-manager to favorites 2024-11-05 20:02:38 +09:00
Dr.Lt.Data
8c58353e25 Merge branch 'feat/cnr' into manager-ext 2024-11-05 19:52:28 +09:00
Dr.Lt.Data
e3aea8c3e2 move model manager backend code to manager-core 2024-11-05 19:51:09 +09:00
Dr.Lt.Data
3b8b91d648 Merge branch 'feat/cnr' into manager-ext 2024-10-30 03:53:41 +09:00
Dr.Lt.Data
a735f60790 provide addtional channels to the manager-core 2024-10-27 23:46:38 +09:00
Dr.Lt.Data
7bc1a944af Merge branch 'main' into manager-ext 2024-10-20 14:07:49 +09:00
Dr.Lt.Data
164f25fb43 fix: robust config writer 2024-10-16 12:38:44 +09:00
Dr.Lt.Data
57861e9cbd Merge branch 'feat/cnr' into manager-ext 2024-10-16 08:37:16 +09:00
Dr.Lt.Data
fd62ada1a6 Merge branch 'feat/cnr' into manager-ext 2024-10-15 23:36:51 +09:00
Dr.Lt.Data
929453d105 MODIFIED: remove config.ini to <user_path>/default/manager-ext.ini
FIXED: confusing of config handler with manager-core
2024-10-14 00:31:16 +09:00
Dr.Lt.Data
559d4c1185 Merge branch 'feat/cnr' into manager-ext 2024-10-13 17:07:01 +09:00
Dr.Lt.Data
c1fc8aabdc Merge branch 'feat/cnr' into manager-ext 2024-10-08 19:48:34 +09:00
Dr.Lt.Data
851742e5e7 add migration code 2024-10-05 16:50:26 +09:00
Dr.Lt.Data
adf3f8d094 Merge branch 'feat/cnr' into manager-ext 2024-10-05 15:36:35 +09:00
Dr.Lt.Data
95df2ad212 fix: invalid components path 2024-09-26 10:17:28 +09:00
Dr.Lt.Data
b333342d5f Merge branch 'feat/cnr' into manager-ext 2024-09-26 09:52:53 +09:00
Dr.Lt.Data
e34b59f16d Merge branch 'feat/cnr' into manager-ext 2024-09-24 09:04:26 +09:00
Dr.Lt.Data
f8306c0896 remove legacy code 2024-09-24 02:50:00 +09:00
Dr.Lt.Data
f6572e3f5a Merge branch 'feat/cnr' into manager-ext 2024-09-24 02:21:11 +09:00
Dr.Lt.Data
c694764c7a update README 2024-09-23 02:31:22 +09:00
Dr.Lt.Data
da42eca04a wip: remove manager core code 2024-09-23 02:22:00 +09:00
145 changed files with 35988 additions and 150967 deletions

View File

@@ -1 +0,0 @@
PYPI_TOKEN=your-pypi-token

View File

@@ -1,70 +0,0 @@
name: CI
on:
push:
branches: [ main, feat/*, fix/* ]
pull_request:
branches: [ main ]
jobs:
validate-openapi:
name: Validate OpenAPI Specification
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Check if OpenAPI changed
id: openapi-changed
uses: tj-actions/changed-files@v44
with:
files: openapi.yaml
- name: Setup Node.js
if: steps.openapi-changed.outputs.any_changed == 'true'
uses: actions/setup-node@v4
with:
node-version: '18'
- name: Install Redoc CLI
if: steps.openapi-changed.outputs.any_changed == 'true'
run: |
npm install -g @redocly/cli
- name: Validate OpenAPI specification
if: steps.openapi-changed.outputs.any_changed == 'true'
run: |
redocly lint openapi.yaml
code-quality:
name: Code Quality Checks
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0 # Fetch all history for proper diff
- name: Get changed Python files
id: changed-py-files
uses: tj-actions/changed-files@v44
with:
files: |
**/*.py
files_ignore: |
comfyui_manager/legacy/**
- name: Setup Python
if: steps.changed-py-files.outputs.any_changed == 'true'
uses: actions/setup-python@v5
with:
python-version: '3.9'
- name: Install dependencies
if: steps.changed-py-files.outputs.any_changed == 'true'
run: |
pip install ruff
- name: Run ruff linting on changed files
if: steps.changed-py-files.outputs.any_changed == 'true'
run: |
echo "Changed files: ${{ steps.changed-py-files.outputs.all_changed_files }}"
echo "${{ steps.changed-py-files.outputs.all_changed_files }}" | xargs -r ruff check

View File

@@ -1,58 +0,0 @@
name: Publish to PyPI
on:
workflow_dispatch:
push:
branches:
- manager-v4
paths:
- "pyproject.toml"
jobs:
build-and-publish:
runs-on: ubuntu-latest
if: ${{ github.repository_owner == 'ltdrdata' || github.repository_owner == 'Comfy-Org' }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.x'
- name: Install build dependencies
run: |
python -m pip install --upgrade pip
python -m pip install build twine
- name: Get current version
id: current_version
run: |
CURRENT_VERSION=$(grep -oP '^version = "\K[^"]+' pyproject.toml)
echo "version=$CURRENT_VERSION" >> $GITHUB_OUTPUT
echo "Current version: $CURRENT_VERSION"
- name: Build package
run: python -m build
# - name: Create GitHub Release
# id: create_release
# uses: softprops/action-gh-release@v2
# env:
# GITHUB_TOKEN: ${{ github.token }}
# with:
# files: dist/*
# tag_name: v${{ steps.current_version.outputs.version }}
# draft: false
# prerelease: false
# generate_release_notes: true
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc
with:
password: ${{ secrets.PYPI_TOKEN }}
skip-existing: true
verbose: true

21
.github/workflows/publish.yml vendored Normal file
View File

@@ -0,0 +1,21 @@
name: Publish to Comfy registry
on:
workflow_dispatch:
push:
branches:
- main
paths:
- "pyproject.toml"
jobs:
publish-node:
name: Publish Custom Node to registry
runs-on: ubuntu-latest
steps:
- name: Check out code
uses: actions/checkout@v4
- name: Publish Custom Node
uses: Comfy-Org/publish-node-action@main
with:
## Add your own personal access token to your Github Repository secrets and reference it here.
personal_access_token: ${{ secrets.REGISTRY_ACCESS_TOKEN }}

View File

@@ -1,23 +0,0 @@
name: Python Linting
on: [push, pull_request]
jobs:
ruff:
name: Run Ruff
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.x
- name: Install Ruff
run: pip install ruff
- name: Run Ruff
run: ruff check .

5
.gitignore vendored
View File

@@ -17,8 +17,3 @@ github-stats-cache.json
pip_overrides.json
*.json
check2.sh
/venv/
build
dist
*.egg-info
.env

View File

@@ -1,47 +0,0 @@
## Testing Changes
1. Activate the ComfyUI environment.
2. Build package locally after making changes.
```bash
# from inside the ComfyUI-Manager directory, with the ComfyUI environment activated
python -m build
```
3. Install the package locally in the ComfyUI environment.
```bash
# Uninstall existing package
pip uninstall comfyui-manager
# Install the locale package
pip install dist/comfyui-manager-*.whl
```
4. Start ComfyUI.
```bash
# after navigating to the ComfyUI directory
python main.py
```
## Manually Publish Test Version to PyPi
1. Set the `PYPI_TOKEN` environment variable in env file.
2. If manually publishing, you likely want to use a release candidate version, so set the version in [pyproject.toml](pyproject.toml) to something like `0.0.1rc1`.
3. Build the package.
```bash
python -m build
```
4. Upload the package to PyPi.
```bash
python -m twine upload dist/* --username __token__ --password $PYPI_TOKEN
```
5. View at https://pypi.org/project/comfyui-manager/

View File

@@ -1,15 +0,0 @@
include comfyui_manager/js/*
include comfyui_manager/*.json
include comfyui_manager/glob/*
include LICENSE.txt
include README.md
include requirements.txt
include pyproject.toml
include custom-node-list.json
include extension-node-list.json
include extras.json
include github-stats.json
include model-list.json
include alter-list.json
include comfyui_manager/channels.list.template
include comfyui_manager/pip-policy.json

285
README.md
View File

@@ -1,51 +1,60 @@
# ComfyUI Manager
# ComfyUI Manager (Extension)
**ComfyUI-Manager** is an extension designed to enhance the usability of [ComfyUI](https://github.com/comfyanonymous/ComfyUI). It offers management functions to **install, remove, disable, and enable** various custom nodes of ComfyUI. Furthermore, this extension provides a hub feature and convenience functions to access a wide range of information within ComfyUI.
**ComfyUI-Manager (Extension)** expands the functionality of [manager-core](https://github.com/Comfy-Org/manager-core), allowing users to access the existing ComfyUI-Manager features.
![menu](misc/menu.jpg)
![menu](https://raw.githubusercontent.com/ltdrdata/ComfyUI-extension-tutorials/refs/heads/Main/ComfyUI-Manager/images/dialog.jpg)
## NOTICE
* V4.0: Modify the structure to be installable via pip instead of using git clone.
* V3.16: Support for `uv` has been added. Set `use_uv` in `config.ini`.
* V3.10: `double-click feature` is removed
* This feature has been moved to https://github.com/ltdrdata/comfyui-connection-helper
* V3.3.2: Overhauled. Officially supports [https://registry.comfy.org/](https://registry.comfy.org/).
* You can see whole nodes info on [ComfyUI Nodes Info](https://ltdrdata.github.io/) page.
* V3.0: ComfyUI-Manager (Extension)
## Installation
* When installing the latest ComfyUI, it will be automatically installed as a dependency, so manual installation is no longer necessary.
### Installation[method1] installation via manager-core (Recommended)
* Manual installation of the nightly version:
* Clone to a temporary directory (**Note:** Do **not** clone into `ComfyUI/custom_nodes`.)
```
git clone https://github.com/Comfy-Org/ComfyUI-Manager
```
* Install via pip
```
cd ComfyUI-Manager
pip install .
```
* See also: https://github.com/Comfy-Org/comfy-cli
Search for ComfyUI-Manager in the custom node installation feature of manager-core and install it.
## Front-end
### Installation[method1] manual (Not Recommended)
* The built-in front-end of ComfyUI-Manager is the legacy front-end. The front-end for ComfyUI-Manager is now provided via [ComfyUI Frontend](https://github.com/Comfy-Org/ComfyUI_frontend).
* To enable the legacy front-end, set the environment variable `ENABLE_LEGACY_COMFYUI_MANAGER_FRONT` to `true` before running.
To install ComfyUI-Manager in addition to an existing installation of ComfyUI, you can follow the following steps:
1. goto `ComfyUI/custom_nodes` dir in terminal(cmd)
2. `git clone https://github.com/ltdrdata/ComfyUI-Manager.git`
3. Restart ComfyUI
### Installation Precautions
* **DO**: `ComfyUI-Manager` files must be accurately located in the path `ComfyUI/custom_nodes/ComfyUI-Manager`
* Installing in a compressed file format is not recommended.
* **DON'T**: Decompress directly into the `ComfyUI/custom_nodes` location, resulting in the Manager contents like `__init__.py` being placed directly in that directory.
* You have to remove all ComfyUI-Manager files from `ComfyUI/custom_nodes`
* **DON'T**: In a form where decompression occurs in a path such as `ComfyUI/custom_nodes/ComfyUI-Manager/ComfyUI-Manager`.
* You have to move `ComfyUI/custom_nodes/ComfyUI-Manager/ComfyUI-Manager` to `ComfyUI/custom_nodes/ComfyUI-Manager`
* **DON'T**: In a form where decompression occurs in a path such as `ComfyUI/custom_nodes/ComfyUI-Manager-main`.
* In such cases, `ComfyUI-Manager` may operate, but it won't be recognized within `ComfyUI-Manager`, and updates cannot be performed. It also poses the risk of duplicate installations.
* You have to rename `ComfyUI/custom_nodes/ComfyUI-Manager-main` to `ComfyUI/custom_nodes/ComfyUI-Manager`
## Colab Notebook
This repository provides Colab notebooks that allow you to install and use ComfyUI, including ComfyUI-Manager. To use ComfyUI, [click on this link](https://colab.research.google.com/github/ltdrdata/ComfyUI-Manager/blob/main/notebooks/comfyui_colab_with_manager.ipynb).
* Support for installing ComfyUI
* Support for basic installation of ComfyUI-Manager
* Support for automatically installing dependencies of custom nodes upon restarting Colab notebooks.
## How To Use
1. Click "Manager" button on main menu
![mainmenu](https://raw.githubusercontent.com/ltdrdata/ComfyUI-extension-tutorials/Main/ComfyUI-Manager/images/topbar.jpg)
![mainmenu](misc/main.jpg)
2. If you click on 'Install Custom Nodes' or 'Install Models', an installer dialog will open.
![menu](https://raw.githubusercontent.com/ltdrdata/ComfyUI-extension-tutorials/refs/heads/Main/ComfyUI-Manager/images/dialog.jpg)
![menu](misc/menu.jpg)
* There are three DB modes: `DB: Channel (1day cache)`, `DB: Local`, and `DB: Channel (remote)`.
* `Channel (1day cache)` utilizes Channel cache information with a validity period of one day to quickly display the list.
@@ -61,9 +70,9 @@
3. Click 'Install' or 'Try Install' button.
![node-install-dialog](https://raw.githubusercontent.com/ltdrdata/ComfyUI-extension-tutorials/Main/ComfyUI-Manager/images/custom-nodes.jpg)
![node-install-dialog](misc/custom-nodes.jpg)
![model-install-dialog](https://raw.githubusercontent.com/ltdrdata/ComfyUI-extension-tutorials/Main/ComfyUI-Manager/images/models.jpg)
![model-install-dialog](misc/models.png)
* Installed: This item is already installed.
* Install: Clicking this button will install the item.
@@ -73,56 +82,39 @@
* Channel settings have a broad impact, affecting not only the node list but also all functions like "Update all."
* Conflicted Nodes with a yellow background show a list of nodes conflicting with other extensions in the respective extension. This issue needs to be addressed by the developer, and users should be aware that due to these conflicts, some nodes may not function correctly and may need to be installed accordingly.
4. Share
![menu](https://raw.githubusercontent.com/ltdrdata/ComfyUI-extension-tutorials/Main/ComfyUI-Manager/images/topbar.jpg) ![share](https://raw.githubusercontent.com/ltdrdata/ComfyUI-extension-tutorials/Main/ComfyUI-Manager/images/share.jpg)
4. If you set the `Badge:` item in the menu as `Badge: Nickname`, `Badge: Nickname (hide built-in)`, `Badge: #ID Nickname`, `Badge: #ID Nickname (hide built-in)` the information badge will be displayed on the node.
* When selecting (hide built-in), it hides the 🦊 icon, which signifies built-in nodes.
* Nodes without any indication on the badge are custom nodes that Manager cannot recognize.
* `Badge: Nickname` displays the nickname of custom nodes, while `Badge: #ID Nickname` also includes the internal ID of the node.
![model-install-dialog](misc/nickname.jpg)
5. Share
![menu](misc/main.jpg) ![share](misc/share.jpg)
* You can share the workflow by clicking the Share button at the bottom of the main menu or selecting Share Output from the Context Menu of the Image node.
* Currently, it supports sharing via [https://comfyworkflows.com/](https://comfyworkflows.com/),
[https://openart.ai](https://openart.ai/workflows/dev), [https://youml.com](https://youml.com)
as well as through the Matrix channel.
![menu](https://raw.githubusercontent.com/ltdrdata/ComfyUI-extension-tutorials/Main/ComfyUI-Manager/images/share-setting.jpg)
![menu](misc/share-setting.jpg)
* Through the Share settings in the Manager menu, you can configure the behavior of the Share button in the Main menu or Share Output button on Context Menu.
* Through the Share settings in the Manager menu, you can configure the behavior of the Share button in the Main menu or Share Ouput button on Context Menu.
* `None`: hide from Main menu
* `All`: Show a dialog where the user can select a title for sharing.
## Paths
In `ComfyUI-Manager` V3.0 and later, configuration files and dynamically generated files are located under `<USER_DIRECTORY>/default/ComfyUI-Manager/`.
* <USER_DIRECTORY>
* If executed without any options, the path defaults to ComfyUI/user.
* It can be set using --user-directory <USER_DIRECTORY>.
* Basic config files: `<USER_DIRECTORY>/default/ComfyUI-Manager/config.ini`
* Configurable channel lists: `<USER_DIRECTORY>/default/ComfyUI-Manager/channels.ini`
* Configurable pip overrides: `<USER_DIRECTORY>/default/ComfyUI-Manager/pip_overrides.json`
* Configurable pip blacklist: `<USER_DIRECTORY>/default/ComfyUI-Manager/pip_blacklist.list`
* Configurable pip auto fix: `<USER_DIRECTORY>/default/ComfyUI-Manager/pip_auto_fix.list`
* Saved snapshot files: `<USER_DIRECTORY>/default/ComfyUI-Manager/snapshots`
* Startup script files: `<USER_DIRECTORY>/default/ComfyUI-Manager/startup-scripts`
* Component files: `<USER_DIRECTORY>/default/ComfyUI-Manager/components`
## `extra_model_paths.yaml` Configuration
The following settings are applied based on the section marked as `is_default`.
* `custom_nodes`: Path for installing custom nodes
* Importing does not need to adhere to the path set as `is_default`, but this is the path where custom nodes are installed by the `ComfyUI Nodes Manager`.
* `download_model_base`: Path for downloading models
## Snapshot-Manager
* When you press `Save snapshot` or use `Update All` on `Manager Menu`, the current installation status snapshot is saved.
* Snapshot file dir: `<USER_DIRECTORY>/default/ComfyUI-Manager/snapshots`
* Snapshot file dir: `ComfyUI-Manager/snapshots`
* You can rename snapshot file.
* Press the "Restore" button to revert to the installation status of the respective snapshot.
* However, for custom nodes not managed by Git, snapshot support is incomplete.
* When you press `Restore`, it will take effect on the next ComfyUI startup.
* The selected snapshot file is saved in `<USER_DIRECTORY>/default/ComfyUI-Manager/startup-scripts/restore-snapshot.json`, and upon restarting ComfyUI, the snapshot is applied and then deleted.
* The selected snapshot file is saved in `ComfyUI-Manager/startup-scripts/restore-snapshot.json`, and upon restarting ComfyUI, the snapshot is applied and then deleted.
![model-install-dialog](https://raw.githubusercontent.com/ltdrdata/ComfyUI-extension-tutorials/Main/ComfyUI-Manager/images/snapshot.jpg)
![model-install-dialog](misc/snapshot.jpg)
## cm-cli: command line tools for power user
@@ -139,18 +131,48 @@ The following settings are applied based on the section marked as `is_default`.
## Custom node support guide
* **NOTICE:**
- You should no longer assume that the GitHub repository name will match the subdirectory name under `custom_nodes`. The name of the subdirectory under `custom_nodes` will now use the normalized name from the `name` field in `pyproject.toml`.
- Avoid relying on directory names for imports whenever possible.
* Currently, the system operates by cloning the git repository and sequentially installing the dependencies listed in requirements.txt using pip, followed by invoking the install.py script. In the future, we plan to discuss and determine the specifications for supporting custom nodes.
* https://docs.comfy.org/registry/overview
* https://github.com/Comfy-Org/rfcs
* Please submit a pull request to update either the custom-node-list.json or model-list.json file.
**Special purpose files** (optional)
* `pyproject.toml` - Spec file for comfyregistry.
* The scanner currently provides a detection function for missing nodes, which is capable of detecting nodes described by the following two patterns.
```
NODE_CLASS_MAPPINGS = {
"ExecutionSwitch": ExecutionSwitch,
"ExecutionBlocker": ExecutionBlocker,
...
}
NODE_CLASS_MAPPINGS.update({
"UniFormer-SemSegPreprocessor": Uniformer_SemSegPreprocessor,
"SemSegPreprocessor": Uniformer_SemSegPreprocessor,
})
```
* Or you can provide manually `node_list.json` file.
* When you write a docstring in the header of the .py file for the Node as follows, it will be used for managing the database in the Manager.
* Currently, only the `nickname` is being used, but other parts will also be utilized in the future.
* The `nickname` will be the name displayed on the badge of the node.
* If there is no `nickname`, it will be truncated to 20 characters from the arbitrarily written title and used.
```
"""
@author: Dr.Lt.Data
@title: Impact Pack
@nickname: Impact Pack
@description: This extension offers various detector nodes and detailer nodes that allow you to configure a workflow that automatically enhances facial details. And provide iterative upscaler.
"""
```
* **Special purpose files** (optional)
* `node_list.json` - When your custom nodes pattern of NODE_CLASS_MAPPINGS is not conventional, it is used to manually provide a list of nodes for reference. ([example](https://github.com/melMass/comfy_mtb/raw/main/node_list.json))
* `requirements.txt` - When installing, this pip requirements will be installed automatically
* `install.py` - When installing, it is automatically called
* `uninstall.py` - When uninstalling, it is automatically called
* `disable.py` - When disabled, it is automatically called
* When installing a custom node setup `.js` file, it is recommended to write this script for disabling.
* `enable.py` - When enabled, it is automatically called
* **All scripts are executed from the root path of the corresponding custom node.**
@@ -169,12 +191,12 @@ The following settings are applied based on the section marked as `is_default`.
}
```
* `<current timestamp>` Ensure that the timestamp is always unique.
* "components" should have the same structure as the content of the file stored in `<USER_DIRECTORY>/default/ComfyUI-Manager/components`.
* "components" should have the same structure as the content of the file stored in ComfyUI-Manager/components.
* `<component name>`: The name should be in the format `<prefix>::<node name>`.
* `<compnent nodeata>`: In the nodedata of the group node.
* `<version>`: Only two formats are allowed: `major.minor.patch` or `major.minor`. (e.g. `1.0`, `2.2.1`)
* `<datetime>`: Saved time
* `<packname>`: If the packname is not empty, the category becomes packname/workflow, and it is saved in the <packname>.pack file in `<USER_DIRECTORY>/default/ComfyUI-Manager/components`.
* `<packname>`: If the packname is not empty, the category becomes packname/workflow, and it is saved in the <packname>.pack file in ComfyUI-Manager/components.
* `<category>`: If there is neither a category nor a packname, it is saved in the components category.
```
"version":"1.0",
@@ -191,38 +213,11 @@ The following settings are applied based on the section marked as `is_default`.
## Support of missing nodes installation
![missing-menu](https://raw.githubusercontent.com/ltdrdata/ComfyUI-extension-tutorials/Main/ComfyUI-Manager/images/missing-menu.jpg)
![missing-menu](misc/missing-menu.jpg)
* When you click on the ```Install Missing Custom Nodes``` button in the menu, it displays a list of extension nodes that contain nodes not currently present in the workflow.
![missing-list](https://raw.githubusercontent.com/ltdrdata/ComfyUI-extension-tutorials/Main/ComfyUI-Manager/images/missing-list.jpg)
# Config
* You can modify the `config.ini` file to apply the settings for ComfyUI-Manager.
* The path to the `config.ini` used by ComfyUI-Manager is displayed in the startup log messages.
* See also: [https://github.com/ltdrdata/ComfyUI-Manager#paths]
* Configuration options:
```
[default]
git_exe = <Manually specify the path to the git executable. If left empty, the default git executable path will be used.>
use_uv = <Use uv instead of pip for dependency installation.>
default_cache_as_channel_url = <Determines whether to retrieve the DB designated as channel_url at startup>
bypass_ssl = <Set to True if SSL errors occur to disable SSL.>
file_logging = <Configure whether to create a log file used by ComfyUI-Manager.>
windows_selector_event_loop_policy = <If an event loop error occurs on Windows, set this to True.>
model_download_by_agent = <When downloading models, use an agent instead of torchvision_download_url.>
downgrade_blacklist = <Set a list of packages to prevent downgrades. List them separated by commas.>
security_level = <Set the security level => strong|normal|normal-|weak>
always_lazy_install = <Whether to perform dependency installation on restart even in environments other than Windows.>
network_mode = <Set the network mode => public|private|offline|personal_cloud>
```
* network_mode:
- public: An environment that uses a typical public network.
- private: An environment that uses a closed network, where a private node DB is configured via `channel_url`. (Uses cache if available)
- offline: An environment that does not use any external connections when using an offline network. (Uses cache if available)
- personal_cloud: Applies relaxed security features in cloud environments such as Google Colab or Runpod, where strong security is not required.
![missing-list](misc/missing-list.jpg)
## Additional Feature
@@ -253,40 +248,13 @@ The following settings are applied based on the section marked as `is_default`.
* Custom pip mapping
* When you create the `pip_overrides.json` file, it changes the installation of specific pip packages to installations defined by the user.
* Please refer to the `pip_overrides.json.template` file.
* Prevent the installation of specific pip packages
* List the package names one per line in the `pip_blacklist.list` file.
* Automatically Restoring pip Installation
* If you list pip spec requirements in `pip_auto_fix.list`, similar to `requirements.txt`, it will automatically restore the specified versions when starting ComfyUI or when versions get mismatched during various custom node installations.
* `--index-url` can be used.
* Use `aria2` as downloader
* [howto](docs/en/use_aria2.md)
* If you add the item `skip_migration_check = True` to `config.ini`, it will not check whether there are nodes that can be migrated at startup.
* This option can be used if performance issues occur in a Colab+GDrive environment.
## Environment Variables
The following features can be configured using environment variables:
* **COMFYUI_PATH**: The installation path of ComfyUI
* **GITHUB_ENDPOINT**: Reverse proxy configuration for environments with limited access to GitHub
* **HF_ENDPOINT**: Reverse proxy configuration for environments with limited access to Hugging Face
### Example 1:
Redirecting `https://github.com/ltdrdata/ComfyUI-Impact-Pack` to `https://mirror.ghproxy.com/https://github.com/ltdrdata/ComfyUI-Impact-Pack`
```
GITHUB_ENDPOINT=https://mirror.ghproxy.com/https://github.com
```
#### Example 2:
Changing `https://huggingface.co/path/to/somewhere` to `https://some-hf-mirror.com/path/to/somewhere`
```
HF_ENDPOINT=https://some-hf-mirror.com
```
## Scanner
When you run the `scan.sh` script:
@@ -304,42 +272,43 @@ When you run the `scan.sh` script:
## Troubleshooting
* If your `git.exe` is installed in a specific location other than system git, please install ComfyUI-Manager and run ComfyUI. Then, specify the path including the file name in `git_exe = ` in the `<USER_DIRECTORY>/default/ComfyUI-Manager/config.ini` file that is generated.
* If your `git.exe` is installed in a specific location other than system git, please install ComfyUI-Manager and run ComfyUI. Then, specify the path including the file name in `git_exe = ` in the ComfyUI-Manager/config.ini file that is generated.
* If updating ComfyUI-Manager itself fails, please go to the **ComfyUI-Manager** directory and execute the command `git update-ref refs/remotes/origin/main a361cc1 && git fetch --all && git pull`.
* Alternatively, download the update-fix.py script from [update-fix.py](https://github.com/ltdrdata/ComfyUI-Manager/raw/main/scripts/update-fix.py) and place it in the ComfyUI-Manager directory. Then, run it using your Python command.
For the portable version, use `..\..\..\python_embeded\python.exe update-fix.py`.
* For cases where nodes like `PreviewTextNode` from `ComfyUI_Custom_Nodes_AlekPet` are only supported as front-end nodes, we currently do not provide missing nodes for them.
* Currently, `vid2vid` is not being updated, causing compatibility issues.
* If you encounter the error message `Overlapped Object has pending operation at deallocation on Comfyui Manager load` under Windows
* Edit `config.ini` file: add `windows_selector_event_loop_policy = True`
* if `SSL: CERTIFICATE_VERIFY_FAILED` error is occured.
* Edit `config.ini` file: add `bypass_ssl = True`
## Security policy
The security settings are applied based on whether the ComfyUI server's listener is non-local and whether the network mode is set to `personal_cloud`.
* **non-local**: When the server is launched with `--listen` and is bound to a network range other than the local `127.` range, allowing remote IP access.
* **personal\_cloud**: When the `network_mode` is set to `personal_cloud`.
### Risky Level Table
| Risky Level | features |
|-------------|---------------------------------------------------------------------------------------------------------------------------------------|
| high+ | * `Install via git url`, `pip install`<BR>* Installation of nodepack registered not in the `default channel`. |
| high | * Fix nodepack |
| middle+ | * Uninstall/Update<BR>* Installation of nodepack registered in the `default channel`.<BR>* Restore/Remove Snapshot<BR>* Install model |
| middle | * Restart |
| low | * Update ComfyUI |
### Security Level Table
| Security Level | local | non-local (personal_cloud) | non-local (not personal_cloud) |
|----------------|--------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------|--------------------------------|
| strong | * Only `weak` level risky features are allowed | * Only `weak` level risky features are allowed | * Only `weak` level risky features are allowed |
| normal | * `high+` and `high` level risky features are not allowed<BR>* `middle+` and `middle` level risky features are available | * `high+` and `high` level risky features are not allowed<BR>* `middle+` and `middle` level risky features are available | * `high+`, `high` and `middle+` level risky features are not allowed<BR>* `middle` level risky features are available
| normal- | * All features are available | * `high+` and `high` level risky features are not allowed<BR>* `middle+` and `middle` level risky features are available | * `high+`, `high` and `middle+` level risky features are not allowed<BR>* `middle` level risky features are available
| weak | * All features are available | * All features are available | * `high+` and `middle+` level risky features are not allowed<BR>* `high`, `middle` and `low` level risky features are available
* Edit `config.ini` file: add `security_level = <LEVEL>`
* `strong`
* doesn't allow `high` and `middle` level risky feature
* `normal`
* doesn't allow `high` level risky feature
* `middle` level risky feature is available
* `normal-`
* doesn't allow `high` level risky feature if `--listen` is specified and not starts with `127.`
* `middle` level risky feature is available
* `weak`
* all feature is available
* `high` level risky features
* `Install via git url`, `pip install`
* Installation of custom nodes registered not in the `default channel`.
* Fix custom nodes
* `middle` level risky features
* Uninstall/Update
* Installation of custom nodes registered in the `default channel`.
* Restore/Remove Snapshot
* Restart
* `low` level risky features
* Update ComfyUI
# Disclaimer

37
__init__.py Normal file
View File

@@ -0,0 +1,37 @@
legacy_manager_core_path = None
manager_core_path = None
def is_manager_core_exists():
global legacy_manager_core_path
global manager_core_path
import os
import folder_paths
comfy_path = os.path.dirname(folder_paths.__file__)
legacy_manager_core_path = os.path.join(comfy_path, 'custom_nodes', 'manager-core')
manager_core_path = legacy_manager_core_path
manager_core_path_file = os.path.join(comfy_path, 'manager_core_path.txt')
if os.path.exists(manager_core_path_file):
with open(manager_core_path_file, 'r') as f:
manager_core_path = os.path.join(f.read().strip(), 'manager-core')
return os.path.exists(manager_core_path) or os.path.exists(legacy_manager_core_path)
if not is_manager_core_exists():
from .modules import migration_server
migration_server.manager_core_path = manager_core_path
WEB_DIRECTORY = "migration_js"
NODE_CLASS_MAPPINGS = {}
else:
# Main code
from .modules import manager_ext_server
from .modules import share_3rdparty
WEB_DIRECTORY = "js"
NODE_CLASS_MAPPINGS = {}
__all__ = ['NODE_CLASS_MAPPINGS']

4
channels.list.template Normal file
View File

@@ -0,0 +1,4 @@
legacy::https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main/node_db/legacy
forked::https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main/node_db/forked
dev::https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main/node_db/dev
tutorial::https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main/node_db/tutorial

View File

@@ -1,49 +0,0 @@
# ComfyUI-Manager: Core Backend (glob)
This directory contains the Python backend modules that power ComfyUI-Manager, handling the core functionality of node management, downloading, security, and server operations.
## Directory Structure
- **glob/** - code for new cacheless ComfyUI-Manager
- **legacy/** - code for legacy ComfyUI-Manager
## Core Modules
- **manager_core.py**: The central implementation of management functions, handling configuration, installation, updates, and node management.
- **manager_server.py**: Implements server functionality and API endpoints for the web interface to interact with the backend.
## Specialized Modules
- **share_3rdparty.py**: Manages integration with third-party sharing platforms.
## Architecture
The backend follows a modular design pattern with clear separation of concerns:
1. **Core Layer**: Manager modules provide the primary API and business logic
2. **Utility Layer**: Helper modules provide specialized functionality
3. **Integration Layer**: Modules that connect to external systems
## Security Model
The system implements a comprehensive security framework with multiple levels:
- **Block**: Highest security - blocks most remote operations
- **High**: Allows only specific trusted operations
- **Middle**: Standard security for most users
- **Normal-**: More permissive for advanced users
- **Weak**: Lowest security for development environments
## Implementation Details
- The backend is designed to work seamlessly with ComfyUI
- Asynchronous task queuing is implemented for background operations
- The system supports multiple installation modes
- Error handling and risk assessment are integrated throughout the codebase
## API Integration
The backend exposes a REST API via `manager_server.py` that enables:
- Custom node management (install, update, disable, remove)
- Model downloading and organization
- System configuration
- Snapshot management
- Workflow component handling

View File

@@ -1,104 +0,0 @@
import os
import logging
from aiohttp import web
from .common.manager_security import HANDLER_POLICY
from .common import manager_security
from comfy.cli_args import args
def prestartup():
from . import prestartup_script # noqa: F401
logging.info('[PRE] ComfyUI-Manager')
def start():
logging.info('[START] ComfyUI-Manager')
from .common import cm_global # noqa: F401
if args.enable_manager:
if args.enable_manager_legacy_ui:
try:
from .legacy import manager_server # noqa: F401
from .legacy import share_3rdparty # noqa: F401
from .legacy import manager_core as core
import nodes
logging.info("[ComfyUI-Manager] Legacy UI is enabled.")
nodes.EXTENSION_WEB_DIRS['comfyui-manager-legacy'] = os.path.join(os.path.dirname(__file__), 'js')
except Exception as e:
print("Error enabling legacy ComfyUI Manager frontend:", e)
core = None
else:
from .glob import manager_server # noqa: F401
from .glob import share_3rdparty # noqa: F401
from .glob import manager_core as core
if core is not None:
manager_security.is_personal_cloud_mode = core.get_config()['network_mode'].lower() == 'personal_cloud'
def should_be_disabled(fullpath:str) -> bool:
"""
1. Disables the legacy ComfyUI-Manager.
2. The blocklist can be expanded later based on policies.
"""
if args.enable_manager:
# In cases where installation is done via a zip archive, the directory name may not be comfyui-manager, and it may not contain a git repository.
# It is assumed that any installed legacy ComfyUI-Manager will have at least 'comfyui-manager' in its directory name.
dir_name = os.path.basename(fullpath).lower()
if 'comfyui-manager' in dir_name:
return True
return False
def get_client_ip(request):
peername = request.transport.get_extra_info("peername")
if peername is not None:
host, port = peername
return host
return "unknown"
def create_middleware():
connected_clients = set()
is_local_mode = manager_security.is_loopback(args.listen)
@web.middleware
async def manager_middleware(request: web.Request, handler):
nonlocal connected_clients
# security policy for remote environments
prev_client_count = len(connected_clients)
client_ip = get_client_ip(request)
connected_clients.add(client_ip)
next_client_count = len(connected_clients)
if prev_client_count == 1 and next_client_count > 1:
manager_security.multiple_remote_alert()
policy = manager_security.get_handler_policy(handler)
is_banned = False
# policy check
if len(connected_clients) > 1:
if is_local_mode:
if HANDLER_POLICY.MULTIPLE_REMOTE_BAN_NON_LOCAL in policy:
is_banned = True
if HANDLER_POLICY.MULTIPLE_REMOTE_BAN_NOT_PERSONAL_CLOUD in policy:
is_banned = not manager_security.is_personal_cloud_mode
if HANDLER_POLICY.BANNED in policy:
is_banned = True
if is_banned:
logging.warning(f"[Manager] Banning request from {client_ip}: {request.path}")
response = web.Response(text="[Manager] This request is banned.", status=403)
else:
response: web.Response = await handler(request)
return response
return manager_middleware

View File

@@ -1,6 +0,0 @@
default::https://raw.githubusercontent.com/Comfy-Org/ComfyUI-Manager/main
recent::https://raw.githubusercontent.com/Comfy-Org/ComfyUI-Manager/main/node_db/new
legacy::https://raw.githubusercontent.com/Comfy-Org/ComfyUI-Manager/main/node_db/legacy
forked::https://raw.githubusercontent.com/Comfy-Org/ComfyUI-Manager/main/node_db/forked
dev::https://raw.githubusercontent.com/Comfy-Org/ComfyUI-Manager/main/node_db/dev
tutorial::https://raw.githubusercontent.com/Comfy-Org/ComfyUI-Manager/main/node_db/tutorial

View File

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,16 +0,0 @@
# ComfyUI-Manager: Core Backend (glob)
This directory contains the Python backend modules that power ComfyUI-Manager, handling the core functionality of node management, downloading, security, and server operations.
## Core Modules
- **manager_downloader.py**: Handles downloading operations for models, extensions, and other resources.
- **manager_util.py**: Provides utility functions used throughout the system.
## Specialized Modules
- **cm_global.py**: Maintains global variables and state management across the system.
- **cnr_utils.py**: Helper utilities for interacting with the custom node registry (CNR).
- **git_utils.py**: Git-specific utilities for repository operations.
- **node_package.py**: Handles the packaging and installation of node extensions.
- **security_check.py**: Implements the multi-level security system for installation safety.

View File

View File

@@ -1,117 +0,0 @@
import traceback
#
# Global Var
#
# Usage:
# import cm_global
# cm_global.variables['comfyui.revision'] = 1832
# print(f"log mode: {cm_global.variables['logger.enabled']}")
#
variables = {}
#
# Global API
#
# Usage:
# [register API]
# import cm_global
#
# def api_hello(msg):
# print(f"hello: {msg}")
# return msg
#
# cm_global.register_api('hello', api_hello)
#
# [use API]
# import cm_global
#
# test = cm_global.try_call(api='hello', msg='an example')
# print(f"'{test}' is returned")
#
APIs = {}
def register_api(k, f):
global APIs
APIs[k] = f
def try_call(**kwargs):
if 'api' in kwargs:
api_name = kwargs['api']
try:
api = APIs.get(api_name)
if api is not None:
del kwargs['api']
return api(**kwargs)
else:
print(f"WARN: The '{kwargs['api']}' API has not been registered.")
except Exception as e:
print(f"ERROR: An exception occurred while calling the '{api_name}' API.")
raise e
else:
return None
#
# Extension Info
#
# Usage:
# import cm_global
#
# cm_global.extension_infos['my_extension'] = {'version': [0, 1], 'name': 'me', 'description': 'example extension', }
#
extension_infos = {}
on_extension_registered_handlers = {}
def register_extension(extension_name, v):
global extension_infos
global on_extension_registered_handlers
extension_infos[extension_name] = v
if extension_name in on_extension_registered_handlers:
for k, f in on_extension_registered_handlers[extension_name]:
try:
f(extension_name, v)
except Exception:
print(f"[ERROR] '{k}' on_extension_registered_handlers")
traceback.print_exc()
del on_extension_registered_handlers[extension_name]
def add_on_extension_registered(k, extension_name, f):
global on_extension_registered_handlers
if extension_name in extension_infos:
try:
v = extension_infos[extension_name]
f(extension_name, v)
except Exception:
print(f"[ERROR] '{k}' on_extension_registered_handler")
traceback.print_exc()
else:
if extension_name not in on_extension_registered_handlers:
on_extension_registered_handlers[extension_name] = []
on_extension_registered_handlers[extension_name].append((k, f))
def add_on_revision_detected(k, f):
if 'comfyui.revision' in variables:
try:
f(variables['comfyui.revision'])
except Exception:
print(f"[ERROR] '{k}' on_revision_detected_handler")
traceback.print_exc()
else:
variables['cm.on_revision_detected_handler'].append((k, f))
error_dict = {}
disable_front = False

View File

@@ -1,256 +0,0 @@
import asyncio
import json
import os
import platform
import time
from dataclasses import dataclass
from typing import List
from . import context
from . import manager_util
import requests
import toml
base_url = "https://api.comfy.org"
lock = asyncio.Lock()
is_cache_loading = False
async def get_cnr_data(cache_mode=True, dont_wait=True):
try:
return await _get_cnr_data(cache_mode, dont_wait)
except asyncio.TimeoutError:
print("A timeout occurred during the fetch process from ComfyRegistry.")
return await _get_cnr_data(cache_mode=True, dont_wait=True) # timeout fallback
async def _get_cnr_data(cache_mode=True, dont_wait=True):
global is_cache_loading
uri = f'{base_url}/nodes'
async def fetch_all():
remained = True
page = 1
full_nodes = {}
# Determine form factor based on environment and platform
is_desktop = bool(os.environ.get('__COMFYUI_DESKTOP_VERSION__'))
system = platform.system().lower()
is_windows = system == 'windows'
is_mac = system == 'darwin'
is_linux = system == 'linux'
# Get ComfyUI version tag
if is_desktop:
# extract version from pyproject.toml instead of git tag
comfyui_ver = context.get_current_comfyui_ver() or 'unknown'
else:
comfyui_ver = context.get_comfyui_tag() or 'unknown'
if is_desktop:
if is_windows:
form_factor = 'desktop-win'
elif is_mac:
form_factor = 'desktop-mac'
else:
form_factor = 'other'
else:
if is_windows:
form_factor = 'git-windows'
elif is_mac:
form_factor = 'git-mac'
elif is_linux:
form_factor = 'git-linux'
else:
form_factor = 'other'
while remained:
# Add comfyui_version and form_factor to the API request
sub_uri = f'{base_url}/nodes?page={page}&limit=30&comfyui_version={comfyui_ver}&form_factor={form_factor}'
sub_json_obj = await asyncio.wait_for(manager_util.get_data_with_cache(sub_uri, cache_mode=False, silent=True, dont_cache=True), timeout=30)
remained = page < sub_json_obj['totalPages']
for x in sub_json_obj['nodes']:
full_nodes[x['id']] = x
if page % 5 == 0:
print(f"FETCH ComfyRegistry Data: {page}/{sub_json_obj['totalPages']}")
page += 1
time.sleep(0.5)
print("FETCH ComfyRegistry Data [DONE]")
for v in full_nodes.values():
if 'latest_version' not in v:
v['latest_version'] = dict(version='nightly')
return {'nodes': list(full_nodes.values())}
if cache_mode:
is_cache_loading = True
cache_state = manager_util.get_cache_state(uri)
if dont_wait:
if cache_state == 'not-cached':
return {}
else:
print("[ComfyUI-Manager] The ComfyRegistry cache update is still in progress, so an outdated cache is being used.")
with open(manager_util.get_cache_path(uri), 'r', encoding="UTF-8", errors="ignore") as json_file:
return json.load(json_file)['nodes']
if cache_state == 'cached':
with open(manager_util.get_cache_path(uri), 'r', encoding="UTF-8", errors="ignore") as json_file:
return json.load(json_file)['nodes']
try:
json_obj = await fetch_all()
manager_util.save_to_cache(uri, json_obj)
return json_obj['nodes']
except Exception:
res = {}
print("Cannot connect to comfyregistry.")
finally:
if cache_mode:
is_cache_loading = False
return res
@dataclass
class NodeVersion:
changelog: str
dependencies: List[str]
deprecated: bool
id: str
version: str
download_url: str
def map_node_version(api_node_version):
"""
Maps node version data from API response to NodeVersion dataclass.
Args:
api_data (dict): The 'node_version' part of the API response.
Returns:
NodeVersion: An instance of NodeVersion dataclass populated with data from the API.
"""
return NodeVersion(
changelog=api_node_version.get(
"changelog", ""
), # Provide a default value if 'changelog' is missing
dependencies=api_node_version.get(
"dependencies", []
), # Provide a default empty list if 'dependencies' is missing
deprecated=api_node_version.get(
"deprecated", False
), # Assume False if 'deprecated' is not specified
id=api_node_version[
"id"
], # 'id' should be mandatory; raise KeyError if missing
version=api_node_version[
"version"
], # 'version' should be mandatory; raise KeyError if missing
download_url=api_node_version.get(
"downloadUrl", ""
), # Provide a default value if 'downloadUrl' is missing
)
def install_node(node_id, version=None):
"""
Retrieves the node version for installation.
Args:
node_id (str): The unique identifier of the node.
version (str, optional): Specific version of the node to retrieve. If omitted, the latest version is returned.
Returns:
NodeVersion: Node version data or error message.
"""
if version is None:
url = f"{base_url}/nodes/{node_id}/install"
else:
url = f"{base_url}/nodes/{node_id}/install?version={version}"
response = requests.get(url, verify=not manager_util.bypass_ssl)
if response.status_code == 200:
# Convert the API response to a NodeVersion object
return map_node_version(response.json())
else:
return None
def all_versions_of_node(node_id):
url = f"{base_url}/nodes/{node_id}/versions?statuses=NodeVersionStatusActive&statuses=NodeVersionStatusPending"
response = requests.get(url, verify=not manager_util.bypass_ssl)
if response.status_code == 200:
return response.json()
else:
return None
def read_cnr_info(fullpath):
try:
toml_path = os.path.join(fullpath, 'pyproject.toml')
tracking_path = os.path.join(fullpath, '.tracking')
if not os.path.exists(toml_path) or not os.path.exists(tracking_path):
return None # not valid CNR node pack
with open(toml_path, "r", encoding="utf-8") as f:
data = toml.load(f)
project = data.get('project', {})
name = project.get('name').strip().lower()
original_name = project.get('name')
# normalize version
# for example: 2.5 -> 2.5.0
version = str(manager_util.StrictVersion(project.get('version')))
urls = project.get('urls', {})
repository = urls.get('Repository')
if name and version: # repository is optional
return {
"id": name,
"original_name": original_name,
"version": version,
"url": repository
}
return None
except Exception:
return None # not valid CNR node pack
def generate_cnr_id(fullpath, cnr_id):
cnr_id_path = os.path.join(fullpath, '.git', '.cnr-id')
try:
if not os.path.exists(cnr_id_path):
with open(cnr_id_path, "w") as f:
return f.write(cnr_id)
except Exception:
print(f"[ComfyUI Manager] unable to create file: {cnr_id_path}")
def read_cnr_id(fullpath):
cnr_id_path = os.path.join(fullpath, '.git', '.cnr-id')
try:
if os.path.exists(cnr_id_path):
with open(cnr_id_path) as f:
return f.read().strip()
except Exception:
pass
return None

View File

@@ -1,108 +0,0 @@
import sys
import os
import logging
from . import manager_util
import toml
import git
# read env vars
comfy_path: str = os.environ.get('COMFYUI_PATH')
comfy_base_path = os.environ.get('COMFYUI_FOLDERS_BASE_PATH')
if comfy_path is None:
try:
comfy_path = os.path.abspath(os.path.dirname(sys.modules['__main__'].__file__))
os.environ['COMFYUI_PATH'] = comfy_path
except Exception:
logging.error("[ComfyUI-Manager] environment variable 'COMFYUI_PATH' is not specified.")
exit(-1)
if comfy_base_path is None:
comfy_base_path = comfy_path
channel_list_template_path = os.path.join(manager_util.comfyui_manager_path, 'channels.list.template')
git_script_path = os.path.join(manager_util.comfyui_manager_path, "git_helper.py")
manager_files_path = None
manager_config_path = None
manager_channel_list_path = None
manager_startup_script_path:str = None
manager_snapshot_path = None
manager_pip_overrides_path = None
manager_pip_blacklist_path = None
manager_components_path = None
manager_batch_history_path = None
def update_user_directory(user_dir):
global manager_files_path
global manager_config_path
global manager_channel_list_path
global manager_startup_script_path
global manager_snapshot_path
global manager_pip_overrides_path
global manager_pip_blacklist_path
global manager_components_path
global manager_batch_history_path
manager_files_path = os.path.abspath(os.path.join(user_dir, 'default', 'ComfyUI-Manager'))
if not os.path.exists(manager_files_path):
os.makedirs(manager_files_path)
manager_snapshot_path = os.path.join(manager_files_path, "snapshots")
if not os.path.exists(manager_snapshot_path):
os.makedirs(manager_snapshot_path)
manager_startup_script_path = os.path.join(manager_files_path, "startup-scripts")
if not os.path.exists(manager_startup_script_path):
os.makedirs(manager_startup_script_path)
manager_config_path = os.path.join(manager_files_path, 'config.ini')
manager_channel_list_path = os.path.join(manager_files_path, 'channels.list')
manager_pip_overrides_path = os.path.join(manager_files_path, "pip_overrides.json")
manager_pip_blacklist_path = os.path.join(manager_files_path, "pip_blacklist.list")
manager_components_path = os.path.join(manager_files_path, "components")
manager_util.cache_dir = os.path.join(manager_files_path, "cache")
manager_batch_history_path = os.path.join(manager_files_path, "batch_history")
if not os.path.exists(manager_util.cache_dir):
os.makedirs(manager_util.cache_dir)
if not os.path.exists(manager_batch_history_path):
os.makedirs(manager_batch_history_path)
try:
import folder_paths
update_user_directory(folder_paths.get_user_directory())
except Exception:
# fallback:
# This case is only possible when running with cm-cli, and in practice, this case is not actually used.
update_user_directory(os.path.abspath(manager_util.comfyui_manager_path))
def get_current_comfyui_ver():
"""
Extract version from pyproject.toml
"""
toml_path = os.path.join(comfy_path, 'pyproject.toml')
if not os.path.exists(toml_path):
return None
else:
try:
with open(toml_path, "r", encoding="utf-8") as f:
data = toml.load(f)
project = data.get('project', {})
return project.get('version')
except Exception:
return None
def get_comfyui_tag():
try:
with git.Repo(comfy_path) as repo:
return repo.git.describe('--tags')
except Exception:
return None

View File

@@ -1,18 +0,0 @@
import enum
class NetworkMode(enum.Enum):
PUBLIC = "public"
PRIVATE = "private"
OFFLINE = "offline"
PERSONAL_CLOUD = "personal_cloud"
class SecurityLevel(enum.Enum):
STRONG = "strong"
NORMAL = "normal"
NORMAL_MINUS = "normal-minus"
WEAK = "weak"
class DBMode(enum.Enum):
LOCAL = "local"
CACHE = "cache"
REMOTE = "remote"

View File

@@ -1,526 +0,0 @@
import subprocess
import sys
import os
import traceback
import git
import json
import yaml
import requests
from tqdm.auto import tqdm
from git.remote import RemoteProgress
comfy_path = os.environ.get('COMFYUI_PATH')
git_exe_path = os.environ.get('GIT_EXE_PATH')
if comfy_path is None:
print("git_helper: environment variable 'COMFYUI_PATH' is not specified.")
exit(-1)
if not os.path.exists(os.path.join(comfy_path, 'folder_paths.py')):
print("git_helper: '{comfy_path}' is not a valid 'COMFYUI_PATH' location.")
exit(-1)
def download_url(url, dest_folder, filename=None):
# Ensure the destination folder exists
if not os.path.exists(dest_folder):
os.makedirs(dest_folder)
# Extract filename from URL if not provided
if filename is None:
filename = os.path.basename(url)
# Full path to save the file
dest_path = os.path.join(dest_folder, filename)
# Download the file
response = requests.get(url, stream=True)
if response.status_code == 200:
with open(dest_path, 'wb') as file:
for chunk in response.iter_content(chunk_size=1024):
if chunk:
file.write(chunk)
else:
print(f"Failed to download file from {url}")
nodelist_path = os.path.join(os.path.dirname(__file__), "custom-node-list.json")
working_directory = os.getcwd()
if os.path.basename(working_directory) != 'custom_nodes':
print("WARN: This script should be executed in custom_nodes dir")
print(f"DBG: INFO {working_directory}")
print(f"DBG: INFO {sys.argv}")
# exit(-1)
class GitProgress(RemoteProgress):
def __init__(self):
super().__init__()
self.pbar = tqdm(ascii=True)
def update(self, op_code, cur_count, max_count=None, message=''):
self.pbar.total = max_count
self.pbar.n = cur_count
self.pbar.pos = 0
self.pbar.refresh()
def gitclone(custom_nodes_path, url, target_hash=None, repo_path=None):
repo_name = os.path.splitext(os.path.basename(url))[0]
if repo_path is None:
repo_path = os.path.join(custom_nodes_path, repo_name)
# Clone the repository from the remote URL
repo = git.Repo.clone_from(url, repo_path, recursive=True, progress=GitProgress())
if target_hash is not None:
print(f"CHECKOUT: {repo_name} [{target_hash}]")
repo.git.checkout(target_hash)
repo.git.clear_cache()
repo.close()
def gitcheck(path, do_fetch=False):
try:
# Fetch the latest commits from the remote repository
repo = git.Repo(path)
if repo.head.is_detached:
print("CUSTOM NODE CHECK: True")
return
current_branch = repo.active_branch
branch_name = current_branch.name
remote_name = current_branch.tracking_branch().remote_name
remote = repo.remote(name=remote_name)
if do_fetch:
remote.fetch()
# Get the current commit hash and the commit hash of the remote branch
commit_hash = repo.head.commit.hexsha
if f'{remote_name}/{branch_name}' in repo.refs:
remote_commit_hash = repo.refs[f'{remote_name}/{branch_name}'].object.hexsha
else:
print("CUSTOM NODE CHECK: True") # non default branch is treated as updatable
return
# Compare the commit hashes to determine if the local repository is behind the remote repository
if commit_hash != remote_commit_hash:
# Get the commit dates
commit_date = repo.head.commit.committed_datetime
remote_commit_date = repo.refs[f'{remote_name}/{branch_name}'].object.committed_datetime
# Compare the commit dates to determine if the local repository is behind the remote repository
if commit_date < remote_commit_date:
print("CUSTOM NODE CHECK: True")
else:
print("CUSTOM NODE CHECK: False")
except Exception as e:
print(e)
print("CUSTOM NODE CHECK: Error")
def get_remote_name(repo):
available_remotes = [remote.name for remote in repo.remotes]
if 'origin' in available_remotes:
return 'origin'
elif 'upstream' in available_remotes:
return 'upstream'
elif len(available_remotes) > 0:
return available_remotes[0]
if not available_remotes:
print(f"[ComfyUI-Manager] No remotes are configured for this repository: {repo.working_dir}")
else:
print(f"[ComfyUI-Manager] Available remotes in '{repo.working_dir}': ")
for remote in available_remotes:
print(f"- {remote}")
return None
def switch_to_default_branch(repo):
remote_name = get_remote_name(repo)
try:
if remote_name is None:
return False
default_branch = repo.git.symbolic_ref(f'refs/remotes/{remote_name}/HEAD').replace(f'refs/remotes/{remote_name}/', '')
repo.git.checkout(default_branch)
return True
except Exception:
# try checkout master
# try checkout main if failed
try:
repo.git.checkout(repo.heads.master)
return True
except Exception:
try:
if remote_name is not None:
repo.git.checkout('-b', 'master', f'{remote_name}/master')
return True
except Exception:
try:
repo.git.checkout(repo.heads.main)
return True
except Exception:
try:
if remote_name is not None:
repo.git.checkout('-b', 'main', f'{remote_name}/main')
return True
except Exception:
pass
print("[ComfyUI Manager] Failed to switch to the default branch")
return False
def gitpull(path):
# Check if the path is a git repository
if not os.path.exists(os.path.join(path, '.git')):
raise ValueError('Not a git repository')
# Pull the latest changes from the remote repository
repo = git.Repo(path)
if repo.is_dirty():
print(f"STASH: '{path}' is dirty.")
repo.git.stash()
commit_hash = repo.head.commit.hexsha
try:
if repo.head.is_detached:
switch_to_default_branch(repo)
current_branch = repo.active_branch
branch_name = current_branch.name
remote_name = current_branch.tracking_branch().remote_name
remote = repo.remote(name=remote_name)
if f'{remote_name}/{branch_name}' not in repo.refs:
switch_to_default_branch(repo)
current_branch = repo.active_branch
branch_name = current_branch.name
remote.fetch()
if f'{remote_name}/{branch_name}' in repo.refs:
remote_commit_hash = repo.refs[f'{remote_name}/{branch_name}'].object.hexsha
else:
print("CUSTOM NODE PULL: Fail") # update fail
return
if commit_hash == remote_commit_hash:
print("CUSTOM NODE PULL: None") # there is no update
repo.close()
return
remote.pull()
repo.git.submodule('update', '--init', '--recursive')
new_commit_hash = repo.head.commit.hexsha
if commit_hash != new_commit_hash:
print("CUSTOM NODE PULL: Success") # update success
else:
print("CUSTOM NODE PULL: Fail") # update fail
except Exception as e:
print(e)
print("CUSTOM NODE PULL: Fail") # unknown git error
repo.close()
def checkout_comfyui_hash(target_hash):
repo = git.Repo(comfy_path)
commit_hash = repo.head.commit.hexsha
if commit_hash != target_hash:
try:
print(f"CHECKOUT: ComfyUI [{target_hash}]")
repo.git.checkout(target_hash)
except git.GitCommandError as e:
print(f"Error checking out the ComfyUI: {str(e)}")
def checkout_custom_node_hash(git_custom_node_infos):
repo_name_to_url = {}
for url in git_custom_node_infos.keys():
repo_name = url.split('/')[-1]
if repo_name.endswith('.git'):
repo_name = repo_name[:-4]
repo_name_to_url[repo_name] = url
for path in os.listdir(working_directory):
if path.endswith("ComfyUI-Manager"):
continue
fullpath = os.path.join(working_directory, path)
if os.path.isdir(fullpath):
is_disabled = path.endswith(".disabled")
try:
git_dir = os.path.join(fullpath, '.git')
if not os.path.exists(git_dir):
continue
need_checkout = False
repo_name = os.path.basename(fullpath)
if repo_name.endswith('.disabled'):
repo_name = repo_name[:-9]
if repo_name not in repo_name_to_url:
if not is_disabled:
# should be disabled
print(f"DISABLE: {repo_name}")
new_path = fullpath + ".disabled"
os.rename(fullpath, new_path)
need_checkout = False
else:
item = git_custom_node_infos[repo_name_to_url[repo_name]]
if item['disabled'] and is_disabled:
pass
elif item['disabled'] and not is_disabled:
# disable
print(f"DISABLE: {repo_name}")
new_path = fullpath + ".disabled"
os.rename(fullpath, new_path)
elif not item['disabled'] and is_disabled:
# enable
print(f"ENABLE: {repo_name}")
new_path = fullpath[:-9]
os.rename(fullpath, new_path)
fullpath = new_path
need_checkout = True
else:
need_checkout = True
if need_checkout:
repo = git.Repo(fullpath)
commit_hash = repo.head.commit.hexsha
if commit_hash != item['hash']:
print(f"CHECKOUT: {repo_name} [{item['hash']}]")
repo.git.checkout(item['hash'])
except Exception:
print(f"Failed to restore snapshots for the custom node '{path}'")
# clone missing
for k, v in git_custom_node_infos.items():
if 'ComfyUI-Manager' in k:
continue
if not v['disabled']:
repo_name = k.split('/')[-1]
if repo_name.endswith('.git'):
repo_name = repo_name[:-4]
path = os.path.join(working_directory, repo_name)
if not os.path.exists(path):
print(f"CLONE: {path}")
gitclone(working_directory, k, target_hash=v['hash'])
def invalidate_custom_node_file(file_custom_node_infos):
global nodelist_path
enabled_set = set()
for item in file_custom_node_infos:
if not item['disabled']:
enabled_set.add(item['filename'])
for path in os.listdir(working_directory):
fullpath = os.path.join(working_directory, path)
if not os.path.isdir(fullpath) and fullpath.endswith('.py'):
if path not in enabled_set:
print(f"DISABLE: {path}")
new_path = fullpath+'.disabled'
os.rename(fullpath, new_path)
elif not os.path.isdir(fullpath) and fullpath.endswith('.py.disabled'):
path = path[:-9]
if path in enabled_set:
print(f"ENABLE: {path}")
new_path = fullpath[:-9]
os.rename(fullpath, new_path)
# download missing: just support for 'copy' style
py_to_url = {}
with open(nodelist_path, 'r', encoding="UTF-8") as json_file:
info = json.load(json_file)
for item in info['custom_nodes']:
if item['install_type'] == 'copy':
for url in item['files']:
if url.endswith('.py'):
py = url.split('/')[-1]
py_to_url[py] = url
for item in file_custom_node_infos:
filename = item['filename']
if not item['disabled']:
target_path = os.path.join(working_directory, filename)
if not os.path.exists(target_path) and filename in py_to_url:
url = py_to_url[filename]
print(f"DOWNLOAD: {filename}")
download_url(url, working_directory)
def apply_snapshot(path):
try:
if os.path.exists(path):
if not path.endswith('.json') and not path.endswith('.yaml'):
print(f"Snapshot file not found: `{path}`")
print("APPLY SNAPSHOT: False")
return None
with open(path, 'r', encoding="UTF-8") as snapshot_file:
if path.endswith('.json'):
info = json.load(snapshot_file)
elif path.endswith('.yaml'):
info = yaml.load(snapshot_file, Loader=yaml.SafeLoader)
info = info['custom_nodes']
else:
# impossible case
print("APPLY SNAPSHOT: False")
return None
comfyui_hash = info['comfyui']
git_custom_node_infos = info['git_custom_nodes']
file_custom_node_infos = info['file_custom_nodes']
if comfyui_hash:
checkout_comfyui_hash(comfyui_hash)
checkout_custom_node_hash(git_custom_node_infos)
invalidate_custom_node_file(file_custom_node_infos)
print("APPLY SNAPSHOT: True")
if 'pips' in info and info['pips']:
return info['pips']
else:
return None
print(f"Snapshot file not found: `{path}`")
print("APPLY SNAPSHOT: False")
return None
except Exception as e:
print(e)
traceback.print_exc()
print("APPLY SNAPSHOT: False")
return None
def restore_pip_snapshot(pips, options):
non_url = []
local_url = []
non_local_url = []
for k, v in pips.items():
if v == "":
non_url.append(k)
else:
if v.startswith('file:'):
local_url.append(v)
else:
non_local_url.append(v)
failed = []
if '--pip-non-url' in options:
# try all at once
res = 1
try:
res = subprocess.check_call([sys.executable, '-m', 'pip', 'install'] + non_url)
except Exception:
pass
# fallback
if res != 0:
for x in non_url:
res = 1
try:
res = subprocess.check_call([sys.executable, '-m', 'pip', 'install', x])
except Exception:
pass
if res != 0:
failed.append(x)
if '--pip-non-local-url' in options:
for x in non_local_url:
res = 1
try:
res = subprocess.check_call([sys.executable, '-m', 'pip', 'install', x])
except Exception:
pass
if res != 0:
failed.append(x)
if '--pip-local-url' in options:
for x in local_url:
res = 1
try:
res = subprocess.check_call([sys.executable, '-m', 'pip', 'install', x])
except Exception:
pass
if res != 0:
failed.append(x)
print(f"Installation failed for pip packages: {failed}")
def setup_environment():
if git_exe_path is not None:
git.Git().update_environment(GIT_PYTHON_GIT_EXECUTABLE=git_exe_path)
setup_environment()
try:
if sys.argv[1] == "--clone":
repo_path = None
if len(sys.argv) > 4:
repo_path = sys.argv[4]
gitclone(sys.argv[2], sys.argv[3], repo_path=repo_path)
elif sys.argv[1] == "--check":
gitcheck(sys.argv[2], False)
elif sys.argv[1] == "--fetch":
gitcheck(sys.argv[2], True)
elif sys.argv[1] == "--pull":
gitpull(sys.argv[2])
elif sys.argv[1] == "--apply-snapshot":
options = set()
for x in sys.argv:
if x in ['--pip-non-url', '--pip-local-url', '--pip-non-local-url']:
options.add(x)
pips = apply_snapshot(sys.argv[2])
if pips and len(options) > 0:
restore_pip_snapshot(pips, options)
sys.exit(0)
except Exception as e:
print(e)
sys.exit(-1)

View File

@@ -1,87 +0,0 @@
import os
import configparser
GITHUB_ENDPOINT = os.getenv('GITHUB_ENDPOINT')
def is_git_repo(path: str) -> bool:
""" Check if the path is a git repository. """
# NOTE: Checking it through `git.Repo` must be avoided.
# It locks the file, causing issues on Windows.
return os.path.exists(os.path.join(path, '.git'))
def get_commit_hash(fullpath):
git_head = os.path.join(fullpath, '.git', 'HEAD')
if os.path.exists(git_head):
with open(git_head) as f:
line = f.readline()
if line.startswith("ref: "):
ref = os.path.join(fullpath, '.git', line[5:].strip())
if os.path.exists(ref):
with open(ref) as f2:
return f2.readline().strip()
else:
return "unknown"
else:
return line
return "unknown"
def git_url(fullpath):
"""
resolve version of unclassified custom node based on remote url in .git/config
"""
git_config_path = os.path.join(fullpath, '.git', 'config')
if not os.path.exists(git_config_path):
return None
# Set `strict=False` to allow duplicate `vscode-merge-base` sections, addressing <https://github.com/ltdrdata/ComfyUI-Manager/issues/1529>
config = configparser.ConfigParser(strict=False)
config.read(git_config_path)
for k, v in config.items():
if k.startswith('remote ') and 'url' in v:
if 'Comfy-Org/ComfyUI-Manager' in v['url']:
return "https://github.com/ltdrdata/ComfyUI-Manager"
return v['url']
return None
def normalize_url(url) -> str:
github_id = normalize_to_github_id(url)
if github_id is not None:
url = f"https://github.com/{github_id}"
return url
def normalize_to_github_id(url) -> str:
if 'github' in url or (GITHUB_ENDPOINT is not None and GITHUB_ENDPOINT in url):
author = os.path.basename(os.path.dirname(url))
if author.startswith('git@github.com:'):
author = author.split(':')[1]
repo_name = os.path.basename(url)
if repo_name.endswith('.git'):
repo_name = repo_name[:-4]
return f"{author}/{repo_name}"
return None
def get_url_for_clone(url):
url = normalize_url(url)
if GITHUB_ENDPOINT is not None and url.startswith('https://github.com/'):
url = GITHUB_ENDPOINT + url[18:] # url[18:] -> remove `https://github.com`
return url

View File

@@ -1,163 +0,0 @@
import os
from urllib.parse import urlparse
import urllib
import sys
import logging
import requests
from huggingface_hub import HfApi
from tqdm.auto import tqdm
aria2 = os.getenv('COMFYUI_MANAGER_ARIA2_SERVER')
HF_ENDPOINT = os.getenv('HF_ENDPOINT')
if aria2 is not None:
secret = os.getenv('COMFYUI_MANAGER_ARIA2_SECRET')
url = urlparse(aria2)
port = url.port
host = url.scheme + '://' + url.hostname
import aria2p
aria2 = aria2p.API(aria2p.Client(host=host, port=port, secret=secret))
def basic_download_url(url, dest_folder: str, filename: str):
'''
Download file from url to dest_folder with filename
using requests library.
'''
import requests
# Ensure the destination folder exists
if not os.path.exists(dest_folder):
os.makedirs(dest_folder)
# Full path to save the file
dest_path = os.path.join(dest_folder, filename)
# Download the file
response = requests.get(url, stream=True)
if response.status_code == 200:
with open(dest_path, 'wb') as file:
for chunk in response.iter_content(chunk_size=1024):
if chunk:
file.write(chunk)
else:
raise Exception(f"Failed to download file from {url}")
def download_url(model_url: str, model_dir: str, filename: str):
if HF_ENDPOINT:
model_url = model_url.replace('https://huggingface.co', HF_ENDPOINT)
logging.info(f"model_url replaced by HF_ENDPOINT, new = {model_url}")
if aria2:
return aria2_download_url(model_url, model_dir, filename)
else:
from torchvision.datasets.utils import download_url as torchvision_download_url
try:
return torchvision_download_url(model_url, model_dir, filename)
except Exception as e:
logging.error(f"[ComfyUI-Manager] Failed to download: {model_url} / {repr(e)}")
raise
def aria2_find_task(dir: str, filename: str):
target = os.path.join(dir, filename)
downloads = aria2.get_downloads()
for download in downloads:
for file in download.files:
if file.is_metadata:
continue
if str(file.path) == target:
return download
def aria2_download_url(model_url: str, model_dir: str, filename: str):
import manager_core as core
import tqdm
import time
if model_dir.startswith(core.comfy_path):
model_dir = model_dir[len(core.comfy_path) :]
download_dir = model_dir if model_dir.startswith('/') else os.path.join('/models', model_dir)
download = aria2_find_task(download_dir, filename)
if download is None:
options = {'dir': download_dir, 'out': filename}
download = aria2.add(model_url, options)[0]
if download.is_active:
with tqdm.tqdm(
total=download.total_length,
bar_format='{l_bar}{bar}{r_bar}',
desc=filename,
unit='B',
unit_scale=True,
) as progress_bar:
while download.is_active:
if progress_bar.total == 0 and download.total_length != 0:
progress_bar.reset(download.total_length)
progress_bar.update(download.completed_length - progress_bar.n)
time.sleep(1)
download.update()
def download_url_with_agent(url, save_path):
try:
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3'}
req = urllib.request.Request(url, headers=headers)
response = urllib.request.urlopen(req)
data = response.read()
if not os.path.exists(os.path.dirname(save_path)):
os.makedirs(os.path.dirname(save_path))
with open(save_path, 'wb') as f:
f.write(data)
except Exception as e:
print(f"Download error: {url} / {e}", file=sys.stderr)
return False
print("Installation was successful.")
return True
# NOTE: snapshot_download doesn't provide file size tqdm.
def download_repo_in_bytes(repo_id, local_dir):
api = HfApi()
repo_info = api.repo_info(repo_id=repo_id, files_metadata=True)
os.makedirs(local_dir, exist_ok=True)
total_size = 0
for file_info in repo_info.siblings:
if file_info.size is not None:
total_size += file_info.size
pbar = tqdm(total=total_size, unit="B", unit_scale=True, desc="Downloading")
for file_info in repo_info.siblings:
out_path = os.path.join(local_dir, file_info.rfilename)
os.makedirs(os.path.dirname(out_path), exist_ok=True)
if file_info.size is None:
continue
download_url = f"https://huggingface.co/{repo_id}/resolve/main/{file_info.rfilename}"
with requests.get(download_url, stream=True) as r, open(out_path, "wb") as f:
r.raise_for_status()
for chunk in r.iter_content(chunk_size=65536):
if chunk:
f.write(chunk)
pbar.update(len(chunk))
pbar.close()

View File

@@ -1,36 +0,0 @@
from enum import Enum
is_personal_cloud_mode = False
handler_policy = {}
class HANDLER_POLICY(Enum):
MULTIPLE_REMOTE_BAN_NON_LOCAL = 1
MULTIPLE_REMOTE_BAN_NOT_PERSONAL_CLOUD = 2
BANNED = 3
def is_loopback(address):
import ipaddress
try:
return ipaddress.ip_address(address).is_loopback
except ValueError:
return False
def do_nothing():
pass
def get_handler_policy(x):
return handler_policy.get(x) or set()
def add_handler_policy(x, policy):
s = handler_policy.get(x)
if s is None:
s = set()
handler_policy[x] = s
s.add(policy)
multiple_remote_alert = do_nothing

View File

@@ -1,590 +0,0 @@
"""
description:
`manager_util` is the lightest module shared across the prestartup_script, main code, and cm-cli of ComfyUI-Manager.
"""
import traceback
import aiohttp
import json
import threading
import os
from datetime import datetime
import subprocess
import sys
import re
import logging
import platform
import shlex
cache_lock = threading.Lock()
session_lock = threading.Lock()
comfyui_manager_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
cache_dir = os.path.join(comfyui_manager_path, '.cache') # This path is also updated together in **manager_core.update_user_directory**.
use_uv = False
bypass_ssl = False
def is_manager_pip_package():
return not os.path.exists(os.path.join(comfyui_manager_path, '..', 'custom_nodes'))
def add_python_path_to_env():
if platform.system() != "Windows":
sep = ':'
else:
sep = ';'
os.environ['PATH'] = os.path.dirname(sys.executable)+sep+os.environ['PATH']
def make_pip_cmd(cmd):
if 'python_embeded' in sys.executable:
if use_uv:
return [sys.executable, '-s', '-m', 'uv', 'pip'] + cmd
else:
return [sys.executable, '-s', '-m', 'pip'] + cmd
else:
# FIXED: https://github.com/ltdrdata/ComfyUI-Manager/issues/1667
if use_uv:
return [sys.executable, '-m', 'uv', 'pip'] + cmd
else:
return [sys.executable, '-m', 'pip'] + cmd
# DON'T USE StrictVersion - cannot handle pre_release version
# try:
# from distutils.version import StrictVersion
# except Exception:
# print(f"[ComfyUI-Manager] 'distutils' package not found. Activating fallback mode for compatibility.")
class StrictVersion:
def __init__(self, version_string):
self.version_string = version_string
self.major = 0
self.minor = 0
self.patch = 0
self.pre_release = None
self.parse_version_string()
def parse_version_string(self):
parts = self.version_string.split('.')
if not parts:
raise ValueError("Version string must not be empty")
self.major = int(parts[0])
self.minor = int(parts[1]) if len(parts) > 1 else 0
self.patch = int(parts[2]) if len(parts) > 2 else 0
# Handling pre-release versions if present
if len(parts) > 3:
self.pre_release = parts[3]
def __str__(self):
version = f"{self.major}.{self.minor}.{self.patch}"
if self.pre_release:
version += f"-{self.pre_release}"
return version
def __eq__(self, other):
return (self.major, self.minor, self.patch, self.pre_release) == \
(other.major, other.minor, other.patch, other.pre_release)
def __lt__(self, other):
if (self.major, self.minor, self.patch) == (other.major, other.minor, other.patch):
return self.pre_release_compare(self.pre_release, other.pre_release) < 0
return (self.major, self.minor, self.patch) < (other.major, other.minor, other.patch)
@staticmethod
def pre_release_compare(pre1, pre2):
if pre1 == pre2:
return 0
if pre1 is None:
return 1
if pre2 is None:
return -1
return -1 if pre1 < pre2 else 1
def __le__(self, other):
return self == other or self < other
def __gt__(self, other):
return not self <= other
def __ge__(self, other):
return not self < other
def __ne__(self, other):
return not self == other
def simple_hash(input_string):
hash_value = 0
for char in input_string:
hash_value = (hash_value * 31 + ord(char)) % (2**32)
return hash_value
def is_file_created_within_one_day(file_path):
if not os.path.exists(file_path):
return False
file_creation_time = os.path.getctime(file_path)
current_time = datetime.now().timestamp()
time_difference = current_time - file_creation_time
return time_difference <= 86400
async def get_data(uri, silent=False):
if not silent:
print(f"FETCH DATA from: {uri}", end="")
if uri.startswith("http"):
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=not bypass_ssl)) as session:
headers = {
'Cache-Control': 'no-cache',
'Pragma': 'no-cache',
'Expires': '0'
}
async with session.get(uri, headers=headers) as resp:
json_text = await resp.text()
else:
with cache_lock:
with open(uri, "r", encoding="utf-8") as f:
json_text = f.read()
try:
json_obj = json.loads(json_text)
except Exception as e:
logging.error(f"[ComfyUI-Manager] An error occurred while fetching '{uri}': {e}")
return {}
if not silent:
print(" [DONE]")
return json_obj
def get_cache_path(uri):
cache_uri = str(simple_hash(uri)) + '_' + os.path.basename(uri).replace('&', "_").replace('?', "_").replace('=', "_")
return os.path.join(cache_dir, cache_uri+'.json')
def get_cache_state(uri):
cache_uri = get_cache_path(uri)
if not os.path.exists(cache_uri):
return "not-cached"
elif is_file_created_within_one_day(cache_uri):
return "cached"
return "expired"
def save_to_cache(uri, json_obj, silent=False):
cache_uri = get_cache_path(uri)
with cache_lock:
with open(cache_uri, "w", encoding='utf-8') as file:
json.dump(json_obj, file, indent=4, sort_keys=True)
if not silent:
logging.info(f"[ComfyUI-Manager] default cache updated: {uri}")
async def get_data_with_cache(uri, silent=False, cache_mode=True, dont_wait=False, dont_cache=False):
cache_uri = get_cache_path(uri)
if cache_mode and dont_wait:
# NOTE: return the cache if possible, even if it is expired, so do not cache
if not os.path.exists(cache_uri):
logging.error(f"[ComfyUI-Manager] The network connection is unstable, so it is operating in fallback mode: {uri}")
return {}
else:
if not is_file_created_within_one_day(cache_uri):
logging.error(f"[ComfyUI-Manager] The network connection is unstable, so it is operating in outdated cache mode: {uri}")
return await get_data(cache_uri, silent=silent)
if cache_mode and is_file_created_within_one_day(cache_uri):
json_obj = await get_data(cache_uri, silent=silent)
else:
json_obj = await get_data(uri, silent=silent)
if not dont_cache:
with cache_lock:
with open(cache_uri, "w", encoding='utf-8') as file:
json.dump(json_obj, file, indent=4, sort_keys=True)
if not silent:
logging.info(f"[ComfyUI-Manager] default cache updated: {uri}")
return json_obj
def sanitize_tag(x):
return x.replace('<', '&lt;').replace('>', '&gt;')
def extract_package_as_zip(file_path, extract_path):
import zipfile
try:
with zipfile.ZipFile(file_path, "r") as zip_ref:
zip_ref.extractall(extract_path)
extracted_files = zip_ref.namelist()
logging.info(f"Extracted zip file to {extract_path}")
return extracted_files
except zipfile.BadZipFile:
logging.error(f"File '{file_path}' is not a zip or is corrupted.")
return None
pip_map = None
def get_installed_packages(renew=False):
global pip_map
if renew or pip_map is None:
try:
result = subprocess.check_output(make_pip_cmd(['list']), universal_newlines=True)
pip_map = {}
for line in result.split('\n'):
x = line.strip()
if x:
y = line.split()
if y[0] == 'Package' or y[0].startswith('-'):
continue
normalized_name = y[0].lower().replace('-', '_')
pip_map[normalized_name] = y[1]
except subprocess.CalledProcessError:
logging.error("[ComfyUI-Manager] Failed to retrieve the information of installed pip packages.")
return {}
return pip_map
def clear_pip_cache():
global pip_map
pip_map = None
def parse_requirement_line(line):
tokens = shlex.split(line)
if not tokens:
return None
package_spec = tokens[0]
pattern = re.compile(
r'^(?P<package>[A-Za-z0-9_.+-]+)'
r'(?P<operator>==|>=|<=|!=|~=|>|<)?'
r'(?P<version>[A-Za-z0-9_.+-]*)$'
)
m = pattern.match(package_spec)
if not m:
return None
package = m.group('package')
operator = m.group('operator') or None
version = m.group('version') or None
index_url = None
if '--index-url' in tokens:
idx = tokens.index('--index-url')
if idx + 1 < len(tokens):
index_url = tokens[idx + 1]
res = {'package': package}
if operator is not None:
res['operator'] = operator
if version is not None:
res['version'] = StrictVersion(version)
if index_url is not None:
res['index_url'] = index_url
return res
torch_torchvision_torchaudio_version_map = {
'2.7.0': ('0.22.0', '2.7.0'),
'2.6.0': ('0.21.0', '2.6.0'),
'2.5.1': ('0.20.0', '2.5.0'),
'2.5.0': ('0.20.0', '2.5.0'),
'2.4.1': ('0.19.1', '2.4.1'),
'2.4.0': ('0.19.0', '2.4.0'),
'2.3.1': ('0.18.1', '2.3.1'),
'2.3.0': ('0.18.0', '2.3.0'),
'2.2.2': ('0.17.2', '2.2.2'),
'2.2.1': ('0.17.1', '2.2.1'),
'2.2.0': ('0.17.0', '2.2.0'),
'2.1.2': ('0.16.2', '2.1.2'),
'2.1.1': ('0.16.1', '2.1.1'),
'2.1.0': ('0.16.0', '2.1.0'),
'2.0.1': ('0.15.2', '2.0.1'),
'2.0.0': ('0.15.1', '2.0.0'),
}
def torch_rollback(prev):
spec = prev.split('+')
if len(spec) > 1:
platform = spec[1]
else:
cmd = make_pip_cmd(['install', '--force', 'torch', 'torchvision', 'torchaudio'])
subprocess.check_output(cmd, universal_newlines=True)
logging.error(cmd)
return
torch_ver = StrictVersion(spec[0])
torch_ver = f"{torch_ver.major}.{torch_ver.minor}.{torch_ver.patch}"
torch_torchvision_torchaudio_ver = torch_torchvision_torchaudio_version_map.get(torch_ver)
if torch_torchvision_torchaudio_ver is None:
cmd = make_pip_cmd(['install', '--pre', 'torch', 'torchvision', 'torchaudio',
'--index-url', f"https://download.pytorch.org/whl/nightly/{platform}"])
logging.info("[ComfyUI-Manager] restore PyTorch to nightly version")
else:
torchvision_ver, torchaudio_ver = torch_torchvision_torchaudio_ver
cmd = make_pip_cmd(['install', f'torch=={torch_ver}', f'torchvision=={torchvision_ver}', f"torchaudio=={torchaudio_ver}",
'--index-url', f"https://download.pytorch.org/whl/{platform}"])
logging.info(f"[ComfyUI-Manager] restore PyTorch to {torch_ver}+{platform}")
subprocess.check_output(cmd, universal_newlines=True)
class PIPFixer:
def __init__(self, prev_pip_versions, comfyui_path, manager_files_path):
self.prev_pip_versions = { **prev_pip_versions }
self.comfyui_path = comfyui_path
self.manager_files_path = manager_files_path
def fix_broken(self):
new_pip_versions = get_installed_packages(True)
# remove `comfy` python package
try:
if 'comfy' in new_pip_versions:
cmd = make_pip_cmd(['uninstall', 'comfy'])
subprocess.check_output(cmd, universal_newlines=True)
logging.warning("[ComfyUI-Manager] 'comfy' python package is uninstalled.\nWARN: The 'comfy' package is completely unrelated to ComfyUI and should never be installed as it causes conflicts with ComfyUI.")
except Exception as e:
logging.error("[ComfyUI-Manager] Failed to uninstall `comfy` python package")
logging.error(e)
# fix torch - reinstall torch packages if version is changed
try:
if 'torch' not in self.prev_pip_versions or 'torchvision' not in self.prev_pip_versions or 'torchaudio' not in self.prev_pip_versions:
logging.error("[ComfyUI-Manager] PyTorch is not installed")
elif self.prev_pip_versions['torch'] != new_pip_versions['torch'] \
or self.prev_pip_versions['torchvision'] != new_pip_versions['torchvision'] \
or self.prev_pip_versions['torchaudio'] != new_pip_versions['torchaudio']:
torch_rollback(self.prev_pip_versions['torch'])
except Exception as e:
logging.error("[ComfyUI-Manager] Failed to restore PyTorch")
logging.error(e)
# fix opencv
try:
ocp = new_pip_versions.get('opencv-contrib-python')
ocph = new_pip_versions.get('opencv-contrib-python-headless')
op = new_pip_versions.get('opencv-python')
oph = new_pip_versions.get('opencv-python-headless')
versions = [ocp, ocph, op, oph]
versions = [StrictVersion(x) for x in versions if x is not None]
versions.sort(reverse=True)
if len(versions) > 0:
# upgrade to maximum version
targets = []
cur = versions[0]
if ocp is not None and StrictVersion(ocp) != cur:
targets.append('opencv-contrib-python')
if ocph is not None and StrictVersion(ocph) != cur:
targets.append('opencv-contrib-python-headless')
if op is not None and StrictVersion(op) != cur:
targets.append('opencv-python')
if oph is not None and StrictVersion(oph) != cur:
targets.append('opencv-python-headless')
if len(targets) > 0:
for x in targets:
cmd = make_pip_cmd(['install', f"{x}=={versions[0].version_string}"])
subprocess.check_output(cmd, universal_newlines=True)
logging.info(f"[ComfyUI-Manager] 'opencv' dependencies were fixed: {targets}")
except Exception as e:
logging.error("[ComfyUI-Manager] Failed to restore opencv")
logging.error(e)
# fix missing frontend
try:
# NOTE: package name in requirements is 'comfyui-frontend-package'
# but, package name from `pip freeze` is 'comfyui_frontend_package'
# but, package name from `uv pip freeze` is 'comfyui-frontend-package'
#
# get_installed_packages returns normalized name (i.e. comfyui_frontend_package)
if 'comfyui_frontend_package' not in new_pip_versions:
requirements_path = os.path.join(self.comfyui_path, 'requirements.txt')
with open(requirements_path, 'r') as file:
lines = file.readlines()
front_line = next((line.strip() for line in lines if line.startswith('comfyui-frontend-package')), None)
if front_line is None:
logging.info("[ComfyUI-Manager] Skipped fixing the 'comfyui-frontend-package' dependency because the ComfyUI is outdated.")
else:
cmd = make_pip_cmd(['install', front_line])
subprocess.check_output(cmd , universal_newlines=True)
logging.info("[ComfyUI-Manager] 'comfyui-frontend-package' dependency were fixed")
except Exception as e:
logging.error("[ComfyUI-Manager] Failed to restore comfyui-frontend-package")
logging.error(e)
# restore based on custom list
pip_auto_fix_path = os.path.join(self.manager_files_path, "pip_auto_fix.list")
if os.path.exists(pip_auto_fix_path):
with open(pip_auto_fix_path, 'r', encoding="UTF-8", errors="ignore") as f:
fixed_list = []
for x in f.readlines():
try:
parsed = parse_requirement_line(x)
need_to_reinstall = True
normalized_name = parsed['package'].lower().replace('-', '_')
if normalized_name in new_pip_versions:
if 'version' in parsed and 'operator' in parsed:
cur = StrictVersion(new_pip_versions[normalized_name])
dest = parsed['version']
op = parsed['operator']
if cur == dest:
if op in ['==', '>=', '<=']:
need_to_reinstall = False
elif cur < dest:
if op in ['<=', '<', '~=', '!=']:
need_to_reinstall = False
elif cur > dest:
if op in ['>=', '>', '~=', '!=']:
need_to_reinstall = False
if need_to_reinstall:
cmd_args = ['install']
if 'version' in parsed and 'operator' in parsed:
cmd_args.append(parsed['package']+parsed['operator']+parsed['version'].version_string)
if 'index_url' in parsed:
cmd_args.append('--index-url')
cmd_args.append(parsed['index_url'])
cmd = make_pip_cmd(cmd_args)
subprocess.check_output(cmd, universal_newlines=True)
fixed_list.append(parsed['package'])
except Exception as e:
traceback.print_exc()
logging.error(f"[ComfyUI-Manager] Failed to restore '{x}'")
logging.error(e)
if len(fixed_list) > 0:
logging.info(f"[ComfyUI-Manager] dependencies in pip_auto_fix.json were fixed: {fixed_list}")
def sanitize(data):
return data.replace("<", "&lt;").replace(">", "&gt;")
def sanitize_filename(input_string):
result_string = re.sub(r'[^a-zA-Z0-9_]', '_', input_string)
return result_string
def robust_readlines(fullpath):
import chardet
try:
with open(fullpath, "r") as f:
return f.readlines()
except Exception:
encoding = None
with open(fullpath, "rb") as f:
raw_data = f.read()
result = chardet.detect(raw_data)
encoding = result['encoding']
if encoding is not None:
with open(fullpath, "r", encoding=encoding) as f:
return f.readlines()
print(f"[ComfyUI-Manager] Failed to recognize encoding for: {fullpath}")
return []
def restore_pip_snapshot(pips, options):
non_url = []
local_url = []
non_local_url = []
for k, v in pips.items():
# NOTE: skip torch related packages
if k.startswith("torch==") or k.startswith("torchvision==") or k.startswith("torchaudio==") or k.startswith("nvidia-"):
continue
if v == "":
non_url.append(k)
else:
if v.startswith('file:'):
local_url.append(v)
else:
non_local_url.append(v)
# restore other pips
failed = []
if '--pip-non-url' in options:
# try all at once
res = 1
try:
res = subprocess.check_output(make_pip_cmd(['install'] + non_url))
except Exception:
pass
# fallback
if res != 0:
for x in non_url:
res = 1
try:
res = subprocess.check_output(make_pip_cmd(['install', '--no-deps', x]))
except Exception:
pass
if res != 0:
failed.append(x)
if '--pip-non-local-url' in options:
for x in non_local_url:
res = 1
try:
res = subprocess.check_output(make_pip_cmd(['install', '--no-deps', x]))
except Exception:
pass
if res != 0:
failed.append(x)
if '--pip-local-url' in options:
for x in local_url:
res = 1
try:
res = subprocess.check_output(make_pip_cmd(['install', '--no-deps', x]))
except Exception:
pass
if res != 0:
failed.append(x)
print(f"Installation failed for pip packages: {failed}")

View File

@@ -1,72 +0,0 @@
from __future__ import annotations
from dataclasses import dataclass
import os
from .git_utils import get_commit_hash
@dataclass
class InstalledNodePackage:
"""Information about an installed node package."""
id: str
fullpath: str
disabled: bool
version: str
@property
def is_unknown(self) -> bool:
return self.version == "unknown"
@property
def is_nightly(self) -> bool:
return self.version == "nightly"
@property
def is_from_cnr(self) -> bool:
return not self.is_unknown and not self.is_nightly
@property
def is_enabled(self) -> bool:
return not self.disabled
@property
def is_disabled(self) -> bool:
return self.disabled
def get_commit_hash(self) -> str:
return get_commit_hash(self.fullpath)
def isValid(self) -> bool:
if self.is_from_cnr:
return os.path.exists(os.path.join(self.fullpath, '.tracking'))
return True
@staticmethod
def from_fullpath(fullpath: str, resolve_from_path) -> InstalledNodePackage:
parent_folder_name = os.path.basename(os.path.dirname(fullpath))
module_name = os.path.basename(fullpath)
if module_name.endswith(".disabled"):
node_id = module_name[:-9]
disabled = True
elif parent_folder_name == ".disabled":
# Nodes under custom_nodes/.disabled/* are disabled
node_id = module_name
disabled = True
else:
node_id = module_name
disabled = False
info = resolve_from_path(fullpath)
if info is None:
version = 'unknown'
else:
node_id = info['id'] # robust module guessing
version = info['ver']
return InstalledNodePackage(
id=node_id, fullpath=fullpath, disabled=disabled, version=version
)

View File

@@ -1,713 +0,0 @@
# Design Document for pip_util.py Implementation
This is designed to minimize breaking existing installed dependencies.
## List of Functions to Implement
## Global Policy Management
### Global Variables
```python
_pip_policy_cache = None # Policy cache (program-wide, loaded once)
```
### Global Functions
* get_pip_policy(): Returns policy for resolving pip dependency conflicts (lazy loading)
- **Call timing**: Called whenever needed (automatically loads only once on first call)
- **Purpose**: Returns policy cache, automatically loads if cache is empty
- **Execution flow**:
1. Declare global _pip_policy_cache
2. If _pip_policy_cache is already loaded, return immediately (prevent duplicate loading)
3. Read base policy file:
- Path: {manager_util.comfyui_manager_path}/pip-policy.json
- Use empty dictionary if file doesn't exist
- Log error and use empty dictionary if JSON parsing fails
4. Read user policy file:
- Path: {context.manager_files_path}/pip-policy.user.json
- Create empty JSON file if doesn't exist ({"_comment": "User-specific pip policy overrides"})
- Log warning and use empty dictionary if JSON parsing fails
5. Apply merge rules (merge by package name):
- Start with base policy as base
- For each package in user policy:
* Package only in user policy: add to base
* Package only in base policy: keep in base
* Package in both: completely replace with user policy (entire package replacement, not section-level)
6. Store merged policy in _pip_policy_cache
7. Log policy load success (include number of loaded package policies)
8. Return _pip_policy_cache
- **Return value**: Dict (merged policy dictionary)
- **Exception handling**:
- File read failure: Log warning and treat file as empty dictionary
- JSON parsing failure: Log error and treat file as empty dictionary
- **Notes**:
- Lazy loading pattern automatically loads on first call
- Not thread-safe, caution needed in multi-threaded environments
- Policy file structure should support the following scenarios:
- Dictionary structure of {dependency name -> policy object}
- Policy object has four policy sections:
- **uninstall**: Package removal policy (pre-processing, condition optional)
- **apply_first_match**: Evaluate top-to-bottom and execute only the first policy that satisfies condition (exclusive)
- **apply_all_matches**: Execute all policies that satisfy conditions (cumulative)
- **restore**: Package restoration policy (post-processing, condition optional)
- Condition types:
- installed: Check version condition of already installed dependencies
- spec is optional
- package field: Specify package to check (optional, defaults to self)
- Explicit: Reference another package (e.g., numba checks numpy version)
- Omitted: Check own version (e.g., critical-package checks its own version)
- platform: Platform conditions (os, has_gpu, comfyui_version, etc.)
- If condition is absent, always considered satisfied
- uninstall policy (pre-removal policy):
- Removal policy list (condition is optional, evaluate top-to-bottom and execute only first match)
- When condition satisfied (or always if no condition): remove target package and abort installation
- If this policy is applied, all subsequent steps are ignored
- target field specifies package to remove
- Example: Unconditionally remove if specific package is installed
- Actions available in apply_first_match (determine installation method, exclusive):
- skip: Block installation of specific dependency
- force_version: Force change to specific version during installation
- extra_index_url field can specify custom package repository (optional)
- replace: Replace with different dependency
- extra_index_url field can specify custom package repository (optional)
- Actions available in apply_all_matches (installation options, cumulative):
- pin_dependencies: Pin currently installed versions of other dependencies
- pinned_packages field specifies package list
- Example: `pip install requests urllib3==1.26.15 certifi==2023.7.22 charset-normalizer==3.2.0`
- Real use case: Prevent urllib3 from upgrading to 2.x when installing requests
- on_failure: "fail" or "retry_without_pin"
- install_with: Specify additional dependencies to install together
- warn: Record warning message in log
- restore policy (post-restoration policy):
- Restoration policy list (condition is optional, evaluate top-to-bottom and execute only first match)
- Executed after package installation completes (post-processing)
- When condition satisfied (or always if no condition): force install target package to specific version
- target field specifies package to restore (can be different package)
- version field specifies version to install
- extra_index_url field can specify custom package repository (optional)
- Example: Reinstall/change version if specific package is deleted or wrong version
- Execution order:
1. uninstall evaluation: If condition satisfied, remove package and **terminate** (ignore subsequent steps)
2. apply_first_match evaluation:
- Execute first policy that satisfies condition among skip/force_version/replace
- If no matching policy, proceed with default installation of originally requested package
3. apply_all_matches evaluation: Apply all pin_dependencies, install_with, warn that satisfy conditions
4. Execute actual package installation (pip install or uv pip install)
5. restore evaluation: If condition satisfied, restore target package (post-processing)
## Batch Unit Class (PipBatch)
### Class Structure
```python
class PipBatch:
"""
pip package installation batch unit manager
Maintains pip freeze cache during batch operations for performance optimization
Usage pattern:
# Batch operations (policy auto-loaded)
with PipBatch() as batch:
batch.ensure_not_installed()
batch.install("numpy>=1.20")
batch.install("pandas>=2.0")
batch.install("scipy>=1.7")
batch.ensure_installed()
"""
def __init__(self):
self._installed_cache = None # Installed packages cache (batch-level)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._installed_cache = None
```
### Private Methods
* PipBatch._refresh_installed_cache():
- **Purpose**: Read currently installed package information and refresh cache
- **Execution flow**:
1. Generate command using manager_util.make_pip_cmd(["freeze"])
2. Execute pip freeze via subprocess
3. Parse output:
- Each line is in "package_name==version" format
- Parse "package_name==version" to create dictionary
- Ignore editable packages (starting with -e)
- Ignore comments (starting with #)
4. Store parsed dictionary in self._installed_cache
- **Return value**: None
- **Exception handling**:
- pip freeze failure: Set cache to empty dictionary and log warning
- Parse failure: Ignore line and continue
* PipBatch._get_installed_packages():
- **Purpose**: Return cached installed package information (refresh if cache is None)
- **Execution flow**:
1. If self._installed_cache is None, call _refresh_installed_cache()
2. Return self._installed_cache
- **Return value**: {package_name: version} dictionary
* PipBatch._invalidate_cache():
- **Purpose**: Invalidate cache after package install/uninstall
- **Execution flow**:
1. Set self._installed_cache = None
- **Return value**: None
- **Call timing**: After install(), ensure_not_installed(), ensure_installed()
* PipBatch._parse_package_spec(package_info):
- **Purpose**: Split package spec string into package name and version spec
- **Parameters**:
- package_info: "numpy", "numpy==1.26.0", "numpy>=1.20.0", "numpy~=1.20", etc.
- **Execution flow**:
1. Use regex to split package name and version spec
2. Pattern: `^([a-zA-Z0-9_-]+)([><=!~]+.*)?$`
- **Return value**: (package_name, version_spec) tuple
- Examples: ("numpy", "==1.26.0"), ("pandas", ">=2.0.0"), ("scipy", None)
- **Exception handling**:
- Parse failure: Raise ValueError
* PipBatch._evaluate_condition(condition, package_name, installed_packages):
- **Purpose**: Evaluate policy condition and return whether satisfied
- **Parameters**:
- condition: Policy condition object (dictionary)
- package_name: Name of package currently being processed
- installed_packages: {package_name: version} dictionary
- **Execution flow**:
1. If condition is None, return True (always satisfied)
2. Branch based on condition["type"]:
a. "installed" type:
- target_package = condition.get("package", package_name)
- Check current version with installed_packages.get(target_package)
- If not installed (None), return False
- If spec exists, compare version using packaging.specifiers.SpecifierSet
- If no spec, only check installation status (True)
b. "platform" type:
- If condition["os"] exists, compare with platform.system()
- If condition["has_gpu"] exists, check GPU presence (torch.cuda.is_available(), etc.)
- If condition["comfyui_version"] exists, compare ComfyUI version
- Return True if all conditions satisfied
3. Return True if all conditions satisfied, False if any unsatisfied
- **Return value**: bool
- **Exception handling**:
- Version comparison failure: Log warning and return False
- Unknown condition type: Log warning and return False
### Public Methods
* PipBatch.install(package_info, extra_index_url=None, override_policy=False):
- **Purpose**: Perform policy-based pip package installation (individual package basis)
- **Parameters**:
- package_info: Package name and version spec (e.g., "numpy", "numpy==1.26.0", "numpy>=1.20.0")
- extra_index_url: Additional package repository URL (optional)
- override_policy: If True, skip policy application and install directly (default: False)
- **Execution flow**:
1. Call get_pip_policy() to get policy (lazy loading)
2. Use self._parse_package_spec() to split package_info into package name and version spec
3. Call self._get_installed_packages() to get cached installed package information
4. If override_policy=True → Jump directly to step 10 (skip policy)
5. Get policy for package name from policy dictionary
6. If no policy → Jump to step 10 (default installation)
7. **apply_first_match policy evaluation** (exclusive - only first match):
- Iterate through policy list top-to-bottom
- Evaluate each policy's condition with self._evaluate_condition()
- When first condition-satisfying policy found:
* type="skip": Log reason and return False (don't install)
* type="force_version": Change package_info version to policy's version
* type="replace": Completely replace package_info with policy's replacement package
- If no matching policy, keep original package_info
8. **apply_all_matches policy evaluation** (cumulative - all matches):
- Iterate through policy list top-to-bottom
- Evaluate each policy's condition with self._evaluate_condition()
- For all condition-satisfying policies:
* type="pin_dependencies":
- For each package in pinned_packages, query current version with self._installed_cache.get(pkg)
- Pin to installed version in "package==version" format
- Add to installation package list
* type="install_with":
- Add additional_packages to installation package list
* type="warn":
- Output message as warning log
- If allow_continue=false, wait for user confirmation (optional)
9. Compose final installation package list:
- Main package (modified/replaced package_info)
- Packages pinned by pin_dependencies
- Packages added by install_with
10. Handle extra_index_url:
- Parameter-passed extra_index_url takes priority
- Otherwise use extra_index_url defined in policy
11. Generate pip/uv command using manager_util.make_pip_cmd():
- Basic format: ["pip", "install"] + package list
- If extra_index_url exists: add ["--extra-index-url", url]
12. Execute command via subprocess
13. Handle installation failure:
- If pin_dependencies's on_failure="retry_without_pin":
* Retry with only main package excluding pinned packages
- If on_failure="fail":
* Raise exception and abort installation
- Otherwise: Log warning and continue
14. On successful installation:
- Call self._invalidate_cache() (invalidate cache)
- Log info if reason exists
- Return True
- **Return value**: Installation success status (bool)
- **Exception handling**:
- Policy parsing failure: Log warning and proceed with default installation
- Installation failure: Log error and raise exception (depends on on_failure setting)
- **Notes**:
- restore policy not handled in this method (batch-processed in ensure_installed())
- uninstall policy not handled in this method (batch-processed in ensure_not_installed())
* PipBatch.ensure_not_installed():
- **Purpose**: Iterate through all policies and remove all packages satisfying uninstall conditions (batch processing)
- **Parameters**: None
- **Execution flow**:
1. Call get_pip_policy() to get policy (lazy loading)
2. Call self._get_installed_packages() to get cached installed package information
3. Iterate through all package policies in policy dictionary:
a. Check if each package has uninstall policy
b. If uninstall policy exists:
- Iterate through uninstall policy list top-to-bottom
- Evaluate each policy's condition with self._evaluate_condition()
- When first condition-satisfying policy found:
* Check if target package exists in self._installed_cache
* If installed:
- Generate command with manager_util.make_pip_cmd(["uninstall", "-y", target])
- Execute pip uninstall via subprocess
- Log reason in info log
- Add to removed package list
- Remove package from self._installed_cache
* Move to next package (only first match per package)
4. Complete iteration through all package policies
- **Return value**: List of removed package names (list of str)
- **Exception handling**:
- Individual package removal failure: Log warning only and continue to next package
- **Call timing**:
- Called at batch operation start to pre-remove conflicting packages
- Called before multiple package installations to clean installation environment
* PipBatch.ensure_installed():
- **Purpose**: Iterate through all policies and restore all packages satisfying restore conditions (batch processing)
- **Parameters**: None
- **Execution flow**:
1. Call get_pip_policy() to get policy (lazy loading)
2. Call self._get_installed_packages() to get cached installed package information
3. Iterate through all package policies in policy dictionary:
a. Check if each package has restore policy
b. If restore policy exists:
- Iterate through restore policy list top-to-bottom
- Evaluate each policy's condition with self._evaluate_condition()
- When first condition-satisfying policy found:
* Get target package name (policy's "target" field)
* Get version specified in version field
* Check current version with self._installed_cache.get(target)
* If current version is None or different from specified version:
- Compose as package_spec = f"{target}=={version}" format
- Generate command with manager_util.make_pip_cmd(["install", package_spec])
- If extra_index_url exists, add ["--extra-index-url", url]
- Execute pip install via subprocess
- Log reason in info log
- Add to restored package list
- Update cache: self._installed_cache[target] = version
* Move to next package (only first match per package)
4. Complete iteration through all package policies
- **Return value**: List of restored package names (list of str)
- **Exception handling**:
- Individual package installation failure: Log warning only and continue to next package
- **Call timing**:
- Called at batch operation end to restore essential package versions
- Called for environment verification after multiple package installations
## pip-policy.json Examples
### Base Policy File ({manager_util.comfyui_manager_path}/pip-policy.json)
```json
{
"torch": {
"apply_first_match": [
{
"type": "skip",
"reason": "PyTorch installation should be managed manually due to CUDA compatibility"
}
]
},
"opencv-python": {
"apply_first_match": [
{
"type": "replace",
"replacement": "opencv-contrib-python",
"version": ">=4.8.0",
"reason": "opencv-contrib-python includes all opencv-python features plus extras"
}
]
},
"PIL": {
"apply_first_match": [
{
"type": "replace",
"replacement": "Pillow",
"reason": "PIL is deprecated, use Pillow instead"
}
]
},
"click": {
"apply_first_match": [
{
"condition": {
"type": "installed",
"package": "colorama",
"spec": "<0.5.0"
},
"type": "force_version",
"version": "8.1.3",
"reason": "click 8.1.3 compatible with colorama <0.5"
}
],
"apply_all_matches": [
{
"type": "pin_dependencies",
"pinned_packages": ["colorama"],
"reason": "Prevent colorama upgrade that may break compatibility"
}
]
},
"requests": {
"apply_all_matches": [
{
"type": "pin_dependencies",
"pinned_packages": ["urllib3", "certifi", "charset-normalizer"],
"on_failure": "retry_without_pin",
"reason": "Prevent urllib3 from upgrading to 2.x which has breaking changes"
}
]
},
"six": {
"restore": [
{
"target": "six",
"version": "1.16.0",
"reason": "six must be maintained at 1.16.0 for compatibility"
}
]
},
"urllib3": {
"restore": [
{
"condition": {
"type": "installed",
"spec": "!=1.26.15"
},
"target": "urllib3",
"version": "1.26.15",
"reason": "urllib3 must be 1.26.15 for compatibility with legacy code"
}
]
},
"onnxruntime": {
"apply_first_match": [
{
"condition": {
"type": "platform",
"os": "linux",
"has_gpu": true
},
"type": "replace",
"replacement": "onnxruntime-gpu",
"reason": "Use GPU version on Linux with CUDA"
}
]
},
"legacy-custom-node-package": {
"apply_first_match": [
{
"condition": {
"type": "platform",
"comfyui_version": "<1.0.0"
},
"type": "force_version",
"version": "0.9.0",
"reason": "legacy-custom-node-package 0.9.0 is compatible with ComfyUI <1.0.0"
},
{
"condition": {
"type": "platform",
"comfyui_version": ">=1.0.0"
},
"type": "force_version",
"version": "1.5.0",
"reason": "legacy-custom-node-package 1.5.0 is required for ComfyUI >=1.0.0"
}
]
},
"tensorflow": {
"apply_all_matches": [
{
"condition": {
"type": "installed",
"package": "torch"
},
"type": "warn",
"message": "Installing TensorFlow alongside PyTorch may cause CUDA conflicts",
"allow_continue": true
}
]
},
"some-package": {
"uninstall": [
{
"condition": {
"type": "installed",
"package": "conflicting-package",
"spec": ">=2.0.0"
},
"target": "conflicting-package",
"reason": "conflicting-package >=2.0.0 conflicts with some-package"
}
]
},
"banned-malicious-package": {
"uninstall": [
{
"target": "banned-malicious-package",
"reason": "Security vulnerability CVE-2024-XXXXX, always remove if attempting to install"
}
]
},
"critical-package": {
"restore": [
{
"condition": {
"type": "installed",
"package": "critical-package",
"spec": "!=1.2.3"
},
"target": "critical-package",
"version": "1.2.3",
"extra_index_url": "https://custom-repo.example.com/simple",
"reason": "critical-package must be version 1.2.3, restore if different or missing"
}
]
},
"stable-package": {
"apply_first_match": [
{
"condition": {
"type": "installed",
"package": "critical-dependency",
"spec": ">=2.0.0"
},
"type": "force_version",
"version": "1.5.0",
"extra_index_url": "https://custom-repo.example.com/simple",
"reason": "stable-package 1.5.0 is required when critical-dependency >=2.0.0 is installed"
}
]
},
"new-experimental-package": {
"apply_all_matches": [
{
"type": "pin_dependencies",
"pinned_packages": ["numpy", "pandas", "scipy"],
"on_failure": "retry_without_pin",
"reason": "new-experimental-package may upgrade numpy/pandas/scipy, pin them to prevent breakage"
}
]
},
"pytorch-addon": {
"apply_all_matches": [
{
"condition": {
"type": "installed",
"package": "torch",
"spec": ">=2.0.0"
},
"type": "pin_dependencies",
"pinned_packages": ["torch", "torchvision", "torchaudio"],
"on_failure": "fail",
"reason": "pytorch-addon must not change PyTorch ecosystem versions"
}
]
}
}
```
### Policy Structure Schema
```json
{
"$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"patternProperties": {
"^.*$": {
"type": "object",
"properties": {
"uninstall": {
"type": "array",
"description": "When condition satisfied (or always if no condition), remove package and terminate",
"items": {
"type": "object",
"required": ["target"],
"properties": {
"condition": {
"type": "object",
"description": "Optional: always remove if absent",
"required": ["type"],
"properties": {
"type": {"enum": ["installed", "platform"]},
"package": {"type": "string", "description": "Optional: defaults to self"},
"spec": {"type": "string", "description": "Optional: version condition"},
"os": {"type": "string"},
"has_gpu": {"type": "boolean"},
"comfyui_version": {"type": "string"}
}
},
"target": {
"type": "string",
"description": "Package name to remove"
},
"reason": {"type": "string"}
}
}
},
"restore": {
"type": "array",
"description": "When condition satisfied (or always if no condition), restore package and terminate",
"items": {
"type": "object",
"required": ["target", "version"],
"properties": {
"condition": {
"type": "object",
"description": "Optional: always restore if absent",
"required": ["type"],
"properties": {
"type": {"enum": ["installed", "platform"]},
"package": {"type": "string", "description": "Optional: defaults to self"},
"spec": {"type": "string", "description": "Optional: version condition"},
"os": {"type": "string"},
"has_gpu": {"type": "boolean"},
"comfyui_version": {"type": "string"}
}
},
"target": {
"type": "string",
"description": "Package name to restore"
},
"version": {
"type": "string",
"description": "Version to restore"
},
"extra_index_url": {"type": "string"},
"reason": {"type": "string"}
}
}
},
"apply_first_match": {
"type": "array",
"description": "Execute only first condition-satisfying policy (exclusive)",
"items": {
"type": "object",
"required": ["type"],
"properties": {
"condition": {
"type": "object",
"description": "Optional: always apply if absent",
"required": ["type"],
"properties": {
"type": {"enum": ["installed", "platform"]},
"package": {"type": "string", "description": "Optional: defaults to self"},
"spec": {"type": "string", "description": "Optional: version condition"},
"os": {"type": "string"},
"has_gpu": {"type": "boolean"},
"comfyui_version": {"type": "string"}
}
},
"type": {
"enum": ["skip", "force_version", "replace"],
"description": "Exclusive action: determines installation method"
},
"version": {"type": "string"},
"replacement": {"type": "string"},
"extra_index_url": {"type": "string"},
"reason": {"type": "string"}
}
}
},
"apply_all_matches": {
"type": "array",
"description": "Execute all condition-satisfying policies (cumulative)",
"items": {
"type": "object",
"required": ["type"],
"properties": {
"condition": {
"type": "object",
"description": "Optional: always apply if absent",
"required": ["type"],
"properties": {
"type": {"enum": ["installed", "platform"]},
"package": {"type": "string", "description": "Optional: defaults to self"},
"spec": {"type": "string", "description": "Optional: version condition"},
"os": {"type": "string"},
"has_gpu": {"type": "boolean"},
"comfyui_version": {"type": "string"}
}
},
"type": {
"enum": ["pin_dependencies", "install_with", "warn"],
"description": "Cumulative action: adds installation options"
},
"pinned_packages": {
"type": "array",
"items": {"type": "string"}
},
"on_failure": {"enum": ["fail", "retry_without_pin"]},
"additional_packages": {"type": "array"},
"message": {"type": "string"},
"allow_continue": {"type": "boolean"},
"reason": {"type": "string"}
}
}
}
}
}
}
}
```
## Error Handling
* Default behavior when errors occur during policy execution:
- Log error and continue
- Only treat as installation failure when pin_dependencies's on_failure="fail"
- For other cases, leave warning and attempt originally requested installation
* pip_install: Performs pip package installation
- Use manager_util.make_pip_cmd to generate commands for selective application of uv and pip
- Provide functionality to skip policy application through override_policy flag

View File

@@ -1,614 +0,0 @@
# pip_util.py Implementation Plan Document
## 1. Project Overview
### Purpose
Implement a policy-based pip package management system that minimizes breaking existing installed dependencies
### Core Features
- JSON-based policy file loading and merging (lazy loading)
- Per-package installation policy evaluation and application
- Performance optimization through batch-level pip freeze caching
- Automated conditional package removal/restoration
### Technology Stack
- Python 3.x
- packaging library (version comparison)
- subprocess (pip command execution)
- json (policy file parsing)
---
## 2. Architecture Design
### 2.1 Global Policy Management (Lazy Loading Pattern)
```
┌─────────────────────────────────────┐
│ get_pip_policy() │
│ - Auto-loads policy files on │
│ first call via lazy loading │
│ - Returns cache on subsequent calls│
└─────────────────────────────────────┘
┌─────────────────────────────────────┐
│ _pip_policy_cache (global) │
│ - Merged policy dictionary │
│ - {package_name: policy_object} │
└─────────────────────────────────────┘
```
### 2.2 Batch Operation Class (PipBatch)
```
┌─────────────────────────────────────┐
│ PipBatch (Context Manager) │
│ ┌───────────────────────────────┐ │
│ │ _installed_cache │ │
│ │ - Caches pip freeze results │ │
│ │ - {package: version} │ │
│ └───────────────────────────────┘ │
│ │
│ Public Methods: │
│ ├─ install() │
│ ├─ ensure_not_installed() │
│ └─ ensure_installed() │
│ │
│ Private Methods: │
│ ├─ _get_installed_packages() │
│ ├─ _refresh_installed_cache() │
│ ├─ _invalidate_cache() │
│ ├─ _parse_package_spec() │
│ └─ _evaluate_condition() │
└─────────────────────────────────────┘
```
### 2.3 Policy Evaluation Flow
```
install("numpy>=1.20") called
get_pip_policy() → Load policy (lazy)
Parse package name: "numpy"
Look up "numpy" policy in policy dictionary
├─ Evaluate apply_first_match (exclusive)
│ ├─ skip → Return False (don't install)
│ ├─ force_version → Change version
│ └─ replace → Replace package
├─ Evaluate apply_all_matches (cumulative)
│ ├─ pin_dependencies → Pin dependencies
│ ├─ install_with → Additional packages
│ └─ warn → Warning log
Execute pip install
Invalidate cache (_invalidate_cache)
```
---
## 3. Phase-by-Phase Implementation Plan
### Phase 1: Core Infrastructure Setup (2-3 hours)
#### Task 1.1: Project Structure and Dependency Setup (30 min)
**Implementation**:
- Create `pip_util.py` file
- Add necessary import statements
```python
import json
import logging
import platform
import re
import subprocess
from pathlib import Path
from typing import Dict, List, Optional, Tuple
from packaging.specifiers import SpecifierSet
from packaging.version import Version
from . import manager_util, context
```
- Set up logging
```python
logger = logging.getLogger(__name__)
```
**Validation**:
- Module loads without import errors
- Logger works correctly
#### Task 1.2: Global Variable and get_pip_policy() Implementation (1 hour)
**Implementation**:
- Declare global variable
```python
_pip_policy_cache: Optional[Dict] = None
```
- Implement `get_pip_policy()` function
- Check cache and early return
- Read base policy file (`{manager_util.comfyui_manager_path}/pip-policy.json`)
- Read user policy file (`{context.manager_files_path}/pip-policy.user.json`)
- Create file if doesn't exist (for user policy)
- Merge policies (complete package-level replacement)
- Save to cache and return
**Exception Handling**:
- `FileNotFoundError`: File not found → Use empty dictionary
- `json.JSONDecodeError`: JSON parse failure → Warning log + empty dictionary
- General exception: Warning log + empty dictionary
**Validation**:
- Returns empty dictionary when policy files don't exist
- Returns correct merged result when policy files exist
- Confirms cache usage on second call (load log appears only once)
#### Task 1.3: PipBatch Class Basic Structure (30 min)
**Implementation**:
- Class definition and `__init__`
```python
class PipBatch:
def __init__(self):
self._installed_cache: Optional[Dict[str, str]] = None
```
- Context manager methods (`__enter__`, `__exit__`)
```python
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._installed_cache = None
return False
```
**Validation**:
- `with PipBatch() as batch:` syntax works correctly
- Cache cleared on `__exit__` call
---
### Phase 2: Caching and Utility Methods (2-3 hours)
#### Task 2.1: pip freeze Caching Methods (1 hour)
**Implementation**:
- Implement `_refresh_installed_cache()`
- Call `manager_util.make_pip_cmd(["freeze"])`
- Execute command via subprocess
- Parse output (package==version format)
- Exclude editable packages (-e) and comments (#)
- Convert to dictionary and store in `self._installed_cache`
- Implement `_get_installed_packages()`
- Call `_refresh_installed_cache()` if cache is None
- Return cache
- Implement `_invalidate_cache()`
- Set `self._installed_cache = None`
**Exception Handling**:
- `subprocess.CalledProcessError`: pip freeze failure → Empty dictionary
- Parse error: Ignore line + warning log
**Validation**:
- pip freeze results correctly parsed into dictionary
- New load occurs after cache invalidation and re-query
#### Task 2.2: Package Spec Parsing (30 min)
**Implementation**:
- Implement `_parse_package_spec(package_info)`
- Regex pattern: `^([a-zA-Z0-9_-]+)([><=!~]+.*)?$`
- Split package name and version spec
- Return tuple: `(package_name, version_spec)`
**Exception Handling**:
- Parse failure: Raise `ValueError`
**Validation**:
- "numpy" → ("numpy", None)
- "numpy==1.26.0" → ("numpy", "==1.26.0")
- "pandas>=2.0.0" → ("pandas", ">=2.0.0")
- Invalid format → ValueError
#### Task 2.3: Condition Evaluation Method (1.5 hours)
**Implementation**:
- Implement `_evaluate_condition(condition, package_name, installed_packages)`
**Handling by Condition Type**:
1. **condition is None**: Always return True
2. **"installed" type**:
- `target_package = condition.get("package", package_name)`
- Check version with `installed_packages.get(target_package)`
- If spec exists, compare using `packaging.specifiers.SpecifierSet`
- If no spec, only check installation status
3. **"platform" type**:
- `os` condition: Compare with `platform.system()`
- `has_gpu` condition: Check `torch.cuda.is_available()` (False if torch unavailable)
- `comfyui_version` condition: TODO (currently warning)
**Exception Handling**:
- Version comparison failure: Warning log + return False
- Unknown condition type: Warning log + return False
**Validation**:
- Write test cases for each condition type
- Verify edge case handling (torch not installed, invalid version format, etc.)
---
### Phase 3: Core Installation Logic Implementation (4-5 hours)
#### Task 3.1: install() Method - Basic Flow (2 hours)
**Implementation**:
1. Parse package spec (`_parse_package_spec`)
2. Query installed package cache (`_get_installed_packages`)
3. If `override_policy=True`, install directly and return
4. Call `get_pip_policy()` to load policy
5. Default installation if no policy exists
**Validation**:
- Verify policy ignored when override_policy=True
- Verify default installation for packages without policy
#### Task 3.2: install() Method - apply_first_match Policy (1 hour)
**Implementation**:
- Iterate through policy list top-to-bottom
- Evaluate each policy's condition (`_evaluate_condition`)
- When condition satisfied:
- **skip**: Log reason and return False
- **force_version**: Force version change
- **replace**: Replace package
- Apply only first match (break)
**Validation**:
- Verify installation blocked by skip policy
- Verify version changed by force_version
- Verify package replaced by replace
#### Task 3.3: install() Method - apply_all_matches Policy (1 hour)
**Implementation**:
- Iterate through policy list top-to-bottom
- Evaluate each policy's condition
- Apply all condition-satisfying policies:
- **pin_dependencies**: Pin to installed version
- **install_with**: Add to additional package list
- **warn**: Output warning log
**Validation**:
- Verify multiple policies applied simultaneously
- Verify version pinning by pin_dependencies
- Verify additional package installation by install_with
#### Task 3.4: install() Method - Installation Execution and Retry Logic (1 hour)
**Implementation**:
1. Compose final package list
2. Generate command using `manager_util.make_pip_cmd()`
3. Handle `extra_index_url`
4. Execute installation via subprocess
5. Handle failure based on on_failure setting:
- `retry_without_pin`: Retry without pins
- `fail`: Raise exception
- Other: Warning log
6. Invalidate cache on success
**Validation**:
- Verify normal installation
- Verify retry logic on pin failure
- Verify error handling
---
### Phase 4: Batch Operation Methods Implementation (2-3 hours)
#### Task 4.1: ensure_not_installed() Implementation (1.5 hours)
**Implementation**:
1. Call `get_pip_policy()`
2. Iterate through all package policies
3. Check each package's uninstall policy
4. When condition satisfied:
- Check if target package is installed
- If installed, execute `pip uninstall -y {target}`
- Remove from cache
- Add to removal list
5. Execute only first match (per package)
6. Return list of removed packages
**Exception Handling**:
- Individual package removal failure: Warning log + continue
**Validation**:
- Verify package removal by uninstall policy
- Verify batch removal of multiple packages
- Verify continued processing of other packages even on removal failure
#### Task 4.2: ensure_installed() Implementation (1.5 hours)
**Implementation**:
1. Call `get_pip_policy()`
2. Iterate through all package policies
3. Check each package's restore policy
4. When condition satisfied:
- Check target package's current version
- If absent or different version:
- Execute `pip install {target}=={version}`
- Add extra_index_url if present
- Update cache
- Add to restoration list
5. Execute only first match (per package)
6. Return list of restored packages
**Exception Handling**:
- Individual package installation failure: Warning log + continue
**Validation**:
- Verify package restoration by restore policy
- Verify reinstallation on version mismatch
- Verify continued processing of other packages even on restoration failure
---
## 4. Testing Strategy
### 4.1 Unit Tests
#### Policy Loading Tests
```python
def test_get_pip_policy_empty():
"""Returns empty dictionary when policy files don't exist"""
def test_get_pip_policy_merge():
"""Correctly merges base and user policies"""
def test_get_pip_policy_cache():
"""Uses cache on second call"""
```
#### Package Parsing Tests
```python
def test_parse_package_spec_simple():
"""'numpy' → ('numpy', None)"""
def test_parse_package_spec_version():
"""'numpy==1.26.0' → ('numpy', '==1.26.0')"""
def test_parse_package_spec_range():
"""'pandas>=2.0.0' → ('pandas', '>=2.0.0')"""
def test_parse_package_spec_invalid():
"""Invalid format → ValueError"""
```
#### Condition Evaluation Tests
```python
def test_evaluate_condition_none():
"""None condition → True"""
def test_evaluate_condition_installed():
"""Evaluates installed package condition"""
def test_evaluate_condition_platform():
"""Evaluates platform condition"""
```
### 4.2 Integration Tests
#### Installation Policy Tests
```python
def test_install_with_skip_policy():
"""Blocks installation with skip policy"""
def test_install_with_force_version():
"""Changes version with force_version policy"""
def test_install_with_replace():
"""Replaces package with replace policy"""
def test_install_with_pin_dependencies():
"""Pins versions with pin_dependencies"""
```
#### Batch Operation Tests
```python
def test_ensure_not_installed():
"""Removes packages with uninstall policy"""
def test_ensure_installed():
"""Restores packages with restore policy"""
def test_batch_workflow():
"""Tests complete batch workflow"""
```
### 4.3 Edge Case Tests
```python
def test_install_without_policy():
"""Default installation for packages without policy"""
def test_install_override_policy():
"""Ignores policy with override_policy=True"""
def test_pip_freeze_failure():
"""Handles empty cache on pip freeze failure"""
def test_json_parse_error():
"""Handles malformed JSON files"""
def test_subprocess_failure():
"""Exception handling when pip command fails"""
```
---
## 5. Error Handling Strategy
### 5.1 Policy Loading Errors
- **File not found**: Warning log + empty dictionary
- **JSON parse failure**: Error log + empty dictionary
- **No read permission**: Warning log + empty dictionary
### 5.2 Package Installation Errors
- **pip command failure**: Depends on on_failure setting
- `retry_without_pin`: Retry
- `fail`: Raise exception
- Other: Warning log
- **Invalid package spec**: Raise ValueError
### 5.3 Batch Operation Errors
- **Individual package failure**: Warning log + continue to next package
- **pip freeze failure**: Empty dictionary + warning log
---
## 6. Performance Optimization
### 6.1 Caching Strategy
- **Policy cache**: Reused program-wide via global variable
- **pip freeze cache**: Reused per batch, invalidated after install/remove
- **lazy loading**: Load only when needed
### 6.2 Parallel Processing Considerations
- Current implementation is not thread-safe
- Consider adding threading.Lock if needed
- Batch operations execute sequentially only
---
## 7. Documentation Requirements
### 7.1 Code Documentation
- Docstrings required for all public methods
- Specify parameters, return values, and exceptions
- Include usage examples
### 7.2 User Guide
- Explain `pip-policy.json` structure
- Policy writing examples
- Usage pattern examples
### 7.3 Developer Guide
- Architecture explanation
- Extension methods
- Test execution methods
---
## 8. Deployment Checklist
### 8.1 Code Quality
- [ ] All unit tests pass
- [ ] All integration tests pass
- [ ] Code coverage ≥80%
- [ ] No linting errors (flake8, pylint)
- [ ] Type hints complete (mypy passes)
### 8.2 Documentation
- [ ] README.md written
- [ ] API documentation generated
- [ ] Example policy files written
- [ ] Usage guide written
### 8.3 Performance Verification
- [ ] Policy loading performance measured (<100ms)
- [ ] pip freeze caching effectiveness verified (≥50% speed improvement)
- [ ] Memory usage confirmed (<10MB)
### 8.4 Security Verification
- [ ] Input validation complete
- [ ] Path traversal prevention
- [ ] Command injection prevention
- [ ] JSON parsing safety confirmed
---
## 9. Future Improvements
### 9.1 Short-term (1-2 weeks)
- Implement ComfyUI version check
- Implement user confirmation prompt (allow_continue=false)
- Thread-safe improvements (add Lock)
### 9.2 Mid-term (1-2 months)
- Add policy validation tools
- Policy migration tools
- More detailed logging and debugging options
### 9.3 Long-term (3-6 months)
- Web UI for policy management
- Provide policy templates
- Community policy sharing system
---
## 10. Risks and Mitigation Strategies
### Risk 1: Policy Conflicts
**Description**: Policies for different packages may conflict
**Mitigation**: Develop policy validation tools, conflict detection algorithm
### Risk 2: pip Version Compatibility
**Description**: Must work across various pip versions
**Mitigation**: Test on multiple pip versions, version-specific branching
### Risk 3: Performance Degradation
**Description**: Installation speed may decrease due to policy evaluation
**Mitigation**: Optimize caching, minimize condition evaluation
### Risk 4: Policy Misconfiguration
**Description**: Users may write incorrect policies
**Mitigation**: JSON schema validation, provide examples and guides
---
## 11. Timeline
### Week 1
- Phase 1: Core Infrastructure Setup (Day 1-2)
- Phase 2: Caching and Utility Methods (Day 3-4)
- Write unit tests (Day 5)
### Week 2
- Phase 3: Core Installation Logic Implementation (Day 1-3)
- Phase 4: Batch Operation Methods Implementation (Day 4-5)
### Week 3
- Integration and edge case testing (Day 1-2)
- Documentation (Day 3)
- Code review and refactoring (Day 4-5)
### Week 4
- Performance optimization (Day 1-2)
- Security verification (Day 3)
- Final testing and deployment preparation (Day 4-5)
---
## 12. Success Criteria
### Feature Completeness
- ✅ All policy types (uninstall, apply_first_match, apply_all_matches, restore) work correctly
- ✅ Policy merge logic works correctly
- ✅ Batch operations perform normally
### Quality Metrics
- ✅ Test coverage ≥80%
- ✅ All tests pass
- ✅ 0 linting errors
- ✅ 100% type hint completion
### Performance Metrics
- ✅ Policy loading <100ms
- ✅ ≥50% performance improvement with pip freeze caching
- ✅ Memory usage <10MB
### Usability
- ✅ Clear error messages
- ✅ Sufficient documentation
- ✅ Verified in real-world use cases

View File

@@ -1,629 +0,0 @@
"""
pip_util - Policy-based pip package management system
This module provides a policy-based approach to pip package installation
to minimize dependency conflicts and protect existing installed packages.
Usage:
# Batch operations (policy auto-loaded)
with PipBatch() as batch:
batch.ensure_not_installed()
batch.install("numpy>=1.20")
batch.install("pandas>=2.0")
batch.install("scipy>=1.7")
batch.ensure_installed()
"""
import json
import logging
import platform
import re
import subprocess
from pathlib import Path
from typing import Dict, List, Optional, Tuple
from packaging.requirements import Requirement
from packaging.specifiers import SpecifierSet
from packaging.version import Version
from . import manager_util, context
logger = logging.getLogger(__name__)
# Global policy cache (lazy loaded on first access)
_pip_policy_cache: Optional[Dict] = None
def get_pip_policy() -> Dict:
"""
Get pip policy with lazy loading.
Returns the cached policy if available, otherwise loads it from files.
This function automatically loads the policy on first access.
Thread safety: This function is NOT thread-safe.
Ensure single-threaded access during initialization.
Returns:
Dictionary of merged pip policies
Example:
>>> policy = get_pip_policy()
>>> numpy_policy = policy.get("numpy", {})
"""
global _pip_policy_cache
# Return cached policy if already loaded
if _pip_policy_cache is not None:
logger.debug("Returning cached pip policy")
return _pip_policy_cache
logger.info("Loading pip policies...")
# Load base policy
base_policy = {}
base_policy_path = Path(manager_util.comfyui_manager_path) / "pip-policy.json"
try:
if base_policy_path.exists():
with open(base_policy_path, 'r', encoding='utf-8') as f:
base_policy = json.load(f)
logger.debug(f"Loaded base policy from {base_policy_path}")
else:
logger.warning(f"Base policy file not found: {base_policy_path}")
except json.JSONDecodeError as e:
logger.error(f"Failed to parse base policy JSON: {e}")
base_policy = {}
except Exception as e:
logger.warning(f"Failed to read base policy file: {e}")
base_policy = {}
# Load user policy
user_policy = {}
user_policy_path = Path(context.manager_files_path) / "pip-policy.user.json"
try:
if user_policy_path.exists():
with open(user_policy_path, 'r', encoding='utf-8') as f:
user_policy = json.load(f)
logger.debug(f"Loaded user policy from {user_policy_path}")
else:
# Create empty user policy file
user_policy_path.parent.mkdir(parents=True, exist_ok=True)
with open(user_policy_path, 'w', encoding='utf-8') as f:
json.dump({"_comment": "User-specific pip policy overrides"}, f, indent=2)
logger.info(f"Created empty user policy file: {user_policy_path}")
except json.JSONDecodeError as e:
logger.warning(f"Failed to parse user policy JSON: {e}")
user_policy = {}
except Exception as e:
logger.warning(f"Failed to read user policy file: {e}")
user_policy = {}
# Merge policies (package-level override: user completely replaces base per package)
merged_policy = base_policy.copy()
for package_name, package_policy in user_policy.items():
if package_name.startswith("_"): # Skip metadata fields like _comment
continue
merged_policy[package_name] = package_policy # Complete package replacement
# Store in global cache
_pip_policy_cache = merged_policy
logger.info(f"Policy loaded successfully: {len(_pip_policy_cache)} package policies")
return _pip_policy_cache
class PipBatch:
"""
Pip package installation batch manager.
Maintains pip freeze cache during a batch of operations for performance optimization.
Usage pattern:
# Batch operations (policy auto-loaded)
with PipBatch() as batch:
batch.ensure_not_installed()
batch.install("numpy>=1.20")
batch.install("pandas>=2.0")
batch.install("scipy>=1.7")
batch.ensure_installed()
Attributes:
_installed_cache: Cache of installed packages from pip freeze
"""
def __init__(self):
"""Initialize PipBatch with empty cache."""
self._installed_cache: Optional[Dict[str, str]] = None
def __enter__(self):
"""Enter context manager."""
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""Exit context manager and clear cache."""
self._installed_cache = None
return False
def _refresh_installed_cache(self) -> None:
"""
Refresh the installed packages cache by executing pip freeze.
Parses pip freeze output into a dictionary of {package_name: version}.
Ignores editable packages and comments.
Raises:
No exceptions raised - failures result in empty cache with warning log
"""
try:
cmd = manager_util.make_pip_cmd(["freeze"])
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
packages = {}
for line in result.stdout.strip().split('\n'):
line = line.strip()
# Skip empty lines
if not line:
continue
# Skip editable packages (-e /path/to/package or -e git+https://...)
# Editable packages don't have version info and are typically development-only
if line.startswith('-e '):
continue
# Skip comments (defensive: pip freeze typically doesn't output comments,
# but this handles manually edited requirements.txt or future pip changes)
if line.startswith('#'):
continue
# Parse package==version
if '==' in line:
try:
package_name, version = line.split('==', 1)
packages[package_name.strip()] = version.strip()
except ValueError:
logger.warning(f"Failed to parse pip freeze line: {line}")
continue
self._installed_cache = packages
logger.debug(f"Refreshed installed packages cache: {len(packages)} packages")
except subprocess.CalledProcessError as e:
logger.warning(f"pip freeze failed: {e}")
self._installed_cache = {}
except Exception as e:
logger.warning(f"Failed to refresh installed packages cache: {e}")
self._installed_cache = {}
def _get_installed_packages(self) -> Dict[str, str]:
"""
Get cached installed packages, refresh if cache is None.
Returns:
Dictionary of {package_name: version}
"""
if self._installed_cache is None:
self._refresh_installed_cache()
return self._installed_cache
def _invalidate_cache(self) -> None:
"""
Invalidate the installed packages cache.
Should be called after install/uninstall operations.
"""
self._installed_cache = None
def _parse_package_spec(self, package_info: str) -> Tuple[str, Optional[str]]:
"""
Parse package spec string into package name and version spec using PEP 508.
Uses the packaging library to properly parse package specifications according to
PEP 508 standard, which handles complex cases like extras and multiple version
constraints that simple regex cannot handle correctly.
Args:
package_info: Package specification like "numpy", "numpy==1.26.0", "numpy>=1.20.0",
or complex specs like "package[extra]>=1.0,<2.0"
Returns:
Tuple of (package_name, version_spec)
Examples: ("numpy", "==1.26.0"), ("pandas", ">=2.0.0"), ("scipy", None)
Package names are normalized (e.g., "NumPy" -> "numpy")
Raises:
ValueError: If package_info cannot be parsed according to PEP 508
Example:
>>> batch._parse_package_spec("numpy>=1.20")
("numpy", ">=1.20")
>>> batch._parse_package_spec("requests[security]>=2.0,<3.0")
("requests", ">=2.0,<3.0")
"""
try:
req = Requirement(package_info)
package_name = req.name # Normalized package name
version_spec = str(req.specifier) if req.specifier else None
return package_name, version_spec
except Exception as e:
raise ValueError(f"Invalid package spec: {package_info}") from e
def _evaluate_condition(self, condition: Optional[Dict], package_name: str,
installed_packages: Dict[str, str]) -> bool:
"""
Evaluate policy condition and return whether it's satisfied.
Args:
condition: Policy condition object (dict) or None
package_name: Current package being processed
installed_packages: Dictionary of {package_name: version}
Returns:
True if condition is satisfied, False otherwise
None condition always returns True
Example:
>>> condition = {"type": "installed", "package": "numpy", "spec": ">=1.20"}
>>> batch._evaluate_condition(condition, "numba", {"numpy": "1.26.0"})
True
"""
# No condition means always satisfied
if condition is None:
return True
condition_type = condition.get("type")
if condition_type == "installed":
# Check if a package is installed with optional version spec
target_package = condition.get("package", package_name)
installed_version = installed_packages.get(target_package)
# Package not installed
if installed_version is None:
return False
# Check version spec if provided
spec = condition.get("spec")
if spec:
try:
specifier = SpecifierSet(spec)
return Version(installed_version) in specifier
except Exception as e:
logger.warning(f"Failed to compare version {installed_version} with spec {spec}: {e}")
return False
# Package is installed (no spec check)
return True
elif condition_type == "platform":
# Check platform conditions (os, has_gpu, comfyui_version)
conditions_met = True
# Check OS
if "os" in condition:
expected_os = condition["os"].lower()
actual_os = platform.system().lower()
if expected_os not in actual_os and actual_os not in expected_os:
conditions_met = False
# Check GPU availability
if "has_gpu" in condition:
expected_gpu = condition["has_gpu"]
try:
import torch
has_gpu = torch.cuda.is_available()
except ImportError:
has_gpu = False
if expected_gpu != has_gpu:
conditions_met = False
# Check ComfyUI version
if "comfyui_version" in condition:
# TODO: Implement ComfyUI version check
logger.warning("ComfyUI version condition not yet implemented")
return conditions_met
else:
logger.warning(f"Unknown condition type: {condition_type}")
return False
def install(self, package_info: str, extra_index_url: Optional[str] = None,
override_policy: bool = False) -> bool:
"""
Install a pip package with policy-based modifications.
Args:
package_info: Package specification (e.g., "numpy", "numpy==1.26.0", "numpy>=1.20.0")
extra_index_url: Additional package repository URL (optional)
override_policy: If True, skip policy application and install directly (default: False)
Returns:
True if installation succeeded, False if skipped by policy
Raises:
ValueError: If package_info cannot be parsed
subprocess.CalledProcessError: If installation fails (depending on policy on_failure settings)
Example:
>>> with PipBatch() as batch:
... batch.install("numpy>=1.20")
... batch.install("torch", override_policy=True)
"""
# Parse package spec
try:
package_name, version_spec = self._parse_package_spec(package_info)
except ValueError as e:
logger.error(f"Invalid package spec: {e}")
raise
# Get installed packages cache
installed_packages = self._get_installed_packages()
# Override policy - skip to direct installation
if override_policy:
logger.info(f"Installing {package_info} (policy override)")
cmd = manager_util.make_pip_cmd(["install", package_info])
if extra_index_url:
cmd.extend(["--extra-index-url", extra_index_url])
try:
subprocess.run(cmd, check=True)
self._invalidate_cache()
logger.info(f"Successfully installed {package_info}")
return True
except subprocess.CalledProcessError as e:
logger.error(f"Failed to install {package_info}: {e}")
raise
# Get policy (lazy loading)
pip_policy = get_pip_policy()
policy = pip_policy.get(package_name, {})
# If no policy, proceed with default installation
if not policy:
logger.debug(f"No policy found for {package_name}, proceeding with default installation")
cmd = manager_util.make_pip_cmd(["install", package_info])
if extra_index_url:
cmd.extend(["--extra-index-url", extra_index_url])
try:
subprocess.run(cmd, check=True)
self._invalidate_cache()
logger.info(f"Successfully installed {package_info}")
return True
except subprocess.CalledProcessError as e:
logger.error(f"Failed to install {package_info}: {e}")
raise
# Apply apply_first_match policies (exclusive - first match only)
final_package_info = package_info
final_extra_index_url = extra_index_url
policy_reason = None
apply_first_match = policy.get("apply_first_match", [])
for policy_item in apply_first_match:
condition = policy_item.get("condition")
if self._evaluate_condition(condition, package_name, installed_packages):
policy_type = policy_item.get("type")
if policy_type == "skip":
reason = policy_item.get("reason", "No reason provided")
logger.info(f"Skipping installation of {package_name}: {reason}")
return False
elif policy_type == "force_version":
forced_version = policy_item.get("version")
final_package_info = f"{package_name}=={forced_version}"
policy_reason = policy_item.get("reason")
if "extra_index_url" in policy_item:
final_extra_index_url = policy_item["extra_index_url"]
logger.info(f"Force version for {package_name}: {forced_version} ({policy_reason})")
break # First match only
elif policy_type == "replace":
replacement = policy_item.get("replacement")
replacement_version = policy_item.get("version", "")
if replacement_version:
final_package_info = f"{replacement}{replacement_version}"
else:
final_package_info = replacement
policy_reason = policy_item.get("reason")
if "extra_index_url" in policy_item:
final_extra_index_url = policy_item["extra_index_url"]
logger.info(f"Replacing {package_name} with {final_package_info}: {policy_reason}")
break # First match only
# Apply apply_all_matches policies (cumulative - all matches)
additional_packages = []
pinned_packages = []
pin_on_failure = "fail"
apply_all_matches = policy.get("apply_all_matches", [])
for policy_item in apply_all_matches:
condition = policy_item.get("condition")
if self._evaluate_condition(condition, package_name, installed_packages):
policy_type = policy_item.get("type")
if policy_type == "pin_dependencies":
pin_list = policy_item.get("pinned_packages", [])
for pkg in pin_list:
installed_version = installed_packages.get(pkg)
if installed_version:
pinned_packages.append(f"{pkg}=={installed_version}")
else:
logger.warning(f"Cannot pin {pkg}: not currently installed")
pin_on_failure = policy_item.get("on_failure", "fail")
reason = policy_item.get("reason", "")
logger.info(f"Pinning dependencies: {pinned_packages} ({reason})")
elif policy_type == "install_with":
additional = policy_item.get("additional_packages", [])
additional_packages.extend(additional)
reason = policy_item.get("reason", "")
logger.info(f"Installing additional packages: {additional} ({reason})")
elif policy_type == "warn":
message = policy_item.get("message", "")
allow_continue = policy_item.get("allow_continue", True)
logger.warning(f"Policy warning for {package_name}: {message}")
if not allow_continue:
# TODO: Implement user confirmation
logger.info("User confirmation required (not implemented, continuing)")
# Build final package list
packages_to_install = [final_package_info] + pinned_packages + additional_packages
# Execute installation
cmd = manager_util.make_pip_cmd(["install"] + packages_to_install)
if final_extra_index_url:
cmd.extend(["--extra-index-url", final_extra_index_url])
try:
subprocess.run(cmd, check=True)
self._invalidate_cache()
if policy_reason:
logger.info(f"Successfully installed {final_package_info}: {policy_reason}")
else:
logger.info(f"Successfully installed {final_package_info}")
return True
except subprocess.CalledProcessError as e:
# Handle installation failure
if pinned_packages and pin_on_failure == "retry_without_pin":
logger.warning(f"Installation failed with pinned dependencies, retrying without pins")
retry_cmd = manager_util.make_pip_cmd(["install", final_package_info])
if final_extra_index_url:
retry_cmd.extend(["--extra-index-url", final_extra_index_url])
try:
subprocess.run(retry_cmd, check=True)
self._invalidate_cache()
logger.info(f"Successfully installed {final_package_info} (without pins)")
return True
except subprocess.CalledProcessError as retry_error:
logger.error(f"Retry installation also failed: {retry_error}")
raise
elif pin_on_failure == "fail":
logger.error(f"Installation failed: {e}")
raise
else:
logger.warning(f"Installation failed, but continuing: {e}")
return False
def ensure_not_installed(self) -> List[str]:
"""
Remove all packages matching uninstall policies (batch processing).
Iterates through all package policies and executes uninstall actions
where conditions are satisfied.
Returns:
List of removed package names
Example:
>>> with PipBatch() as batch:
... removed = batch.ensure_not_installed()
... print(f"Removed: {removed}")
"""
# Get policy (lazy loading)
pip_policy = get_pip_policy()
installed_packages = self._get_installed_packages()
removed_packages = []
for package_name, policy in pip_policy.items():
uninstall_policies = policy.get("uninstall", [])
for uninstall_policy in uninstall_policies:
condition = uninstall_policy.get("condition")
if self._evaluate_condition(condition, package_name, installed_packages):
target = uninstall_policy.get("target")
reason = uninstall_policy.get("reason", "No reason provided")
# Check if target is installed
if target in installed_packages:
try:
cmd = manager_util.make_pip_cmd(["uninstall", "-y", target])
subprocess.run(cmd, check=True)
logger.info(f"Uninstalled {target}: {reason}")
removed_packages.append(target)
# Remove from cache
del installed_packages[target]
except subprocess.CalledProcessError as e:
logger.warning(f"Failed to uninstall {target}: {e}")
# First match only per package
break
return removed_packages
def ensure_installed(self) -> List[str]:
"""
Restore all packages matching restore policies (batch processing).
Iterates through all package policies and executes restore actions
where conditions are satisfied.
Returns:
List of restored package names
Example:
>>> with PipBatch() as batch:
... batch.install("numpy>=1.20")
... restored = batch.ensure_installed()
... print(f"Restored: {restored}")
"""
# Get policy (lazy loading)
pip_policy = get_pip_policy()
installed_packages = self._get_installed_packages()
restored_packages = []
for package_name, policy in pip_policy.items():
restore_policies = policy.get("restore", [])
for restore_policy in restore_policies:
condition = restore_policy.get("condition")
if self._evaluate_condition(condition, package_name, installed_packages):
target = restore_policy.get("target")
version = restore_policy.get("version")
reason = restore_policy.get("reason", "No reason provided")
extra_index_url = restore_policy.get("extra_index_url")
# Check if target needs restoration
current_version = installed_packages.get(target)
if current_version is None or current_version != version:
try:
package_spec = f"{target}=={version}"
cmd = manager_util.make_pip_cmd(["install", package_spec])
if extra_index_url:
cmd.extend(["--extra-index-url", extra_index_url])
subprocess.run(cmd, check=True)
logger.info(f"Restored {package_spec}: {reason}")
restored_packages.append(target)
# Update cache
installed_packages[target] = version
except subprocess.CalledProcessError as e:
logger.warning(f"Failed to restore {target}: {e}")
# First match only per package
break
return restored_packages

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,124 +0,0 @@
import sys
import subprocess
import os
from . import manager_util
def security_check():
print("[START] Security scan")
custom_nodes_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
comfyui_path = os.path.abspath(os.path.join(custom_nodes_path, '..'))
guide = {
"ComfyUI_LLMVISION": """
0.Remove ComfyUI\\custom_nodes\\ComfyUI_LLMVISION.
1.Remove pip packages: openai-1.16.3.dist-info, anthropic-0.21.4.dist-info, openai-1.30.2.dist-info, anthropic-0.21.5.dist-info, anthropic-0.26.1.dist-info, %LocalAppData%\\rundll64.exe
(For portable versions, it is recommended to reinstall. If you are using a venv, it is advised to recreate the venv.)
2.Remove these files in your system: lib/browser/admin.py, Cadmino.py, Fadmino.py, VISION-D.exe, BeamNG.UI.exe
3.Check your Windows registry for the key listed above and remove it.
(HKEY_CURRENT_USER\\Software\\OpenAICLI)
4.Run a malware scanner.
5.Change all of your passwords, everywhere.
(Reinstall OS is recommended.)
\n
Detailed information: https://old.reddit.com/r/comfyui/comments/1dbls5n/psa_if_youve_used_the_comfyui_llmvision_node_from/
""",
"lolMiner": """
1. Remove pip packages: lolMiner*
2. Remove files: lolMiner*, 4G_Ethash_Linux_Readme.txt, mine* in ComfyUI dir.
(Reinstall ComfyUI is recommended.)
""",
"ultralytics==8.3.41": f"""
Execute following commands:
{sys.executable} -m pip uninstall ultralytics
{sys.executable} -m pip install ultralytics==8.3.40
And kill and remove /tmp/ultralytics_runner
The version 8.3.41 to 8.3.42 of the Ultralytics package you installed is compromised. Please uninstall that version and reinstall the latest version.
https://blog.comfy.org/comfyui-statement-on-the-ultralytics-crypto-miner-situation/
""",
"ultralytics==8.3.42": f"""
Execute following commands:
{sys.executable} -m pip uninstall ultralytics
{sys.executable} -m pip install ultralytics==8.3.40
And kill and remove /tmp/ultralytics_runner
The version 8.3.41 to 8.3.42 of the Ultralytics package you installed is compromised. Please uninstall that version and reinstall the latest version.
https://blog.comfy.org/comfyui-statement-on-the-ultralytics-crypto-miner-situation/
"""
}
node_blacklist = {"ComfyUI_LLMVISION": "ComfyUI_LLMVISION"}
pip_blacklist = {
"AppleBotzz": "ComfyUI_LLMVISION",
"ultralytics==8.3.41": "ultralytics==8.3.41"
}
file_blacklist = {
"ComfyUI_LLMVISION": ["%LocalAppData%\\rundll64.exe"],
"lolMiner": [os.path.join(comfyui_path, 'lolMiner')]
}
installed_pips = subprocess.check_output(manager_util.make_pip_cmd(["freeze"]), text=True)
detected = set()
try:
anthropic_info = subprocess.check_output(manager_util.make_pip_cmd(["show", "anthropic"]), text=True, stderr=subprocess.DEVNULL)
requires_lines = [x for x in anthropic_info.split('\n') if x.startswith("Requires")]
if requires_lines:
anthropic_reqs = requires_lines[0].split(": ", 1)[1]
if "pycrypto" in anthropic_reqs:
location_lines = [x for x in anthropic_info.split('\n') if x.startswith("Location")]
if location_lines:
location = location_lines[0].split(": ", 1)[1]
for fi in os.listdir(location):
if fi.startswith("anthropic"):
guide["ComfyUI_LLMVISION"] = (f"\n0.Remove {os.path.join(location, fi)}" + guide["ComfyUI_LLMVISION"])
detected.add("ComfyUI_LLMVISION")
except subprocess.CalledProcessError:
pass
for k, v in node_blacklist.items():
if os.path.exists(os.path.join(custom_nodes_path, k)):
print(f"[SECURITY ALERT] custom node '{k}' is dangerous.")
detected.add(v)
for k, v in pip_blacklist.items():
if k in installed_pips:
detected.add(v)
break
for k, v in file_blacklist.items():
for x in v:
if os.path.exists(os.path.expandvars(x)):
detected.add(k)
break
if len(detected) > 0:
for line in installed_pips.split('\n'):
for k, v in pip_blacklist.items():
if k in line:
print(f"[SECURITY ALERT] '{line}' is dangerous.")
print("\n########################################################################")
print(" Malware has been detected, forcibly terminating ComfyUI execution.")
print("########################################################################\n")
for x in detected:
print(f"\n======== TARGET: {x} =========")
print("\nTODO:")
print(guide.get(x))
exit(-1)
print("[DONE] Security scan")

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,68 +0,0 @@
# Data Models
This directory contains Pydantic models for ComfyUI Manager, providing type safety, validation, and serialization for the API and internal data structures.
## Overview
- `generated_models.py` - All models auto-generated from OpenAPI spec
- `__init__.py` - Package exports for all models
**Note**: All models are now auto-generated from the OpenAPI specification. Manual model files (`task_queue.py`, `state_management.py`) have been deprecated in favor of a single source of truth.
## Generating Types from OpenAPI
The state management models are automatically generated from the OpenAPI specification using `datamodel-codegen`. This ensures type safety and consistency between the API specification and the Python code.
### Prerequisites
Install the code generator:
```bash
pipx install datamodel-code-generator
```
### Generation Command
To regenerate all models after updating the OpenAPI spec:
```bash
datamodel-codegen \
--use-subclass-enum \
--field-constraints \
--strict-types bytes \
--use-double-quotes \
--input openapi.yaml \
--output comfyui_manager/data_models/generated_models.py \
--output-model-type pydantic_v2.BaseModel
```
### When to Regenerate
You should regenerate the models when:
1. **Adding new API endpoints** that return new data structures
2. **Modifying existing schemas** in the OpenAPI specification
3. **Adding new state management features** that require new models
### Important Notes
- **Single source of truth**: All models are now generated from `openapi.yaml`
- **No manual models**: All previously manual models have been migrated to the OpenAPI spec
- **OpenAPI requirements**: New schemas must be referenced in API paths to be generated by datamodel-codegen
- **Validation**: Always validate the OpenAPI spec before generation:
```bash
python3 -c "import yaml; yaml.safe_load(open('openapi.yaml'))"
```
### Example: Adding New State Models
1. Add your schema to `openapi.yaml` under `components/schemas/`
2. Reference the schema in an API endpoint response
3. Run the generation command above
4. Update `__init__.py` to export the new models
5. Import and use the models in your code
### Troubleshooting
- **Models not generated**: Ensure schemas are under `components/schemas/` (not `parameters/`)
- **Missing models**: Verify schemas are referenced in at least one API path
- **Import errors**: Check that new models are added to `__init__.py` exports

View File

@@ -1,137 +0,0 @@
"""
Data models for ComfyUI Manager.
This package contains Pydantic models used throughout the ComfyUI Manager
for data validation, serialization, and type safety.
All models are auto-generated from the OpenAPI specification to ensure
consistency between the API and implementation.
"""
from .generated_models import (
# Core Task Queue Models
QueueTaskItem,
TaskHistoryItem,
TaskStateMessage,
TaskExecutionStatus,
# WebSocket Message Models
MessageTaskDone,
MessageTaskStarted,
MessageTaskFailed,
MessageUpdate,
ManagerMessageName,
# State Management Models
BatchExecutionRecord,
ComfyUISystemState,
BatchOperation,
InstalledNodeInfo,
InstalledModelInfo,
ComfyUIVersionInfo,
# Import Fail Info Models
ImportFailInfoBulkRequest,
ImportFailInfoBulkResponse,
ImportFailInfoItem,
ImportFailInfoItem1,
# Other models
OperationType,
OperationResult,
ManagerPackInfo,
ManagerPackInstalled,
SelectedVersion,
ManagerChannel,
ManagerDatabaseSource,
ManagerPackState,
ManagerPackInstallType,
ManagerPack,
InstallPackParams,
UpdatePackParams,
UpdateAllPacksParams,
UpdateComfyUIParams,
FixPackParams,
UninstallPackParams,
DisablePackParams,
EnablePackParams,
UpdateAllQueryParams,
UpdateComfyUIQueryParams,
ComfyUISwitchVersionQueryParams,
QueueStatus,
ManagerMappings,
ModelMetadata,
NodePackageMetadata,
SnapshotItem,
Error,
InstalledPacksResponse,
HistoryResponse,
HistoryListResponse,
InstallType,
SecurityLevel,
RiskLevel,
)
__all__ = [
# Core Task Queue Models
"QueueTaskItem",
"TaskHistoryItem",
"TaskStateMessage",
"TaskExecutionStatus",
# WebSocket Message Models
"MessageTaskDone",
"MessageTaskStarted",
"MessageTaskFailed",
"MessageUpdate",
"ManagerMessageName",
# State Management Models
"BatchExecutionRecord",
"ComfyUISystemState",
"BatchOperation",
"InstalledNodeInfo",
"InstalledModelInfo",
"ComfyUIVersionInfo",
# Import Fail Info Models
"ImportFailInfoBulkRequest",
"ImportFailInfoBulkResponse",
"ImportFailInfoItem",
"ImportFailInfoItem1",
# Other models
"OperationType",
"OperationResult",
"ManagerPackInfo",
"ManagerPackInstalled",
"SelectedVersion",
"ManagerChannel",
"ManagerDatabaseSource",
"ManagerPackState",
"ManagerPackInstallType",
"ManagerPack",
"InstallPackParams",
"UpdatePackParams",
"UpdateAllPacksParams",
"UpdateComfyUIParams",
"FixPackParams",
"UninstallPackParams",
"DisablePackParams",
"EnablePackParams",
"UpdateAllQueryParams",
"UpdateComfyUIQueryParams",
"ComfyUISwitchVersionQueryParams",
"QueueStatus",
"ManagerMappings",
"ModelMetadata",
"NodePackageMetadata",
"SnapshotItem",
"Error",
"InstalledPacksResponse",
"HistoryResponse",
"HistoryListResponse",
"InstallType",
"SecurityLevel",
"RiskLevel",
]

View File

@@ -1,561 +0,0 @@
# generated by datamodel-codegen:
# filename: openapi.yaml
# timestamp: 2025-07-31T04:52:26+00:00
from __future__ import annotations
from datetime import datetime
from enum import Enum
from typing import Any, Dict, List, Optional, Union
from pydantic import BaseModel, Field, RootModel
class OperationType(str, Enum):
install = "install"
uninstall = "uninstall"
update = "update"
update_comfyui = "update-comfyui"
fix = "fix"
disable = "disable"
enable = "enable"
install_model = "install-model"
class OperationResult(str, Enum):
success = "success"
failed = "failed"
skipped = "skipped"
error = "error"
skip = "skip"
class TaskExecutionStatus(BaseModel):
status_str: OperationResult
completed: bool = Field(..., description="Whether the task completed")
messages: List[str] = Field(..., description="Additional status messages")
class ManagerMessageName(str, Enum):
cm_task_completed = "cm-task-completed"
cm_task_started = "cm-task-started"
cm_queue_status = "cm-queue-status"
class ManagerPackInfo(BaseModel):
id: str = Field(
...,
description="Either github-author/github-repo or name of pack from the registry",
)
version: str = Field(..., description="Semantic version or Git commit hash")
ui_id: Optional[str] = Field(None, description="Task ID - generated internally")
class ManagerPackInstalled(BaseModel):
ver: str = Field(
...,
description="The version of the pack that is installed (Git commit hash or semantic version)",
)
cnr_id: Optional[str] = Field(
None, description="The name of the pack if installed from the registry"
)
aux_id: Optional[str] = Field(
None,
description="The name of the pack if installed from github (author/repo-name format)",
)
enabled: bool = Field(..., description="Whether the pack is enabled")
class SelectedVersion(str, Enum):
latest = "latest"
nightly = "nightly"
class ManagerChannel(str, Enum):
default = "default"
recent = "recent"
legacy = "legacy"
forked = "forked"
dev = "dev"
tutorial = "tutorial"
class ManagerDatabaseSource(str, Enum):
remote = "remote"
local = "local"
cache = "cache"
class ManagerPackState(str, Enum):
installed = "installed"
disabled = "disabled"
not_installed = "not_installed"
import_failed = "import_failed"
needs_update = "needs_update"
class ManagerPackInstallType(str, Enum):
git_clone = "git-clone"
copy = "copy"
cnr = "cnr"
class SecurityLevel(str, Enum):
strong = "strong"
normal = "normal"
normal_ = "normal-"
weak = "weak"
class RiskLevel(str, Enum):
block = "block"
high_ = "high+"
high = "high"
middle_ = "middle+"
middle = "middle"
class UpdateState(Enum):
false = "false"
true = "true"
class ManagerPack(ManagerPackInfo):
author: Optional[str] = Field(
None, description="Pack author name or 'Unclaimed' if added via GitHub crawl"
)
files: Optional[List[str]] = Field(
None,
description="Repository URLs for installation (typically contains one GitHub URL)",
)
reference: Optional[str] = Field(
None, description="The type of installation reference"
)
title: Optional[str] = Field(None, description="The display name of the pack")
cnr_latest: Optional[SelectedVersion] = None
repository: Optional[str] = Field(None, description="GitHub repository URL")
state: Optional[ManagerPackState] = None
update_state: Optional[UpdateState] = Field(
None, alias="update-state", description="Update availability status"
)
stars: Optional[int] = Field(None, description="GitHub stars count")
last_update: Optional[datetime] = Field(None, description="Last update timestamp")
health: Optional[str] = Field(None, description="Health status of the pack")
description: Optional[str] = Field(None, description="Pack description")
trust: Optional[bool] = Field(None, description="Whether the pack is trusted")
install_type: Optional[ManagerPackInstallType] = None
class InstallPackParams(ManagerPackInfo):
selected_version: Union[str, SelectedVersion] = Field(
..., description="Semantic version, Git commit hash, latest, or nightly"
)
repository: Optional[str] = Field(
None,
description="GitHub repository URL (required if selected_version is nightly)",
)
pip: Optional[List[str]] = Field(None, description="PyPi dependency names")
mode: ManagerDatabaseSource
channel: ManagerChannel
skip_post_install: Optional[bool] = Field(
None, description="Whether to skip post-installation steps"
)
class UpdateAllPacksParams(BaseModel):
mode: Optional[ManagerDatabaseSource] = None
ui_id: Optional[str] = Field(None, description="Task ID - generated internally")
class UpdatePackParams(BaseModel):
node_name: str = Field(..., description="Name of the node package to update")
node_ver: Optional[str] = Field(
None, description="Current version of the node package"
)
class UpdateComfyUIParams(BaseModel):
is_stable: Optional[bool] = Field(
True,
description="Whether to update to stable version (true) or nightly (false)",
)
target_version: Optional[str] = Field(
None,
description="Specific version to switch to (for version switching operations)",
)
class FixPackParams(BaseModel):
node_name: str = Field(..., description="Name of the node package to fix")
node_ver: str = Field(..., description="Version of the node package")
class UninstallPackParams(BaseModel):
node_name: str = Field(..., description="Name of the node package to uninstall")
is_unknown: Optional[bool] = Field(
False, description="Whether this is an unknown/unregistered package"
)
class DisablePackParams(BaseModel):
node_name: str = Field(..., description="Name of the node package to disable")
is_unknown: Optional[bool] = Field(
False, description="Whether this is an unknown/unregistered package"
)
class EnablePackParams(BaseModel):
cnr_id: str = Field(
..., description="ComfyUI Node Registry ID of the package to enable"
)
class UpdateAllQueryParams(BaseModel):
client_id: str = Field(
..., description="Client identifier that initiated the request"
)
ui_id: str = Field(..., description="Base UI identifier for task tracking")
mode: Optional[ManagerDatabaseSource] = None
class UpdateComfyUIQueryParams(BaseModel):
client_id: str = Field(
..., description="Client identifier that initiated the request"
)
ui_id: str = Field(..., description="UI identifier for task tracking")
stable: Optional[bool] = Field(
True,
description="Whether to update to stable version (true) or nightly (false)",
)
class ComfyUISwitchVersionQueryParams(BaseModel):
ver: str = Field(..., description="Version to switch to")
client_id: str = Field(
..., description="Client identifier that initiated the request"
)
ui_id: str = Field(..., description="UI identifier for task tracking")
class QueueStatus(BaseModel):
total_count: int = Field(
..., description="Total number of tasks (pending + running)"
)
done_count: int = Field(..., description="Number of completed tasks")
in_progress_count: int = Field(..., description="Number of tasks currently running")
pending_count: Optional[int] = Field(
None, description="Number of tasks waiting to be executed"
)
is_processing: bool = Field(..., description="Whether the task worker is active")
client_id: Optional[str] = Field(
None, description="Client ID (when filtered by client)"
)
class ManagerMappings1(BaseModel):
title_aux: Optional[str] = Field(None, description="The display name of the pack")
class ManagerMappings(
RootModel[Optional[Dict[str, List[Union[List[str], ManagerMappings1]]]]]
):
root: Optional[Dict[str, List[Union[List[str], ManagerMappings1]]]] = Field(
None, description="Tuple of [node_names, metadata]"
)
class ModelMetadata(BaseModel):
name: str = Field(..., description="Name of the model")
type: str = Field(..., description="Type of model")
base: Optional[str] = Field(None, description="Base model type")
save_path: Optional[str] = Field(None, description="Path for saving the model")
url: str = Field(..., description="Download URL")
filename: str = Field(..., description="Target filename")
ui_id: Optional[str] = Field(None, description="ID for UI reference")
class InstallType(str, Enum):
git = "git"
copy = "copy"
pip = "pip"
class NodePackageMetadata(BaseModel):
title: Optional[str] = Field(None, description="Display name of the node package")
name: Optional[str] = Field(None, description="Repository/package name")
files: Optional[List[str]] = Field(None, description="Source URLs for the package")
description: Optional[str] = Field(
None, description="Description of the node package functionality"
)
install_type: Optional[InstallType] = Field(None, description="Installation method")
version: Optional[str] = Field(None, description="Version identifier")
id: Optional[str] = Field(
None, description="Unique identifier for the node package"
)
ui_id: Optional[str] = Field(None, description="ID for UI reference")
channel: Optional[str] = Field(None, description="Source channel")
mode: Optional[str] = Field(None, description="Source mode")
class SnapshotItem(RootModel[str]):
root: str = Field(..., description="Name of the snapshot")
class Error(BaseModel):
error: str = Field(..., description="Error message")
class InstalledPacksResponse(RootModel[Optional[Dict[str, ManagerPackInstalled]]]):
root: Optional[Dict[str, ManagerPackInstalled]] = None
class HistoryListResponse(BaseModel):
ids: Optional[List[str]] = Field(
None, description="List of available batch history IDs"
)
class InstalledNodeInfo(BaseModel):
name: str = Field(..., description="Node package name")
version: str = Field(..., description="Installed version")
repository_url: Optional[str] = Field(None, description="Git repository URL")
install_method: str = Field(
..., description="Installation method (cnr, git, pip, etc.)"
)
enabled: Optional[bool] = Field(
True, description="Whether the node is currently enabled"
)
install_date: Optional[datetime] = Field(
None, description="ISO timestamp of installation"
)
class InstalledModelInfo(BaseModel):
name: str = Field(..., description="Model filename")
path: str = Field(..., description="Full path to model file")
type: str = Field(..., description="Model type (checkpoint, lora, vae, etc.)")
size_bytes: Optional[int] = Field(None, description="File size in bytes", ge=0)
hash: Optional[str] = Field(None, description="Model file hash for verification")
install_date: Optional[datetime] = Field(
None, description="ISO timestamp when added"
)
class ComfyUIVersionInfo(BaseModel):
version: str = Field(..., description="ComfyUI version string")
commit_hash: Optional[str] = Field(None, description="Git commit hash")
branch: Optional[str] = Field(None, description="Git branch name")
is_stable: Optional[bool] = Field(
False, description="Whether this is a stable release"
)
last_updated: Optional[datetime] = Field(
None, description="ISO timestamp of last update"
)
class BatchOperation(BaseModel):
operation_id: str = Field(..., description="Unique operation identifier")
operation_type: OperationType
target: str = Field(
..., description="Target of the operation (node name, model name, etc.)"
)
target_version: Optional[str] = Field(
None, description="Target version for the operation"
)
result: OperationResult
error_message: Optional[str] = Field(
None, description="Error message if operation failed"
)
start_time: datetime = Field(
..., description="ISO timestamp when operation started"
)
end_time: Optional[datetime] = Field(
None, description="ISO timestamp when operation completed"
)
client_id: Optional[str] = Field(
None, description="Client that initiated the operation"
)
class ComfyUISystemState(BaseModel):
snapshot_time: datetime = Field(
..., description="ISO timestamp when snapshot was taken"
)
comfyui_version: ComfyUIVersionInfo
frontend_version: Optional[str] = Field(
None, description="ComfyUI frontend version if available"
)
python_version: str = Field(..., description="Python interpreter version")
platform_info: str = Field(
..., description="Operating system and platform information"
)
installed_nodes: Optional[Dict[str, InstalledNodeInfo]] = Field(
None, description="Map of installed node packages by name"
)
installed_models: Optional[Dict[str, InstalledModelInfo]] = Field(
None, description="Map of installed models by name"
)
manager_config: Optional[Dict[str, Any]] = Field(
None, description="ComfyUI Manager configuration settings"
)
comfyui_root_path: Optional[str] = Field(
None, description="ComfyUI root installation directory"
)
model_paths: Optional[Dict[str, List[str]]] = Field(
None, description="Map of model types to their configured paths"
)
manager_version: Optional[str] = Field(None, description="ComfyUI Manager version")
security_level: Optional[SecurityLevel] = None
network_mode: Optional[str] = Field(
None, description="Network mode (online, offline, private)"
)
cli_args: Optional[Dict[str, Any]] = Field(
None, description="Selected ComfyUI CLI arguments"
)
custom_nodes_count: Optional[int] = Field(
None, description="Total number of custom node packages", ge=0
)
failed_imports: Optional[List[str]] = Field(
None, description="List of custom nodes that failed to import"
)
pip_packages: Optional[Dict[str, str]] = Field(
None, description="Map of installed pip packages to their versions"
)
embedded_python: Optional[bool] = Field(
None,
description="Whether ComfyUI is running from an embedded Python distribution",
)
class BatchExecutionRecord(BaseModel):
batch_id: str = Field(..., description="Unique batch identifier")
start_time: datetime = Field(..., description="ISO timestamp when batch started")
end_time: Optional[datetime] = Field(
None, description="ISO timestamp when batch completed"
)
state_before: ComfyUISystemState
state_after: Optional[ComfyUISystemState] = Field(
None, description="System state after batch execution"
)
operations: Optional[List[BatchOperation]] = Field(
None, description="List of operations performed in this batch"
)
total_operations: Optional[int] = Field(
0, description="Total number of operations in batch", ge=0
)
successful_operations: Optional[int] = Field(
0, description="Number of successful operations", ge=0
)
failed_operations: Optional[int] = Field(
0, description="Number of failed operations", ge=0
)
skipped_operations: Optional[int] = Field(
0, description="Number of skipped operations", ge=0
)
class ImportFailInfoBulkRequest(BaseModel):
cnr_ids: Optional[List[str]] = Field(
None, description="A list of CNR IDs to check."
)
urls: Optional[List[str]] = Field(
None, description="A list of repository URLs to check."
)
class ImportFailInfoItem1(BaseModel):
error: Optional[str] = None
traceback: Optional[str] = None
class ImportFailInfoItem(RootModel[Optional[ImportFailInfoItem1]]):
root: Optional[ImportFailInfoItem1]
class QueueTaskItem(BaseModel):
ui_id: str = Field(..., description="Unique identifier for the task")
client_id: str = Field(..., description="Client identifier that initiated the task")
kind: OperationType
params: Union[
InstallPackParams,
UpdatePackParams,
UpdateAllPacksParams,
UpdateComfyUIParams,
FixPackParams,
UninstallPackParams,
DisablePackParams,
EnablePackParams,
ModelMetadata,
]
class TaskHistoryItem(BaseModel):
ui_id: str = Field(..., description="Unique identifier for the task")
client_id: str = Field(..., description="Client identifier that initiated the task")
kind: str = Field(..., description="Type of task that was performed")
timestamp: datetime = Field(..., description="ISO timestamp when task completed")
result: str = Field(..., description="Task result message or details")
status: Optional[TaskExecutionStatus] = None
batch_id: Optional[str] = Field(
None, description="ID of the batch this task belongs to"
)
end_time: Optional[datetime] = Field(
None, description="ISO timestamp when task execution ended"
)
class TaskStateMessage(BaseModel):
history: Dict[str, TaskHistoryItem] = Field(
..., description="Map of task IDs to their history items"
)
running_queue: List[QueueTaskItem] = Field(
..., description="Currently executing tasks"
)
pending_queue: List[QueueTaskItem] = Field(
..., description="Tasks waiting to be executed"
)
installed_packs: Dict[str, ManagerPackInstalled] = Field(
..., description="Map of currently installed node packages by name"
)
class MessageTaskDone(BaseModel):
ui_id: str = Field(..., description="Task identifier")
result: str = Field(..., description="Task result message")
kind: str = Field(..., description="Type of task")
status: Optional[TaskExecutionStatus] = None
timestamp: datetime = Field(..., description="ISO timestamp when task completed")
state: TaskStateMessage
class MessageTaskStarted(BaseModel):
ui_id: str = Field(..., description="Task identifier")
kind: str = Field(..., description="Type of task")
timestamp: datetime = Field(..., description="ISO timestamp when task started")
state: TaskStateMessage
class MessageTaskFailed(BaseModel):
ui_id: str = Field(..., description="Task identifier")
error: str = Field(..., description="Error message")
kind: str = Field(..., description="Type of task")
timestamp: datetime = Field(..., description="ISO timestamp when task failed")
state: TaskStateMessage
class MessageUpdate(
RootModel[Union[MessageTaskDone, MessageTaskStarted, MessageTaskFailed]]
):
root: Union[MessageTaskDone, MessageTaskStarted, MessageTaskFailed] = Field(
..., description="Union type for all possible WebSocket message updates"
)
class HistoryResponse(BaseModel):
history: Optional[Dict[str, TaskHistoryItem]] = Field(
None, description="Map of task IDs to their history items"
)
class ImportFailInfoBulkResponse(RootModel[Optional[Dict[str, ImportFailInfoItem]]]):
root: Optional[Dict[str, ImportFailInfoItem]] = None

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,11 +0,0 @@
- Anytime you make a change to the data being sent or received, you should follow this process:
1. Adjust the openapi.yaml file first
2. Verify the syntax of the openapi.yaml file using `yaml.safe_load`
3. Regenerate the types following the instructions in the `data_models/README.md` file
4. Verify the new data model is generated
5. Verify the syntax of the generated types files
6. Run formatting and linting on the generated types files
7. Adjust the `__init__.py` files in the `data_models` directory to match/export the new data model
8. Only then, make the changes to the rest of the codebase
9. Run the CI tests to verify that the changes are working
- The comfyui_manager is a python package that is used to manage the comfyui server. There are two sub-packages `glob` and `legacy`. These represent the current version (`glob`) and the previous version (`legacy`), not including common utilities and data models. When developing, we work in the `glob` package. You can ignore the `legacy` package entirely, unless you have a very good reason to research how things were done in the legacy or prior major versions of the package. But in those cases, you should just look for the sake of knowledge or reflection, not for changing code (unless explicitly asked to do so).

View File

View File

@@ -1,55 +0,0 @@
SECURITY_MESSAGE_MIDDLE = "ERROR: To use this action, a security_level of `normal or below` is required. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
SECURITY_MESSAGE_MIDDLE_P = "ERROR: To use this action, security_level must be `normal or below`, and network_mode must be set to `personal_cloud`. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
SECURITY_MESSAGE_NORMAL_MINUS = "ERROR: To use this feature, you must either set '--listen' to a local IP and set the security level to 'normal-' or lower, or set the security level to 'middle' or 'weak'. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
SECURITY_MESSAGE_GENERAL = "ERROR: This installation is not allowed in this security_level. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
SECURITY_MESSAGE_NORMAL_MINUS_MODEL = "ERROR: Downloading models that are not in '.safetensors' format is only allowed for models registered in the 'default' channel at this security level. If you want to download this model, set the security level to 'normal-' or lower."
def is_loopback(address):
import ipaddress
try:
return ipaddress.ip_address(address).is_loopback
except ValueError:
return False
model_dir_name_map = {
"checkpoints": "checkpoints",
"checkpoint": "checkpoints",
"unclip": "checkpoints",
"text_encoders": "text_encoders",
"clip": "text_encoders",
"vae": "vae",
"lora": "loras",
"t2i-adapter": "controlnet",
"t2i-style": "controlnet",
"controlnet": "controlnet",
"clip_vision": "clip_vision",
"gligen": "gligen",
"upscale": "upscale_models",
"embedding": "embeddings",
"embeddings": "embeddings",
"unet": "diffusion_models",
"diffusion_model": "diffusion_models",
}
# List of all model directory names used for checking installed models
MODEL_DIR_NAMES = [
"checkpoints",
"loras",
"vae",
"text_encoders",
"diffusion_models",
"clip_vision",
"embeddings",
"diffusers",
"vae_approx",
"controlnet",
"gligen",
"upscale_models",
"hypernetworks",
"photomaker",
"classifiers",
]

View File

File diff suppressed because it is too large Load Diff

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,451 +0,0 @@
import mimetypes
from ..common import context
from . import manager_core as core
import os
from aiohttp import web
import aiohttp
import json
import hashlib
import folder_paths
from server import PromptServer
import logging
import sys
try:
from nio import AsyncClient, LoginResponse, UploadResponse
matrix_nio_is_available = True
except Exception:
logging.warning(f"[ComfyUI-Manager] The matrix sharing feature has been disabled because the `matrix-nio` dependency is not installed.\n\tTo use this feature, please run the following command:\n\t{sys.executable} -m pip install matrix-nio\n")
matrix_nio_is_available = False
def extract_model_file_names(json_data):
"""Extract unique file names from the input JSON data."""
file_names = set()
model_filename_extensions = {'.safetensors', '.ckpt', '.pt', '.pth', '.bin'}
# Recursively search for file names in the JSON data
def recursive_search(data):
if isinstance(data, dict):
for value in data.values():
recursive_search(value)
elif isinstance(data, list):
for item in data:
recursive_search(item)
elif isinstance(data, str) and '.' in data:
file_names.add(os.path.basename(data)) # file_names.add(data)
recursive_search(json_data)
return [f for f in list(file_names) if os.path.splitext(f)[1] in model_filename_extensions]
def find_file_paths(base_dir, file_names):
"""Find the paths of the files in the base directory."""
file_paths = {}
for root, dirs, files in os.walk(base_dir):
# Exclude certain directories
dirs[:] = [d for d in dirs if d not in ['.git']]
for file in files:
if file in file_names:
file_paths[file] = os.path.join(root, file)
return file_paths
def compute_sha256_checksum(filepath):
"""Compute the SHA256 checksum of a file, in chunks"""
sha256 = hashlib.sha256()
with open(filepath, 'rb') as f:
for chunk in iter(lambda: f.read(4096), b''):
sha256.update(chunk)
return sha256.hexdigest()
@PromptServer.instance.routes.get("/v2/manager/share_option")
async def share_option(request):
if "value" in request.rel_url.query:
core.get_config()['share_option'] = request.rel_url.query['value']
core.write_config()
else:
return web.Response(text=core.get_config()['share_option'], status=200)
return web.Response(status=200)
def get_openart_auth():
if not os.path.exists(os.path.join(context.manager_files_path, ".openart_key")):
return None
try:
with open(os.path.join(context.manager_files_path, ".openart_key"), "r") as f:
openart_key = f.read().strip()
return openart_key if openart_key else None
except Exception:
return None
def get_matrix_auth():
if not os.path.exists(os.path.join(context.manager_files_path, "matrix_auth")):
return None
try:
with open(os.path.join(context.manager_files_path, "matrix_auth"), "r") as f:
matrix_auth = f.read()
homeserver, username, password = matrix_auth.strip().split("\n")
if not homeserver or not username or not password:
return None
return {
"homeserver": homeserver,
"username": username,
"password": password,
}
except Exception:
return None
def get_comfyworkflows_auth():
if not os.path.exists(os.path.join(context.manager_files_path, "comfyworkflows_sharekey")):
return None
try:
with open(os.path.join(context.manager_files_path, "comfyworkflows_sharekey"), "r") as f:
share_key = f.read()
if not share_key.strip():
return None
return share_key
except Exception:
return None
def get_youml_settings():
if not os.path.exists(os.path.join(context.manager_files_path, ".youml")):
return None
try:
with open(os.path.join(context.manager_files_path, ".youml"), "r") as f:
youml_settings = f.read().strip()
return youml_settings if youml_settings else None
except Exception:
return None
def set_youml_settings(settings):
with open(os.path.join(context.manager_files_path, ".youml"), "w") as f:
f.write(settings)
@PromptServer.instance.routes.get("/v2/manager/get_openart_auth")
async def api_get_openart_auth(request):
# print("Getting stored Matrix credentials...")
openart_key = get_openart_auth()
if not openart_key:
return web.Response(status=404)
return web.json_response({"openart_key": openart_key})
@PromptServer.instance.routes.post("/v2/manager/set_openart_auth")
async def api_set_openart_auth(request):
json_data = await request.json()
openart_key = json_data['openart_key']
with open(os.path.join(context.manager_files_path, ".openart_key"), "w") as f:
f.write(openart_key)
return web.Response(status=200)
@PromptServer.instance.routes.get("/v2/manager/get_matrix_auth")
async def api_get_matrix_auth(request):
# print("Getting stored Matrix credentials...")
matrix_auth = get_matrix_auth()
if not matrix_auth:
return web.Response(status=404)
return web.json_response(matrix_auth)
@PromptServer.instance.routes.get("/v2/manager/youml/settings")
async def api_get_youml_settings(request):
youml_settings = get_youml_settings()
if not youml_settings:
return web.Response(status=404)
return web.json_response(json.loads(youml_settings))
@PromptServer.instance.routes.post("/v2/manager/youml/settings")
async def api_set_youml_settings(request):
json_data = await request.json()
set_youml_settings(json.dumps(json_data))
return web.Response(status=200)
@PromptServer.instance.routes.get("/v2/manager/get_comfyworkflows_auth")
async def api_get_comfyworkflows_auth(request):
# Check if the user has provided Matrix credentials in a file called 'matrix_accesstoken'
# in the same directory as the ComfyUI base folder
# print("Getting stored Comfyworkflows.com auth...")
comfyworkflows_auth = get_comfyworkflows_auth()
if not comfyworkflows_auth:
return web.Response(status=404)
return web.json_response({"comfyworkflows_sharekey": comfyworkflows_auth})
@PromptServer.instance.routes.post("/v2/manager/set_esheep_workflow_and_images")
async def set_esheep_workflow_and_images(request):
json_data = await request.json()
with open(os.path.join(context.manager_files_path, "esheep_share_message.json"), "w", encoding='utf-8') as file:
json.dump(json_data, file, indent=4)
return web.Response(status=200)
@PromptServer.instance.routes.get("/v2/manager/get_esheep_workflow_and_images")
async def get_esheep_workflow_and_images(request):
with open(os.path.join(context.manager_files_path, "esheep_share_message.json"), 'r', encoding='utf-8') as file:
data = json.load(file)
return web.Response(status=200, text=json.dumps(data))
@PromptServer.instance.routes.get("/v2/manager/get_matrix_dep_status")
async def get_matrix_dep_status(request):
if matrix_nio_is_available:
return web.Response(status=200, text='available')
else:
return web.Response(status=200, text='unavailable')
def set_matrix_auth(json_data):
homeserver = json_data['homeserver']
username = json_data['username']
password = json_data['password']
with open(os.path.join(context.manager_files_path, "matrix_auth"), "w") as f:
f.write("\n".join([homeserver, username, password]))
def set_comfyworkflows_auth(comfyworkflows_sharekey):
with open(os.path.join(context.manager_files_path, "comfyworkflows_sharekey"), "w") as f:
f.write(comfyworkflows_sharekey)
def has_provided_matrix_auth(matrix_auth):
return matrix_auth['homeserver'].strip() and matrix_auth['username'].strip() and matrix_auth['password'].strip()
def has_provided_comfyworkflows_auth(comfyworkflows_sharekey):
return comfyworkflows_sharekey.strip()
@PromptServer.instance.routes.post("/v2/manager/share")
async def share_art(request):
# get json data
json_data = await request.json()
matrix_auth = json_data['matrix_auth']
comfyworkflows_sharekey = json_data['cw_auth']['cw_sharekey']
set_matrix_auth(matrix_auth)
set_comfyworkflows_auth(comfyworkflows_sharekey)
share_destinations = json_data['share_destinations']
credits = json_data['credits']
title = json_data['title']
description = json_data['description']
is_nsfw = json_data['is_nsfw']
prompt = json_data['prompt']
potential_outputs = json_data['potential_outputs']
selected_output_index = json_data['selected_output_index']
try:
output_to_share = potential_outputs[int(selected_output_index)]
except Exception:
# for now, pick the first output
output_to_share = potential_outputs[0]
assert output_to_share['type'] in ('image', 'output')
output_dir = folder_paths.get_output_directory()
if output_to_share['type'] == 'image':
asset_filename = output_to_share['image']['filename']
asset_subfolder = output_to_share['image']['subfolder']
if output_to_share['image']['type'] == 'temp':
output_dir = folder_paths.get_temp_directory()
else:
asset_filename = output_to_share['output']['filename']
asset_subfolder = output_to_share['output']['subfolder']
if asset_subfolder:
asset_filepath = os.path.join(output_dir, asset_subfolder, asset_filename)
else:
asset_filepath = os.path.join(output_dir, asset_filename)
# get the mime type of the asset
assetFileType = mimetypes.guess_type(asset_filepath)[0]
share_website_host = "UNKNOWN"
if "comfyworkflows" in share_destinations:
share_website_host = "https://comfyworkflows.com"
share_endpoint = f"{share_website_host}/api"
# get presigned urls
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
async with session.post(
f"{share_endpoint}/get_presigned_urls",
json={
"assetFileName": asset_filename,
"assetFileType": assetFileType,
"workflowJsonFileName": 'workflow.json',
"workflowJsonFileType": 'application/json',
},
) as resp:
assert resp.status == 200
presigned_urls_json = await resp.json()
assetFilePresignedUrl = presigned_urls_json["assetFilePresignedUrl"]
assetFileKey = presigned_urls_json["assetFileKey"]
workflowJsonFilePresignedUrl = presigned_urls_json["workflowJsonFilePresignedUrl"]
workflowJsonFileKey = presigned_urls_json["workflowJsonFileKey"]
# upload asset
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
async with session.put(assetFilePresignedUrl, data=open(asset_filepath, "rb")) as resp:
assert resp.status == 200
# upload workflow json
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
async with session.put(workflowJsonFilePresignedUrl, data=json.dumps(prompt['workflow']).encode('utf-8')) as resp:
assert resp.status == 200
model_filenames = extract_model_file_names(prompt['workflow'])
model_file_paths = find_file_paths(folder_paths.base_path, model_filenames)
models_info = {}
for filename, filepath in model_file_paths.items():
models_info[filename] = {
"filename": filename,
"sha256_checksum": compute_sha256_checksum(filepath),
"relative_path": os.path.relpath(filepath, folder_paths.base_path),
}
# make a POST request to /api/upload_workflow with form data key values
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
form = aiohttp.FormData()
if comfyworkflows_sharekey:
form.add_field("shareKey", comfyworkflows_sharekey)
form.add_field("source", "comfyui_manager")
form.add_field("assetFileKey", assetFileKey)
form.add_field("assetFileType", assetFileType)
form.add_field("workflowJsonFileKey", workflowJsonFileKey)
form.add_field("sharedWorkflowWorkflowJsonString", json.dumps(prompt['workflow']))
form.add_field("sharedWorkflowPromptJsonString", json.dumps(prompt['output']))
form.add_field("shareWorkflowCredits", credits)
form.add_field("shareWorkflowTitle", title)
form.add_field("shareWorkflowDescription", description)
form.add_field("shareWorkflowIsNSFW", str(is_nsfw).lower())
form.add_field("currentSnapshot", json.dumps(await core.get_current_snapshot()))
form.add_field("modelsInfo", json.dumps(models_info))
async with session.post(
f"{share_endpoint}/upload_workflow",
data=form,
) as resp:
assert resp.status == 200
upload_workflow_json = await resp.json()
workflowId = upload_workflow_json["workflowId"]
# check if the user has provided Matrix credentials
if matrix_nio_is_available and "matrix" in share_destinations:
comfyui_share_room_id = '!LGYSoacpJPhIfBqVfb:matrix.org'
filename = os.path.basename(asset_filepath)
content_type = assetFileType
try:
homeserver = 'matrix.org'
if matrix_auth:
homeserver = matrix_auth.get('homeserver', 'matrix.org')
homeserver = homeserver.replace("http://", "https://")
if not homeserver.startswith("https://"):
homeserver = "https://" + homeserver
client = AsyncClient(homeserver, matrix_auth['username'])
# Login
login_resp = await client.login(matrix_auth['password'])
if not isinstance(login_resp, LoginResponse) or not login_resp.access_token:
await client.close()
return web.json_response({"error": "Invalid Matrix credentials."}, content_type='application/json', status=400)
# Upload asset
with open(asset_filepath, 'rb') as f:
upload_resp, _maybe_keys = await client.upload(f, content_type=content_type, filename=filename)
asset_data = f.seek(0) or f.read() # get size for info below
if not isinstance(upload_resp, UploadResponse) or not upload_resp.content_uri:
await client.close()
return web.json_response({"error": "Failed to upload asset to Matrix."}, content_type='application/json', status=500)
mxc_url = upload_resp.content_uri
# Upload workflow JSON
import io
workflow_json_bytes = json.dumps(prompt['workflow']).encode('utf-8')
workflow_io = io.BytesIO(workflow_json_bytes)
upload_workflow_resp, _maybe_keys = await client.upload(workflow_io, content_type='application/json', filename='workflow.json')
workflow_io.seek(0)
if not isinstance(upload_workflow_resp, UploadResponse) or not upload_workflow_resp.content_uri:
await client.close()
return web.json_response({"error": "Failed to upload workflow to Matrix."}, content_type='application/json', status=500)
workflow_json_mxc_url = upload_workflow_resp.content_uri
# Send text message
text_content = ""
if title:
text_content += f"{title}\n"
if description:
text_content += f"{description}\n"
if credits:
text_content += f"\ncredits: {credits}\n"
await client.room_send(
room_id=comfyui_share_room_id,
message_type="m.room.message",
content={"msgtype": "m.text", "body": text_content}
)
# Send image
await client.room_send(
room_id=comfyui_share_room_id,
message_type="m.room.message",
content={
"msgtype": "m.image",
"body": filename,
"url": mxc_url,
"info": {
"mimetype": content_type,
"size": len(asset_data)
}
}
)
# Send workflow JSON file
await client.room_send(
room_id=comfyui_share_room_id,
message_type="m.room.message",
content={
"msgtype": "m.file",
"body": "workflow.json",
"url": workflow_json_mxc_url,
"info": {
"mimetype": "application/json",
"size": len(workflow_json_bytes)
}
}
)
await client.close()
except:
import traceback
traceback.print_exc()
return web.json_response({"error": "An error occurred when sharing your art to Matrix."}, content_type='application/json', status=500)
return web.json_response({
"comfyworkflows": {
"url": None if "comfyworkflows" not in share_destinations else f"{share_website_host}/workflows/{workflowId}",
},
"matrix": {
"success": None if "matrix" not in share_destinations else True
}
}, content_type='application/json', status=200)

View File

View File

@@ -1,142 +0,0 @@
import os
import git
import logging
import traceback
from comfyui_manager.common import context
import folder_paths
from comfy.cli_args import args
import latent_preview
from comfyui_manager.glob import manager_core as core
from comfyui_manager.common import cm_global
comfy_ui_hash = "-"
comfyui_tag = None
def print_comfyui_version():
global comfy_ui_hash
global comfyui_tag
is_detached = False
try:
repo = git.Repo(os.path.dirname(folder_paths.__file__))
core.comfy_ui_revision = len(list(repo.iter_commits("HEAD")))
comfy_ui_hash = repo.head.commit.hexsha
cm_global.variables["comfyui.revision"] = core.comfy_ui_revision
core.comfy_ui_commit_datetime = repo.head.commit.committed_datetime
cm_global.variables["comfyui.commit_datetime"] = core.comfy_ui_commit_datetime
is_detached = repo.head.is_detached
current_branch = repo.active_branch.name
comfyui_tag = context.get_comfyui_tag()
try:
if (
not os.environ.get("__COMFYUI_DESKTOP_VERSION__")
and core.comfy_ui_commit_datetime.date()
< core.comfy_ui_required_commit_datetime.date()
):
logging.warning(
f"\n\n## [WARN] ComfyUI-Manager: Your ComfyUI version ({core.comfy_ui_revision})[{core.comfy_ui_commit_datetime.date()}] is too old. Please update to the latest version. ##\n\n"
)
except Exception:
pass
# process on_revision_detected -->
if "cm.on_revision_detected_handler" in cm_global.variables:
for k, f in cm_global.variables["cm.on_revision_detected_handler"]:
try:
f(core.comfy_ui_revision)
except Exception:
logging.error(f"[ERROR] '{k}' on_revision_detected_handler")
traceback.print_exc()
del cm_global.variables["cm.on_revision_detected_handler"]
else:
logging.warning(
"[ComfyUI-Manager] Some features are restricted due to your ComfyUI being outdated."
)
# <--
if current_branch == "master":
if comfyui_tag:
logging.info(
f"### ComfyUI Version: {comfyui_tag} | Released on '{core.comfy_ui_commit_datetime.date()}'"
)
else:
logging.info(
f"### ComfyUI Revision: {core.comfy_ui_revision} [{comfy_ui_hash[:8]}] | Released on '{core.comfy_ui_commit_datetime.date()}'"
)
else:
if comfyui_tag:
logging.info(
f"### ComfyUI Version: {comfyui_tag} on '{current_branch}' | Released on '{core.comfy_ui_commit_datetime.date()}'"
)
else:
logging.info(
f"### ComfyUI Revision: {core.comfy_ui_revision} on '{current_branch}' [{comfy_ui_hash[:8]}] | Released on '{core.comfy_ui_commit_datetime.date()}'"
)
except Exception:
if is_detached:
logging.info(
f"### ComfyUI Revision: {core.comfy_ui_revision} [{comfy_ui_hash[:8]}] *DETACHED | Released on '{core.comfy_ui_commit_datetime.date()}'"
)
else:
logging.info(
"### ComfyUI Revision: UNKNOWN (The currently installed ComfyUI is not a Git repository)"
)
def set_preview_method(method):
if method == "auto":
args.preview_method = latent_preview.LatentPreviewMethod.Auto
elif method == "latent2rgb":
args.preview_method = latent_preview.LatentPreviewMethod.Latent2RGB
elif method == "taesd":
args.preview_method = latent_preview.LatentPreviewMethod.TAESD
else:
args.preview_method = latent_preview.LatentPreviewMethod.NoPreviews
core.get_config()["preview_method"] = method
def set_update_policy(mode):
core.get_config()["update_policy"] = mode
def set_db_mode(mode):
core.get_config()["db_mode"] = mode
def setup_environment():
git_exe = core.get_config()["git_exe"]
if git_exe != "":
git.Git().update_environment(GIT_PYTHON_GIT_EXECUTABLE=git_exe)
def initialize_environment():
context.comfy_path = os.path.dirname(folder_paths.__file__)
core.js_path = os.path.join(context.comfy_path, "web", "extensions")
# Legacy database paths - kept for potential future use
# local_db_model = os.path.join(manager_util.comfyui_manager_path, "model-list.json")
# local_db_alter = os.path.join(manager_util.comfyui_manager_path, "alter-list.json")
# local_db_custom_node_list = os.path.join(
# manager_util.comfyui_manager_path, "custom-node-list.json"
# )
# local_db_extension_node_mappings = os.path.join(
# manager_util.comfyui_manager_path, "extension-node-map.json"
# )
set_preview_method(core.get_config()["preview_method"])
print_comfyui_version()
setup_environment()
core.check_invalid_nodes()

View File

@@ -1,60 +0,0 @@
import locale
import sys
import re
def handle_stream(stream, prefix):
stream.reconfigure(encoding=locale.getpreferredencoding(), errors="replace")
for msg in stream:
if (
prefix == "[!]"
and ("it/s]" in msg or "s/it]" in msg)
and ("%|" in msg or "it [" in msg)
):
if msg.startswith("100%"):
print("\r" + msg, end="", file=sys.stderr),
else:
print("\r" + msg[:-1], end="", file=sys.stderr),
else:
if prefix == "[!]":
print(prefix, msg, end="", file=sys.stderr)
else:
print(prefix, msg, end="")
def convert_markdown_to_html(input_text):
pattern_a = re.compile(r"\[a/([^]]+)]\(([^)]+)\)")
pattern_w = re.compile(r"\[w/([^]]+)]")
pattern_i = re.compile(r"\[i/([^]]+)]")
pattern_bold = re.compile(r"\*\*([^*]+)\*\*")
pattern_white = re.compile(r"%%([^*]+)%%")
def replace_a(match):
return f"<a href='{match.group(2)}' target='blank'>{match.group(1)}</a>"
def replace_w(match):
return f"<p class='cm-warn-note'>{match.group(1)}</p>"
def replace_i(match):
return f"<p class='cm-info-note'>{match.group(1)}</p>"
def replace_bold(match):
return f"<B>{match.group(1)}</B>"
def replace_white(match):
return f"<font color='white'>{match.group(1)}</font>"
input_text = (
input_text.replace("\\[", "&#91;")
.replace("\\]", "&#93;")
.replace("<", "&lt;")
.replace(">", "&gt;")
)
result_text = re.sub(pattern_a, replace_a, input_text)
result_text = re.sub(pattern_w, replace_w, result_text)
result_text = re.sub(pattern_i, replace_i, result_text)
result_text = re.sub(pattern_bold, replace_bold, result_text)
result_text = re.sub(pattern_white, replace_white, result_text)
return result_text.replace("\n", "<BR>")

View File

@@ -1,161 +0,0 @@
import os
import logging
import concurrent.futures
import folder_paths
from comfyui_manager.glob import manager_core as core
from comfyui_manager.glob.constants import model_dir_name_map, MODEL_DIR_NAMES
def get_model_dir(data, show_log=False):
if "download_model_base" in folder_paths.folder_names_and_paths:
models_base = folder_paths.folder_names_and_paths["download_model_base"][0][0]
else:
models_base = folder_paths.models_dir
# NOTE: Validate to prevent path traversal.
if any(char in data["filename"] for char in {"/", "\\", ":"}):
return None
def resolve_custom_node(save_path):
save_path = save_path[13:] # remove 'custom_nodes/'
# NOTE: Validate to prevent path traversal.
if save_path.startswith(os.path.sep) or ":" in save_path:
return None
repo_name = save_path.replace("\\", "/").split("/")[
0
] # get custom node repo name
# NOTE: The creation of files within the custom node path should be removed in the future.
repo_path = core.lookup_installed_custom_nodes_legacy(repo_name)
if repo_path is not None and repo_path[0]:
# Returns the retargeted path based on the actually installed repository
return os.path.join(os.path.dirname(repo_path[1]), save_path)
else:
return None
if data["save_path"] != "default":
if ".." in data["save_path"] or data["save_path"].startswith("/"):
if show_log:
logging.info(
f"[WARN] '{data['save_path']}' is not allowed path. So it will be saved into 'models/etc'."
)
base_model = os.path.join(models_base, "etc")
else:
if data["save_path"].startswith("custom_nodes"):
base_model = resolve_custom_node(data["save_path"])
if base_model is None:
if show_log:
logging.info(
f"[ComfyUI-Manager] The target custom node for model download is not installed: {data['save_path']}"
)
return None
else:
base_model = os.path.join(models_base, data["save_path"])
else:
model_dir_name = model_dir_name_map.get(data["type"].lower())
if model_dir_name is not None:
base_model = folder_paths.folder_names_and_paths[model_dir_name][0][0]
else:
base_model = os.path.join(models_base, "etc")
return base_model
def get_model_path(data, show_log=False):
base_model = get_model_dir(data, show_log)
if base_model is None:
return None
else:
if data["filename"] == "<huggingface>":
return os.path.join(base_model, os.path.basename(data["url"]))
else:
return os.path.join(base_model, data["filename"])
def check_model_installed(json_obj):
def is_exists(model_dir_name, filename, url):
if filename == "<huggingface>":
filename = os.path.basename(url)
dirs = folder_paths.get_folder_paths(model_dir_name)
for x in dirs:
if os.path.exists(os.path.join(x, filename)):
return True
return False
total_models_files = set()
for x in MODEL_DIR_NAMES:
for y in folder_paths.get_filename_list(x):
total_models_files.add(y)
def process_model_phase(item):
if (
"diffusion" not in item["filename"]
and "pytorch" not in item["filename"]
and "model" not in item["filename"]
):
# non-general name case
if item["filename"] in total_models_files:
item["installed"] = "True"
return
if item["save_path"] == "default":
model_dir_name = model_dir_name_map.get(item["type"].lower())
if model_dir_name is not None:
item["installed"] = str(
is_exists(model_dir_name, item["filename"], item["url"])
)
else:
item["installed"] = "False"
else:
model_dir_name = item["save_path"].split("/")[0]
if model_dir_name in folder_paths.folder_names_and_paths:
if is_exists(model_dir_name, item["filename"], item["url"]):
item["installed"] = "True"
if "installed" not in item:
if item["filename"] == "<huggingface>":
filename = os.path.basename(item["url"])
else:
filename = item["filename"]
fullpath = os.path.join(
folder_paths.models_dir, item["save_path"], filename
)
item["installed"] = "True" if os.path.exists(fullpath) else "False"
with concurrent.futures.ThreadPoolExecutor(8) as executor:
for item in json_obj["models"]:
executor.submit(process_model_phase, item)
async def check_whitelist_for_model(item):
from comfyui_manager.data_models import ManagerDatabaseSource
json_obj = await core.get_data_by_mode(ManagerDatabaseSource.cache.value, "model-list.json")
for x in json_obj.get("models", []):
if (
x["save_path"] == item["save_path"]
and x["base"] == item["base"]
and x["filename"] == item["filename"]
):
return True
json_obj = await core.get_data_by_mode(ManagerDatabaseSource.local.value, "model-list.json")
for x in json_obj.get("models", []):
if (
x["save_path"] == item["save_path"]
and x["base"] == item["base"]
and x["filename"] == item["filename"]
):
return True
return False

View File

@@ -1,65 +0,0 @@
import concurrent.futures
from comfyui_manager.glob import manager_core as core
def check_state_of_git_node_pack(
node_packs, do_fetch=False, do_update_check=True, do_update=False
):
if do_fetch:
print("Start fetching...", end="")
elif do_update:
print("Start updating...", end="")
elif do_update_check:
print("Start update check...", end="")
def process_custom_node(item):
core.check_state_of_git_node_pack_single(
item, do_fetch, do_update_check, do_update
)
with concurrent.futures.ThreadPoolExecutor(4) as executor:
for k, v in node_packs.items():
if v.get("active_version") in ["unknown", "nightly"]:
executor.submit(process_custom_node, v)
if do_fetch:
print("\x1b[2K\rFetching done.")
elif do_update:
update_exists = any(
item.get("updatable", False) for item in node_packs.values()
)
if update_exists:
print("\x1b[2K\rUpdate done.")
else:
print("\x1b[2K\rAll extensions are already up-to-date.")
elif do_update_check:
print("\x1b[2K\rUpdate check done.")
def nickname_filter(json_obj):
preemptions_map = {}
for k, x in json_obj.items():
if "preemptions" in x[1]:
for y in x[1]["preemptions"]:
preemptions_map[y] = k
elif k.endswith("/ComfyUI"):
for y in x[0]:
preemptions_map[y] = k
updates = {}
for k, x in json_obj.items():
removes = set()
for y in x[0]:
k2 = preemptions_map.get(y)
if k2 is not None and k != k2:
removes.add(y)
if len(removes) > 0:
updates[k] = [y for y in x[0] if y not in removes]
for k, v in updates.items():
json_obj[k][0] = v
return json_obj

View File

@@ -1,67 +0,0 @@
from comfyui_manager.glob import manager_core as core
from comfy.cli_args import args
from comfyui_manager.data_models import SecurityLevel, RiskLevel, ManagerDatabaseSource
def is_loopback(address):
import ipaddress
try:
return ipaddress.ip_address(address).is_loopback
except ValueError:
return False
def is_allowed_security_level(level):
is_local_mode = is_loopback(args.listen)
is_personal_cloud = core.get_config()['network_mode'].lower() == 'personal_cloud'
if level == RiskLevel.block.value:
return False
elif level == RiskLevel.high_.value:
if is_local_mode:
return core.get_config()['security_level'] in [SecurityLevel.weak.value, SecurityLevel.normal_.value]
elif is_personal_cloud:
return core.get_config()['security_level'] == SecurityLevel.weak.value
else:
return False
elif level == RiskLevel.high.value:
if is_local_mode:
return core.get_config()['security_level'] in [SecurityLevel.weak.value, SecurityLevel.normal_.value]
else:
return core.get_config()['security_level'] == SecurityLevel.weak.value
elif level == RiskLevel.middle_.value:
if is_local_mode or is_personal_cloud:
return core.get_config()['security_level'] in [SecurityLevel.weak.value, SecurityLevel.normal.value, SecurityLevel.normal_.value]
else:
return False
elif level == RiskLevel.middle.value:
return core.get_config()['security_level'] in [SecurityLevel.weak.value, SecurityLevel.normal.value, SecurityLevel.normal_.value]
else:
return True
async def get_risky_level(files, pip_packages):
json_data1 = await core.get_data_by_mode(ManagerDatabaseSource.local.value, "custom-node-list.json")
json_data2 = await core.get_data_by_mode(
ManagerDatabaseSource.cache.value,
"custom-node-list.json",
channel_url="https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main",
)
all_urls = set()
for x in json_data1["custom_nodes"] + json_data2["custom_nodes"]:
all_urls.update(x.get("files", []))
for x in files:
if x not in all_urls:
return RiskLevel.high_.value
all_pip_packages = set()
for x in json_data1["custom_nodes"] + json_data2["custom_nodes"]:
all_pip_packages.update(x.get("pip", []))
for p in pip_packages:
if p not in all_pip_packages:
return RiskLevel.block.value
return RiskLevel.middle_.value

View File

@@ -1,50 +0,0 @@
# ComfyUI-Manager: Frontend (js)
This directory contains the JavaScript frontend implementation for ComfyUI-Manager, providing the user interface components that interact with the backend API.
## Core Components
- **comfyui-manager.js**: Main entry point that initializes the manager UI and integrates with ComfyUI.
- **custom-nodes-manager.js**: Implements the UI for browsing, installing, and managing custom nodes.
- **model-manager.js**: Handles the model management interface for downloading and organizing AI models.
- **components-manager.js**: Manages reusable workflow components system.
- **snapshot.js**: Implements the snapshot system for backing up and restoring installations.
## Sharing Components
- **comfyui-share-common.js**: Base functionality for workflow sharing features.
- **comfyui-share-copus.js**: Integration with the ComfyUI Opus sharing platform.
- **comfyui-share-openart.js**: Integration with the OpenArt sharing platform.
- **comfyui-share-youml.js**: Integration with the YouML sharing platform.
## Utility Components
- **cm-api.js**: Client-side API wrapper for communication with the backend.
- **common.js**: Shared utilities and helper functions used across the frontend.
- **node_fixer.js**: Utilities for fixing disconnected links and repairing malformed nodes by recreating them while preserving connections.
- **popover-helper.js**: UI component for popup tooltips and contextual information.
- **turbogrid.esm.js**: Grid component library - https://github.com/cenfun/turbogrid
- **workflow-metadata.js**: Handles workflow metadata parsing, validation and cross-repository compatibility including versioning, dependencies tracking, and resource management.
## Architecture
The frontend follows a modular component-based architecture:
1. **Integration Layer**: Connects with ComfyUI's existing UI system
2. **Manager Components**: Individual functional UI components (node manager, model manager, etc.)
3. **Sharing Components**: Platform-specific sharing implementations
4. **Utility Layer**: Reusable UI components and helpers
## Implementation Details
- The frontend integrates directly with ComfyUI's UI system through `app.js`
- Dialog-based UI for most manager functions to avoid cluttering the main interface
- Asynchronous API calls to handle backend operations without blocking the UI
## Styling
CSS files are included for specific components:
- **custom-nodes-manager.css**: Styling for the node management UI
- **model-manager.css**: Styling for the model management UI
This frontend implementation provides a comprehensive yet user-friendly interface for managing the ComfyUI ecosystem.

View File

@@ -1,662 +0,0 @@
import { app } from "../../scripts/app.js";
import { api } from "../../scripts/api.js";
import { $el, ComfyDialog } from "../../scripts/ui.js";
import { getBestPosition, getPositionStyle, getRect } from './popover-helper.js';
function internalCustomConfirm(message, confirmMessage, cancelMessage) {
return new Promise((resolve) => {
// transparent bg
const modalOverlay = document.createElement('div');
modalOverlay.style.position = 'fixed';
modalOverlay.style.top = 0;
modalOverlay.style.left = 0;
modalOverlay.style.width = '100%';
modalOverlay.style.height = '100%';
modalOverlay.style.backgroundColor = 'rgba(0, 0, 0, 0.8)';
modalOverlay.style.display = 'flex';
modalOverlay.style.alignItems = 'center';
modalOverlay.style.justifyContent = 'center';
modalOverlay.style.zIndex = '1101';
// Modal window container (dark bg)
const modalDialog = document.createElement('div');
modalDialog.style.backgroundColor = '#333';
modalDialog.style.padding = '20px';
modalDialog.style.borderRadius = '4px';
modalDialog.style.maxWidth = '400px';
modalDialog.style.width = '80%';
modalDialog.style.boxShadow = '0 2px 8px rgba(0, 0, 0, 0.5)';
modalDialog.style.color = '#fff';
// Display message
const modalMessage = document.createElement('p');
modalMessage.textContent = message;
modalMessage.style.margin = '0';
modalMessage.style.padding = '0 0 20px';
modalMessage.style.wordBreak = 'keep-all';
// Button container
const modalButtons = document.createElement('div');
modalButtons.style.display = 'flex';
modalButtons.style.justifyContent = 'flex-end';
// Confirm button (green)
const confirmButton = document.createElement('button');
if(confirmMessage)
confirmButton.textContent = confirmMessage;
else
confirmButton.textContent = 'Confirm';
confirmButton.style.marginLeft = '10px';
confirmButton.style.backgroundColor = '#28a745'; // green
confirmButton.style.color = '#fff';
confirmButton.style.border = 'none';
confirmButton.style.padding = '6px 12px';
confirmButton.style.borderRadius = '4px';
confirmButton.style.cursor = 'pointer';
confirmButton.style.fontWeight = 'bold';
// Cancel button (red)
const cancelButton = document.createElement('button');
if(cancelMessage)
cancelButton.textContent = cancelMessage;
else
cancelButton.textContent = 'Cancel';
cancelButton.style.marginLeft = '10px';
cancelButton.style.backgroundColor = '#dc3545'; // red
cancelButton.style.color = '#fff';
cancelButton.style.border = 'none';
cancelButton.style.padding = '6px 12px';
cancelButton.style.borderRadius = '4px';
cancelButton.style.cursor = 'pointer';
cancelButton.style.fontWeight = 'bold';
const closeModal = () => {
document.body.removeChild(modalOverlay);
};
confirmButton.addEventListener('click', () => {
closeModal();
resolve(true);
});
cancelButton.addEventListener('click', () => {
closeModal();
resolve(false);
});
modalButtons.appendChild(confirmButton);
modalButtons.appendChild(cancelButton);
modalDialog.appendChild(modalMessage);
modalDialog.appendChild(modalButtons);
modalOverlay.appendChild(modalDialog);
document.body.appendChild(modalOverlay);
});
}
export function show_message(msg) {
app.ui.dialog.show(msg);
app.ui.dialog.element.style.zIndex = 1100;
}
export async function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
export async function customConfirm(message) {
try {
let res = await
window['app'].extensionManager.dialog
.confirm({
title: 'Confirm',
message: message
});
return res;
}
catch {
let res = await internalCustomConfirm(message);
return res;
}
}
export function customAlert(message) {
try {
window['app'].extensionManager.toast.addAlert(message);
}
catch {
alert(message);
}
}
export function infoToast(summary, message) {
try {
app.extensionManager.toast.add({
severity: 'info',
summary: summary,
detail: message,
life: 3000
})
}
catch {
// do nothing
}
}
export async function customPrompt(title, message) {
try {
let res = await
window['app'].extensionManager.dialog
.prompt({
title: title,
message: message
});
return res;
}
catch {
return prompt(title, message)
}
}
export function rebootAPI() {
if ('electronAPI' in window) {
window.electronAPI.restartApp();
return true;
}
customConfirm("Are you sure you'd like to reboot the server?").then((isConfirmed) => {
if (isConfirmed) {
try {
api.fetchApi("/v2/manager/reboot");
}
catch(exception) {}
}
});
return false;
}
export var manager_instance = null;
export function setManagerInstance(obj) {
manager_instance = obj;
}
export function showToast(message, duration = 3000) {
const toast = $el("div.comfy-toast", {textContent: message});
document.body.appendChild(toast);
setTimeout(() => {
toast.classList.add("comfy-toast-fadeout");
setTimeout(() => toast.remove(), 500);
}, duration);
}
function isValidURL(url) {
if(url.includes('&'))
return false;
const http_pattern = /^(https?|ftp):\/\/[^\s$?#]+$/;
const ssh_pattern = /^(.+@|ssh:\/\/).+:.+$/;
return http_pattern.test(url) || ssh_pattern.test(url);
}
export async function install_pip(packages) {
if(packages.includes('&'))
app.ui.dialog.show(`Invalid PIP package enumeration: '${packages}'`);
const res = await api.fetchApi("/v2/customnode/install/pip", {
method: "POST",
body: packages,
});
if(res.status == 403) {
show_message('This action is not allowed with this security level configuration.');
return;
}
if(res.status == 200) {
show_message(`PIP package installation is processed.<br>To apply the pip packages, please click the <button id='cm-reboot-button3'><font size='3px'>RESTART</font></button> button in ComfyUI.`);
const rebootButton = document.getElementById('cm-reboot-button3');
const self = this;
rebootButton.addEventListener("click", rebootAPI);
}
else {
show_message(`Failed to install '${packages}'<BR>See terminal log.`);
}
}
export async function install_via_git_url(url, manager_dialog) {
if(!url) {
return;
}
if(!isValidURL(url)) {
show_message(`Invalid Git url '${url}'`);
return;
}
show_message(`Wait...<BR><BR>Installing '${url}'`);
const res = await api.fetchApi("/v2/customnode/install/git_url", {
method: "POST",
body: url,
});
if(res.status == 403) {
show_message('This action is not allowed with this security level configuration.');
return;
}
if(res.status == 200) {
show_message(`'${url}' is installed<BR>To apply the installed custom node, please <button id='cm-reboot-button4'><font size='3px'>RESTART</font></button> ComfyUI.`);
const rebootButton = document.getElementById('cm-reboot-button4');
const self = this;
rebootButton.addEventListener("click",
function() {
if(rebootAPI()) {
manager_dialog.close();
}
});
}
else {
show_message(`Failed to install '${url}'<BR>See terminal log.`);
}
}
export async function free_models(free_execution_cache) {
try {
let mode = "";
if(free_execution_cache) {
mode = '{"unload_models": true, "free_memory": true}';
}
else {
mode = '{"unload_models": true}';
}
let res = await api.fetchApi(`/free`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: mode
});
if (res.status == 200) {
if(free_execution_cache) {
showToast("'Models' and 'Execution Cache' have been cleared.", 3000);
}
else {
showToast("Models' have been unloaded.", 3000);
}
} else {
showToast('Unloading of models failed. Installed ComfyUI may be an outdated version.', 5000);
}
} catch (error) {
showToast('An error occurred while trying to unload models.', 5000);
}
}
export function md5(inputString) {
const hc = '0123456789abcdef';
const rh = n => {let j,s='';for(j=0;j<=3;j++) s+=hc.charAt((n>>(j*8+4))&0x0F)+hc.charAt((n>>(j*8))&0x0F);return s;}
const ad = (x,y) => {let l=(x&0xFFFF)+(y&0xFFFF);let m=(x>>16)+(y>>16)+(l>>16);return (m<<16)|(l&0xFFFF);}
const rl = (n,c) => (n<<c)|(n>>>(32-c));
const cm = (q,a,b,x,s,t) => ad(rl(ad(ad(a,q),ad(x,t)),s),b);
const ff = (a,b,c,d,x,s,t) => cm((b&c)|((~b)&d),a,b,x,s,t);
const gg = (a,b,c,d,x,s,t) => cm((b&d)|(c&(~d)),a,b,x,s,t);
const hh = (a,b,c,d,x,s,t) => cm(b^c^d,a,b,x,s,t);
const ii = (a,b,c,d,x,s,t) => cm(c^(b|(~d)),a,b,x,s,t);
const sb = x => {
let i;const nblk=((x.length+8)>>6)+1;const blks=[];for(i=0;i<nblk*16;i++) { blks[i]=0 };
for(i=0;i<x.length;i++) {blks[i>>2]|=x.charCodeAt(i)<<((i%4)*8);}
blks[i>>2]|=0x80<<((i%4)*8);blks[nblk*16-2]=x.length*8;return blks;
}
let i,x=sb(inputString),a=1732584193,b=-271733879,c=-1732584194,d=271733878,olda,oldb,oldc,oldd;
for(i=0;i<x.length;i+=16) {olda=a;oldb=b;oldc=c;oldd=d;
a=ff(a,b,c,d,x[i+ 0], 7, -680876936);d=ff(d,a,b,c,x[i+ 1],12, -389564586);c=ff(c,d,a,b,x[i+ 2],17, 606105819);
b=ff(b,c,d,a,x[i+ 3],22,-1044525330);a=ff(a,b,c,d,x[i+ 4], 7, -176418897);d=ff(d,a,b,c,x[i+ 5],12, 1200080426);
c=ff(c,d,a,b,x[i+ 6],17,-1473231341);b=ff(b,c,d,a,x[i+ 7],22, -45705983);a=ff(a,b,c,d,x[i+ 8], 7, 1770035416);
d=ff(d,a,b,c,x[i+ 9],12,-1958414417);c=ff(c,d,a,b,x[i+10],17, -42063);b=ff(b,c,d,a,x[i+11],22,-1990404162);
a=ff(a,b,c,d,x[i+12], 7, 1804603682);d=ff(d,a,b,c,x[i+13],12, -40341101);c=ff(c,d,a,b,x[i+14],17,-1502002290);
b=ff(b,c,d,a,x[i+15],22, 1236535329);a=gg(a,b,c,d,x[i+ 1], 5, -165796510);d=gg(d,a,b,c,x[i+ 6], 9,-1069501632);
c=gg(c,d,a,b,x[i+11],14, 643717713);b=gg(b,c,d,a,x[i+ 0],20, -373897302);a=gg(a,b,c,d,x[i+ 5], 5, -701558691);
d=gg(d,a,b,c,x[i+10], 9, 38016083);c=gg(c,d,a,b,x[i+15],14, -660478335);b=gg(b,c,d,a,x[i+ 4],20, -405537848);
a=gg(a,b,c,d,x[i+ 9], 5, 568446438);d=gg(d,a,b,c,x[i+14], 9,-1019803690);c=gg(c,d,a,b,x[i+ 3],14, -187363961);
b=gg(b,c,d,a,x[i+ 8],20, 1163531501);a=gg(a,b,c,d,x[i+13], 5,-1444681467);d=gg(d,a,b,c,x[i+ 2], 9, -51403784);
c=gg(c,d,a,b,x[i+ 7],14, 1735328473);b=gg(b,c,d,a,x[i+12],20,-1926607734);a=hh(a,b,c,d,x[i+ 5], 4, -378558);
d=hh(d,a,b,c,x[i+ 8],11,-2022574463);c=hh(c,d,a,b,x[i+11],16, 1839030562);b=hh(b,c,d,a,x[i+14],23, -35309556);
a=hh(a,b,c,d,x[i+ 1], 4,-1530992060);d=hh(d,a,b,c,x[i+ 4],11, 1272893353);c=hh(c,d,a,b,x[i+ 7],16, -155497632);
b=hh(b,c,d,a,x[i+10],23,-1094730640);a=hh(a,b,c,d,x[i+13], 4, 681279174);d=hh(d,a,b,c,x[i+ 0],11, -358537222);
c=hh(c,d,a,b,x[i+ 3],16, -722521979);b=hh(b,c,d,a,x[i+ 6],23, 76029189);a=hh(a,b,c,d,x[i+ 9], 4, -640364487);
d=hh(d,a,b,c,x[i+12],11, -421815835);c=hh(c,d,a,b,x[i+15],16, 530742520);b=hh(b,c,d,a,x[i+ 2],23, -995338651);
a=ii(a,b,c,d,x[i+ 0], 6, -198630844);d=ii(d,a,b,c,x[i+ 7],10, 1126891415);c=ii(c,d,a,b,x[i+14],15,-1416354905);
b=ii(b,c,d,a,x[i+ 5],21, -57434055);a=ii(a,b,c,d,x[i+12], 6, 1700485571);d=ii(d,a,b,c,x[i+ 3],10,-1894986606);
c=ii(c,d,a,b,x[i+10],15, -1051523);b=ii(b,c,d,a,x[i+ 1],21,-2054922799);a=ii(a,b,c,d,x[i+ 8], 6, 1873313359);
d=ii(d,a,b,c,x[i+15],10, -30611744);c=ii(c,d,a,b,x[i+ 6],15,-1560198380);b=ii(b,c,d,a,x[i+13],21, 1309151649);
a=ii(a,b,c,d,x[i+ 4], 6, -145523070);d=ii(d,a,b,c,x[i+11],10,-1120210379);c=ii(c,d,a,b,x[i+ 2],15, 718787259);
b=ii(b,c,d,a,x[i+ 9],21, -343485551);a=ad(a,olda);b=ad(b,oldb);c=ad(c,oldc);d=ad(d,oldd);
}
return rh(a)+rh(b)+rh(c)+rh(d);
}
export async function fetchData(route, options) {
let err;
const res = await api.fetchApi(route, options).catch(e => {
err = e;
});
if (!res) {
return {
status: 400,
error: new Error("Unknown Error")
}
}
const { status, statusText } = res;
if (err) {
return {
status,
error: err
}
}
if (status !== 200) {
return {
status,
error: new Error(statusText || "Unknown Error")
}
}
const data = await res.json();
if (!data) {
return {
status,
error: new Error(`Failed to load data: ${route}`)
}
}
return {
status,
data
}
}
// https://cenfun.github.io/open-icons/
export const icons = {
search: '<svg viewBox="0 0 24 24" width="100%" height="100%" pointer-events="none" xmlns="http://www.w3.org/2000/svg"><path fill="none" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="m21 21-4.486-4.494M19 10.5a8.5 8.5 0 1 1-17 0 8.5 8.5 0 0 1 17 0"/></svg>',
conflicts: '<svg viewBox="0 0 400 400" width="100%" height="100%" pointer-events="none" xmlns="http://www.w3.org/2000/svg"><path fill="currentColor" d="m397.2 350.4.2-.2-180-320-.2.2C213.8 24.2 207.4 20 200 20s-13.8 4.2-17.2 10.4l-.2-.2-180 320 .2.2c-1.6 2.8-2.8 6-2.8 9.6 0 11 9 20 20 20h360c11 0 20-9 20-20 0-3.6-1.2-6.8-2.8-9.6M220 340h-40v-40h40zm0-60h-40V120h40z"/></svg>',
passed: '<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 426.667 426.667"><path fill="#6AC259" d="M213.333,0C95.518,0,0,95.514,0,213.333s95.518,213.333,213.333,213.333c117.828,0,213.333-95.514,213.333-213.333S331.157,0,213.333,0z M174.199,322.918l-93.935-93.931l31.309-31.309l62.626,62.622l140.894-140.898l31.309,31.309L174.199,322.918z"/></svg>',
download: '<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" width="100%" height="100%" viewBox="0 0 32 32"><path fill="currentColor" d="M26 24v4H6v-4H4v4a2 2 0 0 0 2 2h20a2 2 0 0 0 2-2v-4zm0-10l-1.41-1.41L17 20.17V2h-2v18.17l-7.59-7.58L6 14l10 10l10-10z"></path></svg>',
close: '<svg xmlns="http://www.w3.org/2000/svg" pointer-events="none" width="100%" height="100%" viewBox="0 0 16 16"><g fill="currentColor"><path fill-rule="evenodd" clip-rule="evenodd" d="m7.116 8-4.558 4.558.884.884L8 8.884l4.558 4.558.884-.884L8.884 8l4.558-4.558-.884-.884L8 7.116 3.442 2.558l-.884.884L7.116 8z"/></g></svg>',
arrowRight: '<svg xmlns="http://www.w3.org/2000/svg" pointer-events="none" width="100%" height="100%" viewBox="0 0 20 20"><path fill="currentColor" fill-rule="evenodd" d="m2.542 2.154 7.254 7.26c.136.14.204.302.204.483a.73.73 0 0 1-.204.5l-7.575 7.398c-.383.317-.724.317-1.022 0-.299-.317-.299-.643 0-.98l7.08-6.918-6.754-6.763c-.237-.343-.215-.654.066-.935.281-.28.598-.295.951-.045Zm9 0 7.254 7.26c.136.14.204.302.204.483a.73.73 0 0 1-.204.5l-7.575 7.398c-.383.317-.724.317-1.022 0-.299-.317-.299-.643 0-.98l7.08-6.918-6.754-6.763c-.237-.343-.215-.654.066-.935.281-.28.598-.295.951-.045Z"/></svg>'
}
export function sanitizeHTML(str) {
return str
.replace(/&/g, "&amp;")
.replace(/</g, "&lt;")
.replace(/>/g, "&gt;")
.replace(/"/g, "&quot;")
.replace(/'/g, "&#039;");
}
export function showTerminal() {
try {
const panel = app.extensionManager.bottomPanel;
const isTerminalVisible = panel.bottomPanelVisible && panel.activeBottomPanelTab.id === 'logs-terminal';
if (!isTerminalVisible)
panel.toggleBottomPanelTab('logs-terminal');
}
catch(exception) {
// do nothing
}
}
let need_restart = false;
export function setNeedRestart(value) {
need_restart = value;
}
async function onReconnected(event) {
if(need_restart) {
setNeedRestart(false);
const confirmed = await customConfirm("To apply the changes to the node pack's installation status, you need to refresh the browser. Would you like to refresh?");
if (!confirmed) {
return;
}
window.location.reload(true);
}
}
api.addEventListener('reconnected', onReconnected);
const storeId = "comfyui-manager-grid";
let timeId;
export function storeColumnWidth(gridId, columnItem) {
clearTimeout(timeId);
timeId = setTimeout(() => {
let data = {};
const dataStr = localStorage.getItem(storeId);
if (dataStr) {
try {
data = JSON.parse(dataStr);
} catch (e) {}
}
if (!data[gridId]) {
data[gridId] = {};
}
data[gridId][columnItem.id] = columnItem.width;
localStorage.setItem(storeId, JSON.stringify(data));
}, 200)
}
export function restoreColumnWidth(gridId, columns) {
const dataStr = localStorage.getItem(storeId);
if (!dataStr) {
return;
}
let data;
try {
data = JSON.parse(dataStr);
} catch (e) {}
if(!data) {
return;
}
const widthMap = data[gridId];
if (!widthMap) {
return;
}
columns.forEach(columnItem => {
const w = widthMap[columnItem.id];
if (w) {
columnItem.width = w;
}
});
}
export function getTimeAgo(dateStr) {
const date = new Date(dateStr);
if (!date || !(date instanceof Date) || isNaN(date.getTime())) {
return "";
}
const units = [
{ max: 2760000, value: 60000, name: 'minute', past: 'a minute ago', future: 'in a minute' },
{ max: 72000000, value: 3600000, name: 'hour', past: 'an hour ago', future: 'in an hour' },
{ max: 518400000, value: 86400000, name: 'day', past: 'yesterday', future: 'tomorrow' },
{ max: 2419200000, value: 604800000, name: 'week', past: 'last week', future: 'in a week' },
{ max: 28512000000, value: 2592000000, name: 'month', past: 'last month', future: 'in a month' }
];
const diff = Date.now() - date.getTime();
// less than a minute
if (Math.abs(diff) < 60000)
return 'just now';
for (let i = 0; i < units.length; i++) {
if (Math.abs(diff) < units[i].max) {
return format(diff, units[i].value, units[i].name, units[i].past, units[i].future, diff < 0);
}
}
function format(diff, divisor, unit, past, future, isInTheFuture) {
const val = Math.round(Math.abs(diff) / divisor);
if (isInTheFuture)
return val <= 1 ? future : 'in ' + val + ' ' + unit + 's';
return val <= 1 ? past : val + ' ' + unit + 's ago';
}
return format(diff, 31536000000, 'year', 'last year', 'in a year', diff < 0);
};
export const loadCss = (cssFile) => {
const cssPath = import.meta.resolve(cssFile);
//console.log(cssPath);
const $link = document.createElement("link");
$link.setAttribute("rel", 'stylesheet');
$link.setAttribute("href", cssPath);
document.head.appendChild($link);
};
export const copyText = (text) => {
return new Promise((resolve) => {
let err;
try {
navigator.clipboard.writeText(text);
} catch (e) {
err = e;
}
if (err) {
resolve(false);
} else {
resolve(true);
}
});
};
function renderPopover($elem, target, options = {}) {
// async microtask
queueMicrotask(() => {
const containerRect = getRect(window);
const targetRect = getRect(target);
const elemRect = getRect($elem);
const positionInfo = getBestPosition(
containerRect,
targetRect,
elemRect,
options.positions
);
const style = getPositionStyle(positionInfo, {
bgColor: options.bgColor,
borderColor: options.borderColor,
borderRadius: options.borderRadius
});
$elem.style.top = positionInfo.top + "px";
$elem.style.left = positionInfo.left + "px";
$elem.style.background = style.background;
});
}
let $popover;
export function hidePopover() {
if ($popover) {
$popover.remove();
$popover = null;
}
}
export function showPopover(target, text, className, options) {
hidePopover();
$popover = document.createElement("div");
$popover.className = ['cn-popover', className].filter(it => it).join(" ");
document.body.appendChild($popover);
$popover.innerHTML = text;
$popover.style.display = "block";
renderPopover($popover, target, {
borderRadius: 10,
... options
});
}
let $tooltip;
export function hideTooltip(target) {
if ($tooltip) {
$tooltip.style.display = "none";
$tooltip.innerHTML = "";
$tooltip.style.top = "0px";
$tooltip.style.left = "0px";
}
}
export function showTooltip(target, text, className = 'cn-tooltip', styleMap = {}) {
if (!$tooltip) {
$tooltip = document.createElement("div");
$tooltip.className = className;
$tooltip.style.cssText = `
pointer-events: none;
position: fixed;
z-index: 10001;
padding: 20px;
color: #1e1e1e;
max-width: 350px;
filter: drop-shadow(1px 5px 5px rgb(0 0 0 / 30%));
${Object.keys(styleMap).map(k=>k+":"+styleMap[k]+";").join("")}
`;
document.body.appendChild($tooltip);
}
$tooltip.innerHTML = text;
$tooltip.style.display = "block";
renderPopover($tooltip, target, {
positions: ['top', 'bottom', 'right', 'center'],
bgColor: "#ffffff",
borderColor: "#cccccc",
borderRadius: 5
});
}
export function generateUUID() {
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
const r = Math.random() * 16 | 0;
const v = c === 'x' ? r : (r & 0x3 | 0x8);
return v.toString(16);
});
}
function initTooltip () {
const mouseenterHandler = (e) => {
const target = e.target;
const text = target.getAttribute('tooltip');
if (text) {
showTooltip(target, text);
}
};
const mouseleaveHandler = (e) => {
const target = e.target;
const text = target.getAttribute('tooltip');
if (text) {
hideTooltip(target);
}
};
document.body.removeEventListener('mouseenter', mouseenterHandler, true);
document.body.removeEventListener('mouseleave', mouseleaveHandler, true);
document.body.addEventListener('mouseenter', mouseenterHandler, true);
document.body.addEventListener('mouseleave', mouseleaveHandler, true);
}
initTooltip();

View File

@@ -1,699 +0,0 @@
.cn-manager {
--grid-font: -apple-system, BlinkMacSystemFont, "Segue UI", "Noto Sans", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji";
z-index: 1099;
width: 80%;
height: 80%;
display: flex;
flex-direction: column;
gap: 10px;
color: var(--fg-color);
font-family: arial, sans-serif;
text-underline-offset: 3px;
outline: none;
}
.cn-manager .cn-flex-auto {
flex: auto;
}
.cn-manager button {
font-size: 16px;
color: var(--input-text);
background-color: var(--comfy-input-bg);
border-radius: 8px;
border-color: var(--border-color);
border-style: solid;
margin: 0;
padding: 4px 8px;
min-width: 100px;
}
.cn-manager button:disabled,
.cn-manager input:disabled,
.cn-manager select:disabled {
color: gray;
}
.cn-manager button:disabled {
background-color: var(--comfy-input-bg);
}
.cn-manager .cn-manager-restart {
display: none;
background-color: #500000;
color: white;
}
.cn-manager .cn-manager-stop {
display: none;
background-color: #500000;
color: white;
}
.cn-manager .cn-manager-back {
align-items: center;
justify-content: center;
}
.arrow-icon {
height: 1em;
width: 1em;
margin-right: 5px;
transform: translateY(2px);
}
.cn-icon {
display: block;
width: 16px;
height: 16px;
}
.cn-icon svg {
display: block;
margin: 0;
pointer-events: none;
}
.cn-manager-header {
display: flex;
flex-wrap: wrap;
gap: 5px;
align-items: center;
padding: 0 5px;
}
.cn-manager-header label {
display: flex;
gap: 5px;
align-items: center;
}
.cn-manager-filter {
height: 28px;
line-height: 28px;
}
.cn-manager-keywords {
height: 28px;
line-height: 28px;
padding: 0 5px 0 26px;
background-size: 16px;
background-position: 5px center;
background-repeat: no-repeat;
background-image: url("data:image/svg+xml;charset=utf8,%3Csvg%20viewBox%3D%220%200%2024%2024%22%20width%3D%22100%25%22%20height%3D%22100%25%22%20pointer-events%3D%22none%22%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%3E%3Cpath%20fill%3D%22none%22%20stroke%3D%22%23888%22%20stroke-linecap%3D%22round%22%20stroke-linejoin%3D%22round%22%20stroke-width%3D%222%22%20d%3D%22m21%2021-4.486-4.494M19%2010.5a8.5%208.5%200%201%201-17%200%208.5%208.5%200%200%201%2017%200%22%2F%3E%3C%2Fsvg%3E");
}
.cn-manager-status {
padding-left: 10px;
}
.cn-manager-grid {
flex: auto;
border: 1px solid var(--border-color);
overflow: hidden;
position: relative;
}
.cn-manager-selection {
display: flex;
flex-wrap: wrap;
gap: 10px;
align-items: center;
}
.cn-manager-message {
position: relative;
}
.cn-manager-footer {
display: flex;
flex-wrap: wrap;
gap: 10px;
align-items: center;
}
.cn-manager-grid .tg-turbogrid {
font-family: var(--grid-font);
font-size: 15px;
background: var(--bg-color);
}
.cn-manager-grid .tg-turbogrid .tg-highlight::after {
position: absolute;
top: 0;
left: 0;
content: "";
display: block;
width: 100%;
height: 100%;
box-sizing: border-box;
background-color: #80bdff11;
pointer-events: none;
}
.cn-manager-grid .cn-pack-name a {
color: skyblue;
text-decoration: none;
word-break: break-word;
}
.cn-manager-grid .cn-pack-desc a {
color: #5555FF;
font-weight: bold;
text-decoration: none;
}
.cn-manager-grid .tg-cell a:hover {
text-decoration: underline;
}
.cn-manager-grid .cn-pack-version {
line-height: 100%;
display: flex;
flex-direction: column;
justify-content: center;
height: 100%;
gap: 5px;
}
.cn-manager-grid .cn-pack-nodes {
line-height: 100%;
display: flex;
flex-direction: column;
justify-content: center;
gap: 5px;
cursor: pointer;
height: 100%;
}
.cn-manager-grid .cn-pack-nodes:hover {
text-decoration: underline;
}
.cn-manager-grid .cn-pack-conflicts {
color: orange;
}
.cn-popover {
position: fixed;
z-index: 10000;
padding: 20px;
color: #1e1e1e;
filter: drop-shadow(1px 5px 5px rgb(0 0 0 / 30%));
overflow: hidden;
}
.cn-flyover {
position: absolute;
top: 0;
right: 0;
z-index: 1000;
display: none;
width: 50%;
height: 100%;
background-color: var(--comfy-menu-bg);
animation-duration: 0.2s;
animation-fill-mode: both;
flex-direction: column;
}
.cn-flyover::before {
position: absolute;
top: 0;
content: "";
z-index: 10;
display: block;
width: 10px;
height: 100%;
pointer-events: none;
left: -10px;
background-image: linear-gradient(to left, rgb(0 0 0 / 20%), rgb(0 0 0 / 0%));
}
.cn-flyover-header {
height: 45px;
display: flex;
align-items: center;
gap: 5px;
border-bottom: 1px solid var(--border-color);
}
.cn-flyover-close {
display: flex;
align-items: center;
padding: 0 10px;
justify-content: center;
cursor: pointer;
opacity: 0.8;
height: 100%;
}
.cn-flyover-close:hover {
opacity: 1;
}
.cn-flyover-close svg {
display: block;
margin: 0;
pointer-events: none;
width: 20px;
height: 20px;
}
.cn-flyover-title {
display: flex;
align-items: center;
font-weight: bold;
gap: 10px;
flex: auto;
}
.cn-flyover-body {
height: calc(100% - 45px);
overflow-y: auto;
position: relative;
background-color: var(--comfy-menu-secondary-bg);
}
@keyframes cn-slide-in-right {
from {
visibility: visible;
transform: translate3d(100%, 0, 0);
}
to {
transform: translate3d(0, 0, 0);
}
}
.cn-slide-in-right {
animation-name: cn-slide-in-right;
}
@keyframes cn-slide-out-right {
from {
transform: translate3d(0, 0, 0);
}
to {
visibility: hidden;
transform: translate3d(100%, 0, 0);
}
}
.cn-slide-out-right {
animation-name: cn-slide-out-right;
}
.cn-nodes-list {
width: 100%;
}
.cn-nodes-row {
display: flex;
align-items: center;
gap: 10px;
}
.cn-nodes-row:nth-child(odd) {
background-color: rgb(0 0 0 / 5%);
}
.cn-nodes-row:hover {
background-color: rgb(0 0 0 / 10%);
}
.cn-nodes-sn {
text-align: right;
min-width: 35px;
color: var(--drag-text);
flex-shrink: 0;
font-size: 12px;
padding: 8px 5px;
}
.cn-nodes-name {
cursor: pointer;
white-space: nowrap;
flex-shrink: 0;
position: relative;
padding: 8px 5px;
}
.cn-nodes-name::after {
content: attr(action);
position: absolute;
pointer-events: none;
top: 50%;
left: 100%;
transform: translate(5px, -50%);
font-size: 12px;
color: var(--drag-text);
background-color: var(--comfy-input-bg);
border-radius: 10px;
border: 1px solid var(--border-color);
padding: 3px 8px;
display: none;
}
.cn-nodes-name.action::after {
display: block;
}
.cn-nodes-name:hover {
text-decoration: underline;
}
.cn-nodes-conflict .cn-nodes-name,
.cn-nodes-conflict .cn-icon {
color: orange;
}
.cn-conflicts-list {
display: flex;
flex-wrap: wrap;
gap: 5px;
align-items: center;
padding: 5px 0;
}
.cn-conflicts-list b {
font-weight: normal;
color: var(--descrip-text);
}
.cn-nodes-pack {
cursor: pointer;
color: skyblue;
}
.cn-nodes-pack:hover {
text-decoration: underline;
}
.cn-pack-badge {
font-size: 12px;
font-weight: normal;
background-color: var(--comfy-input-bg);
border-radius: 10px;
border: 1px solid var(--border-color);
padding: 3px 8px;
color: var(--error-text);
}
.cn-preview {
min-width: 300px;
max-width: 500px;
min-height: 120px;
overflow: hidden;
font-size: 12px;
pointer-events: none;
padding: 12px;
color: var(--fg-color);
}
.cn-preview-header {
display: flex;
gap: 8px;
align-items: center;
border-bottom: 1px solid var(--comfy-input-bg);
padding: 5px 10px;
}
.cn-preview-dot {
width: 8px;
height: 8px;
border-radius: 50%;
background-color: grey;
position: relative;
filter: drop-shadow(1px 2px 3px rgb(0 0 0 / 30%));
}
.cn-preview-dot.cn-preview-optional::after {
content: "";
position: absolute;
pointer-events: none;
top: 50%;
left: 50%;
transform: translate(-50%, -50%);
background-color: var(--comfy-input-bg);
border-radius: 50%;
width: 3px;
height: 3px;
}
.cn-preview-dot.cn-preview-grid {
border-radius: 0;
}
.cn-preview-dot.cn-preview-grid::before {
content: '';
position: absolute;
border-left: 1px solid var(--comfy-input-bg);
border-right: 1px solid var(--comfy-input-bg);
width: 4px;
height: 100%;
left: 2px;
top: 0;
z-index: 1;
}
.cn-preview-dot.cn-preview-grid::after {
content: '';
position: absolute;
border-top: 1px solid var(--comfy-input-bg);
border-bottom: 1px solid var(--comfy-input-bg);
width: 100%;
height: 4px;
left: 0;
top: 2px;
z-index: 1;
}
.cn-preview-name {
flex: auto;
font-size: 14px;
}
.cn-preview-io {
display: flex;
justify-content: space-between;
padding: 10px 10px;
}
.cn-preview-column > div {
display: flex;
gap: 10px;
align-items: center;
height: 18px;
overflow: hidden;
white-space: nowrap;
text-overflow: ellipsis;
}
.cn-preview-input {
justify-content: flex-start;
}
.cn-preview-output {
justify-content: flex-end;
}
.cn-preview-list {
display: flex;
flex-direction: column;
gap: 3px;
padding: 0 10px 10px 10px;
}
.cn-preview-switch {
position: relative;
display: flex;
justify-content: space-between;
align-items: center;
background: var(--bg-color);
border: 2px solid var(--border-color);
border-radius: 10px;
text-wrap: nowrap;
padding: 2px 20px;
gap: 10px;
}
.cn-preview-switch::before,
.cn-preview-switch::after {
position: absolute;
pointer-events: none;
top: 50%;
transform: translate(0, -50%);
color: var(--fg-color);
opacity: 0.8;
}
.cn-preview-switch::before {
content: "◀";
left: 5px;
}
.cn-preview-switch::after {
content: "▶";
right: 5px;
}
.cn-preview-value {
color: var(--descrip-text);
}
.cn-preview-string {
min-height: 30px;
max-height: 300px;
background: var(--bg-color);
color: var(--descrip-text);
border-radius: 3px;
padding: 3px 5px;
overflow-y: auto;
overflow-x: hidden;
}
.cn-preview-description {
margin: 0px 10px 10px 10px;
padding: 6px;
background: var(--border-color);
color: var(--descrip-text);
border-radius: 5px;
font-style: italic;
word-break: break-word;
}
.cn-tag-list {
display: flex;
flex-wrap: wrap;
gap: 5px;
align-items: center;
margin-bottom: 5px;
}
.cn-tag-list > div {
background-color: var(--border-color);
border-radius: 5px;
padding: 0 5px;
}
.cn-install-buttons {
display: flex;
flex-direction: column;
gap: 3px;
padding: 3px;
align-items: center;
justify-content: center;
height: 100%;
}
.cn-selected-buttons {
display: flex;
gap: 5px;
align-items: center;
padding-right: 20px;
}
.cn-manager .cn-btn-enable {
background-color: #333399;
color: white;
}
.cn-manager .cn-btn-disable {
background-color: #442277;
color: white;
}
.cn-manager .cn-btn-update {
background-color: #1155AA;
color: white;
}
.cn-manager .cn-btn-try-update {
background-color: Gray;
color: white;
}
.cn-manager .cn-btn-try-fix {
background-color: #6495ED;
color: white;
}
.cn-manager .cn-btn-import-failed {
background-color: #AA1111;
font-size: 10px;
font-weight: bold;
color: white;
}
.cn-manager .cn-btn-install {
background-color: black;
color: white;
}
.cn-manager .cn-btn-try-install {
background-color: Gray;
color: white;
}
.cn-manager .cn-btn-uninstall {
background-color: #993333;
color: white;
}
.cn-manager .cn-btn-reinstall {
background-color: #993333;
color: white;
}
.cn-manager .cn-btn-switch {
background-color: #448833;
color: white;
}
@keyframes cn-btn-loading-bg {
0% {
left: 0;
}
100% {
left: -105px;
}
}
.cn-manager button.cn-btn-loading {
position: relative;
overflow: hidden;
border-color: rgb(0 119 207 / 80%);
background-color: var(--comfy-input-bg);
}
.cn-manager button.cn-btn-loading::after {
position: absolute;
top: 0;
left: 0;
content: "";
width: 500px;
height: 100%;
background-image: repeating-linear-gradient(
-45deg,
rgb(0 119 207 / 30%),
rgb(0 119 207 / 30%) 10px,
transparent 10px,
transparent 15px
);
animation: cn-btn-loading-bg 2s linear infinite;
}
.cn-manager-light .cn-pack-name a {
color: blue;
}
.cn-manager-light .cm-warn-note {
background-color: #ccc !important;
}
.cn-manager-light .cn-btn-install {
background-color: #333;
}

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,213 +0,0 @@
.cmm-manager {
--grid-font: -apple-system, BlinkMacSystemFont, "Segoe UI", "Noto Sans", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji";
z-index: 1099;
width: 80%;
height: 80%;
display: flex;
flex-direction: column;
gap: 10px;
color: var(--fg-color);
font-family: arial, sans-serif;
}
.cmm-manager .cmm-flex-auto {
flex: auto;
}
.cmm-manager button {
font-size: 16px;
color: var(--input-text);
background-color: var(--comfy-input-bg);
border-radius: 8px;
border-color: var(--border-color);
border-style: solid;
margin: 0;
padding: 4px 8px;
min-width: 100px;
}
.cmm-manager button:disabled,
.cmm-manager input:disabled,
.cmm-manager select:disabled {
color: gray;
}
.cmm-manager button:disabled {
background-color: var(--comfy-input-bg);
}
.cmm-manager .cmm-manager-refresh {
display: none;
background-color: #000080;
color: white;
}
.cmm-manager .cmm-manager-stop {
display: none;
background-color: #500000;
color: white;
}
.cmm-manager-header {
display: flex;
flex-wrap: wrap;
gap: 5px;
align-items: center;
padding: 0 5px;
}
.cmm-manager-header label {
display: flex;
gap: 5px;
align-items: center;
}
.cmm-manager-type,
.cmm-manager-base,
.cmm-manager-filter {
height: 28px;
line-height: 28px;
}
.cmm-manager-keywords {
height: 28px;
line-height: 28px;
padding: 0 5px 0 26px;
background-size: 16px;
background-position: 5px center;
background-repeat: no-repeat;
background-image: url("data:image/svg+xml;charset=utf8,%3Csvg%20viewBox%3D%220%200%2024%2024%22%20width%3D%22100%25%22%20height%3D%22100%25%22%20pointer-events%3D%22none%22%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%3E%3Cpath%20fill%3D%22none%22%20stroke%3D%22%23888%22%20stroke-linecap%3D%22round%22%20stroke-linejoin%3D%22round%22%20stroke-width%3D%222%22%20d%3D%22m21%2021-4.486-4.494M19%2010.5a8.5%208.5%200%201%201-17%200%208.5%208.5%200%200%201%2017%200%22%2F%3E%3C%2Fsvg%3E");
}
.cmm-manager-status {
padding-left: 10px;
}
.cmm-manager-grid {
flex: auto;
border: 1px solid var(--border-color);
overflow: hidden;
}
.cmm-manager-selection {
display: flex;
flex-wrap: wrap;
gap: 10px;
align-items: center;
}
.cmm-manager-footer {
display: flex;
flex-wrap: wrap;
gap: 10px;
align-items: center;
}
.cmm-manager-grid .tg-turbogrid {
font-family: var(--grid-font);
font-size: 15px;
background: var(--bg-color);
}
.cmm-manager-grid .cmm-node-name a {
color: skyblue;
text-decoration: none;
word-break: break-word;
}
.cmm-manager-grid .cmm-node-desc a {
color: #5555FF;
font-weight: bold;
text-decoration: none;
}
.cmm-manager-grid .tg-cell a:hover {
text-decoration: underline;
}
.cmm-icon-passed {
width: 20px;
height: 20px;
position: absolute;
left: calc(50% - 10px);
top: calc(50% - 10px);
}
.cmm-manager .cmm-btn-enable {
background-color: blue;
color: white;
}
.cmm-manager .cmm-btn-disable {
background-color: MediumSlateBlue;
color: white;
}
.cmm-manager .cmm-btn-install {
background-color: black;
color: white;
}
.cmm-btn-download {
width: 18px;
height: 18px;
position: absolute;
left: calc(50% - 10px);
top: calc(50% - 10px);
cursor: pointer;
opacity: 0.8;
color: #fff;
}
.cmm-btn-download:hover {
opacity: 1;
}
.cmm-manager-light .cmm-btn-download {
color: #000;
}
@keyframes cmm-btn-loading-bg {
0% {
left: 0;
}
100% {
left: -105px;
}
}
.cmm-manager button.cmm-btn-loading {
position: relative;
overflow: hidden;
border-color: rgb(0 119 207 / 80%);
background-color: var(--comfy-input-bg);
}
.cmm-manager button.cmm-btn-loading::after {
position: absolute;
top: 0;
left: 0;
content: "";
width: 500px;
height: 100%;
background-image: repeating-linear-gradient(
-45deg,
rgb(0 119 207 / 30%),
rgb(0 119 207 / 30%) 10px,
transparent 10px,
transparent 15px
);
animation: cmm-btn-loading-bg 2s linear infinite;
}
.cmm-manager-light .cmm-node-name a {
color: blue;
}
.cmm-manager-light .cm-warn-note {
background-color: #ccc !important;
}
.cmm-manager-light .cmm-btn-install {
background-color: #333;
}

View File

@@ -1,619 +0,0 @@
const hasOwn = function(obj, key) {
return Object.prototype.hasOwnProperty.call(obj, key);
};
const isNum = function(num) {
if (typeof num !== 'number' || isNaN(num)) {
return false;
}
const isInvalid = function(n) {
if (n === Number.MAX_VALUE || n === Number.MIN_VALUE || n === Number.NEGATIVE_INFINITY || n === Number.POSITIVE_INFINITY) {
return true;
}
return false;
};
if (isInvalid(num)) {
return false;
}
return true;
};
const toNum = (num) => {
if (typeof (num) !== 'number') {
num = parseFloat(num);
}
if (isNaN(num)) {
num = 0;
}
num = Math.round(num);
return num;
};
const clamp = function(value, min, max) {
return Math.max(min, Math.min(max, value));
};
const isWindow = (obj) => {
return Boolean(obj && obj === obj.window);
};
const isDocument = (obj) => {
return Boolean(obj && obj.nodeType === 9);
};
const isElement = (obj) => {
return Boolean(obj && obj.nodeType === 1);
};
// ===========================================================================================
export const toRect = (obj) => {
if (obj) {
return {
left: toNum(obj.left || obj.x),
top: toNum(obj.top || obj.y),
width: toNum(obj.width),
height: toNum(obj.height)
};
}
return {
left: 0,
top: 0,
width: 0,
height: 0
};
};
export const getElement = (selector) => {
if (typeof selector === 'string' && selector) {
if (selector.startsWith('#')) {
return document.getElementById(selector.slice(1));
}
return document.querySelector(selector);
}
if (isDocument(selector)) {
return selector.body;
}
if (isElement(selector)) {
return selector;
}
};
export const getRect = (target, fixed) => {
if (!target) {
return toRect();
}
if (isWindow(target)) {
return {
left: 0,
top: 0,
width: window.innerWidth,
height: window.innerHeight
};
}
const elem = getElement(target);
if (!elem) {
return toRect(target);
}
const br = elem.getBoundingClientRect();
const rect = toRect(br);
// fix offset
if (!fixed) {
rect.left += window.scrollX;
rect.top += window.scrollY;
}
rect.width = elem.offsetWidth;
rect.height = elem.offsetHeight;
return rect;
};
// ===========================================================================================
const calculators = {
bottom: (info, containerRect, targetRect) => {
info.space = containerRect.top + containerRect.height - targetRect.top - targetRect.height - info.height;
info.top = targetRect.top + targetRect.height;
info.left = Math.round(targetRect.left + targetRect.width * 0.5 - info.width * 0.5);
},
top: (info, containerRect, targetRect) => {
info.space = targetRect.top - info.height - containerRect.top;
info.top = targetRect.top - info.height;
info.left = Math.round(targetRect.left + targetRect.width * 0.5 - info.width * 0.5);
},
right: (info, containerRect, targetRect) => {
info.space = containerRect.left + containerRect.width - targetRect.left - targetRect.width - info.width;
info.top = Math.round(targetRect.top + targetRect.height * 0.5 - info.height * 0.5);
info.left = targetRect.left + targetRect.width;
},
left: (info, containerRect, targetRect) => {
info.space = targetRect.left - info.width - containerRect.left;
info.top = Math.round(targetRect.top + targetRect.height * 0.5 - info.height * 0.5);
info.left = targetRect.left - info.width;
}
};
// with order
export const getDefaultPositions = () => {
return Object.keys(calculators);
};
const calculateSpace = (info, containerRect, targetRect) => {
const calculator = calculators[info.position];
calculator(info, containerRect, targetRect);
if (info.space >= 0) {
info.passed += 1;
}
};
// ===========================================================================================
const calculateAlignOffset = (info, containerRect, targetRect, alignType, sizeType) => {
const popoverStart = info[alignType];
const popoverSize = info[sizeType];
const containerStart = containerRect[alignType];
const containerSize = containerRect[sizeType];
const targetStart = targetRect[alignType];
const targetSize = targetRect[sizeType];
const targetCenter = targetStart + targetSize * 0.5;
// size overflow
if (popoverSize > containerSize) {
const overflow = (popoverSize - containerSize) * 0.5;
info[alignType] = containerStart - overflow;
info.offset = targetCenter - containerStart + overflow;
return;
}
const space1 = popoverStart - containerStart;
const space2 = (containerStart + containerSize) - (popoverStart + popoverSize);
// both side passed, default to center
if (space1 >= 0 && space2 >= 0) {
if (info.passed) {
info.passed += 2;
}
info.offset = popoverSize * 0.5;
return;
}
// one side passed
if (info.passed) {
info.passed += 1;
}
if (space1 < 0) {
const min = containerStart;
info[alignType] = min;
info.offset = targetCenter - min;
return;
}
// space2 < 0
const max = containerStart + containerSize - popoverSize;
info[alignType] = max;
info.offset = targetCenter - max;
};
const calculateHV = (info, containerRect) => {
if (['top', 'bottom'].includes(info.position)) {
info.top = clamp(info.top, containerRect.top, containerRect.top + containerRect.height - info.height);
return ['left', 'width'];
}
info.left = clamp(info.left, containerRect.left, containerRect.left + containerRect.width - info.width);
return ['top', 'height'];
};
const calculateOffset = (info, containerRect, targetRect) => {
const [alignType, sizeType] = calculateHV(info, containerRect);
calculateAlignOffset(info, containerRect, targetRect, alignType, sizeType);
info.offset = clamp(info.offset, 0, info[sizeType]);
};
// ===========================================================================================
const calculateDistance = (info, previousPositionInfo) => {
if (!previousPositionInfo) {
return;
}
// no change if position no change with previous
if (info.position === previousPositionInfo.position) {
return;
}
const ax = info.left + info.width * 0.5;
const ay = info.top + info.height * 0.5;
const bx = previousPositionInfo.left + previousPositionInfo.width * 0.5;
const by = previousPositionInfo.top + previousPositionInfo.height * 0.5;
const dx = Math.abs(ax - bx);
const dy = Math.abs(ay - by);
info.distance = Math.round(Math.sqrt(dx * dx + dy * dy));
};
// ===========================================================================================
const calculatePositionInfo = (info, containerRect, targetRect, previousPositionInfo) => {
calculateSpace(info, containerRect, targetRect);
calculateOffset(info, containerRect, targetRect);
calculateDistance(info, previousPositionInfo);
};
// ===========================================================================================
const calculateBestPosition = (containerRect, targetRect, infoMap, withOrder, previousPositionInfo) => {
// position space: +1
// align space:
// two side passed: +2
// one side passed: +1
const safePassed = 3;
if (previousPositionInfo) {
const prevInfo = infoMap[previousPositionInfo.position];
if (prevInfo) {
calculatePositionInfo(prevInfo, containerRect, targetRect);
if (prevInfo.passed >= safePassed) {
return prevInfo;
}
prevInfo.calculated = true;
}
}
const positionList = [];
Object.values(infoMap).forEach((info) => {
if (!info.calculated) {
calculatePositionInfo(info, containerRect, targetRect, previousPositionInfo);
}
positionList.push(info);
});
positionList.sort((a, b) => {
if (a.passed !== b.passed) {
return b.passed - a.passed;
}
if (withOrder && a.passed >= safePassed && b.passed >= safePassed) {
return a.index - b.index;
}
if (a.space !== b.space) {
return b.space - a.space;
}
return a.index - b.index;
});
// logTable(positionList);
return positionList[0];
};
// const logTable = (() => {
// let time_id;
// return (info) => {
// clearTimeout(time_id);
// time_id = setTimeout(() => {
// console.table(info);
// }, 10);
// };
// })();
// ===========================================================================================
const getAllowPositions = (positions, defaultAllowPositions) => {
if (!positions) {
return;
}
if (Array.isArray(positions)) {
positions = positions.join(',');
}
positions = String(positions).split(',').map((it) => it.trim().toLowerCase()).filter((it) => it);
positions = positions.filter((it) => defaultAllowPositions.includes(it));
if (!positions.length) {
return;
}
return positions;
};
const isPositionChanged = (info, previousPositionInfo) => {
if (!previousPositionInfo) {
return true;
}
if (info.left !== previousPositionInfo.left) {
return true;
}
if (info.top !== previousPositionInfo.top) {
return true;
}
return false;
};
// ===========================================================================================
// const log = (name, time) => {
// if (time > 0.1) {
// console.log(name, time);
// }
// };
export const getBestPosition = (containerRect, targetRect, popoverRect, positions, previousPositionInfo) => {
const defaultAllowPositions = getDefaultPositions();
let withOrder = true;
let allowPositions = getAllowPositions(positions, defaultAllowPositions);
if (!allowPositions) {
allowPositions = defaultAllowPositions;
withOrder = false;
}
// console.log('withOrder', withOrder);
// const start_time = performance.now();
const infoMap = {};
allowPositions.forEach((k, i) => {
infoMap[k] = {
position: k,
index: i,
top: 0,
left: 0,
width: popoverRect.width,
height: popoverRect.height,
space: 0,
offset: 0,
passed: 0,
distance: 0
};
});
// log('infoMap', performance.now() - start_time);
const bestPosition = calculateBestPosition(containerRect, targetRect, infoMap, withOrder, previousPositionInfo);
// check left/top
bestPosition.changed = isPositionChanged(bestPosition, previousPositionInfo);
return bestPosition;
};
// ===========================================================================================
const getTemplatePath = (width, height, arrowOffset, arrowSize, borderRadius) => {
const p = (px, py) => {
return [px, py].join(',');
};
const px = function(num, alignEnd) {
const floor = Math.floor(num);
let n = num < floor + 0.5 ? floor + 0.5 : floor + 1.5;
if (alignEnd) {
n -= 1;
}
return n;
};
const pxe = function(num) {
return px(num, true);
};
const ls = [];
const innerLeft = px(arrowSize);
const innerRight = pxe(width - arrowSize);
arrowOffset = clamp(arrowOffset, innerLeft, innerRight);
const innerTop = px(arrowSize);
const innerBottom = pxe(height - arrowSize);
const startPoint = p(innerLeft, innerTop + borderRadius);
const arrowPoint = p(arrowOffset, 1);
const LT = p(innerLeft, innerTop);
const RT = p(innerRight, innerTop);
const AOT = p(arrowOffset - arrowSize, innerTop);
const RRT = p(innerRight - borderRadius, innerTop);
ls.push(`M${startPoint}`);
ls.push(`V${innerBottom - borderRadius}`);
ls.push(`Q${p(innerLeft, innerBottom)} ${p(innerLeft + borderRadius, innerBottom)}`);
ls.push(`H${innerRight - borderRadius}`);
ls.push(`Q${p(innerRight, innerBottom)} ${p(innerRight, innerBottom - borderRadius)}`);
ls.push(`V${innerTop + borderRadius}`);
if (arrowOffset < innerLeft + arrowSize + borderRadius) {
ls.push(`Q${RT} ${RRT}`);
ls.push(`H${arrowOffset + arrowSize}`);
ls.push(`L${arrowPoint}`);
if (arrowOffset < innerLeft + arrowSize) {
ls.push(`L${LT}`);
ls.push(`L${startPoint}`);
} else {
ls.push(`L${AOT}`);
ls.push(`Q${LT} ${startPoint}`);
}
} else if (arrowOffset > innerRight - arrowSize - borderRadius) {
if (arrowOffset > innerRight - arrowSize) {
ls.push(`L${RT}`);
} else {
ls.push(`Q${RT} ${p(arrowOffset + arrowSize, innerTop)}`);
}
ls.push(`L${arrowPoint}`);
ls.push(`L${AOT}`);
ls.push(`H${innerLeft + borderRadius}`);
ls.push(`Q${LT} ${startPoint}`);
} else {
ls.push(`Q${RT} ${RRT}`);
ls.push(`H${arrowOffset + arrowSize}`);
ls.push(`L${arrowPoint}`);
ls.push(`L${AOT}`);
ls.push(`H${innerLeft + borderRadius}`);
ls.push(`Q${LT} ${startPoint}`);
}
return ls.join('');
};
const getPathData = function(position, width, height, arrowOffset, arrowSize, borderRadius) {
const handlers = {
bottom: () => {
const d = getTemplatePath(width, height, arrowOffset, arrowSize, borderRadius);
return {
d,
transform: ''
};
},
top: () => {
const d = getTemplatePath(width, height, width - arrowOffset, arrowSize, borderRadius);
return {
d,
transform: `rotate(180,${width * 0.5},${height * 0.5})`
};
},
left: () => {
const d = getTemplatePath(height, width, arrowOffset, arrowSize, borderRadius);
const x = (width - height) * 0.5;
const y = (height - width) * 0.5;
return {
d,
transform: `translate(${x} ${y}) rotate(90,${height * 0.5},${width * 0.5})`
};
},
right: () => {
const d = getTemplatePath(height, width, height - arrowOffset, arrowSize, borderRadius);
const x = (width - height) * 0.5;
const y = (height - width) * 0.5;
return {
d,
transform: `translate(${x} ${y}) rotate(-90,${height * 0.5},${width * 0.5})`
};
}
};
return handlers[position]();
};
// ===========================================================================================
// position style cache
const styleCache = {
// position: '',
// top: {},
// bottom: {},
// left: {},
// right: {}
};
export const getPositionStyle = (info, options = {}) => {
const o = {
bgColor: '#fff',
borderColor: '#ccc',
borderRadius: 5,
arrowSize: 10
};
Object.keys(o).forEach((k) => {
if (hasOwn(options, k)) {
const d = o[k];
const v = options[k];
if (typeof d === 'string') {
// string
if (typeof v === 'string' && v) {
o[k] = v;
}
} else {
// number
if (isNum(v) && v >= 0) {
o[k] = v;
}
}
}
});
const key = [
info.width,
info.height,
info.offset,
o.arrowSize,
o.borderRadius,
o.bgColor,
o.borderColor
].join('-');
const positionCache = styleCache[info.position];
if (positionCache && key === positionCache.key) {
const st = positionCache.style;
st.changed = styleCache.position !== info.position;
styleCache.position = info.position;
return st;
}
// console.log(options);
const data = getPathData(info.position, info.width, info.height, info.offset, o.arrowSize, o.borderRadius);
// console.log(data);
const viewBox = [0, 0, info.width, info.height].join(' ');
const svg = [
`<svg viewBox="${viewBox}" xmlns="http://www.w3.org/2000/svg">`,
`<path d="${data.d}" fill="${o.bgColor}" stroke="${o.borderColor}" transform="${data.transform}" />`,
'</svg>'
].join('');
// console.log(svg);
const backgroundImage = `url("data:image/svg+xml;charset=utf8,${encodeURIComponent(svg)}")`;
const background = `${backgroundImage} center no-repeat`;
const padding = `${o.arrowSize + o.borderRadius}px`;
const style = {
background,
backgroundImage,
padding,
changed: true
};
styleCache.position = info.position;
styleCache[info.position] = {
key,
style
};
return style;
};

View File

@@ -1,84 +0,0 @@
/**
* Attaches metadata to the workflow on save
* - custom node pack version to all custom nodes used in the workflow
*
* Example metadata:
* "nodes": {
* "1": {
* type: "CheckpointLoaderSimple",
* ...
* properties: {
* cnr_id: "comfy-core",
* version: "0.3.8",
* },
* },
* }
*
* @typedef {Object} NodeInfo
* @property {string} ver - Version (git hash or semantic version)
* @property {string} cnr_id - ComfyRegistry node ID
* @property {boolean} enabled - Whether the node is enabled
*/
import { app } from "../../scripts/app.js";
import { api } from "../../scripts/api.js";
class WorkflowMetadataExtension {
constructor() {
this.name = "Comfy.CustomNodesManager.WorkflowMetadata";
this.installedNodes = {};
this.comfyCoreVersion = null;
}
/**
* Get the installed nodes info
* @returns {Promise<Record<string, NodeInfo>>} The mapping from node name to its info.
* ver can either be a git commit hash or a semantic version such as "1.0.0"
* cnr_id is the id of the node in the ComfyRegistry
* enabled is true if the node is enabled, false if it is disabled
*/
async getInstalledNodes() {
const res = await api.fetchApi("/v2/customnode/installed");
return await res.json();
}
async init() {
this.installedNodes = await this.getInstalledNodes();
this.comfyCoreVersion = (await api.getSystemStats()).system.comfyui_version;
}
/**
* Called when any node is created
* @param {LGraphNode} node The newly created node
*/
nodeCreated(node) {
try {
// nodeData doesn't exist if node is missing or node is frontend only node
if (!node?.constructor?.nodeData?.python_module) return;
const nodeProperties = (node.properties ??= {});
const modules = node.constructor.nodeData.python_module.split(".");
const moduleType = modules[0];
if (moduleType === "custom_nodes") {
const nodePackageName = modules[1];
const { cnr_id, aux_id, ver } =
this.installedNodes[nodePackageName] ??
this.installedNodes[nodePackageName.toLowerCase()] ??
{};
if (cnr_id === "comfy-core") return; // don't allow hijacking comfy-core name
if (cnr_id) nodeProperties.cnr_id = cnr_id;
else nodeProperties.aux_id = aux_id;
if (ver) nodeProperties.ver = ver.trim();
} else if (["nodes", "comfy_extras", "comfy_api_nodes"].includes(moduleType)) {
nodeProperties.cnr_id = "comfy-core";
nodeProperties.ver = this.comfyCoreVersion;
}
} catch (e) {
console.error(e);
}
}
}
app.registerExtension(new WorkflowMetadataExtension());

View File

View File

File diff suppressed because it is too large Load Diff

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,823 +0,0 @@
import os
import shutil
import subprocess
import sys
import atexit
import threading
import re
import locale
import platform
import json
import ast
import logging
import traceback
from .common import security_check
from .common import manager_util
from .common import cm_global
from .common import manager_downloader
import folder_paths
manager_util.add_python_path_to_env()
import datetime as dt
if hasattr(dt, 'datetime'):
from datetime import datetime as dt_datetime
def current_timestamp():
return dt_datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]
else:
# NOTE: Occurs in some Mac environments.
import time
logging.error(f"[ComfyUI-Manager] fallback timestamp mode\n datetime module is invalid: '{dt.__file__}'")
def current_timestamp():
return str(time.time()).split('.')[0]
cm_global.pip_blacklist = {'torch', 'torchaudio', 'torchsde', 'torchvision'}
cm_global.pip_downgrade_blacklist = ['torch', 'torchaudio', 'torchsde', 'torchvision', 'transformers', 'safetensors', 'kornia']
def skip_pip_spam(x):
return ('Requirement already satisfied:' in x) or ("DEPRECATION: Loading egg at" in x)
message_collapses = [skip_pip_spam]
import_failed_extensions = set()
cm_global.variables['cm.on_revision_detected_handler'] = []
enable_file_logging = True
def register_message_collapse(f):
global message_collapses
message_collapses.append(f)
def is_import_failed_extension(name):
global import_failed_extensions
return name in import_failed_extensions
comfy_path = os.environ.get('COMFYUI_PATH')
comfy_base_path = os.environ.get('COMFYUI_FOLDERS_BASE_PATH')
if comfy_path is None:
comfy_path = os.path.abspath(os.path.dirname(sys.modules['__main__'].__file__))
os.environ['COMFYUI_PATH'] = comfy_path
if comfy_base_path is None:
comfy_base_path = comfy_path
sys.__comfyui_manager_register_message_collapse = register_message_collapse
sys.__comfyui_manager_is_import_failed_extension = is_import_failed_extension
cm_global.register_api('cm.register_message_collapse', register_message_collapse)
cm_global.register_api('cm.is_import_failed_extension', is_import_failed_extension)
comfyui_manager_path = os.path.abspath(os.path.dirname(__file__))
custom_nodes_base_path = folder_paths.get_folder_paths('custom_nodes')[0]
manager_files_path = os.path.abspath(os.path.join(folder_paths.get_user_directory(), 'default', 'ComfyUI-Manager'))
manager_pip_overrides_path = os.path.join(manager_files_path, "pip_overrides.json")
manager_pip_blacklist_path = os.path.join(manager_files_path, "pip_blacklist.list")
restore_snapshot_path = os.path.join(manager_files_path, "startup-scripts", "restore-snapshot.json")
manager_config_path = os.path.join(manager_files_path, 'config.ini')
default_conf = {}
def read_config():
global default_conf
try:
import configparser
config = configparser.ConfigParser(strict=False)
config.read(manager_config_path)
default_conf = config['default']
except Exception:
pass
def read_uv_mode():
if 'use_uv' in default_conf:
manager_util.use_uv = default_conf['use_uv'].lower() == 'true'
def check_file_logging():
global enable_file_logging
if 'file_logging' in default_conf and default_conf['file_logging'].lower() == 'false':
enable_file_logging = False
read_config()
read_uv_mode()
security_check.security_check()
check_file_logging()
cm_global.pip_overrides = {}
if os.path.exists(manager_pip_overrides_path):
with open(manager_pip_overrides_path, 'r', encoding="UTF-8", errors="ignore") as json_file:
cm_global.pip_overrides = json.load(json_file)
if os.path.exists(manager_pip_blacklist_path):
with open(manager_pip_blacklist_path, 'r', encoding="UTF-8", errors="ignore") as f:
for x in f.readlines():
y = x.strip()
if y != '':
cm_global.pip_blacklist.add(y)
def remap_pip_package(pkg):
if pkg in cm_global.pip_overrides:
res = cm_global.pip_overrides[pkg]
print(f"[ComfyUI-Manager] '{pkg}' is remapped to '{res}'")
return res
else:
return pkg
std_log_lock = threading.Lock()
def handle_stream(stream, prefix):
stream.reconfigure(encoding=locale.getpreferredencoding(), errors='replace')
for msg in stream:
if prefix == '[!]' and ('it/s]' in msg or 's/it]' in msg) and ('%|' in msg or 'it [' in msg):
if msg.startswith('100%'):
print('\r' + msg, end="", file=sys.stderr),
else:
print('\r' + msg[:-1], end="", file=sys.stderr),
else:
if prefix == '[!]':
print(prefix, msg, end="", file=sys.stderr)
else:
print(prefix, msg, end="")
def process_wrap(cmd_str, cwd_path, handler=None, env=None):
process = subprocess.Popen(cmd_str, cwd=cwd_path, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, bufsize=1)
if handler is None:
handler = handle_stream
stdout_thread = threading.Thread(target=handler, args=(process.stdout, ""))
stderr_thread = threading.Thread(target=handler, args=(process.stderr, "[!]"))
stdout_thread.start()
stderr_thread.start()
stdout_thread.join()
stderr_thread.join()
return process.wait()
original_stdout = sys.stdout
def try_get_custom_nodes(x):
for custom_nodes_dir in folder_paths.get_folder_paths('custom_nodes'):
if x.startswith(custom_nodes_dir):
relative_path = os.path.relpath(x, custom_nodes_dir)
next_segment = relative_path.split(os.sep)[0]
if next_segment.lower() != 'comfyui-manager':
return next_segment, os.path.join(custom_nodes_dir, next_segment)
return None
def extract_origin_module():
stack = traceback.extract_stack()[:-2]
for frame in reversed(stack):
info = try_get_custom_nodes(frame.filename)
if info is None:
continue
else:
return info
return None
def extract_origin_module_from_strings(file_paths):
for filepath in file_paths:
info = try_get_custom_nodes(filepath)
if info is None:
continue
else:
return info
return None
def finalize_startup():
res = {}
for k, v in cm_global.error_dict.items():
if v['path'] in import_failed_extensions:
res[k] = v
cm_global.error_dict = res
try:
if '--port' in sys.argv:
port_index = sys.argv.index('--port')
if port_index + 1 < len(sys.argv):
port = int(sys.argv[port_index + 1])
postfix = f"_{port}"
else:
postfix = ""
else:
postfix = ""
# Logger setup
log_path_base = None
if enable_file_logging:
log_path_base = os.path.join(folder_paths.user_directory, 'comfyui')
if not os.path.exists(folder_paths.user_directory):
os.makedirs(folder_paths.user_directory)
if os.path.exists(f"{log_path_base}{postfix}.log"):
if os.path.exists(f"{log_path_base}{postfix}.prev.log"):
if os.path.exists(f"{log_path_base}{postfix}.prev2.log"):
os.remove(f"{log_path_base}{postfix}.prev2.log")
os.rename(f"{log_path_base}{postfix}.prev.log", f"{log_path_base}{postfix}.prev2.log")
os.rename(f"{log_path_base}{postfix}.log", f"{log_path_base}{postfix}.prev.log")
log_file = open(f"{log_path_base}{postfix}.log", "w", encoding="utf-8", errors="ignore")
log_lock = threading.Lock()
original_stdout = sys.stdout
original_stderr = sys.stderr
if original_stdout.encoding.lower() == 'utf-8':
write_stdout = original_stdout.write
write_stderr = original_stderr.write
else:
def wrapper_stdout(msg):
original_stdout.write(msg.encode('utf-8').decode(original_stdout.encoding, errors="ignore"))
def wrapper_stderr(msg):
original_stderr.write(msg.encode('utf-8').decode(original_stderr.encoding, errors="ignore"))
write_stdout = wrapper_stdout
write_stderr = wrapper_stderr
pat_tqdm = r'\d+%.*\[(.*?)\]'
pat_import_fail = r'seconds \(IMPORT FAILED\):(.*)$'
is_start_mode = True
class ComfyUIManagerLogger:
def __init__(self, is_stdout):
self.is_stdout = is_stdout
self.encoding = "utf-8"
self.last_char = ''
def fileno(self):
try:
if self.is_stdout:
return original_stdout.fileno()
else:
return original_stderr.fileno()
except AttributeError:
# Handle error
raise ValueError("The object does not have a fileno method")
def isatty(self):
return False
def write(self, message):
global is_start_mode
if any(f(message) for f in message_collapses):
return
if is_start_mode:
match = re.search(pat_import_fail, message)
if match:
import_failed_extensions.add(match.group(1).strip())
if not self.is_stdout:
origin_info = extract_origin_module()
if origin_info is not None:
name, origin_path = origin_info
if name != 'comfyui-manager':
if name not in cm_global.error_dict:
cm_global.error_dict[name] = {'name': name, 'path': origin_path, 'msg': ''}
cm_global.error_dict[name]['msg'] += message
if not self.is_stdout:
match = re.search(pat_tqdm, message)
if match:
message = re.sub(r'([#|])\d', r'\1▌', message)
message = re.sub('#', '', message)
if '100%' in message:
self.sync_write(message)
else:
write_stderr(message)
original_stderr.flush()
else:
self.sync_write(message)
else:
self.sync_write(message)
def sync_write(self, message, file_only=False):
with log_lock:
timestamp = current_timestamp()
if self.last_char != '\n':
log_file.write(message)
else:
log_file.write(f"[{timestamp}] {message}")
try:
log_file.flush()
except Exception:
pass
self.last_char = message if message == '' else message[-1]
if not file_only:
with std_log_lock:
if self.is_stdout:
write_stdout(message)
original_stdout.flush()
else:
write_stderr(message)
original_stderr.flush()
def flush(self):
try:
log_file.flush()
except Exception:
pass
with std_log_lock:
if self.is_stdout:
original_stdout.flush()
else:
original_stderr.flush()
def close(self):
self.flush()
def reconfigure(self, *args, **kwargs):
pass
# You can close through sys.stderr.close_log()
def close_log(self):
sys.stderr = original_stderr
sys.stdout = original_stdout
log_file.close()
def close_log():
sys.stderr = original_stderr
sys.stdout = original_stdout
log_file.close()
if enable_file_logging:
sys.stdout = ComfyUIManagerLogger(True)
stderr_wrapper = ComfyUIManagerLogger(False)
sys.stderr = stderr_wrapper
atexit.register(close_log)
else:
sys.stdout.close_log = lambda: None
stderr_wrapper = None
class LoggingHandler(logging.Handler):
def emit(self, record):
global is_start_mode
try:
message = record.getMessage()
except Exception as e:
message = f"<<logging error>>: {record} - {e}"
original_stderr.write(message)
if is_start_mode:
match = re.search(pat_import_fail, message)
if match:
import_failed_extensions.add(match.group(1).strip())
if 'Traceback' in message:
file_lists = self._extract_file_paths(message)
origin_info = extract_origin_module_from_strings(file_lists)
if origin_info is not None:
name, origin_path = origin_info
if name != 'comfyui-manager':
if name not in cm_global.error_dict:
cm_global.error_dict[name] = {'name': name, 'path': origin_path, 'msg': ''}
cm_global.error_dict[name]['msg'] += message
if 'Starting server' in message:
is_start_mode = False
finalize_startup()
if stderr_wrapper:
stderr_wrapper.sync_write(message+'\n', file_only=True)
def _extract_file_paths(self, msg):
file_paths = []
for line in msg.split('\n'):
match = re.findall(r'File \"(.*?)\", line \d+', line)
for x in match:
if not x.startswith('<'):
file_paths.extend(match)
return file_paths
logging.getLogger().addHandler(LoggingHandler())
except Exception as e:
print(f"[ComfyUI-Manager] Logging failed: {e}")
print("** ComfyUI startup time:", current_timestamp())
print("** Platform:", platform.system())
print("** Python version:", sys.version)
print("** Python executable:", sys.executable)
print("** ComfyUI Path:", comfy_path)
print("** ComfyUI Base Folder Path:", comfy_base_path)
print("** User directory:", folder_paths.user_directory)
print("** ComfyUI-Manager config path:", manager_config_path)
if log_path_base is not None:
print("** Log path:", os.path.abspath(f'{log_path_base}.log'))
else:
print("** Log path: file logging is disabled")
def read_downgrade_blacklist():
try:
if 'downgrade_blacklist' in default_conf:
items = default_conf['downgrade_blacklist'].split(',')
items = [x.strip() for x in items if x != '']
cm_global.pip_downgrade_blacklist += items
cm_global.pip_downgrade_blacklist = list(set(cm_global.pip_downgrade_blacklist))
except Exception:
pass
read_downgrade_blacklist()
def check_bypass_ssl():
try:
import ssl
if 'bypass_ssl' in default_conf and default_conf['bypass_ssl'].lower() == 'true':
print(f"[ComfyUI-Manager] WARN: Unsafe - SSL verification bypass option is Enabled. (see {manager_config_path})")
ssl._create_default_https_context = ssl._create_unverified_context # SSL certificate error fix.
except Exception:
pass
check_bypass_ssl()
# Perform install
processed_install = set()
script_list_path = os.path.join(folder_paths.user_directory, "default", "ComfyUI-Manager", "startup-scripts", "install-scripts.txt")
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, manager_files_path)
def is_installed(name):
name = name.strip()
if name.startswith('#'):
return True
pattern = r'([^<>!~=]+)([<>!~=]=?)([0-9.a-zA-Z]*)'
match = re.search(pattern, name)
if match:
name = match.group(1)
if name in cm_global.pip_blacklist:
return True
if name in cm_global.pip_downgrade_blacklist:
pips = manager_util.get_installed_packages()
if match is None:
if name in pips:
return True
elif match.group(2) in ['<=', '==', '<', '~=']:
if name in pips:
if manager_util.StrictVersion(pips[name]) >= manager_util.StrictVersion(match.group(3)):
print(f"[ComfyUI-Manager] skip black listed pip installation: '{name}'")
return True
pkg = manager_util.get_installed_packages().get(name.lower())
if pkg is None:
return False # update if not installed
if match is None:
return True # don't update if version is not specified
if match.group(2) in ['>', '>=']:
if manager_util.StrictVersion(pkg) < manager_util.StrictVersion(match.group(3)):
return False
elif manager_util.StrictVersion(pkg) > manager_util.StrictVersion(match.group(3)):
print(f"[SKIP] Downgrading pip package isn't allowed: {name.lower()} (cur={pkg})")
if match.group(2) == '==':
if manager_util.StrictVersion(pkg) < manager_util.StrictVersion(match.group(3)):
return False
if match.group(2) == '~=':
if manager_util.StrictVersion(pkg) == manager_util.StrictVersion(match.group(3)):
return False
return True # prevent downgrade
if os.path.exists(restore_snapshot_path):
try:
cloned_repos = []
def msg_capture(stream, prefix):
stream.reconfigure(encoding=locale.getpreferredencoding(), errors='replace')
for msg in stream:
if msg.startswith("CLONE: "):
cloned_repos.append(msg[7:])
if prefix == '[!]':
print(prefix, msg, end="", file=sys.stderr)
else:
print(prefix, msg, end="")
elif prefix == '[!]' and ('it/s]' in msg or 's/it]' in msg) and ('%|' in msg or 'it [' in msg):
if msg.startswith('100%'):
print('\r' + msg, end="", file=sys.stderr),
else:
print('\r'+msg[:-1], end="", file=sys.stderr),
else:
if prefix == '[!]':
print(prefix, msg, end="", file=sys.stderr)
else:
print(prefix, msg, end="")
print("[ComfyUI-Manager] Restore snapshot.")
new_env = os.environ.copy()
if 'COMFYUI_FOLDERS_BASE_PATH' not in new_env:
new_env["COMFYUI_FOLDERS_BASE_PATH"] = comfy_path
if 'COMFYUI_PATH' not in new_env:
new_env['COMFYUI_PATH'] = os.path.dirname(folder_paths.__file__)
cmd_str = [sys.executable, '-m', 'comfyui_manager.cm_cli', 'restore-snapshot', restore_snapshot_path]
exit_code = process_wrap(cmd_str, custom_nodes_base_path, handler=msg_capture, env=new_env)
if exit_code != 0:
print("[ComfyUI-Manager] Restore snapshot failed.")
else:
print("[ComfyUI-Manager] Restore snapshot done.")
except Exception as e:
print(e)
print("[ComfyUI-Manager] Restore snapshot failed.")
os.remove(restore_snapshot_path)
def execute_lazy_install_script(repo_path, executable):
global processed_install
install_script_path = os.path.join(repo_path, "install.py")
requirements_path = os.path.join(repo_path, "requirements.txt")
if os.path.exists(requirements_path):
print(f"Install: pip packages for '{repo_path}'")
lines = manager_util.robust_readlines(requirements_path)
for line in lines:
package_name = remap_pip_package(line.strip())
package_name = package_name.split('#')[0].strip()
if package_name and not is_installed(package_name):
if '--index-url' in package_name:
s = package_name.split('--index-url')
install_cmd = manager_util.make_pip_cmd(["install", s[0].strip(), '--index-url', s[1].strip()])
else:
install_cmd = manager_util.make_pip_cmd(["install", package_name])
process_wrap(install_cmd, repo_path)
if os.path.exists(install_script_path) and f'{repo_path}/install.py' not in processed_install:
processed_install.add(f'{repo_path}/install.py')
print(f"Install: install script for '{repo_path}'")
install_cmd = [executable, "install.py"]
new_env = os.environ.copy()
if 'COMFYUI_FOLDERS_BASE_PATH' not in new_env:
new_env["COMFYUI_FOLDERS_BASE_PATH"] = comfy_path
process_wrap(install_cmd, repo_path, env=new_env)
def execute_lazy_cnr_switch(target, zip_url, from_path, to_path, no_deps, custom_nodes_path):
import uuid
import shutil
# 1. download
archive_name = f"CNR_temp_{str(uuid.uuid4())}.zip" # should be unpredictable name - security precaution
download_path = os.path.join(custom_nodes_path, archive_name)
manager_downloader.download_url(zip_url, custom_nodes_path, archive_name)
# 2. extract files into <node_id>@<cur_ver>
extracted = manager_util.extract_package_as_zip(download_path, from_path)
os.remove(download_path)
if extracted is None:
if len(os.listdir(from_path)) == 0:
shutil.rmtree(from_path)
print(f'Empty archive file: {target}')
return False
# 3. calculate garbage files (.tracking - extracted)
tracking_info_file = os.path.join(from_path, '.tracking')
prev_files = set()
with open(tracking_info_file, 'r') as f:
for line in f:
prev_files.add(line.strip())
garbage = prev_files.difference(extracted)
garbage = [os.path.join(custom_nodes_path, x) for x in garbage]
# 4-1. remove garbage files
for x in garbage:
if os.path.isfile(x):
os.remove(x)
# 4-2. remove garbage dir if empty
for x in garbage:
if os.path.isdir(x):
if not os.listdir(x):
os.rmdir(x)
# 5. rename dir name <node_id>@<prev_ver> ==> <node_id>@<cur_ver>
print(f"'{from_path}' is moved to '{to_path}'")
shutil.move(from_path, to_path)
# 6. create .tracking file
tracking_info_file = os.path.join(to_path, '.tracking')
with open(tracking_info_file, "w", encoding='utf-8') as file:
file.write('\n'.join(list(extracted)))
script_executed = False
def execute_startup_script():
global script_executed
print("\n#######################################################################")
print("[ComfyUI-Manager] Starting dependency installation/(de)activation for the extension\n")
custom_nodelist_cache = None
def get_custom_node_paths():
nonlocal custom_nodelist_cache
if custom_nodelist_cache is None:
custom_nodelist_cache = set()
for base in folder_paths.get_folder_paths('custom_nodes'):
for x in os.listdir(base):
fullpath = os.path.join(base, x)
if os.path.isdir(fullpath):
custom_nodelist_cache.add(fullpath)
return custom_nodelist_cache
def execute_lazy_delete(path):
# Validate to prevent arbitrary paths from being deleted
if path not in get_custom_node_paths():
logging.error(f"## ComfyUI-Manager: The scheduled '{path}' is not a custom node path, so the deletion has been canceled.")
return
if not os.path.exists(path):
logging.info(f"## ComfyUI-Manager: SKIP-DELETE => '{path}' (already deleted)")
return
try:
shutil.rmtree(path)
logging.info(f"## ComfyUI-Manager: DELETE => '{path}'")
except Exception as e:
logging.error(f"## ComfyUI-Manager: Failed to delete '{path}' ({e})")
executed = set()
# Read each line from the file and convert it to a list using eval
with open(script_list_path, 'r', encoding="UTF-8", errors="ignore") as file:
for line in file:
if line in executed:
continue
executed.add(line)
try:
script = ast.literal_eval(line)
if script[1].startswith('#') and script[1] != '#FORCE':
if script[1] == "#LAZY-INSTALL-SCRIPT":
execute_lazy_install_script(script[0], script[2])
elif script[1] == "#LAZY-CNR-SWITCH-SCRIPT":
execute_lazy_cnr_switch(script[0], script[2], script[3], script[4], script[5], script[6])
execute_lazy_install_script(script[3], script[7])
elif script[1] == "#LAZY-DELETE-NODEPACK":
execute_lazy_delete(script[2])
elif os.path.exists(script[0]):
if script[1] == "#FORCE":
del script[1]
else:
if 'pip' in script[1:] and 'install' in script[1:] and is_installed(script[-1]):
continue
print(f"\n## ComfyUI-Manager: EXECUTE => {script[1:]}")
print(f"\n## Execute management script for '{script[0]}'")
new_env = os.environ.copy()
if 'COMFYUI_FOLDERS_BASE_PATH' not in new_env:
new_env["COMFYUI_FOLDERS_BASE_PATH"] = comfy_path
exit_code = process_wrap(script[1:], script[0], env=new_env)
if exit_code != 0:
print(f"management script failed: {script[0]}")
else:
print(f"\n## ComfyUI-Manager: CANCELED => {script[1:]}")
except Exception as e:
print(f"[ERROR] Failed to execute management script: {line} / {e}")
# Remove the script_list_path file
if os.path.exists(script_list_path):
script_executed = True
os.remove(script_list_path)
print("\n[ComfyUI-Manager] Startup script completed.")
print("#######################################################################\n")
# Check if script_list_path exists
if os.path.exists(script_list_path):
execute_startup_script()
pip_fixer.fix_broken()
del processed_install
del pip_fixer
manager_util.clear_pip_cache()
if script_executed:
# Restart
print("[ComfyUI-Manager] Restarting to reapply dependency installation.")
if '__COMFY_CLI_SESSION__' in os.environ:
with open(os.path.join(os.environ['__COMFY_CLI_SESSION__'] + '.reboot'), 'w'):
pass
print("--------------------------------------------------------------------------\n")
exit(0)
else:
sys_argv = sys.argv.copy()
if sys_argv[0].endswith("__main__.py"): # this is a python module
module_name = os.path.basename(os.path.dirname(sys_argv[0]))
cmds = [sys.executable, '-m', module_name] + sys_argv[1:]
elif sys.platform.startswith('win32'):
cmds = ['"' + sys.executable + '"', '"' + sys_argv[0] + '"'] + sys_argv[1:]
else:
cmds = [sys.executable] + sys_argv
print(f"Command: {cmds}", flush=True)
print("--------------------------------------------------------------------------\n")
os.execv(sys.executable, cmds)
def check_windows_event_loop_policy():
try:
import configparser
config = configparser.ConfigParser(strict=False)
config.read(manager_config_path)
default_conf = config['default']
if 'windows_selector_event_loop_policy' in default_conf and default_conf['windows_selector_event_loop_policy'].lower() == 'true':
try:
import asyncio
import asyncio.windows_events
asyncio.set_event_loop_policy(asyncio.windows_events.WindowsSelectorEventLoopPolicy())
print("[ComfyUI-Manager] Windows event loop policy mode enabled")
except Exception as e:
print(f"[ComfyUI-Manager] WARN: Windows initialization fail: {e}")
except Exception:
pass
if platform.system() == 'Windows':
check_windows_event_loop_policy()

17171
custom-node-list.json Normal file
View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,41 +0,0 @@
# ComfyUI-Manager: Documentation
This directory contains documentation for the ComfyUI-Manager, providing guides and tutorials for users in multiple languages.
## Directory Structure
The documentation is organized into language-specific directories:
- **en/**: English documentation
- **ko/**: Korean documentation
## Core Documentation Files
### Command-Line Interface
- **cm-cli.md**: Documentation for the ComfyUI-Manager Command Line Interface (CLI), which allows using manager functionality without the UI.
### Advanced Features
- **use_aria2.md**: Guide for using the aria2 download accelerator with ComfyUI-Manager for faster model downloads.
## Documentation Standards
The documentation follows these standards:
1. **Markdown Format**: All documentation is written in Markdown for easy rendering on GitHub and other platforms
2. **Language-specific Directories**: Content is separated by language to facilitate localization
3. **Feature-focused Documentation**: Each major feature has its own documentation file
4. **Updated with Releases**: Documentation is kept in sync with software releases
## Contributing to Documentation
When contributing new documentation:
1. Place files in the appropriate language directory
2. Use clear, concise language appropriate for the target audience
3. Include examples where helpful
4. Consider adding screenshots or diagrams for complex features
5. Maintain consistent formatting with existing documentation
This documentation directory will continue to grow to support the expanding feature set of ComfyUI-Manager.

View File

@@ -121,9 +121,8 @@ ComfyUI-Loopchain
* If no file exists at the snapshot path, it is implicitly assumed to be in ComfyUI-Manager/snapshots.
* `--pip-non-url`: Restore for pip packages registered on PyPI.
* `--pip-non-local-url`: Restore for pip packages registered at web URLs.
* `--pip-local-url`: Restore for pip packages specified by local paths.
* `--user-directory`: Set the user directory.
* `--restore-to`: The path where the restored custom nodes will be installed. (When this option is applied, only the custom nodes installed in the target path are recognized as installed.)
* `--pip-local-url`: Restore for pip packages specified by local paths.
### 5. CLI Only Mode

View File

@@ -123,8 +123,7 @@ ComfyUI-Loopchain
* `--pip-non-url`: PyPI 에 등록된 pip 패키지들에 대해서 복구를 수행
* `--pip-non-local-url`: web URL에 등록된 pip 패키지들에 대해서 복구를 수행
* `--pip-local-url`: local 경로를 지정하고 있는 pip 패키지들에 대해서 복구를 수행
* `--user-directory`: 사용자 디렉토리 설정
* `--restore-to`: 복구될 커스텀 노드가 설치될 경로. (이 옵션을 적용할 경우 오직 대상 경로에 설치된 custom nodes 만 설치된 것으로 인식함.)
### 5. CLI only mode

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,15 +1,15 @@
{
"favorites": [
"comfyui-manager",
"comfyui_ipadapter_plus",
"comfyui-animatediff-evolved",
"comfyui_controlnet_aux",
"comfyui-impact-pack",
"comfyui-impact-subpack",
"comfyui-custom-scripts",
"comfyui-layerdiffuse",
"comfyui-liveportraitkj",
"aigodlike-comfyui-translation",
"comfyui-reactor",
"comfyui-reactor-node",
"comfyui_instantid",
"sd-dynamic-thresholding",
"pr-was-node-suite-comfyui-47064894",

7797
github-stats.json Normal file
View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
import { api } from "../../scripts/api.js";
import { app } from "../../scripts/app.js";
import { sleep, customConfirm, customAlert } from "./common.js";
import { sleep } from "./common.js";
async function tryInstallCustomNode(event) {
let msg = '-= [ComfyUI Manager] extension installation request =-\n\n';
@@ -19,13 +19,14 @@ async function tryInstallCustomNode(event) {
msg += `\n\nRequest message:\n${event.detail.msg}`;
if(event.detail.target.installed == 'True') {
customAlert(msg);
alert(msg);
return;
}
const res = await customConfirm(msg);
let res = confirm(msg);
if(res) {
if(event.detail.target.installed == 'Disabled') {
const response = await api.fetchApi(`/v2/customnode/toggle_active`, {
const response = await api.fetchApi(`/customnode/toggle_active`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(event.detail.target)
@@ -35,7 +36,7 @@ async function tryInstallCustomNode(event) {
await sleep(300);
app.ui.dialog.show(`Installing... '${event.detail.target.title}'`);
const response = await api.fetchApi(`/v2/customnode/install`, {
const response = await api.fetchApi(`/customnode/install`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(event.detail.target)
@@ -45,14 +46,9 @@ async function tryInstallCustomNode(event) {
show_message('This action is not allowed with this security level configuration.');
return false;
}
else if(response.status == 400) {
let msg = await res.text();
show_message(msg);
return false;
}
}
let response = await api.fetchApi("/v2/manager/reboot");
let response = await api.fetchApi("/manager/reboot");
if(response.status == 403) {
show_message('This action is not allowed with this security level configuration.');
return false;

View File

File diff suppressed because it is too large Load Diff

View File

@@ -4,7 +4,6 @@ import { $el, ComfyDialog } from "../../scripts/ui.js";
import { CopusShareDialog } from "./comfyui-share-copus.js";
import { OpenArtShareDialog } from "./comfyui-share-openart.js";
import { YouMLShareDialog } from "./comfyui-share-youml.js";
import { customAlert } from "./common.js";
export const SUPPORTED_OUTPUT_NODE_TYPES = [
"PreviewImage",
@@ -172,7 +171,7 @@ export const shareToEsheep= () => {
const nodes = app.graph._nodes
const { potential_outputs, potential_output_nodes } = getPotentialOutputsAndOutputNodes(nodes);
const workflow = prompt['workflow']
api.fetchApi(`/v2/manager/set_esheep_workflow_and_images`, {
api.fetchApi(`/manager/set_esheep_workflow_and_images`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
@@ -253,9 +252,9 @@ export const showShareDialog = async (share_option) => {
if (potential_output_nodes.length === 0) {
// todo: add support for other output node types (animatediff combine, etc.)
const supported_nodes_string = SUPPORTED_OUTPUT_NODE_TYPES.join(", ");
customAlert(`No supported output node found (${supported_nodes_string}). To share this workflow, please add an output node to your graph and re-run your prompt.`);
alert(`No supported output node found (${supported_nodes_string}). To share this workflow, please add an output node to your graph and re-run your prompt.`);
} else {
customAlert("To share this, first run a prompt. Once it's done, click 'Share'.\n\nNOTE: Images of the Share target can only be selected in the PreviewImage, SaveImage, and VHS_VideoCombine nodes. In the case of VHS_VideoCombine, only the image/gif and image/webp formats are supported.");
alert("To share this, first run a prompt. Once it's done, click 'Share'.\n\nNOTE: Images of the Share target can only be selected in the PreviewImage, SaveImage, and VHS_VideoCombine nodes. In the case of VHS_VideoCombine, only the image/gif and image/webp formats are supported.");
}
return false;
}
@@ -337,7 +336,7 @@ export class ShareDialogChooser extends ComfyDialog {
key: "Copus",
textContent: "Copus",
website: "https://www.copus.io",
description: "🔴 Earn simple. Get paid from your ComfyUI workflows—no revenue sharing. Ever.",
description: "🔴 Permanently store and secure ownership of your workflow on the open-source platform: <a style='color:var(--input-text);' href='https://copus.io' target='_blank'>Copus.io</a>",
onclick: () => {
showCopusShareDialog();
this.close();
@@ -357,8 +356,7 @@ export class ShareDialogChooser extends ComfyDialog {
});
buttons.forEach(b => {
const button = $el("button",
{
const button = $el("button", {
type: "button",
textContent: b.textContent,
onclick: b.onclick,
@@ -371,14 +369,8 @@ export class ShareDialogChooser extends ComfyDialog {
'padding': '5px 5px',
'margin-bottom': '5px',
'transition': 'background-color 0.3s',
'position':'relative'
}
},
[
$el("span", { style: {
} }),
]
);
});
button.addEventListener('mouseover', () => {
button.style.backgroundColor = '#007BFF'; // Change color on hover
});
@@ -396,28 +388,6 @@ export class ShareDialogChooser extends ComfyDialog {
},
});
const copus_ui =$el("div", { style: {
'position': 'absolute',
'height': '100%',
'left': '-25px',
'top': '-26px',
'width': '100%',
'z-index':'-1',
'background':'url("https://static.copus.io/images/client/202412/test/f28ac6ef8f4c6f3d5d50856a272ed02c.png")',
'background-repeat': 'no-repeat',
} });
const copus_ui_bottom =$el("div", { style: {
'position': 'absolute',
'height': '100%',
'left': '25px',
'bottom': '-26px',
'width': '100%',
'transform':'scale(-1, -1)',
'z-index':'-1',
'background':'url("https://static.copus.io/images/client/202412/test/f28ac6ef8f4c6f3d5d50856a272ed02c.png")',
'background-repeat': 'no-repeat',
} });
const websiteLink = $el("a", {
textContent: "🌐 Website",
href: b.website,
@@ -447,6 +417,7 @@ export class ShareDialogChooser extends ComfyDialog {
'margin-bottom': '10px',
}
}, [button, websiteLink]);
const column = $el("div", {
style: {
'flex-basis': '100%',
@@ -455,17 +426,8 @@ export class ShareDialogChooser extends ComfyDialog {
'border': '1px solid #ddd',
'border-radius': '5px',
'box-shadow': '0 2px 4px rgba(0, 0, 0, 0.1)',
'position':'relative'
}
}, [buttonLinkContainer, description
,
b.key ==='Copus' ?
copus_ui
:'',
b.key ==='Copus' ?
copus_ui_bottom
:'',
]);
}, [buttonLinkContainer, description]);
container.appendChild(column);
});
@@ -513,7 +475,7 @@ export class ShareDialogChooser extends ComfyDialog {
}
show() {
this.element.style.display = "block";
this.element.style.zIndex = 1099;
this.element.style.zIndex = 10001;
}
}
export class ShareDialog extends ComfyDialog {
@@ -552,20 +514,6 @@ export class ShareDialog extends ComfyDialog {
this.matrix_destination_checkbox.style.color = "var(--fg-color)";
this.matrix_destination_checkbox.checked = this.share_option === 'matrix'; //true;
try {
api.fetchApi(`/v2/manager/get_matrix_dep_status`)
.then(response => response.text())
.then(data => {
if(data == 'unavailable') {
matrix_destination_checkbox_text.style.textDecoration = "line-through";
this.matrix_destination_checkbox.disabled = true;
this.matrix_destination_checkbox.title = "It has been disabled because the 'matrix-nio' dependency is not installed. Please install this dependency to use the matrix sharing feature.";
matrix_destination_checkbox_text.title = "It has been disabled because the 'matrix-nio' dependency is not installed. Please install this dependency to use the matrix sharing feature.";
}
})
.catch(error => {});
} catch (error) {}
this.comfyworkflows_destination_checkbox = $el("input", { type: 'checkbox', id: "comfyworkflows_destination" }, [])
const comfyworkflows_destination_checkbox_text = $el("label", {}, [" ComfyWorkflows.com"])
this.comfyworkflows_destination_checkbox.style.color = "var(--fg-color)";
@@ -826,7 +774,7 @@ export class ShareDialog extends ComfyDialog {
// get the user's existing matrix auth and share key
ShareDialog.matrix_auth = { homeserver: "matrix.org", username: "", password: "" };
try {
api.fetchApi(`/v2/manager/get_matrix_auth`)
api.fetchApi(`/manager/get_matrix_auth`)
.then(response => response.json())
.then(data => {
ShareDialog.matrix_auth = data;
@@ -845,7 +793,7 @@ export class ShareDialog extends ComfyDialog {
ShareDialog.cw_sharekey = "";
try {
// console.log("Fetching comfyworkflows share key")
api.fetchApi(`/v2/manager/get_comfyworkflows_auth`)
api.fetchApi(`/manager/get_comfyworkflows_auth`)
.then(response => response.json())
.then(data => {
ShareDialog.cw_sharekey = data.comfyworkflows_sharekey;
@@ -876,7 +824,7 @@ export class ShareDialog extends ComfyDialog {
if (destinations.includes("matrix")) {
let definedMatrixAuth = !!this.matrix_homeserver_input.value && !!this.matrix_username_input.value && !!this.matrix_password_input.value;
if (!definedMatrixAuth) {
customAlert("Please set your Matrix account details.");
alert("Please set your Matrix account details.");
return;
}
}
@@ -893,9 +841,9 @@ export class ShareDialog extends ComfyDialog {
if (potential_output_nodes.length === 0) {
// todo: add support for other output node types (animatediff combine, etc.)
const supported_nodes_string = SUPPORTED_OUTPUT_NODE_TYPES.join(", ");
customAlert(`No supported output node found (${supported_nodes_string}). To share this workflow, please add an output node to your graph and re-run your prompt.`);
alert(`No supported output node found (${supported_nodes_string}). To share this workflow, please add an output node to your graph and re-run your prompt.`);
} else {
customAlert("To share this, first run a prompt. Once it's done, click 'Share'.\n\nNOTE: Images of the Share target can only be selected in the PreviewImage, SaveImage, and VHS_VideoCombine nodes. In the case of VHS_VideoCombine, only the image/gif and image/webp formats are supported.");
alert("To share this, first run a prompt. Once it's done, click 'Share'.\n\nNOTE: Images of the Share target can only be selected in the PreviewImage, SaveImage, and VHS_VideoCombine nodes. In the case of VHS_VideoCombine, only the image/gif and image/webp formats are supported.");
}
this.selectedOutputIndex = 0;
this.close();
@@ -905,7 +853,7 @@ export class ShareDialog extends ComfyDialog {
// Change the text of the share button to "Sharing..." to indicate that the share process has started
this.share_button.textContent = "Sharing...";
const response = await api.fetchApi(`/v2/manager/share`, {
const response = await api.fetchApi(`/manager/share`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
@@ -933,16 +881,16 @@ export class ShareDialog extends ComfyDialog {
try {
const response_json = await response.json();
if (response_json.error) {
customAlert(response_json.error);
alert(response_json.error);
this.close();
return;
} else {
customAlert("Failed to share your art. Please try again.");
alert("Failed to share your art. Please try again.");
this.close();
return;
}
} catch (e) {
customAlert("Failed to share your art. Please try again.");
alert("Failed to share your art. Please try again.");
this.close();
return;
}

View File

@@ -1,15 +1,13 @@
import { app } from "../../scripts/app.js";
import { $el, ComfyDialog } from "../../scripts/ui.js";
import { customAlert } from "./common.js";
const env = "prod";
let DEFAULT_HOMEPAGE_URL = "https://copus.io";
let API_ENDPOINT = "https://api.client.prod.copus.io";
let API_ENDPOINT = "https://api.client.prod.copus.io/copus-client";
if (env !== "prod") {
API_ENDPOINT = "https://api.test.copus.io";
API_ENDPOINT = "https://api.dev.copus.io/copus-client";
DEFAULT_HOMEPAGE_URL = "https://test.copus.io";
}
@@ -63,7 +61,6 @@ export class CopusShareDialog extends ComfyDialog {
[$el("div.comfy-modal-content", {}, [...this.createButtons()])]
);
this.selectedOutputIndex = 0;
this.selectedOutput_lock = 0;
this.selectedNodeId = null;
this.uploadedImages = [];
this.allFilesImages = [];
@@ -71,7 +68,7 @@ export class CopusShareDialog extends ComfyDialog {
this.allFiles = [];
this.titleNum = 0;
}
createButtons() {
const inputStyle = {
display: "block",
@@ -193,38 +190,10 @@ export class CopusShareDialog extends ComfyDialog {
type: "text",
placeholder: "Subtitle (Optional)",
style: inputStyle,
maxLength: "350",
maxLength: "70",
oninput: () => {
const titleNum = this.SubTitleInput.value.length;
subTitleNumDom.textContent = `${titleNum}/350`;
},
});
this.LockInput = $el("input", {
type: "text",
placeholder: "0",
style: {
width: "100px",
padding: "7px",
paddingLeft: "30px",
borderRadius: "4px",
border: "1px solid #ddd",
boxSizing: "border-box",
position: "relative",
},
oninput: (event) => {
let input = event.target.value;
// Use a regular expression to match a number with up to two decimal places
const regex = /^\d*\.?\d{0,2}$/;
if (!regex.test(input)) {
// If the input doesn't match, remove the last entered character
event.target.value = input.slice(0, -1);
}
const numericValue = parseFloat(input);
if (numericValue > 9999) {
input = "9999";
}
// Update the input field with the valid value
event.target.value = input;
subTitleNumDom.textContent = `${titleNum}/70`;
},
});
this.descriptionInput = $el("textarea", {
@@ -303,7 +272,7 @@ export class CopusShareDialog extends ComfyDialog {
},
[]
);
const titleNumDom = $el(
"label",
{
@@ -328,7 +297,7 @@ export class CopusShareDialog extends ComfyDialog {
color: "#999",
},
},
["0/350"]
["0/70"]
);
const descriptionNumDom = $el(
"label",
@@ -344,11 +313,15 @@ export class CopusShareDialog extends ComfyDialog {
["0/70"]
);
// Additional Inputs Section
const additionalInputsSection = $el("div", { style: { ...sectionStyle } }, [
$el("label", { style: labelStyle }, ["3⃣ Title "]),
this.TitleInput,
titleNumDom,
]);
const additionalInputsSection = $el(
"div",
{ style: { ...sectionStyle, } },
[
$el("label", { style: labelStyle }, ["3⃣ Title "]),
this.TitleInput,
titleNumDom,
]
);
const SubtitleSection = $el("div", { style: sectionStyle }, [
$el("label", { style: labelStyle }, ["4⃣ Subtitle "]),
this.SubTitleInput,
@@ -360,101 +333,6 @@ export class CopusShareDialog extends ComfyDialog {
// descriptionNumDom,
]);
// switch between outputs section and additional inputs section
this.radioButtons_lock = [];
this.radioButtonsCheck_lock = $el("input", {
type: "radio",
name: "output_type_lock",
value: "0",
id: "blockchain1_lock",
checked: true,
});
this.radioButtonsCheckOff_lock = $el("input", {
type: "radio",
name: "output_type_lock",
value: "1",
id: "blockchain_lock",
});
const blockChainSection_lock = $el("div", { style: sectionStyle }, [
$el("label", { style: labelStyle }, ["6⃣ Download threshold"]),
$el(
"label",
{
style: {
marginTop: "10px",
display: "flex",
alignItems: "center",
cursor: "pointer",
},
},
[
this.radioButtonsCheck_lock,
$el(
"div",
{
style: {
marginLeft: "5px",
display: "flex",
alignItems: "center",
position: "relative",
},
},
[
$el("span", { style: { marginLeft: "5px" } }, ["ON"]),
$el(
"span",
{
style: {
marginLeft: "20px",
marginRight: "10px",
color: "#fff",
},
},
["Unlock with"]
),
$el("img", {
style: {
width: "16px",
height: "16px",
position: "absolute",
right: "75px",
zIndex: "100",
},
src: "https://static.copus.io/images/admin/202507/prod/e2919a1d8f3c2d99d3b8fe27ff94b841.png",
}),
this.LockInput,
]
),
]
),
$el(
"label",
{ style: { display: "flex", alignItems: "center", cursor: "pointer" } },
[
this.radioButtonsCheckOff_lock,
$el(
"div",
{
style: {
marginLeft: "5px",
display: "flex",
alignItems: "center",
},
},
[$el("span", { style: { marginLeft: "5px" } }, ["OFF"])]
),
]
),
$el(
"p",
{ style: { fontSize: "16px", color: "#fff", margin: "10px 0 0 0" } },
[
]
),
]);
this.radioButtons = [];
this.radioButtonsCheck = $el("input", {
@@ -472,7 +350,7 @@ export class CopusShareDialog extends ComfyDialog {
});
const blockChainSection = $el("div", { style: sectionStyle }, [
$el("label", { style: labelStyle }, ["8️⃣ Store on blockchain "]),
$el("label", { style: labelStyle }, ["6️⃣ Store on blockchain "]),
$el(
"label",
{
@@ -502,141 +380,6 @@ export class CopusShareDialog extends ComfyDialog {
["Secure ownership with a permanent & decentralized storage"]
),
]);
this.ratingRadioButtonsCheck0 = $el("input", {
type: "radio",
name: "content_rating",
value: "0",
id: "content_rating0",
});
this.ratingRadioButtonsCheck1 = $el("input", {
type: "radio",
name: "content_rating",
value: "1",
id: "content_rating1",
});
this.ratingRadioButtonsCheck2 = $el("input", {
type: "radio",
name: "content_rating",
value: "2",
id: "content_rating2",
});
this.ratingRadioButtonsCheck_1 = $el("input", {
type: "radio",
name: "content_rating",
value: "-1",
id: "content_rating_1",
checked: true,
});
// content rating
const contentRatingSection = $el("div", { style: sectionStyle }, [
$el("label", { style: labelStyle }, ["7⃣ Content rating "]),
$el(
"label",
{
style: {
marginTop: "10px",
display: "flex",
alignItems: "center",
cursor: "pointer",
},
},
[
this.ratingRadioButtonsCheck0,
$el("img", {
style: {
width: "12px",
height: "12px",
marginLeft: "5px",
},
src: "https://static.copus.io/images/client/202507/test/b9f17da83b054d53cd0cb4508c2c30dc.png",
}),
$el("span", { style: { marginLeft: "5px", color: "#fff" } }, [
"All ages",
]),
]
),
$el(
"p",
{ style: { fontSize: "10px", color: "#fff", marginLeft: "20px" } },
["Safe for all viewers; no profanity, violence, or mature themes."]
),
$el(
"label",
{ style: { display: "flex", alignItems: "center", cursor: "pointer" } },
[
this.ratingRadioButtonsCheck1,
$el("img", {
style: {
width: "12px",
height: "12px",
marginLeft: "5px",
},
src: "https://static.copus.io/images/client/202507/test/7848bc0d3690671df21c7cf00c4cfc81.png",
}),
$el("span", { style: { marginLeft: "5px", color: "#fff" } }, [
"13+ (Teen)",
]),
]
),
$el(
"p",
{ style: { fontSize: "10px", color: "#fff", marginLeft: "20px" } },
[
"Mild language, light themes, or cartoon violence; no explicit content. ",
]
),
$el(
"label",
{ style: { display: "flex", alignItems: "center", cursor: "pointer" } },
[
this.ratingRadioButtonsCheck2,
$el("img", {
style: {
width: "12px",
height: "12px",
marginLeft: "5px",
},
src: "https://static.copus.io/images/client/202507/test/bc51839c208d68d91173e43c23bff039.png",
}),
$el("span", { style: { marginLeft: "5px", color: "#fff" } }, [
"18+ (Explicit)",
]),
]
),
$el(
"p",
{ style: { fontSize: "10px", color: "#fff", marginLeft: "20px" } },
[
"Explicit content, including sexual content, strong violence, or intense themes. ",
]
),
$el(
"label",
{ style: { display: "flex", alignItems: "center", cursor: "pointer" } },
[
this.ratingRadioButtonsCheck_1,
$el("img", {
style: {
width: "12px",
height: "12px",
marginLeft: "5px",
},
src: "https://static.copus.io/images/client/202507/test/5c802fdcaaea4e7bbed37393eec0d5ba.png",
}),
$el("span", { style: { marginLeft: "5px", color: "#fff" } }, [
"Not Rated",
]),
]
),
$el(
"p",
{ style: { fontSize: "10px", color: "#fff", marginLeft: "20px" } },
["No age rating provided."]
),
]);
// Message Section
this.message = $el(
"div",
@@ -698,8 +441,6 @@ export class CopusShareDialog extends ComfyDialog {
SubtitleSection,
DescriptionSection,
// contestSection,
blockChainSection_lock,
contentRatingSection,
blockChainSection,
this.message,
buttonsSection,
@@ -708,7 +449,7 @@ export class CopusShareDialog extends ComfyDialog {
return layout;
}
/**
* api
* api
* @param {url} path
* @param {params} options
* @param {statusText} statusText
@@ -761,9 +502,7 @@ export class CopusShareDialog extends ComfyDialog {
url: data,
});
} else {
throw new Error(
"make sure your API key is correct and try again later"
);
throw new Error("make sure your API key is correct and try again later");
}
} catch (e) {
if (e?.response?.status === 413) {
@@ -781,7 +520,7 @@ export class CopusShareDialog extends ComfyDialog {
this.shareButton.textContent = "Sharing...";
await this.share();
} catch (e) {
customAlert(e.message);
alert(e.message);
}
this.shareButton.disabled = false;
this.shareButton.textContent = "Share";
@@ -804,15 +543,6 @@ export class CopusShareDialog extends ComfyDialog {
subTitle: this.SubTitleInput.value,
content: this.descriptionInput.value,
storeOnChain: this.radioButtonsCheck.checked ? true : false,
lockState: this.radioButtonsCheck_lock.checked ? 2 : 0,
unlockPrice: this.LockInput.value,
rating: this.ratingRadioButtonsCheck0.checked
? 0
: this.ratingRadioButtonsCheck1.checked
? 1
: this.ratingRadioButtonsCheck2.checked
? 2
: -1,
};
if (!this.keyInput.value) {
@@ -827,12 +557,6 @@ export class CopusShareDialog extends ComfyDialog {
throw new Error("Title is required");
}
if (this.radioButtonsCheck_lock.checked) {
if (!this.LockInput.value) {
throw new Error("Price is required");
}
}
if (!this.uploadedImages.length) {
if (this.selectedFile) {
await this.uploadThumbnail(this.selectedFile);
@@ -878,23 +602,23 @@ export class CopusShareDialog extends ComfyDialog {
"Uploading workflow..."
);
if (res.status && res.data.status && res.data) {
localStorage.setItem("copus_token", this.keyInput.value);
const { data } = res.data;
if (data) {
const url = `${DEFAULT_HOMEPAGE_URL}/work/${data}`;
this.message.innerHTML = `Workflow has been shared successfully. <a href="${url}" target="_blank">Click here to view it.</a>`;
this.previewImage.src = "";
this.previewImage.style.display = "none";
this.uploadedImages = [];
this.allFilesImages = [];
this.allFiles = [];
this.TitleInput.value = "";
this.SubTitleInput.value = "";
this.descriptionInput.value = "";
this.selectedFile = null;
}
}
if (res.status && res.data.status && res.data) {
localStorage.setItem("copus_token",this.keyInput.value);
const { data } = res.data;
if (data) {
const url = `${DEFAULT_HOMEPAGE_URL}/work/${data}`;
this.message.innerHTML = `Workflow has been shared successfully. <a href="${url}" target="_blank">Click here to view it.</a>`;
this.previewImage.src = "";
this.previewImage.style.display = "none";
this.uploadedImages = [];
this.allFilesImages = [];
this.allFiles = [];
this.TitleInput.value = "";
this.SubTitleInput.value = "";
this.descriptionInput.value = "";
this.selectedFile = null;
}
}
} catch (e) {
throw new Error("Error sharing workflow: " + e.message);
}
@@ -940,7 +664,7 @@ export class CopusShareDialog extends ComfyDialog {
this.element.style.display = "block";
this.previewImage.src = "";
this.previewImage.style.display = "none";
this.keyInput.value = apiToken != null ? apiToken : "";
this.keyInput.value = apiToken!=null?apiToken:"";
this.uploadedImages = [];
this.allFilesImages = [];
this.allFiles = [];

View File

@@ -1,7 +1,6 @@
import {app} from "../../scripts/app.js";
import {api} from "../../scripts/api.js";
import {ComfyDialog, $el} from "../../scripts/ui.js";
import { customAlert } from "./common.js";
const LOCAL_STORAGE_KEY = "openart_comfy_workflow_key";
const DEFAULT_HOMEPAGE_URL = "https://openart.ai/workflows/dev?developer=true";
@@ -67,7 +66,7 @@ export class OpenArtShareDialog extends ComfyDialog {
async readKey() {
let key = ""
try {
key = await api.fetchApi(`/v2/manager/get_openart_auth`)
key = await api.fetchApi(`/manager/get_openart_auth`)
.then(response => response.json())
.then(data => {
return data.openart_key;
@@ -82,7 +81,7 @@ export class OpenArtShareDialog extends ComfyDialog {
}
async saveKey(value) {
await api.fetchApi(`/v2/manager/set_openart_auth`, {
await api.fetchApi(`/manager/set_openart_auth`, {
method: 'POST',
headers: {'Content-Type': 'application/json'},
body: JSON.stringify({
@@ -399,7 +398,7 @@ export class OpenArtShareDialog extends ComfyDialog {
form.append("file", uploadFile);
try {
const res = await this.fetchApi(
`/v2/workflows/upload_thumbnail`,
`/workflows/upload_thumbnail`,
{
method: "POST",
body: form,
@@ -432,7 +431,7 @@ export class OpenArtShareDialog extends ComfyDialog {
this.shareButton.textContent = "Sharing...";
await this.share();
} catch (e) {
customAlert(e.message);
alert(e.message);
}
this.shareButton.disabled = false;
this.shareButton.textContent = "Share";
@@ -459,7 +458,7 @@ export class OpenArtShareDialog extends ComfyDialog {
throw new Error("Title is required");
}
const current_snapshot = await api.fetchApi(`/v2/snapshot/get_current`)
const current_snapshot = await api.fetchApi(`/snapshot/get_current`)
.then(response => response.json())
.catch(error => {
// console.log(error);
@@ -489,7 +488,7 @@ export class OpenArtShareDialog extends ComfyDialog {
try {
const response = await this.fetchApi(
"/v2/workflows/publish",
"/workflows/publish",
{
method: "POST",
headers: {"Content-Type": "application/json"},

View File

@@ -1,7 +1,6 @@
import {app} from "../../scripts/app.js";
import {api} from "../../scripts/api.js";
import {ComfyDialog, $el} from "../../scripts/ui.js";
import { customAlert } from "./common.js";
const BASE_URL = "https://youml.com";
//const BASE_URL = "http://localhost:3000";
@@ -179,7 +178,7 @@ export class YouMLShareDialog extends ComfyDialog {
async loadToken() {
let key = ""
try {
const response = await api.fetchApi(`/v2/manager/youml/settings`)
const response = await api.fetchApi(`/manager/youml/settings`)
const settings = await response.json()
return settings.token
} catch (error) {
@@ -188,7 +187,7 @@ export class YouMLShareDialog extends ComfyDialog {
}
async saveToken(value) {
await api.fetchApi(`/v2/manager/youml/settings`, {
await api.fetchApi(`/manager/youml/settings`, {
method: 'POST',
headers: {'Content-Type': 'application/json'},
body: JSON.stringify({
@@ -348,7 +347,7 @@ export class YouMLShareDialog extends ComfyDialog {
this.shareButton.textContent = "Sharing...";
await this.share();
} catch (e) {
customAlert(e.message);
alert(e.message);
} finally {
this.shareButton.disabled = false;
this.shareButton.textContent = "Share";
@@ -380,7 +379,7 @@ export class YouMLShareDialog extends ComfyDialog {
try {
let snapshotData = null;
try {
const snapshot = await api.fetchApi(`/v2/snapshot/get_current`)
const snapshot = await api.fetchApi(`/snapshot/get_current`)
snapshotData = await snapshot.json()
} catch (e) {
console.error("Failed to get snapshot", e)

257
js/common.js Normal file
View File

@@ -0,0 +1,257 @@
import { app } from "../../scripts/app.js";
import { api } from "../../scripts/api.js";
import { $el, ComfyDialog } from "../../scripts/ui.js";
export function show_message(msg) {
app.ui.dialog.show(msg);
app.ui.dialog.element.style.zIndex = 10010;
}
export async function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
export function rebootAPI() {
if (confirm("Are you sure you'd like to reboot the server?")) {
try {
api.fetchApi("/manager/reboot");
}
catch(exception) {
}
return true;
}
return false;
}
export async function migrateAPI() {
if (confirm("When performing a migration, existing installed custom nodes will be renamed and the server will be restarted. Are you sure you want to apply this?\n\n(If you don't perform the migration, ComfyUI-Manager's start-up time will be longer each time due to re-checking during startup.)")) {
try {
await api.fetchApi("/manager/migrate_unmanaged_nodes");
api.fetchApi("/manager/reboot");
}
catch(exception) {
}
return true;
}
return false;
}
export var manager_instance = null;
export function setManagerInstance(obj) {
manager_instance = obj;
}
export function showToast(message, duration = 3000) {
const toast = $el("div.comfy-toast", {textContent: message});
document.body.appendChild(toast);
setTimeout(() => {
toast.classList.add("comfy-toast-fadeout");
setTimeout(() => toast.remove(), 500);
}, duration);
}
function isValidURL(url) {
if(url.includes('&'))
return false;
const http_pattern = /^(https?|ftp):\/\/[^\s$?#]+$/;
const ssh_pattern = /^(.+@|ssh:\/\/).+:.+$/;
return http_pattern.test(url) || ssh_pattern.test(url);
}
export async function install_pip(packages) {
if(packages.includes('&'))
app.ui.dialog.show(`Invalid PIP package enumeration: '${packages}'`);
const res = await api.fetchApi("/customnode/install/pip", {
method: "POST",
body: packages,
});
if(res.status == 403) {
show_message('This action is not allowed with this security level configuration.');
return;
}
if(res.status == 200) {
show_message(`PIP package installation is processed.<br>To apply the pip packages, please click the <button id='cm-reboot-button3'><font size='3px'>RESTART</font></button> button in ComfyUI.`);
const rebootButton = document.getElementById('cm-reboot-button3');
const self = this;
rebootButton.addEventListener("click", rebootAPI);
}
else {
show_message(`Failed to install '${packages}'<BR>See terminal log.`);
}
}
export async function install_via_git_url(url, manager_dialog) {
if(!url) {
return;
}
if(!isValidURL(url)) {
show_message(`Invalid Git url '${url}'`);
return;
}
show_message(`Wait...<BR><BR>Installing '${url}'`);
const res = await api.fetchApi("/customnode/install/git_url", {
method: "POST",
body: url,
});
if(res.status == 403) {
show_message('This action is not allowed with this security level configuration.');
return;
}
if(res.status == 200) {
show_message(`'${url}' is installed<BR>To apply the installed custom node, please <button id='cm-reboot-button4'><font size='3px'>RESTART</font></button> ComfyUI.`);
const rebootButton = document.getElementById('cm-reboot-button4');
const self = this;
rebootButton.addEventListener("click",
function() {
if(rebootAPI()) {
manager_dialog.close();
}
});
}
else {
show_message(`Failed to install '${url}'<BR>See terminal log.`);
}
}
export async function free_models(free_execution_cache) {
try {
let mode = "";
if(free_execution_cache) {
mode = '{"unload_models": true, "free_memory": true}';
}
else {
mode = '{"unload_models": true}';
}
let res = await api.fetchApi(`/free`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: mode
});
if (res.status == 200) {
if(free_execution_cache) {
showToast("'Models' and 'Execution Cache' have been cleared.", 3000);
}
else {
showToast("Models' have been unloaded.", 3000);
}
} else {
showToast('Unloading of models failed. Installed ComfyUI may be an outdated version.', 5000);
}
} catch (error) {
showToast('An error occurred while trying to unload models.', 5000);
}
}
export function md5(inputString) {
const hc = '0123456789abcdef';
const rh = n => {let j,s='';for(j=0;j<=3;j++) s+=hc.charAt((n>>(j*8+4))&0x0F)+hc.charAt((n>>(j*8))&0x0F);return s;}
const ad = (x,y) => {let l=(x&0xFFFF)+(y&0xFFFF);let m=(x>>16)+(y>>16)+(l>>16);return (m<<16)|(l&0xFFFF);}
const rl = (n,c) => (n<<c)|(n>>>(32-c));
const cm = (q,a,b,x,s,t) => ad(rl(ad(ad(a,q),ad(x,t)),s),b);
const ff = (a,b,c,d,x,s,t) => cm((b&c)|((~b)&d),a,b,x,s,t);
const gg = (a,b,c,d,x,s,t) => cm((b&d)|(c&(~d)),a,b,x,s,t);
const hh = (a,b,c,d,x,s,t) => cm(b^c^d,a,b,x,s,t);
const ii = (a,b,c,d,x,s,t) => cm(c^(b|(~d)),a,b,x,s,t);
const sb = x => {
let i;const nblk=((x.length+8)>>6)+1;const blks=[];for(i=0;i<nblk*16;i++) { blks[i]=0 };
for(i=0;i<x.length;i++) {blks[i>>2]|=x.charCodeAt(i)<<((i%4)*8);}
blks[i>>2]|=0x80<<((i%4)*8);blks[nblk*16-2]=x.length*8;return blks;
}
let i,x=sb(inputString),a=1732584193,b=-271733879,c=-1732584194,d=271733878,olda,oldb,oldc,oldd;
for(i=0;i<x.length;i+=16) {olda=a;oldb=b;oldc=c;oldd=d;
a=ff(a,b,c,d,x[i+ 0], 7, -680876936);d=ff(d,a,b,c,x[i+ 1],12, -389564586);c=ff(c,d,a,b,x[i+ 2],17, 606105819);
b=ff(b,c,d,a,x[i+ 3],22,-1044525330);a=ff(a,b,c,d,x[i+ 4], 7, -176418897);d=ff(d,a,b,c,x[i+ 5],12, 1200080426);
c=ff(c,d,a,b,x[i+ 6],17,-1473231341);b=ff(b,c,d,a,x[i+ 7],22, -45705983);a=ff(a,b,c,d,x[i+ 8], 7, 1770035416);
d=ff(d,a,b,c,x[i+ 9],12,-1958414417);c=ff(c,d,a,b,x[i+10],17, -42063);b=ff(b,c,d,a,x[i+11],22,-1990404162);
a=ff(a,b,c,d,x[i+12], 7, 1804603682);d=ff(d,a,b,c,x[i+13],12, -40341101);c=ff(c,d,a,b,x[i+14],17,-1502002290);
b=ff(b,c,d,a,x[i+15],22, 1236535329);a=gg(a,b,c,d,x[i+ 1], 5, -165796510);d=gg(d,a,b,c,x[i+ 6], 9,-1069501632);
c=gg(c,d,a,b,x[i+11],14, 643717713);b=gg(b,c,d,a,x[i+ 0],20, -373897302);a=gg(a,b,c,d,x[i+ 5], 5, -701558691);
d=gg(d,a,b,c,x[i+10], 9, 38016083);c=gg(c,d,a,b,x[i+15],14, -660478335);b=gg(b,c,d,a,x[i+ 4],20, -405537848);
a=gg(a,b,c,d,x[i+ 9], 5, 568446438);d=gg(d,a,b,c,x[i+14], 9,-1019803690);c=gg(c,d,a,b,x[i+ 3],14, -187363961);
b=gg(b,c,d,a,x[i+ 8],20, 1163531501);a=gg(a,b,c,d,x[i+13], 5,-1444681467);d=gg(d,a,b,c,x[i+ 2], 9, -51403784);
c=gg(c,d,a,b,x[i+ 7],14, 1735328473);b=gg(b,c,d,a,x[i+12],20,-1926607734);a=hh(a,b,c,d,x[i+ 5], 4, -378558);
d=hh(d,a,b,c,x[i+ 8],11,-2022574463);c=hh(c,d,a,b,x[i+11],16, 1839030562);b=hh(b,c,d,a,x[i+14],23, -35309556);
a=hh(a,b,c,d,x[i+ 1], 4,-1530992060);d=hh(d,a,b,c,x[i+ 4],11, 1272893353);c=hh(c,d,a,b,x[i+ 7],16, -155497632);
b=hh(b,c,d,a,x[i+10],23,-1094730640);a=hh(a,b,c,d,x[i+13], 4, 681279174);d=hh(d,a,b,c,x[i+ 0],11, -358537222);
c=hh(c,d,a,b,x[i+ 3],16, -722521979);b=hh(b,c,d,a,x[i+ 6],23, 76029189);a=hh(a,b,c,d,x[i+ 9], 4, -640364487);
d=hh(d,a,b,c,x[i+12],11, -421815835);c=hh(c,d,a,b,x[i+15],16, 530742520);b=hh(b,c,d,a,x[i+ 2],23, -995338651);
a=ii(a,b,c,d,x[i+ 0], 6, -198630844);d=ii(d,a,b,c,x[i+ 7],10, 1126891415);c=ii(c,d,a,b,x[i+14],15,-1416354905);
b=ii(b,c,d,a,x[i+ 5],21, -57434055);a=ii(a,b,c,d,x[i+12], 6, 1700485571);d=ii(d,a,b,c,x[i+ 3],10,-1894986606);
c=ii(c,d,a,b,x[i+10],15, -1051523);b=ii(b,c,d,a,x[i+ 1],21,-2054922799);a=ii(a,b,c,d,x[i+ 8], 6, 1873313359);
d=ii(d,a,b,c,x[i+15],10, -30611744);c=ii(c,d,a,b,x[i+ 6],15,-1560198380);b=ii(b,c,d,a,x[i+13],21, 1309151649);
a=ii(a,b,c,d,x[i+ 4], 6, -145523070);d=ii(d,a,b,c,x[i+11],10,-1120210379);c=ii(c,d,a,b,x[i+ 2],15, 718787259);
b=ii(b,c,d,a,x[i+ 9],21, -343485551);a=ad(a,olda);b=ad(b,oldb);c=ad(c,oldc);d=ad(d,oldd);
}
return rh(a)+rh(b)+rh(c)+rh(d);
}
export async function fetchData(route, options) {
let err;
const res = await api.fetchApi(route, options).catch(e => {
err = e;
});
if (!res) {
return {
status: 400,
error: new Error("Unknown Error")
}
}
const { status, statusText } = res;
if (err) {
return {
status,
error: err
}
}
if (status !== 200) {
return {
status,
error: new Error(statusText || "Unknown Error")
}
}
const data = await res.json();
if (!data) {
return {
status,
error: new Error(`Failed to load data: ${route}`)
}
}
return {
status,
data
}
}
export const icons = {
search: '<svg viewBox="0 0 24 24" width="100%" height="100%" pointer-events="none" xmlns="http://www.w3.org/2000/svg"><path fill="none" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="m21 21-4.486-4.494M19 10.5a8.5 8.5 0 1 1-17 0 8.5 8.5 0 0 1 17 0"/></svg>',
extensions: '<svg viewBox="64 64 896 896" width="100%" height="100%" pointer-events="none" xmlns="http://www.w3.org/2000/svg"><path fill="currentColor" d="M843.5 737.4c-12.4-75.2-79.2-129.1-155.3-125.4S550.9 676 546 752c-153.5-4.8-208-40.7-199.1-113.7 3.3-27.3 19.8-41.9 50.1-49 18.4-4.3 38.8-4.9 57.3-3.2 1.7.2 3.5.3 5.2.5 11.3 2.7 22.8 5 34.3 6.8 34.1 5.6 68.8 8.4 101.8 6.6 92.8-5 156-45.9 159.2-132.7 3.1-84.1-54.7-143.7-147.9-183.6-29.9-12.8-61.6-22.7-93.3-30.2-14.3-3.4-26.3-5.7-35.2-7.2-7.9-75.9-71.5-133.8-147.8-134.4S189.7 168 180.5 243.8s40 146.3 114.2 163.9 149.9-23.3 175.7-95.1c9.4 1.7 18.7 3.6 28 5.8 28.2 6.6 56.4 15.4 82.4 26.6 70.7 30.2 109.3 70.1 107.5 119.9-1.6 44.6-33.6 65.2-96.2 68.6-27.5 1.5-57.6-.9-87.3-5.8-8.3-1.4-15.9-2.8-22.6-4.3-3.9-.8-6.6-1.5-7.8-1.8l-3.1-.6c-2.2-.3-5.9-.8-10.7-1.3-25-2.3-52.1-1.5-78.5 4.6-55.2 12.9-93.9 47.2-101.1 105.8-15.7 126.2 78.6 184.7 276 188.9 29.1 70.4 106.4 107.9 179.6 87 73.3-20.9 119.3-93.4 106.9-168.6M329.1 345.2a83.3 83.3 0 1 1 .01-166.61 83.3 83.3 0 0 1-.01 166.61M695.6 845a83.3 83.3 0 1 1 .01-166.61A83.3 83.3 0 0 1 695.6 845"/></svg>',
conflicts: '<svg viewBox="0 0 400 400" width="100%" height="100%" pointer-events="none" xmlns="http://www.w3.org/2000/svg"><path fill="currentColor" d="m397.2 350.4.2-.2-180-320-.2.2C213.8 24.2 207.4 20 200 20s-13.8 4.2-17.2 10.4l-.2-.2-180 320 .2.2c-1.6 2.8-2.8 6-2.8 9.6 0 11 9 20 20 20h360c11 0 20-9 20-20 0-3.6-1.2-6.8-2.8-9.6M220 340h-40v-40h40zm0-60h-40V120h40z"/></svg>',
passed: '<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 426.667 426.667"><path fill="#6AC259" d="M213.333,0C95.518,0,0,95.514,0,213.333s95.518,213.333,213.333,213.333c117.828,0,213.333-95.514,213.333-213.333S331.157,0,213.333,0z M174.199,322.918l-93.935-93.931l31.309-31.309l62.626,62.622l140.894-140.898l31.309,31.309L174.199,322.918z"/></svg>',
download: '<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" width="100%" height="100%" viewBox="0 0 32 32"><path fill="currentColor" d="M26 24v4H6v-4H4v4a2 2 0 0 0 2 2h20a2 2 0 0 0 2-2v-4zm0-10l-1.41-1.41L17 20.17V2h-2v18.17l-7.59-7.58L6 14l10 10l10-10z"></path></svg>'
}

View File

@@ -1,6 +1,6 @@
import { app } from "../../scripts/app.js";
import { api } from "../../scripts/api.js"
import { sleep, show_message, customConfirm, customAlert } from "./common.js";
import { sleep, show_message } from "./common.js";
import { GroupNodeConfig, GroupNodeHandler } from "../../extensions/core/groupNode.js";
import { ComfyDialog, $el } from "../../scripts/ui.js";
@@ -64,7 +64,7 @@ function storeGroupNode(name, data, register=true) {
}
export async function load_components() {
let data = await api.fetchApi('/v2/manager/component/loads', {method: "POST"});
let data = await api.fetchApi('/manager/component/loads', {method: "POST"});
let components = await data.json();
let start_time = Date.now();
@@ -222,7 +222,7 @@ async function save_as_component(node, version, author, prefix, nodename, packna
pack_map[packname] = component_name;
rpack_map[component_name] = subgraph;
const res = await api.fetchApi('/v2/manager/component/save', {
const res = await api.fetchApi('/manager/component/save', {
method: "POST",
headers: {
"Content-Type": "application/json",
@@ -259,7 +259,7 @@ async function import_component(component_name, component, mode) {
workflow: component
};
const res = await api.fetchApi('/v2/manager/component/save', {
const res = await api.fetchApi('/manager/component/save', {
method: "POST",
headers: { "Content-Type": "application/json", },
body: JSON.stringify(body)
@@ -365,7 +365,7 @@ function checkVersion(name, component) {
return msg;
}
async function handle_import_components(components) {
function handle_import_components(components) {
let msg = 'Components:\n';
let cnt = 0;
for(let name in components) {
@@ -387,9 +387,8 @@ async function handle_import_components(components) {
let last_name = null;
msg += '\nWill you load components?\n';
const confirmed = await customConfirm(msg);
if(confirmed) {
const mode = await customConfirm('\nWill you save components?\n(cancel=load without save)');
if(confirm(msg)) {
let mode = confirm('\nWill you save components?\n(cancel=load without save)');
for(let name in components) {
let component = components[name];
@@ -412,7 +411,7 @@ async function handle_import_components(components) {
}
}
async function handlePaste(e) {
function handlePaste(e) {
let data = (e.clipboardData || window.clipboardData);
const items = data.items;
for(const item of items) {
@@ -422,7 +421,7 @@ async function handlePaste(e) {
let json_data = JSON.parse(data);
if(json_data.kind == 'ComfyUI Components' && last_paste_timestamp != json_data.timestamp) {
last_paste_timestamp = json_data.timestamp;
await handle_import_components(json_data.components);
handle_import_components(json_data.components);
// disable paste node
localStorage.removeItem("litegrapheditor_clipboard", null);
@@ -456,7 +455,7 @@ export class ComponentBuilderDialog extends ComfyDialog {
this.invalidateControl();
this.element.style.display = "block";
this.element.style.zIndex = 1099;
this.element.style.zIndex = 10001;
this.element.style.width = "500px";
this.element.style.height = "480px";
}
@@ -622,7 +621,7 @@ export class ComponentBuilderDialog extends ComfyDialog {
self.version_string.value = self.default_ver;
}
else {
customAlert('If you are not the author, it is not recommended to change the version, as it may cause component update issues.');
alert('If you are not the author, it is not recommended to change the version, as it may cause component update issues.');
}
};
@@ -678,7 +677,7 @@ export class ComponentBuilderDialog extends ComfyDialog {
let orig_handleFile = app.handleFile;
async function handleFile(file) {
function handleFile(file) {
if (file.name?.endsWith(".json") || file.name?.endsWith(".pack")) {
const reader = new FileReader();
reader.onload = async () => {
@@ -691,7 +690,7 @@ async function handleFile(file) {
}
if(is_component) {
await handle_import_components(jsonContent);
handle_import_components(jsonContent);
}
else {
orig_handleFile.call(app, file);
@@ -709,7 +708,7 @@ app.handleFile = handleFile;
let current_component_policy = 'workflow';
try {
api.fetchApi('/v2/manager/policy/component')
api.fetchApi('/manager/component/policy')
.then(response => response.text())
.then(data => { current_component_policy = data; });
}

1759
js/custom-nodes-manager.js Normal file
View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,18 +1,220 @@
import { app } from "../../scripts/app.js";
import { $el } from "../../scripts/ui.js";
import {
manager_instance, rebootAPI,
fetchData, md5, icons, show_message, customAlert, infoToast, showTerminal,
storeColumnWidth, restoreColumnWidth, loadCss, generateUUID
fetchData, md5, icons
} from "./common.js";
import { api } from "../../scripts/api.js";
// https://cenfun.github.io/turbogrid/api.html
import TG from "./turbogrid.esm.js";
loadCss("./model-manager.css");
const pageCss = `
.cmm-manager {
--grid-font: -apple-system, BlinkMacSystemFont, "Segoe UI", "Noto Sans", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji";
z-index: 10001;
width: 80%;
height: 80%;
display: flex;
flex-direction: column;
gap: 10px;
color: var(--fg-color);
font-family: arial, sans-serif;
}
const gridId = "model";
.cmm-manager .cmm-flex-auto {
flex: auto;
}
.cmm-manager button {
font-size: 16px;
color: var(--input-text);
background-color: var(--comfy-input-bg);
border-radius: 8px;
border-color: var(--border-color);
border-style: solid;
margin: 0;
padding: 4px 8px;
min-width: 100px;
}
.cmm-manager button:disabled,
.cmm-manager input:disabled,
.cmm-manager select:disabled {
color: gray;
}
.cmm-manager button:disabled {
background-color: var(--comfy-input-bg);
}
.cmm-manager-header {
display: flex;
flex-wrap: wrap;
gap: 5px;
align-items: center;
padding: 0 5px;
}
.cmm-manager-header label {
display: flex;
gap: 5px;
align-items: center;
}
.cmm-manager-type,
.cmm-manager-base,
.cmm-manager-filter {
height: 28px;
line-height: 28px;
}
.cmm-manager-keywords {
height: 28px;
line-height: 28px;
padding: 0 5px 0 26px;
background-size: 16px;
background-position: 5px center;
background-repeat: no-repeat;
background-image: url("data:image/svg+xml;charset=utf8,${encodeURIComponent(icons.search.replace("currentColor", "#888"))}");
}
.cmm-manager-status {
padding-left: 10px;
}
.cmm-manager-grid {
flex: auto;
border: 1px solid var(--border-color);
overflow: hidden;
}
.cmm-manager-selection {
display: flex;
flex-wrap: wrap;
gap: 10px;
align-items: center;
}
.cmm-manager-message {
}
.cmm-manager-footer {
display: flex;
flex-wrap: wrap;
gap: 10px;
align-items: center;
}
.cmm-manager-grid .tg-turbogrid {
font-family: var(--grid-font);
font-size: 15px;
background: var(--bg-color);
}
.cmm-manager-grid .cmm-node-name a {
color: skyblue;
text-decoration: none;
word-break: break-word;
}
.cmm-manager-grid .cmm-node-desc a {
color: #5555FF;
font-weight: bold;
text-decoration: none;
}
.cmm-manager-grid .tg-cell a:hover {
text-decoration: underline;
}
.cmm-icon-passed {
width: 20px;
height: 20px;
position: absolute;
left: calc(50% - 10px);
top: calc(50% - 10px);
}
.cmm-manager .cmm-btn-enable {
background-color: blue;
color: white;
}
.cmm-manager .cmm-btn-disable {
background-color: MediumSlateBlue;
color: white;
}
.cmm-manager .cmm-btn-install {
background-color: black;
color: white;
}
.cmm-btn-download {
width: 18px;
height: 18px;
position: absolute;
left: calc(50% - 10px);
top: calc(50% - 10px);
cursor: pointer;
opacity: 0.8;
color: #fff;
}
.cmm-btn-download:hover {
opacity: 1;
}
.cmm-manager-light .cmm-btn-download {
color: #000;
}
@keyframes cmm-btn-loading-bg {
0% {
left: 0;
}
100% {
left: -105px;
}
}
.cmm-manager button.cmm-btn-loading {
position: relative;
overflow: hidden;
border-color: rgb(0 119 207 / 80%);
background-color: var(--comfy-input-bg);
}
.cmm-manager button.cmm-btn-loading::after {
position: absolute;
top: 0;
left: 0;
content: "";
width: 500px;
height: 100%;
background-image: repeating-linear-gradient(
-45deg,
rgb(0 119 207 / 30%),
rgb(0 119 207 / 30%) 10px,
transparent 10px,
transparent 15px
);
animation: cmm-btn-loading-bg 2s linear infinite;
}
.cmm-manager-light .cmm-node-name a {
color: blue;
}
.cmm-manager-light .cm-warn-note {
background-color: #ccc !important;
}
.cmm-manager-light .cmm-btn-install {
background-color: #333;
}
`;
const pageHtml = `
<div class="cmm-manager-header">
@@ -33,14 +235,7 @@ const pageHtml = `
<div class="cmm-manager-selection"></div>
<div class="cmm-manager-message"></div>
<div class="cmm-manager-footer">
<button class="cmm-manager-back">
<svg class="arrow-icon" width="14" height="14" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M2 8H18M2 8L8 2M2 8L8 14" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
</svg>
Back
</button>
<button class="cmm-manager-refresh">Refresh</button>
<button class="cmm-manager-stop">Stop</button>
<button class="cmm-manager-back">Back</button>
<div class="cmm-flex-auto"></div>
</div>
`;
@@ -59,11 +254,17 @@ export class ModelManager {
this.keywords = '';
this.init();
api.addEventListener("cm-queue-status", this.onQueueStatus);
}
init() {
if (!document.querySelector(`style[context="${this.id}"]`)) {
const $style = document.createElement("style");
$style.setAttribute("context", this.id);
$style.innerHTML = pageCss;
document.head.appendChild($style);
}
this.element = $el("div", {
parent: document.body,
className: "comfy-modal cmm-manager"
@@ -81,13 +282,10 @@ export class ModelManager {
value: ""
}, {
label: "Installed",
value: "installed"
value: "True"
}, {
label: "Not Installed",
value: "not_installed"
}, {
label: "In Workflow",
value: "in_workflow"
value: "False"
}];
this.typeList = [{
@@ -167,25 +365,12 @@ export class ModelManager {
}
},
".cmm-manager-refresh": {
click: () => {
app.refreshComboInNodes();
}
},
".cmm-manager-stop": {
click: () => {
api.fetchApi('/v2/manager/queue/reset');
infoToast('Cancel', 'Remaining tasks will stop after completing the current task.');
}
},
".cmm-manager-back": {
click: (e) => {
this.close()
manager_instance.show();
}
}
},
};
Object.keys(eventsMap).forEach(selector => {
const target = this.element.querySelector(selector);
@@ -217,10 +402,6 @@ export class ModelManager {
this.renderSelected();
});
grid.bind("onColumnWidthChanged", (e, columnItem) => {
storeColumnWidth(gridId, columnItem)
});
grid.bind('onClick', (e, d) => {
const { rowItem } = d;
const target = d.e.target;
@@ -257,31 +438,12 @@ export class ModelManager {
rowFilter: (rowItem) => {
const searchableColumns = ["name", "type", "base", "description", "filename", "save_path"];
const models_extensions = ['.ckpt', '.pt', '.pt2', '.bin', '.pth', '.safetensors', '.pkl', '.sft'];
let shouldShown = grid.highlightKeywordsFilter(rowItem, searchableColumns, this.keywords);
if (shouldShown) {
if(this.filter) {
if (this.filter == "in_workflow") {
rowItem.in_workflow = null;
if (Array.isArray(app.graph._nodes)) {
app.graph._nodes.forEach((item, i) => {
if (Array.isArray(item.widgets_values)) {
item.widgets_values.forEach((_item, i) => {
if (rowItem.in_workflow === null && _item !== null && models_extensions.includes("." + _item.toString().split('.').pop())) {
let filename = _item.match(/([^\/]+)(?=\.\w+$)/)[0];
if (grid.highlightKeywordsFilter(rowItem, searchableColumns, filename)) {
rowItem.in_workflow = "True";
grid.highlightKeywordsFilter(rowItem, searchableColumns, "");
}
}
});
}
});
}
}
return ((this.filter == "installed" && rowItem.installed == "True") || (this.filter == "not_installed" && rowItem.installed == "False") || (this.filter == "in_workflow" && rowItem.in_workflow == "True"));
if(this.filter && rowItem.installed !== this.filter) {
return false;
}
if(this.type && rowItem.type !== this.type) {
@@ -356,7 +518,7 @@ export class ModelManager {
sortable: false,
align: 'center',
formatter: (url, rowItem, columnItem) => {
return `<a class="cmm-btn-download" tooltip="Download file" href="${url}" target="_blank">${icons.download}</a>`;
return `<a class="cmm-btn-download" title="Download file" href="${url}" target="_blank">${icons.download}</a>`;
}
}, {
id: 'size',
@@ -391,8 +553,6 @@ export class ModelManager {
width: 200
}];
restoreColumnWidth(gridId, columns);
this.grid.setData({
options,
rows,
@@ -435,19 +595,17 @@ export class ModelManager {
}
async installModels(list, btn) {
btn.classList.add("cmm-btn-loading");
this.showLoading();
this.showError("");
let needRefresh = false;
let needRestart = false;
let errorMsg = "";
let target_items = [];
let batch = {};
for (const item of list) {
this.grid.scrollRowIntoView(item);
target_items.push(item);
if (!this.focusInstall(item)) {
this.grid.onNextUpdated(() => {
@@ -458,120 +616,48 @@ export class ModelManager {
this.showStatus(`Install ${item.name} ...`);
const data = item.originalData;
data.ui_id = item.hash;
if(batch['install_model']) {
batch['install_model'].push(data);
}
else {
batch['install_model'] = [data];
}
}
this.install_context = {btn: btn, targets: target_items};
if(errorMsg) {
this.showError(errorMsg);
show_message("[Installation Errors]\n"+errorMsg);
// reset
for(let k in target_items) {
const item = target_items[k];
this.grid.updateCell(item, "installed");
}
}
else {
this.batch_id = generateUUID();
batch['batch_id'] = this.batch_id;
const res = await api.fetchApi(`/v2/manager/queue/batch`, {
const res = await fetchData('/model/install', {
method: 'POST',
body: JSON.stringify(batch)
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(data)
});
let failed = await res.json();
if(failed.length > 0) {
for(let k in failed) {
let hash = failed[k];
const item = self.grid.getRowItemBy("hash", hash);
errorMsg = `[FAIL] ${item.title}`;
}
if (res.error) {
errorMsg = `Install failed: ${item.name} ${res.error.message}`;
break;;
}
this.showStop();
showTerminal();
}
}
needRestart = true;
async onQueueStatus(event) {
let self = ModelManager.instance;
if(event.detail.status == 'in_progress' && event.detail.ui_target == 'model_manager') {
const hash = event.detail.target;
const item = self.grid.getRowItemBy("hash", hash);
this.grid.setRowSelected(item, false);
item.refresh = true;
self.grid.setRowSelected(item, false);
item.selectable = false;
// self.grid.updateCell(item, "tg-column-select");
self.grid.updateRow(item);
}
else if(event.detail.status == 'batch-done') {
self.hideStop();
self.onQueueCompleted(event.detail);
}
}
this.grid.updateCell(item, "installed");
this.grid.updateCell(item, "tg-column-select");
async onQueueCompleted(info) {
let result = info.model_result;
this.showStatus(`Install ${item.name} successfully`);
if(result.length == 0) {
return;
}
let self = ModelManager.instance;
if(!self.install_context) {
return;
}
let btn = self.install_context.btn;
self.hideLoading();
this.hideLoading();
btn.classList.remove("cmm-btn-loading");
let errorMsg = "";
for(let hash in result){
let v = result[hash];
if(v != 'success')
errorMsg += v + '\n';
}
for(let k in self.install_context.targets) {
let item = self.install_context.targets[k];
self.grid.updateCell(item, "installed");
}
if (errorMsg) {
self.showError(errorMsg);
show_message("Installation Error:\n"+errorMsg);
this.showError(errorMsg);
} else {
self.showStatus(`Install ${result.length} models successfully`);
this.showStatus(`Install ${list.length} models successfully`);
}
self.showRefresh();
self.showMessage(`To apply the installed model, please click the 'Refresh' button.`, "red")
if (needRestart) {
this.showMessage(`To apply the installed model, please click the 'Refresh' button on the main menu.`, "red")
}
infoToast('Tasks done', `[ComfyUI-Manager] All model downloading tasks in the queue have been completed.\n${info.done_count}/${info.total_count}`);
self.install_context = undefined;
}
getModelList(models) {
const typeMap = new Map();
const baseMap = new Map();
@@ -645,7 +731,7 @@ export class ModelManager {
const mode = manager_instance.datasrc_combo.value;
const res = await fetchData(`/v2/externalmodel/getlist?mode=${mode}`);
const res = await fetchData(`/externalmodel/getlist?mode=${mode}`);
if (res.error) {
this.showError("Failed to get external model list.");
this.hideLoading();
@@ -740,7 +826,7 @@ export class ModelManager {
}
showLoading() {
// this.setDisabled(true);
this.setDisabled(true);
if (this.grid) {
this.grid.showLoading();
this.grid.showMask({
@@ -750,7 +836,7 @@ export class ModelManager {
}
hideLoading() {
// this.setDisabled(false);
this.setDisabled(false);
if (this.grid) {
this.grid.hideLoading();
this.grid.hideMask();
@@ -758,9 +844,8 @@ export class ModelManager {
}
setDisabled(disabled) {
const $close = this.element.querySelector(".cmm-manager-close");
const $refresh = this.element.querySelector(".cmm-manager-refresh");
const $stop = this.element.querySelector(".cmm-manager-stop");
const list = [
".cmm-manager-header input",
@@ -772,7 +857,7 @@ export class ModelManager {
})
.flat()
.filter(it => {
return it !== $close && it !== $refresh && it !== $stop;
return it !== $close;
});
list.forEach($elem => {
@@ -789,18 +874,6 @@ export class ModelManager {
}
showRefresh() {
this.element.querySelector(".cmm-manager-refresh").style.display = "block";
}
showStop() {
this.element.querySelector(".cmm-manager-stop").style.display = "block";
}
hideStop() {
this.element.querySelector(".cmm-manager-stop").style.display = "none";
}
setKeywords(keywords = "") {
this.keywords = keywords;
this.element.querySelector(".cmm-manager-keywords").value = keywords;
@@ -817,4 +890,4 @@ export class ModelManager {
close() {
this.element.style.display = "none";
}
}
}

View File

@@ -1,6 +1,16 @@
import { app } from "../../scripts/app.js";
import { api } from "../../scripts/api.js";
let double_click_policy = "copy-all";
api.fetchApi('/manager/dbl_click/policy')
.then(response => response.text())
.then(data => set_double_click_policy(data));
export function set_double_click_policy(mode) {
double_click_policy = mode;
}
function addMenuHandler(nodeType, cb) {
const getOpts = nodeType.prototype.getExtraMenuOptions;
nodeType.prototype.getExtraMenuOptions = function () {
@@ -142,7 +152,63 @@ function node_info_copy(src, dest, connect_both, copy_shape) {
}
app.registerExtension({
name: "Comfy.Legacy.Manager.NodeFixer",
name: "Comfy.Manager.NodeFixer",
async nodeCreated(node, app) {
let orig_dblClick = node.onDblClick;
node.onDblClick = function (e, pos, self) {
orig_dblClick?.apply?.(this, arguments);
if((!node.inputs && !node.outputs) || pos[1] > 0)
return;
switch(double_click_policy) {
case "copy-all":
case "copy-full":
case "copy-input":
{
if(node.inputs?.some(x => x.link != null) || node.outputs?.some(x => x.links != null && x.links.length > 0) )
return;
let src_node = lookup_nearest_nodes(node);
if(src_node)
{
let both_connection = double_click_policy != "copy-input";
let copy_shape = double_click_policy == "copy-full";
node_info_copy(src_node, node, both_connection, copy_shape);
}
}
break;
case "possible-input":
{
let nearest_inputs = lookup_nearest_inputs(node);
if(nearest_inputs)
connect_inputs(nearest_inputs, node);
}
break;
case "dual":
{
if(pos[0] < node.size[0]/2) {
// left: possible-input
let nearest_inputs = lookup_nearest_inputs(node);
if(nearest_inputs)
connect_inputs(nearest_inputs, node);
}
else {
// right: copy-all
if(node.inputs?.some(x => x.link != null) || node.outputs?.some(x => x.links != null && x.links.length > 0) )
return;
let src_node = lookup_nearest_nodes(node);
if(src_node)
node_info_copy(src_node, node, true);
}
}
break;
}
}
},
beforeRegisterNodeDef(nodeType, nodeData, app) {
addMenuHandler(nodeType, function (_, options) {
options.push({
@@ -153,7 +219,6 @@ app.registerExtension({
app.canvas.graph.add(new_node, false);
node_info_copy(this, new_node, true);
app.canvas.graph.remove(this);
requestAnimationFrame(() => app.canvas.setDirty(true, true))
},
});
});

View File

@@ -7,7 +7,7 @@ import { manager_instance, rebootAPI, show_message } from "./common.js";
async function restore_snapshot(target) {
if(SnapshotManager.instance) {
try {
const response = await api.fetchApi(`/v2/snapshot/restore?target=${target}`, { cache: "no-store" });
const response = await api.fetchApi(`/snapshot/restore?target=${target}`, { cache: "no-store" });
if(response.status == 403) {
show_message('This action is not allowed with this security level configuration.');
@@ -35,7 +35,7 @@ async function restore_snapshot(target) {
async function remove_snapshot(target) {
if(SnapshotManager.instance) {
try {
const response = await api.fetchApi(`/v2/snapshot/remove?target=${target}`, { cache: "no-store" });
const response = await api.fetchApi(`/snapshot/remove?target=${target}`, { cache: "no-store" });
if(response.status == 403) {
show_message('This action is not allowed with this security level configuration.');
@@ -61,7 +61,7 @@ async function remove_snapshot(target) {
async function save_current_snapshot() {
try {
const response = await api.fetchApi('/v2/snapshot/save', { cache: "no-store" });
const response = await api.fetchApi('/snapshot/save', { cache: "no-store" });
app.ui.dialog.close();
return true;
}
@@ -76,7 +76,7 @@ async function save_current_snapshot() {
}
async function getSnapshotList() {
const response = await api.fetchApi(`/v2/snapshot/getlist`);
const response = await api.fetchApi(`/snapshot/getlist`);
const data = await response.json();
return data;
}
@@ -291,7 +291,7 @@ export class SnapshotManager extends ComfyDialog {
try {
this.invalidateControl();
this.element.style.display = "block";
this.element.style.zIndex = 1099;
this.element.style.zIndex = 10001;
}
catch(exception) {
app.ui.dialog.show(`Failed to get external model list. / ${exception}`);

34
migration_js/migration.js Normal file
View File

@@ -0,0 +1,34 @@
import { app } from "../../scripts/app.js";
import { api } from "../../scripts/api.js";
app.registerExtension({
name: "Comfy.ManagerExtMenu",
init() {
$el("style", {
textContent: style,
parent: document.head,
});
},
async setup() {
let cmGroup = new (await import("../../scripts/ui/components/buttonGroup.js")).ComfyButtonGroup(
new(await import("../../scripts/ui/components/button.js")).ComfyButton({
icon: "puzzle",
action: async () => {
if(confirm('As some features of ComfyUI Manager have been integrated into ComfyUI, they have been separated into manager-core.\n\nComfyUI Manager only includes additional extension features that are not provided by manager-core.\n\nWill you install manager-core?')) {
app.ui.dialog.show('Installing manager-core...');
app.ui.dialog.element.style.zIndex = 10010;
await api.fetchApi("/manager/install_manager_core");
app.ui.dialog.show('The installation of manager-core will be completed after restarting.');
}
},
tooltip: "Need to install manager-core",
content: "Manager (Need To Install)",
classList: "comfyui-button comfyui-menu-mobile-collapse primary"
}).element
);
app.menu?.settingsGroup.element.before(cmGroup.element);
}
});

BIN
misc/custom-nodes.jpg Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 160 KiB

BIN
misc/main.jpg Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 31 KiB

BIN
misc/menu.jpg Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 91 KiB

BIN
misc/missing-list.jpg Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 170 KiB

BIN
misc/missing-menu.jpg Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 129 KiB

BIN
misc/models.png Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 164 KiB

BIN
misc/nickname.jpg Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 182 KiB

BIN
misc/portable-install.png Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

BIN
misc/share-setting.jpg Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 29 KiB

BIN
misc/share.jpg Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 96 KiB

BIN
misc/snapshot.jpg Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 91 KiB

View File

File diff suppressed because it is too large Load Diff

113
modules/manager_ext_core.py Normal file
View File

@@ -0,0 +1,113 @@
import os
import sys
import configparser
import manager_core as core
import cm_global
from manager_util import *
import shutil
import folder_paths
from comfy.cli_args import args
import latent_preview
version_code = [3, 0, 1]
version_str = f"V{version_code[0]}.{version_code[1]}" + (f'.{version_code[2]}' if len(version_code) > 2 else '')
DEFAULT_CHANNEL = "https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main"
manager_ext_config_path = os.path.abspath(os.path.join(folder_paths.get_user_directory(), 'default', 'manager-ext.ini'))
cached_config = None
manager_ext_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
channel_list_path = os.path.join(manager_ext_path, 'channels.list')
def update_channel_dict():
if not os.path.exists(channel_list_path):
shutil.copy(channel_list_path+'.template', channel_list_path)
core.get_channel_dict() # for the loading
with open(os.path.join(manager_ext_path, 'channels.list'), 'r') as file:
channels = file.read()
for x in channels.split('\n'):
channel_info = x.split("::")
if len(channel_info) == 2:
core.channel_dict[channel_info[0]] = channel_info[1]
update_channel_dict()
def get_current_preview_method():
if args.preview_method == latent_preview.LatentPreviewMethod.Auto:
return "auto"
elif args.preview_method == latent_preview.LatentPreviewMethod.Latent2RGB:
return "latent2rgb"
elif args.preview_method == latent_preview.LatentPreviewMethod.TAESD:
return "taesd"
else:
return "none"
def write_config():
config = configparser.ConfigParser()
config['default'] = {
'preview_method': get_current_preview_method(),
'share_option': get_config()['share_option'],
'default_ui': get_config()['default_ui'],
'component_policy': get_config()['component_policy'],
'double_click_policy': get_config()['double_click_policy'],
'security_level': get_config()['security_level'],
}
directory = os.path.dirname(manager_ext_config_path)
if not os.path.exists(directory):
os.makedirs(directory)
with open(manager_ext_config_path, 'w') as configfile:
config.write(configfile)
def read_config():
try:
config = configparser.ConfigParser()
config.read(manager_ext_config_path)
default_conf = config['default']
# policy migration: disable_unsecure_features -> security_level
security_level = default_conf['security_level'] if 'security_level' in default_conf else 'normal'
return {
'preview_method': default_conf['preview_method'] if 'preview_method' in default_conf else get_current_preview_method(),
'share_option': default_conf['share_option'] if 'share_option' in default_conf else 'all',
'default_ui': default_conf['default_ui'] if 'default_ui' in default_conf else 'none',
'component_policy': default_conf['component_policy'] if 'component_policy' in default_conf else 'workflow',
'double_click_policy': default_conf['double_click_policy'] if 'double_click_policy' in default_conf else 'copy-all',
'security_level': security_level
}
except Exception:
return {
'preview_method': get_current_preview_method(),
'share_option': 'all',
'default_ui': 'none',
'component_policy': 'workflow',
'double_click_policy': 'copy-all',
'security_level': 'normal',
}
def get_config():
global cached_config
if cached_config is None:
cached_config = read_config()
return cached_config
def pip_install(packages):
install_cmd = ['#FORCE', sys.executable, "-m", "pip", "install", '-U'] + packages
core.try_install_script('pip install via manager', '..', install_cmd)

View File

@@ -0,0 +1,745 @@
import traceback
import folder_paths
import locale
import concurrent
import nodes
import os
import sys
import re
import git
from server import PromptServer
import manager_core as core
import cm_global
from . import manager_ext_core as ext_core
from . import manager_ext_util
print(f"### Loading: ComfyUI-Manager (ext) ({ext_core.version_str})")
comfy_ui_hash = "-"
SECURITY_MESSAGE_MIDDLE_OR_BELOW = f"ERROR: To use this action, a security_level of `middle or below` is required. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
SECURITY_MESSAGE_NORMAL_MINUS = f"ERROR: To use this feature, you must either set '--listen' to a local IP and set the security level to 'normal-' or lower, or set the security level to 'middle' or 'weak'. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
SECURITY_MESSAGE_GENERAL = f"ERROR: This installation is not allowed in this security_level. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
routes = PromptServer.instance.routes
def handle_stream(stream, prefix):
stream.reconfigure(encoding=locale.getpreferredencoding(), errors='replace')
for msg in stream:
if prefix == '[!]' and ('it/s]' in msg or 's/it]' in msg) and ('%|' in msg or 'it [' in msg):
if msg.startswith('100%'):
print('\r' + msg, end="", file=sys.stderr),
else:
print('\r' + msg[:-1], end="", file=sys.stderr),
else:
if prefix == '[!]':
print(prefix, msg, end="", file=sys.stderr)
else:
print(prefix, msg, end="")
from comfy.cli_args import args
import latent_preview
is_local_mode = args.listen.startswith('127.') or args.listen.startswith('local.')
def is_allowed_security_level(level):
if level == 'block':
return False
elif level == 'high':
if is_local_mode:
return core.get_config()['security_level'].lower() in ['weak', 'normal-']
else:
return core.get_config()['security_level'].lower() == 'weak'
elif level == 'middle':
return core.get_config()['security_level'].lower() in ['weak', 'normal', 'normal-']
else:
return True
async def get_risky_level(files, pip_packages):
json_data1 = await core.get_data_by_mode('local', 'custom-node-list.json')
json_data2 = await core.get_data_by_mode('cache', 'custom-node-list.json', channel_url='https://github.com/ltdrdata/ComfyUI-Manager/raw/main')
all_urls = set()
for x in json_data1['custom_nodes'] + json_data2['custom_nodes']:
all_urls.update(x['files'])
for x in files:
if x not in all_urls:
return "high"
all_pip_packages = set()
for x in json_data1['custom_nodes'] + json_data2['custom_nodes']:
if "pip" in x:
all_pip_packages.update(x['pip'])
for p in pip_packages:
if p not in all_pip_packages:
return "block"
return "middle"
from manager_downloader import download_url
components_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'components'))
def set_preview_method(method):
if method == 'auto':
args.preview_method = latent_preview.LatentPreviewMethod.Auto
elif method == 'latent2rgb':
args.preview_method = latent_preview.LatentPreviewMethod.Latent2RGB
elif method == 'taesd':
args.preview_method = latent_preview.LatentPreviewMethod.TAESD
else:
args.preview_method = latent_preview.LatentPreviewMethod.NoPreviews
ext_core.get_config()['preview_method'] = args.preview_method
set_preview_method(ext_core.get_config()['preview_method'])
def set_default_ui_mode(mode):
ext_core.get_config()['default_ui'] = mode
def set_component_policy(mode):
ext_core.get_config()['component_policy'] = mode
def set_double_click_policy(mode):
ext_core.get_config()['double_click_policy'] = mode
# Expand Server api
import server
from aiohttp import web
import aiohttp
import json
import zipfile
import urllib.request
def check_state_of_git_node_pack(node_packs, do_fetch=False, do_update_check=True, do_update=False):
if do_fetch:
print("Start fetching...", end="")
elif do_update:
print("Start updating...", end="")
elif do_update_check:
print("Start update check...", end="")
def process_custom_node(item):
core.check_state_of_git_node_pack_single(item, do_fetch, do_update_check, do_update)
with concurrent.futures.ThreadPoolExecutor(4) as executor:
for k, v in node_packs.items():
if v.get('active_version') in ['unknown', 'nightly']:
executor.submit(process_custom_node, v)
if do_fetch:
print(f"\x1b[2K\rFetching done.")
elif do_update:
update_exists = any(item.get('updatable', False) for item in node_packs.values())
if update_exists:
print(f"\x1b[2K\rUpdate done.")
else:
print(f"\x1b[2K\rAll extensions are already up-to-date.")
elif do_update_check:
print(f"\x1b[2K\rUpdate check done.")
def nickname_filter(json_obj):
preemptions_map = {}
for k, x in json_obj.items():
if 'preemptions' in x[1]:
for y in x[1]['preemptions']:
preemptions_map[y] = k
elif k.endswith("/ComfyUI"):
for y in x[0]:
preemptions_map[y] = k
updates = {}
for k, x in json_obj.items():
removes = set()
for y in x[0]:
k2 = preemptions_map.get(y)
if k2 is not None and k != k2:
removes.add(y)
if len(removes) > 0:
updates[k] = [y for y in x[0] if y not in removes]
for k, v in updates.items():
json_obj[k][0] = v
return json_obj
@routes.get("/customnode/getmappings")
async def fetch_customnode_mappings(request):
"""
provide unified (node -> node pack) mapping list
"""
mode = request.rel_url.query["mode"]
nickname_mode = False
if mode == "nickname":
mode = "local"
nickname_mode = True
json_obj = await core.get_data_by_mode(mode, 'extension-node-map.json')
json_obj = core.map_to_unified_keys(json_obj)
if nickname_mode:
json_obj = nickname_filter(json_obj)
all_nodes = set()
patterns = []
for k, x in json_obj.items():
all_nodes.update(set(x[0]))
if 'nodename_pattern' in x[1]:
patterns.append((x[1]['nodename_pattern'], x[0]))
missing_nodes = set(nodes.NODE_CLASS_MAPPINGS.keys()) - all_nodes
for x in missing_nodes:
for pat, item in patterns:
if re.match(pat, x):
item.append(x)
return web.json_response(json_obj, content_type='application/json')
@routes.get("/customnode/fetch_updates")
async def fetch_updates(request):
try:
if request.rel_url.query["mode"] == "local":
channel = 'local'
else:
channel = core.get_config()['channel_url']
await core.unified_manager.reload(request.rel_url.query["mode"])
await core.unified_manager.get_custom_nodes(channel, request.rel_url.query["mode"])
res = core.unified_manager.fetch_or_pull_git_repo(is_pull=False)
for x in res['failed']:
print(f"FETCH FAILED: {x}")
print("\nDone.")
if len(res['updated']) > 0:
return web.Response(status=201)
return web.Response(status=200)
except:
traceback.print_exc()
return web.Response(status=400)
@routes.get("/customnode/update_all")
async def update_all(request):
if not is_allowed_security_level('middle'):
print(SECURITY_MESSAGE_MIDDLE_OR_BELOW)
return web.Response(status=403)
try:
core.save_snapshot_with_postfix('autosave')
if request.rel_url.query["mode"] == "local":
channel = 'local'
else:
channel = core.get_config()['channel_url']
await core.unified_manager.reload(request.rel_url.query["mode"])
await core.unified_manager.get_custom_nodes(channel, request.rel_url.query["mode"])
updated_cnr = []
for k, v in core.unified_manager.active_nodes.items():
if v[0] != 'nightly':
res = core.unified_manager.unified_update(k, v[0])
if res.action == 'switch-cnr' and res:
updated_cnr.append(k)
res = core.unified_manager.fetch_or_pull_git_repo(is_pull=True)
res['updated'] += updated_cnr
for x in res['failed']:
print(f"PULL FAILED: {x}")
if len(res['updated']) == 0 and len(res['failed']) == 0:
status = 200
else:
status = 201
print(f"\nDone.")
return web.json_response(res, status=status, content_type='application/json')
except:
traceback.print_exc()
return web.Response(status=400)
finally:
core.clear_pip_cache()
def convert_markdown_to_html(input_text):
pattern_a = re.compile(r'\[a/([^]]+)\]\(([^)]+)\)')
pattern_w = re.compile(r'\[w/([^]]+)\]')
pattern_i = re.compile(r'\[i/([^]]+)\]')
pattern_bold = re.compile(r'\*\*([^*]+)\*\*')
pattern_white = re.compile(r'%%([^*]+)%%')
def replace_a(match):
return f"<a href='{match.group(2)}' target='blank'>{match.group(1)}</a>"
def replace_w(match):
return f"<p class='cm-warn-note'>{match.group(1)}</p>"
def replace_i(match):
return f"<p class='cm-info-note'>{match.group(1)}</p>"
def replace_bold(match):
return f"<B>{match.group(1)}</B>"
def replace_white(match):
return f"<font color='white'>{match.group(1)}</font>"
input_text = input_text.replace('\\[', '&#91;').replace('\\]', '&#93;').replace('<', '&lt;').replace('>', '&gt;')
result_text = re.sub(pattern_a, replace_a, input_text)
result_text = re.sub(pattern_w, replace_w, result_text)
result_text = re.sub(pattern_i, replace_i, result_text)
result_text = re.sub(pattern_bold, replace_bold, result_text)
result_text = re.sub(pattern_white, replace_white, result_text)
return result_text.replace("\n", "<BR>")
def populate_markdown(x):
if 'description' in x:
x['description'] = convert_markdown_to_html(manager_ext_util.sanitize_tag(x['description']))
if 'name' in x:
x['name'] = manager_ext_util.sanitize_tag(x['name'])
if 'title' in x:
x['title'] = manager_ext_util.sanitize_tag(x['title'])
@routes.get("/customnode/getlist")
async def fetch_customnode_list(request):
"""
provide unified custom node list
"""
if "skip_update" in request.rel_url.query and request.rel_url.query["skip_update"] == "true":
skip_update = True
else:
skip_update = False
if request.rel_url.query["mode"] == "local":
channel = 'local'
else:
channel = core.get_config()['channel_url']
node_packs = await core.get_unified_total_nodes(channel, request.rel_url.query["mode"])
json_obj_github = core.get_data_by_mode(request.rel_url.query["mode"], 'github-stats.json', 'default')
json_obj_extras = core.get_data_by_mode(request.rel_url.query["mode"], 'extras.json', 'default')
core.populate_github_stats(node_packs, await json_obj_github)
core.populate_favorites(node_packs, await json_obj_extras)
check_state_of_git_node_pack(node_packs, False, do_update_check=not skip_update)
for v in node_packs.values():
populate_markdown(v)
if channel != 'local':
found = 'custom'
for name, url in core.get_channel_dict().items():
if url == channel:
found = name
break
channel = found
result = dict(channel=channel, node_packs=node_packs)
return web.json_response(result, content_type='application/json')
@routes.get("/customnode/alternatives")
async def fetch_customnode_alternatives(request):
alter_json = await core.get_data_by_mode(request.rel_url.query["mode"], 'alter-list.json')
res = {}
for item in alter_json['items']:
populate_markdown(item)
res[item['id']] = item
res = core.map_to_unified_keys(res)
return web.json_response(res, content_type='application/json')
@PromptServer.instance.routes.get("/snapshot/getlist")
async def get_snapshot_list(request):
snapshots_directory = os.path.join(core.comfyui_manager_path, 'snapshots')
items = [f[:-5] for f in os.listdir(snapshots_directory) if f.endswith('.json')]
items.sort(reverse=True)
return web.json_response({'items': items}, content_type='application/json')
@routes.get("/snapshot/remove")
async def remove_snapshot(request):
if not is_allowed_security_level('middle'):
print(SECURITY_MESSAGE_MIDDLE_OR_BELOW)
return web.Response(status=403)
try:
target = request.rel_url.query["target"]
path = os.path.join(core.comfyui_manager_path, 'snapshots', f"{target}.json")
if os.path.exists(path):
os.remove(path)
return web.Response(status=200)
except:
return web.Response(status=400)
@routes.get("/snapshot/get_current")
async def get_current_snapshot_api(request):
try:
return web.json_response(core.get_current_snapshot(), content_type='application/json')
except:
return web.Response(status=400)
def unzip_install(files):
temp_filename = 'manager-temp.zip'
for url in files:
if url.endswith("/"):
url = url[:-1]
try:
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3'}
req = urllib.request.Request(url, headers=headers)
response = urllib.request.urlopen(req)
data = response.read()
with open(temp_filename, 'wb') as f:
f.write(data)
with zipfile.ZipFile(temp_filename, 'r') as zip_ref:
zip_ref.extractall(core.custom_nodes_path)
os.remove(temp_filename)
except Exception as e:
print(f"Install(unzip) error: {url} / {e}", file=sys.stderr)
return False
print("Installation was successful.")
return True
def download_url_with_agent(url, save_path):
try:
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3'}
req = urllib.request.Request(url, headers=headers)
response = urllib.request.urlopen(req)
data = response.read()
if not os.path.exists(os.path.dirname(save_path)):
os.makedirs(os.path.dirname(save_path))
with open(save_path, 'wb') as f:
f.write(data)
except Exception as e:
print(f"Download error: {url} / {e}", file=sys.stderr)
return False
print("Installation was successful.")
return True
@routes.post("/customnode/install/git_url")
async def install_custom_node_git_url(request):
if not is_allowed_security_level('high'):
print(SECURITY_MESSAGE_NORMAL_MINUS)
return web.Response(status=403)
url = await request.text()
res = await core.gitclone_install(url)
if res.action == 'skip':
print(f"Already installed: '{res.target}'")
return web.Response(status=200)
elif res.result:
print(f"After restarting ComfyUI, please refresh the browser.")
return web.Response(status=200)
print(res.msg)
return web.Response(status=400)
@routes.post("/customnode/install/pip")
async def install_pip(request):
if not is_allowed_security_level('high'):
print(SECURITY_MESSAGE_NORMAL_MINUS)
return web.Response(status=403)
packages = await request.text()
core.pip_install(packages.split(' '))
return web.Response(status=200)
@routes.get("/comfyui_manager/update_comfyui")
async def update_comfyui(request):
print(f"Update ComfyUI")
try:
repo_path = os.path.dirname(folder_paths.__file__)
res = core.update_path(repo_path)
if res == "fail":
print(f"ComfyUI update fail: The installed ComfyUI does not have a Git repository.")
return web.Response(status=400)
elif res == "updated":
return web.Response(status=201)
else: # skipped
return web.Response(status=200)
except Exception as e:
print(f"ComfyUI update fail: {e}", file=sys.stderr)
return web.Response(status=400)
@routes.get("/comfyui_manager/comfyui_switch_version")
async def comfyui_switch_version(request):
try:
if "ver" in request.rel_url.query:
core.switch_comfyui(request.rel_url.query['ver'])
return web.Response(status=200)
except Exception as e:
print(f"ComfyUI update fail: {e}", file=sys.stderr)
return web.Response(status=400)
@PromptServer.instance.routes.get("/manager/preview_method")
async def preview_method(request):
if "value" in request.rel_url.query:
set_preview_method(request.rel_url.query['value'])
ext_core.write_config()
else:
return web.Response(text=ext_core.get_current_preview_method(), status=200)
return web.Response(status=200)
@routes.get("/manager/default_ui")
async def default_ui_mode(request):
if "value" in request.rel_url.query:
set_default_ui_mode(request.rel_url.query['value'])
ext_core.write_config()
else:
return web.Response(text=ext_core.get_config()['default_ui'], status=200)
return web.Response(status=200)
@routes.get("/manager/component/policy")
async def component_policy(request):
if "value" in request.rel_url.query:
set_component_policy(request.rel_url.query['value'])
ext_core.write_config()
else:
return web.Response(text=ext_core.get_config()['component_policy'], status=200)
return web.Response(status=200)
@routes.get("/manager/dbl_click/policy")
async def dbl_click_policy(request):
if "value" in request.rel_url.query:
set_double_click_policy(request.rel_url.query['value'])
ext_core.write_config()
else:
return web.Response(text=ext_core.get_config()['double_click_policy'], status=200)
return web.Response(status=200)
def add_target_blank(html_text):
pattern = r'(<a\s+href="[^"]*"\s*[^>]*)(>)'
def add_target(match):
if 'target=' not in match.group(1):
return match.group(1) + ' target="_blank"' + match.group(2)
return match.group(0)
modified_html = re.sub(pattern, add_target, html_text)
return modified_html
@routes.get("/manager/notice")
async def get_notice(request):
url = "github.com"
path = "/ltdrdata/ltdrdata.github.io/wiki/News"
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
async with session.get(f"https://{url}{path}") as response:
if response.status == 200:
# html_content = response.read().decode('utf-8')
html_content = await response.text()
pattern = re.compile(r'<div class="markdown-body">([\s\S]*?)</div>')
match = pattern.search(html_content)
if match:
markdown_content = match.group(1)
if core.comfyui_tag:
markdown_content += f"<HR>ComfyUI: <b>{core.comfyui_tag}</b><BR>Commit Date: {core.comfy_ui_commit_datetime.date()}"
else:
markdown_content += f"<HR>ComfyUI: <b>{core.comfy_ui_revision}</b>[{comfy_ui_hash[:6]}]({core.comfy_ui_commit_datetime.date()})"
# markdown_content += f"<BR>&nbsp; &nbsp; &nbsp; &nbsp; &nbsp;()"
markdown_content += f"<BR>manager-core: {core.version_str}"
markdown_content += f"<BR>manager-ext: {ext_core.version_str}"
markdown_content = add_target_blank(markdown_content)
try:
if core.comfy_ui_required_commit_datetime.date() > core.comfy_ui_commit_datetime.date():
markdown_content = f'<P style="text-align: center; color:red; background-color:white; font-weight:bold">Your ComfyUI is too OUTDATED!!!</P>' + markdown_content
except:
pass
return web.Response(text=markdown_content, status=200)
else:
return web.Response(text="Unable to retrieve Notice", status=200)
else:
return web.Response(text="Unable to retrieve Notice", status=200)
@routes.get("/manager/reboot")
def restart(self):
if not is_allowed_security_level('middle'):
print(SECURITY_MESSAGE_MIDDLE_OR_BELOW)
return web.Response(status=403)
try:
sys.stdout.close_log()
except Exception as e:
pass
if '__COMFY_CLI_SESSION__' in os.environ:
with open(os.path.join(os.environ['__COMFY_CLI_SESSION__'] + '.reboot'), 'w') as file:
pass
print(f"\nRestarting...\n\n")
exit(0)
print(f"\nRestarting... [Legacy Mode]\n\n")
if sys.platform.startswith('win32'):
return os.execv(sys.executable, ['"' + sys.executable + '"', '"' + sys.argv[0] + '"'] + sys.argv[1:])
else:
return os.execv(sys.executable, [sys.executable] + sys.argv)
def sanitize_filename(input_string):
result_string = re.sub(r'[^a-zA-Z0-9_]', '_', input_string)
return result_string
@routes.post("/manager/component/save")
async def save_component(request):
try:
data = await request.json()
name = data['name']
workflow = data['workflow']
if not os.path.exists(components_path):
os.mkdir(components_path)
if 'packname' in workflow and workflow['packname'] != '':
sanitized_name = sanitize_filename(workflow['packname']) + '.pack'
else:
sanitized_name = sanitize_filename(name) + '.json'
filepath = os.path.join(components_path, sanitized_name)
components = {}
if os.path.exists(filepath):
with open(filepath) as f:
components = json.load(f)
components[name] = workflow
with open(filepath, 'w') as f:
json.dump(components, f, indent=4, sort_keys=True)
return web.Response(text=filepath, status=200)
except:
return web.Response(status=400)
@routes.post("/manager/component/loads")
async def load_components(request):
try:
json_files = [f for f in os.listdir(components_path) if f.endswith('.json')]
pack_files = [f for f in os.listdir(components_path) if f.endswith('.pack')]
components = {}
for json_file in json_files + pack_files:
file_path = os.path.join(components_path, json_file)
with open(file_path, 'r') as file:
try:
# When there is a conflict between the .pack and the .json, the pack takes precedence and overrides.
components.update(json.load(file))
except json.JSONDecodeError as e:
print(f"[ComfyUI-Manager] Error decoding component file in file {json_file}: {e}")
return web.json_response(components)
except Exception as e:
print(f"[ComfyUI-Manager] failed to load components\n{e}")
return web.Response(status=400)
args.enable_cors_header = "*"
if hasattr(PromptServer.instance, "app"):
app = PromptServer.instance.app
cors_middleware = server.create_cors_middleware(args.enable_cors_header)
app.middlewares.append(cors_middleware)
def sanitize(data):
return data.replace("<", "&lt;").replace(">", "&gt;")
if not os.path.exists(ext_core.manager_ext_config_path):
ext_core.get_config()
ext_core.write_config()
cm_global.register_extension('ComfyUI-Manager',
{'version': core.version,
'name': 'ComfyUI Manager (Extension)',
'nodes': {},
'description': 'ComfyUI-Manager (Extension)', })
cm_global.variables['manager-core.show_menu'] = False

174
modules/manager_ext_util.py Normal file
View File

@@ -0,0 +1,174 @@
import traceback
import aiohttp
import json
import threading
import os
from datetime import datetime
cache_lock = threading.Lock()
comfyui_manager_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
cache_dir = os.path.join(comfyui_manager_path, '.cache')
# DON'T USE StrictVersion - cannot handle pre_release version
# try:
# from distutils.version import StrictVersion
# except:
# print(f"[ComfyUI-Manager] 'distutils' package not found. Activating fallback mode for compatibility.")
class StrictVersion:
def __init__(self, version_string):
self.version_string = version_string
self.major = 0
self.minor = 0
self.patch = 0
self.pre_release = None
self.parse_version_string()
def parse_version_string(self):
parts = self.version_string.split('.')
if not parts:
raise ValueError("Version string must not be empty")
self.major = int(parts[0])
self.minor = int(parts[1]) if len(parts) > 1 else 0
self.patch = int(parts[2]) if len(parts) > 2 else 0
# Handling pre-release versions if present
if len(parts) > 3:
self.pre_release = parts[3]
def __str__(self):
version = f"{self.major}.{self.minor}.{self.patch}"
if self.pre_release:
version += f"-{self.pre_release}"
return version
def __eq__(self, other):
return (self.major, self.minor, self.patch, self.pre_release) == \
(other.major, other.minor, other.patch, other.pre_release)
def __lt__(self, other):
if (self.major, self.minor, self.patch) == (other.major, other.minor, other.patch):
return self.pre_release_compare(self.pre_release, other.pre_release) < 0
return (self.major, self.minor, self.patch) < (other.major, other.minor, other.patch)
@staticmethod
def pre_release_compare(pre1, pre2):
if pre1 == pre2:
return 0
if pre1 is None:
return 1
if pre2 is None:
return -1
return -1 if pre1 < pre2 else 1
def __le__(self, other):
return self == other or self < other
def __gt__(self, other):
return not self <= other
def __ge__(self, other):
return not self < other
def __ne__(self, other):
return not self == other
def simple_hash(input_string):
hash_value = 0
for char in input_string:
hash_value = (hash_value * 31 + ord(char)) % (2**32)
return hash_value
def is_file_created_within_one_day(file_path):
if not os.path.exists(file_path):
return False
file_creation_time = os.path.getctime(file_path)
current_time = datetime.now().timestamp()
time_difference = current_time - file_creation_time
return time_difference <= 86400
async def get_data(uri, silent=False):
if not silent:
print(f"FETCH DATA from: {uri}", end="")
if uri.startswith("http"):
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
async with session.get(uri) as resp:
json_text = await resp.text()
else:
with cache_lock:
with open(uri, "r", encoding="utf-8") as f:
json_text = f.read()
json_obj = json.loads(json_text)
if not silent:
print(f" [DONE]")
return json_obj
async def get_data_with_cache(uri, silent=False, cache_mode=True):
cache_uri = str(simple_hash(uri)) + '_' + os.path.basename(uri).replace('&', "_").replace('?', "_").replace('=', "_")
cache_uri = os.path.join(cache_dir, cache_uri+'.json')
if cache_mode and is_file_created_within_one_day(cache_uri):
json_obj = await get_data(cache_uri, silent=silent)
else:
json_obj = await get_data(uri, silent=silent)
with cache_lock:
with open(cache_uri, "w", encoding='utf-8') as file:
json.dump(json_obj, file, indent=4, sort_keys=True)
if not silent:
print(f"[ComfyUI-Manager] default cache updated: {uri}")
return json_obj
def sanitize_tag(x):
return x.replace('<', '&lt;').replace('>', '&gt;')
def download_url(url, dest_folder, filename):
import requests
# Ensure the destination folder exists
if not os.path.exists(dest_folder):
os.makedirs(dest_folder)
# Full path to save the file
dest_path = os.path.join(dest_folder, filename)
# Download the file
response = requests.get(url, stream=True)
if response.status_code == 200:
with open(dest_path, 'wb') as file:
for chunk in response.iter_content(chunk_size=1024):
if chunk:
file.write(chunk)
else:
raise Exception(f"Failed to download file from {url}")
def extract_package_as_zip(file_path, extract_path):
import zipfile
try:
with zipfile.ZipFile(file_path, "r") as zip_ref:
zip_ref.extractall(extract_path)
extracted_files = zip_ref.namelist()
print(f"Extracted zip file to {extract_path}")
return extracted_files
except zipfile.BadZipFile:
print(f"File '{file_path}' is not a zip or is corrupted.")
return None

View File

@@ -0,0 +1,17 @@
import git
from aiohttp import web
from server import PromptServer
manager_core_path = None
manager_core_url = "https://github.com/Comfy-Org/manager-core"
@PromptServer.instance.routes.get("/manager/install_manager_core")
def install_manager_core(request):
if manager_core_path is not None:
repo = git.Repo.clone_from(manager_core_url, manager_core_path)
repo.git.clear_cache()
repo.close()
else:
print(f"[ComfyUI Manager] Failed to install `manager-core`")
return web.Response(status=200)

View File

@@ -1,7 +1,5 @@
import mimetypes
from ..common import context
from . import manager_core as core
import manager_core as core
import os
from aiohttp import web
import aiohttp
@@ -10,16 +8,6 @@ import hashlib
import folder_paths
from server import PromptServer
import logging
import sys
try:
from nio import AsyncClient, LoginResponse, UploadResponse
matrix_nio_is_available = True
except Exception:
logging.warning(f"[ComfyUI-Manager] The matrix sharing feature has been disabled because the `matrix-nio` dependency is not installed.\n\tTo use this feature, please run the following command:\n\t{sys.executable} -m pip install matrix-nio\n")
matrix_nio_is_available = False
def extract_model_file_names(json_data):
@@ -65,7 +53,7 @@ def compute_sha256_checksum(filepath):
return sha256.hexdigest()
@PromptServer.instance.routes.get("/v2/manager/share_option")
@PromptServer.instance.routes.get("/manager/share_option")
async def share_option(request):
if "value" in request.rel_url.query:
core.get_config()['share_option'] = request.rel_url.query['value']
@@ -77,21 +65,21 @@ async def share_option(request):
def get_openart_auth():
if not os.path.exists(os.path.join(context.manager_files_path, ".openart_key")):
if not os.path.exists(os.path.join(core.comfyui_manager_path, ".openart_key")):
return None
try:
with open(os.path.join(context.manager_files_path, ".openart_key"), "r") as f:
with open(os.path.join(core.comfyui_manager_path, ".openart_key"), "r") as f:
openart_key = f.read().strip()
return openart_key if openart_key else None
except Exception:
except:
return None
def get_matrix_auth():
if not os.path.exists(os.path.join(context.manager_files_path, "matrix_auth")):
if not os.path.exists(os.path.join(core.comfyui_manager_path, "matrix_auth")):
return None
try:
with open(os.path.join(context.manager_files_path, "matrix_auth"), "r") as f:
with open(os.path.join(core.comfyui_manager_path, "matrix_auth"), "r") as f:
matrix_auth = f.read()
homeserver, username, password = matrix_auth.strip().split("\n")
if not homeserver or not username or not password:
@@ -101,40 +89,40 @@ def get_matrix_auth():
"username": username,
"password": password,
}
except Exception:
except:
return None
def get_comfyworkflows_auth():
if not os.path.exists(os.path.join(context.manager_files_path, "comfyworkflows_sharekey")):
if not os.path.exists(os.path.join(core.comfyui_manager_path, "comfyworkflows_sharekey")):
return None
try:
with open(os.path.join(context.manager_files_path, "comfyworkflows_sharekey"), "r") as f:
with open(os.path.join(core.comfyui_manager_path, "comfyworkflows_sharekey"), "r") as f:
share_key = f.read()
if not share_key.strip():
return None
return share_key
except Exception:
except:
return None
def get_youml_settings():
if not os.path.exists(os.path.join(context.manager_files_path, ".youml")):
if not os.path.exists(os.path.join(core.comfyui_manager_path, ".youml")):
return None
try:
with open(os.path.join(context.manager_files_path, ".youml"), "r") as f:
with open(os.path.join(core.comfyui_manager_path, ".youml"), "r") as f:
youml_settings = f.read().strip()
return youml_settings if youml_settings else None
except Exception:
except:
return None
def set_youml_settings(settings):
with open(os.path.join(context.manager_files_path, ".youml"), "w") as f:
with open(os.path.join(core.comfyui_manager_path, ".youml"), "w") as f:
f.write(settings)
@PromptServer.instance.routes.get("/v2/manager/get_openart_auth")
@PromptServer.instance.routes.get("/manager/get_openart_auth")
async def api_get_openart_auth(request):
# print("Getting stored Matrix credentials...")
openart_key = get_openart_auth()
@@ -143,16 +131,16 @@ async def api_get_openart_auth(request):
return web.json_response({"openart_key": openart_key})
@PromptServer.instance.routes.post("/v2/manager/set_openart_auth")
@PromptServer.instance.routes.post("/manager/set_openart_auth")
async def api_set_openart_auth(request):
json_data = await request.json()
openart_key = json_data['openart_key']
with open(os.path.join(context.manager_files_path, ".openart_key"), "w") as f:
with open(os.path.join(core.comfyui_manager_path, ".openart_key"), "w") as f:
f.write(openart_key)
return web.Response(status=200)
@PromptServer.instance.routes.get("/v2/manager/get_matrix_auth")
@PromptServer.instance.routes.get("/manager/get_matrix_auth")
async def api_get_matrix_auth(request):
# print("Getting stored Matrix credentials...")
matrix_auth = get_matrix_auth()
@@ -161,7 +149,7 @@ async def api_get_matrix_auth(request):
return web.json_response(matrix_auth)
@PromptServer.instance.routes.get("/v2/manager/youml/settings")
@PromptServer.instance.routes.get("/manager/youml/settings")
async def api_get_youml_settings(request):
youml_settings = get_youml_settings()
if not youml_settings:
@@ -169,14 +157,14 @@ async def api_get_youml_settings(request):
return web.json_response(json.loads(youml_settings))
@PromptServer.instance.routes.post("/v2/manager/youml/settings")
@PromptServer.instance.routes.post("/manager/youml/settings")
async def api_set_youml_settings(request):
json_data = await request.json()
set_youml_settings(json.dumps(json_data))
return web.Response(status=200)
@PromptServer.instance.routes.get("/v2/manager/get_comfyworkflows_auth")
@PromptServer.instance.routes.get("/manager/get_comfyworkflows_auth")
async def api_get_comfyworkflows_auth(request):
# Check if the user has provided Matrix credentials in a file called 'matrix_accesstoken'
# in the same directory as the ComfyUI base folder
@@ -187,39 +175,33 @@ async def api_get_comfyworkflows_auth(request):
return web.json_response({"comfyworkflows_sharekey": comfyworkflows_auth})
@PromptServer.instance.routes.post("/v2/manager/set_esheep_workflow_and_images")
@PromptServer.instance.routes.post("/manager/set_esheep_workflow_and_images")
async def set_esheep_workflow_and_images(request):
json_data = await request.json()
with open(os.path.join(context.manager_files_path, "esheep_share_message.json"), "w", encoding='utf-8') as file:
current_workflow = json_data['workflow']
images = json_data['images']
with open(os.path.join(core.comfyui_manager_path, "esheep_share_message.json"), "w", encoding='utf-8') as file:
json.dump(json_data, file, indent=4)
return web.Response(status=200)
@PromptServer.instance.routes.get("/v2/manager/get_esheep_workflow_and_images")
@PromptServer.instance.routes.get("/manager/get_esheep_workflow_and_images")
async def get_esheep_workflow_and_images(request):
with open(os.path.join(context.manager_files_path, "esheep_share_message.json"), 'r', encoding='utf-8') as file:
with open(os.path.join(core.comfyui_manager_path, "esheep_share_message.json"), 'r', encoding='utf-8') as file:
data = json.load(file)
return web.Response(status=200, text=json.dumps(data))
@PromptServer.instance.routes.get("/v2/manager/get_matrix_dep_status")
async def get_matrix_dep_status(request):
if matrix_nio_is_available:
return web.Response(status=200, text='available')
else:
return web.Response(status=200, text='unavailable')
def set_matrix_auth(json_data):
homeserver = json_data['homeserver']
username = json_data['username']
password = json_data['password']
with open(os.path.join(context.manager_files_path, "matrix_auth"), "w") as f:
with open(os.path.join(core.comfyui_manager_path, "matrix_auth"), "w") as f:
f.write("\n".join([homeserver, username, password]))
def set_comfyworkflows_auth(comfyworkflows_sharekey):
with open(os.path.join(context.manager_files_path, "comfyworkflows_sharekey"), "w") as f:
with open(os.path.join(core.comfyui_manager_path, "comfyworkflows_sharekey"), "w") as f:
f.write(comfyworkflows_sharekey)
@@ -231,7 +213,7 @@ def has_provided_comfyworkflows_auth(comfyworkflows_sharekey):
return comfyworkflows_sharekey.strip()
@PromptServer.instance.routes.post("/v2/manager/share")
@PromptServer.instance.routes.post("/manager/share")
async def share_art(request):
# get json data
json_data = await request.json()
@@ -253,7 +235,7 @@ async def share_art(request):
try:
output_to_share = potential_outputs[int(selected_output_index)]
except Exception:
except:
# for now, pick the first output
output_to_share = potential_outputs[0]
@@ -337,7 +319,7 @@ async def share_art(request):
form.add_field("shareWorkflowTitle", title)
form.add_field("shareWorkflowDescription", description)
form.add_field("shareWorkflowIsNSFW", str(is_nsfw).lower())
form.add_field("currentSnapshot", json.dumps(await core.get_current_snapshot()))
form.add_field("currentSnapshot", json.dumps(core.get_current_snapshot()))
form.add_field("modelsInfo", json.dumps(models_info))
async with session.post(
@@ -349,12 +331,15 @@ async def share_art(request):
workflowId = upload_workflow_json["workflowId"]
# check if the user has provided Matrix credentials
if matrix_nio_is_available and "matrix" in share_destinations:
if "matrix" in share_destinations:
comfyui_share_room_id = '!LGYSoacpJPhIfBqVfb:matrix.org'
filename = os.path.basename(asset_filepath)
content_type = assetFileType
try:
from matrix_client.api import MatrixHttpApi
from matrix_client.client import MatrixClient
homeserver = 'matrix.org'
if matrix_auth:
homeserver = matrix_auth.get('homeserver', 'matrix.org')
@@ -362,35 +347,20 @@ async def share_art(request):
if not homeserver.startswith("https://"):
homeserver = "https://" + homeserver
client = AsyncClient(homeserver, matrix_auth['username'])
# Login
login_resp = await client.login(matrix_auth['password'])
if not isinstance(login_resp, LoginResponse) or not login_resp.access_token:
await client.close()
client = MatrixClient(homeserver)
try:
token = client.login(username=matrix_auth['username'], password=matrix_auth['password'])
if not token:
return web.json_response({"error": "Invalid Matrix credentials."}, content_type='application/json', status=400)
except:
return web.json_response({"error": "Invalid Matrix credentials."}, content_type='application/json', status=400)
# Upload asset
matrix = MatrixHttpApi(homeserver, token=token)
with open(asset_filepath, 'rb') as f:
upload_resp, _maybe_keys = await client.upload(f, content_type=content_type, filename=filename)
asset_data = f.seek(0) or f.read() # get size for info below
if not isinstance(upload_resp, UploadResponse) or not upload_resp.content_uri:
await client.close()
return web.json_response({"error": "Failed to upload asset to Matrix."}, content_type='application/json', status=500)
mxc_url = upload_resp.content_uri
mxc_url = matrix.media_upload(f.read(), content_type, filename=filename)['content_uri']
# Upload workflow JSON
import io
workflow_json_bytes = json.dumps(prompt['workflow']).encode('utf-8')
workflow_io = io.BytesIO(workflow_json_bytes)
upload_workflow_resp, _maybe_keys = await client.upload(workflow_io, content_type='application/json', filename='workflow.json')
workflow_io.seek(0)
if not isinstance(upload_workflow_resp, UploadResponse) or not upload_workflow_resp.content_uri:
await client.close()
return web.json_response({"error": "Failed to upload workflow to Matrix."}, content_type='application/json', status=500)
workflow_json_mxc_url = upload_workflow_resp.content_uri
workflow_json_mxc_url = matrix.media_upload(prompt['workflow'], 'application/json', filename='workflow.json')['content_uri']
# Send text message
text_content = ""
if title:
text_content += f"{title}\n"
@@ -398,44 +368,9 @@ async def share_art(request):
text_content += f"{description}\n"
if credits:
text_content += f"\ncredits: {credits}\n"
await client.room_send(
room_id=comfyui_share_room_id,
message_type="m.room.message",
content={"msgtype": "m.text", "body": text_content}
)
# Send image
await client.room_send(
room_id=comfyui_share_room_id,
message_type="m.room.message",
content={
"msgtype": "m.image",
"body": filename,
"url": mxc_url,
"info": {
"mimetype": content_type,
"size": len(asset_data)
}
}
)
# Send workflow JSON file
await client.room_send(
room_id=comfyui_share_room_id,
message_type="m.room.message",
content={
"msgtype": "m.file",
"body": "workflow.json",
"url": workflow_json_mxc_url,
"info": {
"mimetype": "application/json",
"size": len(workflow_json_bytes)
}
}
)
await client.close()
response = matrix.send_message(comfyui_share_room_id, text_content)
response = matrix.send_content(comfyui_share_room_id, mxc_url, filename, 'm.image')
response = matrix.send_content(comfyui_share_room_id, workflow_json_mxc_url, 'workflow.json', 'm.file')
except:
import traceback
traceback.print_exc()

Some files were not shown because too many files have changed in this diff Show More